gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal; import java.io.Externalizable; import java.io.IOException; import java.io.InvalidObjectException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.ObjectStreamException; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.ExecutorService; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.managers.checkpoint.GridCheckpointManager; import org.apache.ignite.internal.managers.collision.GridCollisionManager; import org.apache.ignite.internal.managers.communication.GridIoManager; import org.apache.ignite.internal.managers.deployment.GridDeploymentManager; import org.apache.ignite.internal.managers.discovery.GridDiscoveryManager; import org.apache.ignite.internal.managers.eventstorage.GridEventStorageManager; import org.apache.ignite.internal.managers.failover.GridFailoverManager; import org.apache.ignite.internal.managers.indexing.GridIndexingManager; import org.apache.ignite.internal.managers.loadbalancer.GridLoadBalancerManager; import org.apache.ignite.internal.managers.swapspace.GridSwapSpaceManager; import org.apache.ignite.internal.processors.affinity.GridAffinityProcessor; import org.apache.ignite.internal.processors.cache.CacheConflictResolutionManager; import org.apache.ignite.internal.processors.cache.GridCacheProcessor; import org.apache.ignite.internal.processors.cache.portable.CacheObjectPortableProcessorImpl; import org.apache.ignite.internal.processors.cacheobject.IgniteCacheObjectProcessor; import org.apache.ignite.internal.processors.clock.GridClockSource; import org.apache.ignite.internal.processors.clock.GridClockSyncProcessor; import org.apache.ignite.internal.processors.clock.GridJvmClockSource; import org.apache.ignite.internal.processors.closure.GridClosureProcessor; import org.apache.ignite.internal.processors.cluster.ClusterProcessor; import org.apache.ignite.internal.processors.continuous.GridContinuousProcessor; import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessor; import org.apache.ignite.internal.processors.datastructures.DataStructuresProcessor; import org.apache.ignite.internal.processors.hadoop.HadoopProcessorAdapter; import org.apache.ignite.internal.processors.igfs.IgfsHelper; import org.apache.ignite.internal.processors.igfs.IgfsProcessorAdapter; import org.apache.ignite.internal.processors.job.GridJobProcessor; import org.apache.ignite.internal.processors.jobmetrics.GridJobMetricsProcessor; import org.apache.ignite.internal.processors.nodevalidation.DiscoveryNodeValidationProcessor; import org.apache.ignite.internal.processors.offheap.GridOffHeapProcessor; import org.apache.ignite.internal.processors.platform.PlatformProcessor; import org.apache.ignite.internal.processors.plugin.IgnitePluginProcessor; import org.apache.ignite.internal.processors.port.GridPortProcessor; import org.apache.ignite.internal.processors.query.GridQueryProcessor; import org.apache.ignite.internal.processors.resource.GridResourceProcessor; import org.apache.ignite.internal.processors.rest.GridRestProcessor; import org.apache.ignite.internal.processors.schedule.IgniteScheduleProcessorAdapter; import org.apache.ignite.internal.processors.security.GridSecurityProcessor; import org.apache.ignite.internal.processors.segmentation.GridSegmentationProcessor; import org.apache.ignite.internal.processors.service.GridServiceProcessor; import org.apache.ignite.internal.processors.session.GridTaskSessionProcessor; import org.apache.ignite.internal.processors.task.GridTaskProcessor; import org.apache.ignite.internal.processors.timeout.GridTimeoutProcessor; import org.apache.ignite.internal.util.IgniteExceptionRegistry; import org.apache.ignite.internal.util.spring.IgniteSpringHelper; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.plugin.PluginNotFoundException; import org.apache.ignite.plugin.PluginProvider; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.IgniteSystemProperties.IGNITE_DAEMON; import static org.apache.ignite.internal.IgniteComponentType.SPRING; /** * Implementation of kernal context. */ @GridToStringExclude public class GridKernalContextImpl implements GridKernalContext, Externalizable { /** */ private static final long serialVersionUID = 0L; /** */ private static final ThreadLocal<String> stash = new ThreadLocal<>(); /* * Managers. * ======== */ /** */ @GridToStringExclude private GridDeploymentManager depMgr; /** */ @GridToStringExclude private GridIoManager ioMgr; /** */ @GridToStringExclude private GridDiscoveryManager discoMgr; /** */ @GridToStringExclude private GridCheckpointManager cpMgr; /** */ @GridToStringExclude private GridEventStorageManager evtMgr; /** */ @GridToStringExclude private GridFailoverManager failoverMgr; /** */ @GridToStringExclude private GridCollisionManager colMgr; /** */ @GridToStringExclude private GridLoadBalancerManager loadMgr; /** */ @GridToStringExclude private GridSecurityProcessor authProc; /** */ @GridToStringExclude private GridSwapSpaceManager swapspaceMgr; /** */ @GridToStringExclude private GridIndexingManager indexingMgr; /* * Processors. * ========== */ /** */ @GridToStringInclude private GridQueryProcessor qryProc; /** */ @GridToStringInclude private GridTaskProcessor taskProc; /** */ @GridToStringInclude private GridJobProcessor jobProc; /** */ @GridToStringInclude private GridTimeoutProcessor timeProc; /** */ @GridToStringInclude private GridClockSyncProcessor clockSyncProc; /** */ @GridToStringInclude private GridResourceProcessor rsrcProc; /** */ @GridToStringInclude private GridJobMetricsProcessor metricsProc; /** */ @GridToStringInclude private GridClosureProcessor closProc; /** */ @GridToStringInclude private GridServiceProcessor svcProc; /** */ @GridToStringInclude private GridCacheProcessor cacheProc; /** */ @GridToStringInclude private GridTaskSessionProcessor sesProc; /** */ @GridToStringInclude private GridPortProcessor portProc; /** */ @GridToStringInclude private GridOffHeapProcessor offheapProc; /** */ @GridToStringInclude private IgniteScheduleProcessorAdapter scheduleProc; /** */ @GridToStringInclude private GridRestProcessor restProc; /** */ @GridToStringInclude private DataStreamProcessor dataLdrProc; /** */ @GridToStringInclude private IgfsProcessorAdapter igfsProc; /** */ @GridToStringInclude private IgfsHelper igfsHelper; /** */ @GridToStringInclude private GridSegmentationProcessor segProc; /** */ @GridToStringInclude private GridAffinityProcessor affProc; /** */ @GridToStringExclude private GridContinuousProcessor contProc; /** */ @GridToStringExclude private HadoopProcessorAdapter hadoopProc; /** */ @GridToStringExclude private IgnitePluginProcessor pluginProc; /** */ @GridToStringExclude private IgniteCacheObjectProcessor cacheObjProc; /** */ @GridToStringExclude private PlatformProcessor platformProc; /** */ @GridToStringExclude private IgniteSpringHelper spring; /** */ @GridToStringExclude private ClusterProcessor cluster; /** */ @GridToStringExclude private DataStructuresProcessor dataStructuresProc; /** */ @GridToStringExclude private List<GridComponent> comps = new LinkedList<>(); /** */ @GridToStringExclude protected ExecutorService execSvc; /** */ @GridToStringExclude protected ExecutorService sysExecSvc; /** */ @GridToStringExclude private ExecutorService p2pExecSvc; /** */ @GridToStringExclude private ExecutorService mgmtExecSvc; /** */ @GridToStringExclude private ExecutorService igfsExecSvc; /** */ @GridToStringExclude protected ExecutorService restExecSvc; /** */ @GridToStringExclude private Map<String, Object> attrs = new HashMap<>(); /** */ private IgniteEx grid; /** */ private ExecutorService utilityCachePool; /** */ private ExecutorService marshCachePool; /** */ private IgniteConfiguration cfg; /** */ private GridKernalGateway gw; /** Network segmented flag. */ private volatile boolean segFlag; /** Time source. */ private GridClockSource clockSrc = new GridJvmClockSource(); /** Performance suggestions. */ private final GridPerformanceSuggestions perf = new GridPerformanceSuggestions(); /** Marshaller context. */ private MarshallerContextImpl marshCtx; /** */ private ClusterNode locNode; /** */ private volatile boolean disconnected; /** * No-arg constructor is required by externalization. */ public GridKernalContextImpl() { // No-op. } /** * Creates new kernal context. * * @param log Logger. * @param grid Grid instance managed by kernal. * @param cfg Grid configuration. * @param gw Kernal gateway. * @param utilityCachePool Utility cache pool. * @param marshCachePool Marshaller cache pool. * @param execSvc Public executor service. * @param sysExecSvc System executor service. * @param p2pExecSvc P2P executor service. * @param mgmtExecSvc Management executor service. * @param igfsExecSvc IGFS executor service. * @param restExecSvc REST executor service. * @param plugins Plugin providers. * @throws IgniteCheckedException In case of error. */ @SuppressWarnings("TypeMayBeWeakened") protected GridKernalContextImpl( GridLoggerProxy log, IgniteEx grid, IgniteConfiguration cfg, GridKernalGateway gw, ExecutorService utilityCachePool, ExecutorService marshCachePool, ExecutorService execSvc, ExecutorService sysExecSvc, ExecutorService p2pExecSvc, ExecutorService mgmtExecSvc, ExecutorService igfsExecSvc, ExecutorService restExecSvc, List<PluginProvider> plugins) throws IgniteCheckedException { assert grid != null; assert cfg != null; assert gw != null; this.grid = grid; this.cfg = cfg; this.gw = gw; this.utilityCachePool = utilityCachePool; this.marshCachePool = marshCachePool; this.execSvc = execSvc; this.sysExecSvc = sysExecSvc; this.p2pExecSvc = p2pExecSvc; this.mgmtExecSvc = mgmtExecSvc; this.igfsExecSvc = igfsExecSvc; this.restExecSvc = restExecSvc; marshCtx = new MarshallerContextImpl(plugins); try { spring = SPRING.create(false); } catch (IgniteCheckedException ignored) { if (log != null && log.isDebugEnabled()) log.debug("Failed to load spring component, will not be able to extract userVersion from " + "META-INF/ignite.xml."); } } /** {@inheritDoc} */ @Override public Iterator<GridComponent> iterator() { return comps.iterator(); } /** {@inheritDoc} */ @Override public List<GridComponent> components() { return Collections.unmodifiableList(comps); } /** * @param comp Manager to add. */ public void add(GridComponent comp) { add(comp, true); } /** * @param comp Manager to add. * @param addToList If {@code true} component is added to components list. */ public void add(GridComponent comp, boolean addToList) { assert comp != null; /* * Managers. * ======== */ if (comp instanceof GridDeploymentManager) depMgr = (GridDeploymentManager)comp; else if (comp instanceof GridIoManager) ioMgr = (GridIoManager)comp; else if (comp instanceof GridDiscoveryManager) discoMgr = (GridDiscoveryManager)comp; else if (comp instanceof GridCheckpointManager) cpMgr = (GridCheckpointManager)comp; else if (comp instanceof GridEventStorageManager) evtMgr = (GridEventStorageManager)comp; else if (comp instanceof GridFailoverManager) failoverMgr = (GridFailoverManager)comp; else if (comp instanceof GridCollisionManager) colMgr = (GridCollisionManager)comp; else if (comp instanceof GridSecurityProcessor) authProc = (GridSecurityProcessor)comp; else if (comp instanceof GridLoadBalancerManager) loadMgr = (GridLoadBalancerManager)comp; else if (comp instanceof GridSwapSpaceManager) swapspaceMgr = (GridSwapSpaceManager)comp; else if (comp instanceof GridIndexingManager) indexingMgr = (GridIndexingManager)comp; /* * Processors. * ========== */ else if (comp instanceof GridTaskProcessor) taskProc = (GridTaskProcessor)comp; else if (comp instanceof GridJobProcessor) jobProc = (GridJobProcessor)comp; else if (comp instanceof GridTimeoutProcessor) timeProc = (GridTimeoutProcessor)comp; else if (comp instanceof GridClockSyncProcessor) clockSyncProc = (GridClockSyncProcessor)comp; else if (comp instanceof GridResourceProcessor) rsrcProc = (GridResourceProcessor)comp; else if (comp instanceof GridJobMetricsProcessor) metricsProc = (GridJobMetricsProcessor)comp; else if (comp instanceof GridCacheProcessor) cacheProc = (GridCacheProcessor)comp; else if (comp instanceof GridTaskSessionProcessor) sesProc = (GridTaskSessionProcessor)comp; else if (comp instanceof GridPortProcessor) portProc = (GridPortProcessor)comp; else if (comp instanceof GridClosureProcessor) closProc = (GridClosureProcessor)comp; else if (comp instanceof GridServiceProcessor) svcProc = (GridServiceProcessor)comp; else if (comp instanceof IgniteScheduleProcessorAdapter) scheduleProc = (IgniteScheduleProcessorAdapter)comp; else if (comp instanceof GridSegmentationProcessor) segProc = (GridSegmentationProcessor)comp; else if (comp instanceof GridAffinityProcessor) affProc = (GridAffinityProcessor)comp; else if (comp instanceof GridRestProcessor) restProc = (GridRestProcessor)comp; else if (comp instanceof DataStreamProcessor) dataLdrProc = (DataStreamProcessor)comp; else if (comp instanceof IgfsProcessorAdapter) igfsProc = (IgfsProcessorAdapter)comp; else if (comp instanceof GridOffHeapProcessor) offheapProc = (GridOffHeapProcessor)comp; else if (comp instanceof GridContinuousProcessor) contProc = (GridContinuousProcessor)comp; else if (comp instanceof HadoopProcessorAdapter) hadoopProc = (HadoopProcessorAdapter)comp; else if (comp instanceof IgniteCacheObjectProcessor) cacheObjProc = (IgniteCacheObjectProcessor)comp; else if (comp instanceof IgnitePluginProcessor) pluginProc = (IgnitePluginProcessor)comp; else if (comp instanceof GridQueryProcessor) qryProc = (GridQueryProcessor)comp; else if (comp instanceof DataStructuresProcessor) dataStructuresProc = (DataStructuresProcessor)comp; else if (comp instanceof ClusterProcessor) cluster = (ClusterProcessor)comp; else if (comp instanceof PlatformProcessor) platformProc = (PlatformProcessor)comp; else if (!(comp instanceof DiscoveryNodeValidationProcessor)) assert (comp instanceof GridPluginComponent) : "Unknown manager class: " + comp.getClass(); if (addToList) comps.add(comp); } /** * @param helper Helper to add. */ public void addHelper(Object helper) { assert helper != null; if (helper instanceof IgfsHelper) igfsHelper = (IgfsHelper)helper; else assert false : "Unknown helper class: " + helper.getClass(); } /** {@inheritDoc} */ @Override public boolean isStopping() { return ((IgniteKernal)grid).isStopping(); } /** {@inheritDoc} */ @Override public UUID localNodeId() { if (locNode != null) return locNode.id(); if (discoMgr != null) locNode = discoMgr.localNode(); return locNode != null ? locNode.id() : config().getNodeId(); } /** {@inheritDoc} */ @Override public String gridName() { return cfg.getGridName(); } /** {@inheritDoc} */ @Override public GridKernalGateway gateway() { return gw; } /** {@inheritDoc} */ @Override public IgniteEx grid() { return grid; } /** {@inheritDoc} */ @Override public IgniteConfiguration config() { return cfg; } /** {@inheritDoc} */ @Override public GridTaskProcessor task() { return taskProc; } /** {@inheritDoc} */ @Override public GridJobProcessor job() { return jobProc; } /** {@inheritDoc} */ @Override public GridTimeoutProcessor timeout() { return timeProc; } /** {@inheritDoc} */ @Override public GridClockSyncProcessor clockSync() { return clockSyncProc; } /** {@inheritDoc} */ @Override public GridResourceProcessor resource() { return rsrcProc; } /** {@inheritDoc} */ @Override public GridJobMetricsProcessor jobMetric() { return metricsProc; } /** {@inheritDoc} */ @Override public GridCacheProcessor cache() { return cacheProc; } /** {@inheritDoc} */ @Override public GridTaskSessionProcessor session() { return sesProc; } /** {@inheritDoc} */ @Override public GridClosureProcessor closure() { return closProc; } /** {@inheritDoc} */ @Override public GridServiceProcessor service() { return svcProc; } /** {@inheritDoc} */ @Override public GridPortProcessor ports() { return portProc; } /** {@inheritDoc} */ @Override public GridOffHeapProcessor offheap() { return offheapProc; } /** {@inheritDoc} */ @Override public IgniteScheduleProcessorAdapter schedule() { return scheduleProc; } /** {@inheritDoc} */ @Override public GridDeploymentManager deploy() { return depMgr; } /** {@inheritDoc} */ @Override public GridIoManager io() { return ioMgr; } /** {@inheritDoc} */ @Override public GridDiscoveryManager discovery() { return discoMgr; } /** {@inheritDoc} */ @Override public GridCheckpointManager checkpoint() { return cpMgr; } /** {@inheritDoc} */ @Override public GridEventStorageManager event() { return evtMgr; } /** {@inheritDoc} */ @Override public GridFailoverManager failover() { return failoverMgr; } /** {@inheritDoc} */ @Override public GridCollisionManager collision() { return colMgr; } /** {@inheritDoc} */ @Override public GridSecurityProcessor security() { return authProc; } /** {@inheritDoc} */ @Override public GridLoadBalancerManager loadBalancing() { return loadMgr; } /** {@inheritDoc} */ @Override public GridSwapSpaceManager swap() { return swapspaceMgr; } /** {@inheritDoc} */ @Override public GridIndexingManager indexing() { return indexingMgr; } /** {@inheritDoc} */ @Override public GridAffinityProcessor affinity() { return affProc; } /** {@inheritDoc} */ @Override public GridRestProcessor rest() { return restProc; } /** {@inheritDoc} */ @Override public GridSegmentationProcessor segmentation() { return segProc; } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public <K, V> DataStreamProcessor<K, V> dataStream() { return (DataStreamProcessor<K, V>)dataLdrProc; } /** {@inheritDoc} */ @Override public IgfsProcessorAdapter igfs() { return igfsProc; } /** {@inheritDoc} */ @Override public IgfsHelper igfsHelper() { return igfsHelper; } /** {@inheritDoc} */ @Override public GridContinuousProcessor continuous() { return contProc; } /** {@inheritDoc} */ @Override public HadoopProcessorAdapter hadoop() { return hadoopProc; } /** {@inheritDoc} */ @Override public ExecutorService utilityCachePool() { return utilityCachePool; } /** {@inheritDoc} */ @Override public ExecutorService marshallerCachePool() { return marshCachePool; } /** {@inheritDoc} */ @Override public IgniteCacheObjectProcessor cacheObjects() { return cacheObjProc; } /** {@inheritDoc} */ @Override public GridQueryProcessor query() { return qryProc; } /** {@inheritDoc} */ @Override public DataStructuresProcessor dataStructures() { return dataStructuresProc; } /** {@inheritDoc} */ @Override public IgniteLogger log() { return config().getGridLogger(); } /** {@inheritDoc} */ @Override public IgniteLogger log(Class<?> cls) { return config().getGridLogger().getLogger(cls); } /** {@inheritDoc} */ @Override public void markSegmented() { segFlag = true; } /** {@inheritDoc} */ @Override public boolean segmented() { return segFlag; } /** {@inheritDoc} */ @Override public GridClockSource timeSource() { return clockSrc; } /** * Sets time source. For test purposes only. * * @param clockSrc Time source. */ public void timeSource(GridClockSource clockSrc) { this.clockSrc = clockSrc; } /** {@inheritDoc} */ @Override public GridPerformanceSuggestions performance() { return perf; } /** {@inheritDoc} */ @Override public void printMemoryStats() { X.println(">>> "); X.println(">>> Grid memory stats [grid=" + gridName() + ']'); for (GridComponent comp : comps) comp.printMemoryStats(); } /** {@inheritDoc} */ @Override public boolean isDaemon() { return config().isDaemon() || "true".equalsIgnoreCase(System.getProperty(IGNITE_DAEMON)); } /** {@inheritDoc} */ @Override public String userVersion(ClassLoader ldr) { return spring != null ? spring.userVersion(ldr, log()) : U.DFLT_USER_VERSION; } /** {@inheritDoc} */ @Override public PluginProvider pluginProvider(String name) throws PluginNotFoundException { PluginProvider plugin = pluginProc.pluginProvider(name); if (plugin == null) throw new PluginNotFoundException(name); return plugin; } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Nullable @Override public <T> T createComponent(Class<T> cls) { T res = pluginProc.createComponent(cls); if (res != null) return res; if (cls.equals(IgniteCacheObjectProcessor.class)) return (T)new CacheObjectPortableProcessorImpl(this); if (cls.equals(CacheConflictResolutionManager.class)) return null; throw new IgniteException("Unsupported component type: " + cls); } /** * @return Plugin manager. */ @Override public IgnitePluginProcessor plugins() { return pluginProc; } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { U.writeString(out, grid.name()); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { stash.set(U.readString(in)); } /** * Reconstructs object on unmarshalling. * * @return Reconstructed object. * @throws ObjectStreamException Thrown in case of unmarshalling error. */ protected Object readResolve() throws ObjectStreamException { try { return IgnitionEx.gridx(stash.get()).context(); } catch (IllegalStateException e) { throw U.withCause(new InvalidObjectException(e.getMessage()), e); } finally { stash.remove(); } } /** {@inheritDoc} */ @Override public ExecutorService getExecutorService() { return execSvc; } /** {@inheritDoc} */ @Override public ExecutorService getSystemExecutorService() { return sysExecSvc; } /** {@inheritDoc} */ @Override public ExecutorService getManagementExecutorService() { return mgmtExecSvc; } /** {@inheritDoc} */ @Override public ExecutorService getPeerClassLoadingExecutorService() { return p2pExecSvc; } /** {@inheritDoc} */ @Override public ExecutorService getIgfsExecutorService() { return igfsExecSvc; } /** {@inheritDoc} */ @Override public ExecutorService getRestExecutorService() { return restExecSvc; } /** {@inheritDoc} */ @Override public IgniteExceptionRegistry exceptionRegistry() { return IgniteExceptionRegistry.get(); } /** {@inheritDoc} */ @Override public Object nodeAttribute(String key) { return attrs.get(key); } /** {@inheritDoc} */ @Override public boolean hasNodeAttribute(String key) { return attrs.containsKey(key); } /** {@inheritDoc} */ @Override public Object addNodeAttribute(String key, Object val) { return attrs.put(key, val); } /** {@inheritDoc} */ @Override public Map<String, Object> nodeAttributes() { return attrs; } /** {@inheritDoc} */ @Override public ClusterProcessor cluster() { return cluster; } /** {@inheritDoc} */ @Override public MarshallerContextImpl marshallerContext() { return marshCtx; } /** {@inheritDoc} */ @Override public boolean clientNode() { return cfg.isClientMode() || cfg.isDaemon(); } /** {@inheritDoc} */ @Override public boolean clientDisconnected() { if (locNode == null) locNode = discoMgr != null ? discoMgr.localNode() : null; return locNode != null ? (locNode.isClient() && disconnected) : false; } /** {@inheritDoc} */ @Override public PlatformProcessor platform() { return platformProc; } /** * @param disconnected Disconnected flag. */ void disconnected(boolean disconnected) { this.disconnected = disconnected; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridKernalContextImpl.class, this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.authorization.permission; import java.lang.reflect.Field; import java.security.Principal; import javax.annotation.Nonnull; import javax.jcr.security.AccessControlList; import javax.jcr.security.AccessControlManager; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.commons.jackrabbit.authorization.AccessControlUtils; import org.apache.jackrabbit.oak.AbstractSecurityTest; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; import org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants; import org.apache.jackrabbit.oak.plugins.tree.impl.ImmutableTree; import org.apache.jackrabbit.oak.plugins.version.ReadOnlyVersionManager; import org.apache.jackrabbit.oak.spi.security.authorization.AuthorizationConfiguration; import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionProvider; import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions; import org.apache.jackrabbit.oak.spi.security.authorization.permission.TreePermission; import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.util.NodeUtil; import org.apache.jackrabbit.oak.plugins.tree.TreeUtil; import org.junit.Test; import static com.google.common.base.Preconditions.checkNotNull; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class VersionTreePermissionTest extends AbstractSecurityTest implements NodeTypeConstants { private ReadOnlyVersionManager vMgr; private PermissionProvider pp; private Tree testTree; private Field vpField; private Field tpImplTree; @Override public void before() throws Exception { super.before(); NodeUtil testNode = new NodeUtil(root.getTree("/")).addChild("test", NT_OAK_UNSTRUCTURED); testNode.addChild("a", NT_OAK_UNSTRUCTURED).addChild("b", NT_OAK_UNSTRUCTURED).addChild("c", NT_OAK_UNSTRUCTURED); TreeUtil.addMixin(testNode.getTree(), MIX_VERSIONABLE, root.getTree(NODE_TYPES_PATH), null); AccessControlManager acMgr = getAccessControlManager(root); AccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, "/test"); acl.addAccessControlEntry(EveryonePrincipal.getInstance(), AccessControlUtils.privilegesFromNames(acMgr, PrivilegeConstants.JCR_READ)); acMgr.setPolicy("/test", acl); root.commit(); // create a structure in the version storage testNode.setBoolean(JCR_ISCHECKEDOUT, false); root.commit(); testNode.setBoolean(JCR_ISCHECKEDOUT, true); root.commit(); testTree = testNode.getTree(); vMgr = ReadOnlyVersionManager.getInstance(root, NamePathMapper.DEFAULT); pp = getConfig(AuthorizationConfiguration.class).getPermissionProvider(root, root.getContentSession().getWorkspaceName(), ImmutableSet.<Principal>of(EveryonePrincipal.getInstance())); assertTrue(pp instanceof PermissionProviderImpl); vpField = VersionTreePermission.class.getDeclaredField("versionablePermission"); vpField.setAccessible(true); Class cls = Class.forName(CompiledPermissionImpl.class.getName() + "$TreePermissionImpl"); tpImplTree = cls.getDeclaredField("tree"); tpImplTree.setAccessible(true); } @Override public void after() throws Exception { try { root.refresh(); Tree t = root.getTree("/test"); if (t.exists()) { t.remove(); root.commit(); } } finally { super.after(); } } private static TreePermission getVersionPermission(Root root, PermissionProvider pp, String path) { Tree t = root.getTree("/"); TreePermission tp = pp.getTreePermission(t, TreePermission.EMPTY); for (String name : PathUtils.elements(path)) { t = t.getChild(name); tp = pp.getTreePermission(t, tp); } return tp; } private void assertVersionPermission(@Nonnull TreePermission tp, @Nonnull String expectedPath, boolean canRead) throws Exception { assertTrue(tp instanceof VersionTreePermission); assertEquals(canRead, tp.canRead()); assertEquals(canRead, tp.canRead(PropertyStates.createProperty("any", "Value"))); assertEquals(canRead, tp.isGranted(Permissions.READ)); assertEquals(canRead, tp.isGranted(Permissions.READ, PropertyStates.createProperty("any", "Value"))); assertEquals(canRead, tp.canReadProperties()); assertFalse(tp.canReadAll()); VersionTreePermission vtp = (VersionTreePermission) tp; TreePermission delegatee = (TreePermission) vpField.get(vtp); Tree delegateeTree = (Tree) tpImplTree.get(delegatee); assertEquals(expectedPath, delegateeTree.getPath()); } @Test public void testGetTreePermission() throws Exception { Tree versionHistory = checkNotNull(vMgr.getVersionHistory(testTree)); String expectedPath = "/test"; TreePermission tp = getVersionPermission(root, pp, versionHistory.getPath()); assertVersionPermission(tp, expectedPath, true); Tree vTree = versionHistory.getChild("1.0"); assertTrue(vTree.exists()); tp = pp.getTreePermission(vTree, tp); assertVersionPermission(tp, expectedPath, true); Tree frozen = vTree.getChild(JCR_FROZENNODE); assertTrue(frozen.exists()); tp = pp.getTreePermission(frozen, tp); assertVersionPermission(tp, expectedPath, true); Tree t = frozen; for (String name : new String[] {"a", "b", "c"}) { t = t.getChild(name); expectedPath = PathUtils.concat(expectedPath, name); tp = pp.getTreePermission(t, tp); assertVersionPermission(tp, expectedPath, true); } } @Test public void testGetChild() throws Exception { Tree versionHistory = checkNotNull(vMgr.getVersionHistory(testTree)); ImmutableTree t = (ImmutableTree) getRootProvider().createReadOnlyRoot(root).getTree("/"); TreePermission tp = pp.getTreePermission(t, TreePermission.EMPTY); for (String name : PathUtils.elements(versionHistory.getPath())) { t = t.getChild(name); tp = tp.getChildPermission(name, t.getNodeState()); } String expectedPath = "/test"; assertVersionPermission(tp, "/test", true); NodeState ns = t.getChild("1.0").getNodeState(); tp = tp.getChildPermission("1.0", ns); assertVersionPermission(tp, "/test", true); ns = ns.getChildNode(JCR_FROZENNODE); tp = tp.getChildPermission(JCR_FROZENNODE, ns); assertVersionPermission(tp, "/test", true); for (String name : new String[] {"a", "b", "c"}) { ns = ns.getChildNode(name); expectedPath = PathUtils.concat(expectedPath, name); tp = tp.getChildPermission(name, ns); assertVersionPermission(tp, expectedPath, true); } } @Test public void testVersionableRemoved() throws Exception { Tree versionHistory = checkNotNull(vMgr.getVersionHistory(testTree)); testTree.remove(); root.commit(); pp.refresh(); TreePermission tp = getVersionPermission(root, pp, versionHistory.getPath()); assertVersionPermission(tp, "/", false); Tree vTree = versionHistory.getChild("1.0"); tp = pp.getTreePermission(vTree, tp); assertVersionPermission(tp, "/", false); Tree frozen = vTree.getChild(JCR_FROZENNODE); assertTrue(frozen.exists()); tp = pp.getTreePermission(frozen, tp); assertVersionPermission(tp, "/", false); Tree t = frozen; String expectedPath = "/"; for (String name : new String[] {"a", "b", "c"}) { t = t.getChild(name); expectedPath = PathUtils.concat(expectedPath, name); tp = pp.getTreePermission(t, tp); assertVersionPermission(tp, expectedPath, false); } } @Test public void testVersionableChildRemoved() throws Exception { root.getTree("/test/a/b/c").remove(); root.commit(); pp.refresh(); Tree versionHistory = checkNotNull(vMgr.getVersionHistory(testTree)); String frozenCPath = PathUtils.concat(versionHistory.getPath(), "1.0", JCR_FROZENNODE, "a/b/c"); TreePermission tp = getVersionPermission(root, pp, frozenCPath); assertVersionPermission(tp, "/test/a/b/c", true); root.getTree("/test/a").remove(); root.commit(); pp.refresh(); tp = getVersionPermission(root, pp, frozenCPath); assertVersionPermission(tp, "/test/a/b/c", true); } @Test public void testVersionableChildRemoved2() throws Exception { root.getTree("/test/a/b").remove(); root.commit(); pp.refresh(); Tree versionHistory = checkNotNull(vMgr.getVersionHistory(testTree)); String frozenAPath = PathUtils.concat(versionHistory.getPath(), "1.0", JCR_FROZENNODE, "a"); TreePermission tp = getVersionPermission(root, pp, frozenAPath); assertVersionPermission(tp, "/test/a", true); Tree frozenB = root.getTree(frozenAPath).getChild("b"); tp = pp.getTreePermission(frozenB, tp); assertVersionPermission(tp, "/test/a/b", true); Tree frozenC = frozenB.getChild("c"); tp = pp.getTreePermission(frozenC, tp); assertVersionPermission(tp, "/test/a/b/c", true); } }
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * */ package com.google.mystery.web; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; import javax.inject.Inject; import org.apache.commons.text.WordUtils; import com.google.actions.api.ActionContext; import com.google.actions.api.response.ResponseBuilder; import com.google.actions.api.response.helperintent.NewSurface; import com.google.actions.api.response.helperintent.SelectionCarousel; import com.google.api.services.actions_fulfillment.v2.model.BasicCard; import com.google.api.services.actions_fulfillment.v2.model.Button; import com.google.api.services.actions_fulfillment.v2.model.CarouselSelectCarouselItem; import com.google.api.services.actions_fulfillment.v2.model.HtmlResponse; import com.google.api.services.actions_fulfillment.v2.model.Image; import com.google.api.services.actions_fulfillment.v2.model.LinkOutSuggestion; import com.google.api.services.actions_fulfillment.v2.model.MediaObject; import com.google.api.services.actions_fulfillment.v2.model.MediaResponse; import com.google.api.services.actions_fulfillment.v2.model.OpenUrlAction; import com.google.api.services.actions_fulfillment.v2.model.OptionInfo; import com.google.api.services.actions_fulfillment.v2.model.SimpleResponse; import com.google.api.services.actions_fulfillment.v2.model.Suggestion; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.mystery.actions.SessionManager; import com.google.mystery.actions.messages.MessageException; import com.google.mystery.actions.messages.MessagesManager; import com.google.mystery.actions.model.SherlockContext; import com.google.mystery.actions.model.SherlockRequest; import com.google.mystery.actions.model.SherlockResponse; import com.google.mystery.actions.model.SherlockResponseCard; import com.google.mystery.actions.model.SherlockScreenData.Type; import com.google.mystery.assets.AssetsManager; import com.google.mystery.config.SherlockConfig; import com.google.mystery.data.model.Clue; import com.google.mystery.data.model.Session; import com.google.mystery.data.model.Session.State; import com.google.mystery.data.model.Story; /** * @author ilyaplatonov * */ public class DFV2ResponseGenerator { @Inject private SherlockConfig config; @Inject private SessionManager sessionManager; @Inject private AssetsManager assetsManager; @Inject private MessagesManager messagesManager; /** logger. */ private Logger logger = Logger.getLogger(this.getClass().getName()); public void generate(SherlockRequest request, SherlockResponse response, String dialogflowSession, ResponseBuilder webhookResponse) { if (!handleCanvas(request, response, webhookResponse)) { if (response.getStoryAudioURL() == null) { handleNoAudioStory(response, webhookResponse); } else { handleAudioStory(response, webhookResponse); } handleSuggestions(request, response, webhookResponse); } handleContexts(response, dialogflowSession, webhookResponse); handleUserStorage(request.getSessionid(), webhookResponse); handleOtherSurface(response, webhookResponse); handleEndConversation(response, webhookResponse); } private boolean handleCanvas(SherlockRequest request, SherlockResponse response, ResponseBuilder webhookResponse) { if (!request.isCanvas()) { return false; } if (response.getStoryAudioURL() != null) { addText(webhookResponse, response.getPrestoryText()); handleStoryCanvas(request, response, webhookResponse); return true; } else if (response.getScreenData() != null) { if (response.getScreenData().getType() == Type.MAP) { addText(webhookResponse, response.getCombinedText()); handleMapCanvas(request, response, webhookResponse); return true; } else if (response.getScreenData().getType() == Type.NEWSPAPER) { addText(webhookResponse, response.getCombinedText()); handleCanvasNewspaper(request, response, webhookResponse); return true; } else if (response.getScreenData().getType() == Type.MENU) { handleMenu(response, webhookResponse); return true; } else if (response.getScreenData().getType() == Type.BASE) { handleBaseCanvas(response, webhookResponse); return true; } } else if (!request.hasBrowser() || response.getImage() == null || response.getLinkOut() == null) { handleBaseCanvas(response, webhookResponse); return true; } return false; } /** * Rendering initial welcome menu. */ protected void handleMenu(SherlockResponse response, ResponseBuilder webhookResponse) { addText(webhookResponse, response.getCombinedText()); Map<String, Object> data = new HashMap<>(); data.put("title", "Sherlock Mysteries"); data.put("backgroundImage", "https://storage.googleapis.com/" + config.getBucketName() + "/images/background.png"); setSuggestions(data, response); webhookResponse.add(new HtmlResponse().setSuppressMic(false) .setUrl(config.getUrl("/static/canvas/menu.html").toString()).setUpdatedState(data)); } /** * Base canvas support. * * @param response * @param webhookResponse */ protected void handleBaseCanvas(SherlockResponse response, ResponseBuilder webhookResponse) { addText(webhookResponse, response.getCombinedText()); Map<String, Object> data = new HashMap<>(); if (response.getScreenData() == null || response.getScreenData().isShowText()) { data.put("html", SherlockResponse.textToHtml(response.getCombinedText())); } if (response.getImage() != null) { data.put("image", response.getImage().toString()); } if (response.getCards() != null && !response.getCards().isEmpty()) { JsonArray cardsArray = new JsonArray(); if (response.getTitle() != null) { data.put("cardsTitle", response.getTitle()); } data.put("cards", cardsArray); for (SherlockResponseCard card : response.getCards()) { JsonObject cardElement = new JsonObject(); cardElement.addProperty("title", card.getTitle()); if (card.getImageUrl() != null) { cardElement.addProperty("image", card.getImageUrl().toString()); } else { cardElement.addProperty("image", config.getPlaceholderUrl().toString()); } cardElement.addProperty("description", card.getDescription()); cardElement.addProperty("query", card.getKey()); cardsArray.add(cardElement); } } setSuggestions(data, response); setShowQuickMenu(data); webhookResponse.add(new HtmlResponse().setSuppressMic(false) .setUrl(config.getUrl("/static/canvas/base.html").toString()).setUpdatedState(data)); } protected void handleCanvasNewspaper(SherlockRequest request, SherlockResponse response, ResponseBuilder webhookResponse) { Map<String, Object> data = new HashMap<>(); data.put("title", "Newspaper"); List<Story> articles = assetsManager.getAllAricles(request.getCaseDataId()); String html = ""; try { html = messagesManager.message("readAllArticlesHTML", ImmutableMap.of("articles", articles)); } catch (MessageException | IOException e) { logger.log(Level.SEVERE, "Error rendering html", e); } data.put("html", html); if (response.getImage() != null) { data.put("image", response.getImage().toString()); } setSuggestions(data, response, "Continue"); setShowQuickMenu(data); webhookResponse.add(new HtmlResponse().setSuppressMic(false) .setUrl(config.getUrl("/static/canvas/story.html").toString()).setUpdatedState(data)); } protected void handleMapCanvas(SherlockRequest request, SherlockResponse response, ResponseBuilder webhookResponse) { Session session = sessionManager.getSession(); List<Story> stories = new ArrayList<>(); stories.add(assetsManager.getStory(request.getCaseDataId(), Story.CASE_INTRODUCTION_ID)); for (String location : session.getLocationsBacklog()) { stories.add(assetsManager.getStory(request.getCaseDataId(), location)); } JsonArray markers = new JsonArray(); for (Story story : stories) { try { if (story.getLatlong() != null) { String[] split = story.getLatlong().split(","); if (split.length == 2) { JsonObject marker = new JsonObject(); marker.addProperty("title", story.getTitle()); marker.addProperty("lat", Double.parseDouble(split[0].trim())); marker.addProperty("long", Double.parseDouble(split[1].trim())); if (Story.LOCATION.equals(story.getType())) { marker.addProperty("address", WordUtils.capitalize(story.getId())); } marker.addProperty("id", story.getId()); markers.add(marker); } } } catch (NumberFormatException e) { logger.log(Level.WARNING, "Error lat/long parsing", e); } } Map<String, Object> data = new HashMap<>(); data.put("locations", markers); setSuggestions(data, response, "Continue"); setShowQuickMenu(data); webhookResponse .add(new HtmlResponse().setUrl(config.getUrl("/static/canvas/map.html").toString()) .setSuppressMic(true).setUpdatedState(data)); } private void setShowQuickMenu(Map<String, Object> data) { if (sessionManager.getSession().getState() != State.NEW) { data.put("showQuickMenu", Boolean.TRUE); } } private void setSuggestions(Map<String, Object> data, SherlockResponse response, String... extra) { List<String> suggestions = new ArrayList<>(); for (String s : extra) { suggestions.add(s); } if (response.getSuggestions() != null) { for (String s : response.getSuggestions()) { suggestions.add(s); } } data.put("suggestions", suggestions); } /** * Handling canvas API response elements */ private void handleStoryCanvas(SherlockRequest request, SherlockResponse response, ResponseBuilder webhookResponse) { Map<String, Object> data = new HashMap<>(); data.put("title", response.getTitle()); if (response.getStoryAudioURL() != null) { data.put("audio", response.getStoryAudioURL().toString()); } data.put("html", SherlockResponse.textToHtml(response.getStoryText())); if (response.getStoryAudioURL() != null) { data.put("audio", response.getStoryAudioURL().toString()); } if (response.getImage() != null) { data.put("image", response.getImage().toString()); } if (response.getHint() != null) { data.put("hint", response.getHint().getHint()); } setSuggestions(data, response, "Continue"); setShowQuickMenu(data); if (!response.getClues().isEmpty()) { if (response.getTitle() != null) { data.put("cardsTitle", "New Clues"); } JsonArray cardsArray = new JsonArray(); for (Clue clue : response.getClues()) { JsonObject cardElement = new JsonObject(); cardElement.addProperty("title", clue.getName()); if (clue.getImageUrl() != null) { cardElement.addProperty("image", clue.getImageUrl().toString()); } else { cardElement.addProperty("image", config.getPlaceholderUrl().toString()); } cardElement.addProperty("description", clue.getDescription()); cardElement.addProperty("query", String.format("Show %s", clue.getName())); cardsArray.add(cardElement); } data.put("cards", cardsArray); } webhookResponse .add(new HtmlResponse().setUrl(config.getUrl("/static/canvas/story.html").toString()) .setSuppressMic(true).setUpdatedState(data)); if (!Strings.isNullOrEmpty(response.getAfterstoryText())) { // setting text to show up on next request sessionManager.setFollowupText(response.getAfterstoryText()); } } private void handleAudioStory(SherlockResponse response, ResponseBuilder webhookResponse) { if (response.getPrestoryText().trim().length() > 0) { String text = SherlockResponse.textToText(response.getPrestoryText()); addText(webhookResponse, text); } MediaObject mediaObject = new MediaObject(); mediaObject.setContentUrl(response.getStoryAudioURL().toString()).setName(response.getTitle()); if (response.getImage() == null) { mediaObject.setLargeImage(new Image().setUrl(config.getPlaceholderUrl().toString()) .setAccessibilityText(Optional.of(response.getTitle()).orElse("media"))); } else { mediaObject.setLargeImage(new Image().setUrl(response.getImage().toString()) .setAccessibilityText(response.getImageAlt())); } webhookResponse.add( new MediaResponse().setMediaObjects(ImmutableList.of(mediaObject)).setMediaType("AUDIO")); if (!Strings.isNullOrEmpty(response.getAfterstoryText())) { // setting text to show up on next request sessionManager.setFollowupText(response.getAfterstoryText()); } } private void handleNoAudioStory(SherlockResponse response, ResponseBuilder webhookResponse) { String[] splittedText; if (Strings.isNullOrEmpty(response.getAfterstoryText())) { splittedText = splitTextIntoTwo(response.getCombinedText(), 640); } else { if (response.getCombinedText().length() > 640 || response.getImage() != null) { splittedText = new String[] {response.getPreAndText(), response.getAfterstoryText()}; } else { splittedText = new String[] {response.getCombinedText()}; } } if (splittedText.length > 0) { addText(webhookResponse, splittedText[0]); } if (response.getImage() != null) { addImage(response, webhookResponse); } for (int i = 1; i < splittedText.length; i++) { addText(webhookResponse, splittedText[i]); } if (response.getImage() == null && !response.getCards().isEmpty()) { if (response.getCards().size() > 1) { List<SherlockResponseCard> responseCards = response.getCards(); if (responseCards.size() > 10) { responseCards = responseCards.subList(0, 10); logger.warning("Got more that 10 carusel cards"); } List<CarouselSelectCarouselItem> items = new ArrayList<>(); for (SherlockResponseCard sherlockCard : response.getCards()) { CarouselSelectCarouselItem item = new CarouselSelectCarouselItem(); item.setTitle(sherlockCard.getTitle()); item.setDescription(limitLines(sherlockCard.getDescription(), 4)); URL imageUrl = sherlockCard.getImageUrl(); if (imageUrl == null) { imageUrl = config.getPlaceholderUrl(); } String imageAlt = sherlockCard.getImageAlt(); if (imageAlt == null) { imageAlt = sherlockCard.getTitle(); } item.setImage(new Image().setUrl(imageUrl.toString()).setAccessibilityText(imageAlt)); item.setOptionInfo(new OptionInfo().setKey(sherlockCard.getKey()) .setSynonyms(sherlockCard.getSynonyms())); items.add(item); } webhookResponse.add(new SelectionCarousel().setItems(items)); } } } private void handleContexts(SherlockResponse response, String dialogflowSession, ResponseBuilder webhookResponse) { for (SherlockContext responseContext : response.getOutContexts()) { ActionContext actionContext = new ActionContext(responseContext.getName(), responseContext.getLifespan() == null ? 3 : responseContext.getLifespan()); if (response.getContextParameters() != null) { actionContext.setParameters(response.getContextParameters()); } webhookResponse.add(actionContext); } } private void handleSuggestions(SherlockRequest request, SherlockResponse response, ResponseBuilder webhookResponse) { if (!response.getEndConversation()) { List<String> suggestionsList = Lists.newArrayList(response.getSuggestions()); // if we have link, but do not support browser we suggest to open Investigation Log. if (response.getLinkOut() != null && !request.hasBrowser()) { suggestionsList.add("Investigation Log"); } if (suggestionsList.size() > 8) { suggestionsList = suggestionsList.subList(0, 7); } for (String s : suggestionsList) { if (s.length() > 20) { Logger.getLogger(this.getClass().getName()) .warning("Suggestion is too long to use: " + s); } else { webhookResponse.add(new Suggestion().setTitle(s)); } } } if (response.getLinkOut() != null) { webhookResponse.add(new LinkOutSuggestion().setDestinationName(response.getLinkOutTitle()) .setOpenUrlAction(new OpenUrlAction().setUrl(response.getLinkOut().toString()))); } } private void addImage(SherlockResponse response, ResponseBuilder webhookResponse) { Image image = new Image().setUrl(response.getImage().toString()) .setAccessibilityText(response.getImageAlt()); BasicCard basicCard = new BasicCard().setImage(image); if (response.getLinkOut() != null) { basicCard.setButtons(ImmutableList.of(new Button().setTitle(response.getLinkOutTitle()) .setOpenUrlAction(new OpenUrlAction().setUrl(response.getLinkOut().toString())))); } webhookResponse.add(basicCard); } private void addText(ResponseBuilder webhookResponse, String string) { String text = SherlockResponse.textToText(string); if (text.length() > 640) { text = text.substring(0, 640); } String ssml = "<speak>" + SherlockResponse.textToSsml(string) + "</speak>";; webhookResponse.add(new SimpleResponse().setDisplayText(text).setSsml(ssml)); } private void handleEndConversation(SherlockResponse response, ResponseBuilder webhookResponse) { if (response.getEndConversation()) { webhookResponse.endConversation(); } } /** Building transfer to their device response. */ private void handleOtherSurface(SherlockResponse response, ResponseBuilder webhookResponse) { if (response.getOtherSurfaceTitle() != null && response.getOtherSurfaceMessage() != null) { webhookResponse.add(new NewSurface().setNotificationTitle(response.getOtherSurfaceTitle()) .setContext(response.getOtherSurfaceMessage()) .setCapability("actions.intent.NEW_SURFACE")); } } /** Setting userStorage object */ private void handleUserStorage(String sessionid, ResponseBuilder webhookResponse) { webhookResponse.getUserStorage().put("sessionid", sessionid); } public static String[] splitTextIntoTwo(String text, int limit) { List<String> splitted = Splitter.on("\n\n").trimResults().splitToList(text.trim()); int firstNum = 0; int size = 0; for (; firstNum < splitted.size(); firstNum++) { size = size + splitted.get(firstNum).length() + 2; if (size - 2 > limit) { break; } } if (firstNum == 0) { firstNum++; } if (firstNum == splitted.size()) { return new String[] {text.trim()}; } String first = Joiner.on("\n\n").join(splitted.subList(0, firstNum)); String second = Joiner.on("\n\n").join(splitted.subList(firstNum, splitted.size())); return new String[] {first, second}; } /** * Limiting number of lines for given string. */ public static String limitLines(String string, int limit) { String[] split = string.split("\n"); if (split.length > limit) { return split[0]; } return string; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.distributed.internal.membership.gms; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.net.InetAddress; import org.jgroups.util.UUID; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.internal.inet.LocalHostUtil; import org.apache.geode.internal.serialization.BufferDataOutputStream; import org.apache.geode.internal.serialization.DSFIDSerializer; import org.apache.geode.internal.serialization.DSFIDSerializerFactory; import org.apache.geode.internal.serialization.DeserializationContext; import org.apache.geode.internal.serialization.KnownVersion; import org.apache.geode.internal.serialization.SerializationContext; import org.apache.geode.internal.serialization.VersionedDataInputStream; import org.apache.geode.test.junit.categories.SecurityTest; @Category({SecurityTest.class}) public class GMSMemberDataJUnitTest { private DSFIDSerializer dsfidSerializer; @Before public void setup() { dsfidSerializer = new DSFIDSerializerFactory().create(); } @Test public void testEqualsNotSameType() { GMSMemberData member = new GMSMemberData(); assertThat(member).isNotEqualTo("Not a GMSMemberData"); } @Test public void testEqualsIsSame() { GMSMemberData member = new GMSMemberData(); assertThat(member).isEqualTo(member); } @Test public void testCompareToIsSame() { GMSMemberData member = new GMSMemberData(); UUID uuid = new UUID(0, 0); member.setUUID(uuid); assertThat(member.compareTo(member)).isZero(); } private GMSMemberData createGMSMember(byte[] inetAddress, int viewId, long msb, long lsb) { GMSMemberData member = new GMSMemberData(); InetAddress addr1 = mock(InetAddress.class); when(addr1.getAddress()).thenReturn(inetAddress); member.setInetAddr(addr1); member.setVmViewId(viewId); member.setUUID(new UUID(msb, lsb)); return member; } @Test public void testCompareToInetAddressIsLongerThan() { GMSMemberData member1 = createGMSMember(new byte[] {1, 1, 1, 1, 1}, 1, 1, 1); GMSMemberData member2 = createGMSMember(new byte[] {1, 1, 1, 1}, 1, 1, 1); assertThat(member1.compareTo(member2)).isGreaterThan(0); } @Test public void testShallowMemberEquals() { GMSMemberData member1 = createGMSMember(new byte[] {1, 1, 1, 1, 1}, 1, 1, 1); GMSMemberData member2 = new GMSMemberData(member1.getInetAddress(), member1.getMembershipPort(), member1.getVersionOrdinal(), member1.getUuidMostSignificantBits(), member1.getUuidLeastSignificantBits(), member1.getVmViewId()); assertThat(member1.compareTo(member2)).isZero(); } @Test public void testShallowMemberNotEquals() { GMSMemberData member1 = createGMSMember(new byte[] {1, 1, 1, 1, 1}, 1, 1, 1); GMSMemberData member2 = new GMSMemberData(member1.getInetAddress(), member1.getMembershipPort(), member1.getVersionOrdinal(), member1.getUuidMostSignificantBits(), member1.getUuidLeastSignificantBits(), 100); assertThat(member1).isNotEqualTo(member2); } @Test public void testCompareToInetAddressIsShorterThan() { GMSMemberData member1 = createGMSMember(new byte[] {1, 1, 1, 1}, 1, 1, 1); GMSMemberData member2 = createGMSMember(new byte[] {1, 1, 1, 1, 1}, 1, 1, 1); assertThat(member1.compareTo(member2)).isLessThan(0); } @Test public void testCompareToInetAddressIsGreater() { GMSMemberData member1 = createGMSMember(new byte[] {1, 2, 1, 1, 1}, 1, 1, 1); GMSMemberData member2 = createGMSMember(new byte[] {1, 1, 1, 1, 1}, 1, 1, 1); assertThat(member1.compareTo(member2)).isGreaterThan(0); } @Test public void testCompareToInetAddressIsLessThan() { GMSMemberData member1 = createGMSMember(new byte[] {1, 1, 1, 1, 1}, 1, 1, 1); GMSMemberData member2 = createGMSMember(new byte[] {1, 2, 1, 1, 1}, 1, 1, 1); assertThat(member1.compareTo(member2)).isLessThan(0); } @Test public void testCompareToMyViewIdLarger() { GMSMemberData member1 = createGMSMember(new byte[] {1}, 2, 1, 1); GMSMemberData member2 = createGMSMember(new byte[] {1}, 1, 1, 1); assertThat(member1.compareTo(member2)).isGreaterThan(0); } @Test public void testCompareToTheirViewIdLarger() { GMSMemberData member1 = createGMSMember(new byte[] {1}, 1, 1, 1); GMSMemberData member2 = createGMSMember(new byte[] {1}, 2, 1, 1); assertThat(member1.compareTo(member2)).isLessThan(0); } @Test public void testCompareToMyMSBLarger() { GMSMemberData member1 = createGMSMember(new byte[] {1}, 1, 2, 1); GMSMemberData member2 = createGMSMember(new byte[] {1}, 1, 1, 1); assertThat(member1.compareTo(member2)).isGreaterThan(0); } @Test public void testCompareToTheirMSBLarger() { GMSMemberData member1 = createGMSMember(new byte[] {1}, 1, 1, 1); GMSMemberData member2 = createGMSMember(new byte[] {1}, 1, 2, 1); assertThat(member1.compareTo(member2)).isLessThan(0); } @Test public void testCompareToMyLSBLarger() { GMSMemberData member1 = createGMSMember(new byte[] {1}, 1, 1, 2); GMSMemberData member2 = createGMSMember(new byte[] {1}, 1, 1, 1); assertThat(member1.compareTo(member2)).isGreaterThan(0); } @Test public void testCompareToTheirLSBLarger() { GMSMemberData member1 = createGMSMember(new byte[] {1}, 1, 1, 1); GMSMemberData member2 = createGMSMember(new byte[] {1}, 1, 1, 2); assertThat(member1.compareTo(member2)).isLessThan(0); } @Test public void testGetUUIDReturnsNullWhenUUIDIs0() { GMSMemberData member = new GMSMemberData(); UUID uuid = new UUID(0, 0); member.setUUID(uuid); assertThat(member.getUUID()).isNull(); } @Test public void testGetUUID() { GMSMemberData member = new GMSMemberData(); UUID uuid = new UUID(1, 1); member.setUUID(uuid); assertThat(member.getUUID()).isNotNull(); } /** * <p> * GEODE-2875 - adds vmKind to on-wire form of GMSMemberData.writeEssentialData * </p> * <p> * This must be backward-compatible with Geode 1.0 (Version.GFE_90) * </p> * */ @Test public void testGMSMemberBackwardCompatibility() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); GMSMemberData member = new GMSMemberData(); member.setInetAddr(LocalHostUtil.getLocalHost()); DataOutput dataOutput = new DataOutputStream(baos); SerializationContext serializationContext = dsfidSerializer .createSerializationContext(dataOutput); member.writeEssentialData(dataOutput, serializationContext); // vmKind should be transmitted to a member with the current version ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInput dataInput = new DataInputStream(bais); DeserializationContext deserializationContext = dsfidSerializer .createDeserializationContext(dataInput); GMSMemberData newMember = new GMSMemberData(); newMember.readEssentialData(dataInput, deserializationContext); assertThat(newMember.getVmKind()).isEqualTo(member.getVmKind()); assertThat(newMember.getInetAddress()).isNotNull(); assertThat(newMember.getInetAddress().getHostAddress()).isEqualTo(newMember.getHostName()); // vmKind should not be transmitted to a member with version GFE_90 or earlier dataOutput = new BufferDataOutputStream(KnownVersion.GFE_90); member.writeEssentialData(dataOutput, serializationContext); bais = new ByteArrayInputStream(baos.toByteArray()); DataInputStream stream = new DataInputStream(bais); deserializationContext = dsfidSerializer.createDeserializationContext(stream); dataInput = new VersionedDataInputStream(stream, KnownVersion.GFE_90); newMember = new GMSMemberData(); newMember.readEssentialData(dataInput, deserializationContext); assertThat(newMember.getVmKind()).isZero(); } }
/** * Sencha GXT 4.0.1 - Sencha for GWT * Copyright (c) 2006-2016, Sencha Inc. * * licensing@sencha.com * http://www.sencha.com/products/gxt/license/ * * ================================================================================ * Evaluation/Trial License * ================================================================================ * This version of Sencha GXT is licensed commercially for a limited period for * evaluation purposes only. Production use or use beyond the applicable evaluation * period is prohibited under this license. * * Please see the Sencha GXT Licensing page at: * http://www.sencha.com/products/gxt/license/ * * For clarification or additional options, please contact: * licensing@sencha.com * ================================================================================ * * * * * * * * ================================================================================ * Disclaimer * ================================================================================ * THIS SOFTWARE IS DISTRIBUTED "AS-IS" WITHOUT ANY WARRANTIES, CONDITIONS AND * REPRESENTATIONS WHETHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE * IMPLIED WARRANTIES AND CONDITIONS OF MERCHANTABILITY, MERCHANTABLE QUALITY, * FITNESS FOR A PARTICULAR PURPOSE, DURABILITY, NON-INFRINGEMENT, PERFORMANCE AND * THOSE ARISING BY STATUTE OR FROM CUSTOM OR USAGE OF TRADE OR COURSE OF DEALING. * ================================================================================ */ package com.sencha.gxt.explorer.client.grid; import java.util.ArrayList; import java.util.Date; import java.util.List; import com.google.gwt.cell.client.Cell.Context; import com.google.gwt.core.client.EntryPoint; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Element; import com.google.gwt.editor.client.Editor.Path; import com.google.gwt.event.logical.shared.SelectionEvent; import com.google.gwt.event.logical.shared.SelectionHandler; import com.google.gwt.event.logical.shared.ValueChangeEvent; import com.google.gwt.event.logical.shared.ValueChangeHandler; import com.google.gwt.i18n.shared.DateTimeFormat; import com.google.gwt.i18n.shared.DateTimeFormat.PredefinedFormat; import com.google.gwt.safecss.shared.SafeStyles; import com.google.gwt.safecss.shared.SafeStylesUtils; import com.google.gwt.safehtml.shared.SafeHtmlBuilder; import com.google.gwt.user.client.Random; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.Widget; import com.sencha.gxt.cell.core.client.ProgressBarCell; import com.sencha.gxt.cell.core.client.ResizeCell; import com.sencha.gxt.cell.core.client.SliderCell; import com.sencha.gxt.cell.core.client.TextButtonCell; import com.sencha.gxt.cell.core.client.form.ComboBoxCell; import com.sencha.gxt.cell.core.client.form.ComboBoxCell.TriggerAction; import com.sencha.gxt.cell.core.client.form.DateCell; import com.sencha.gxt.core.client.Style.Side; import com.sencha.gxt.core.client.ValueProvider; import com.sencha.gxt.core.client.resources.CommonStyles; import com.sencha.gxt.data.shared.LabelProvider; import com.sencha.gxt.data.shared.ListStore; import com.sencha.gxt.data.shared.ModelKeyProvider; import com.sencha.gxt.data.shared.PropertyAccess; import com.sencha.gxt.examples.resources.client.TestData; import com.sencha.gxt.examples.resources.client.model.Plant; import com.sencha.gxt.explorer.client.app.ui.ExampleContainer; import com.sencha.gxt.explorer.client.model.Example.Detail; import com.sencha.gxt.theme.base.client.colorpalette.ColorPaletteBaseAppearance; import com.sencha.gxt.widget.core.client.ColorPaletteCell; import com.sencha.gxt.widget.core.client.ColorPaletteCell.ColorPaletteAppearance; import com.sencha.gxt.widget.core.client.ContentPanel; import com.sencha.gxt.widget.core.client.button.TextButton; import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; import com.sencha.gxt.widget.core.client.event.CellSelectionEvent; import com.sencha.gxt.widget.core.client.event.ColumnWidthChangeEvent; import com.sencha.gxt.widget.core.client.event.ColumnWidthChangeEvent.ColumnWidthChangeHandler; import com.sencha.gxt.widget.core.client.event.SelectEvent; import com.sencha.gxt.widget.core.client.event.SelectEvent.SelectHandler; import com.sencha.gxt.widget.core.client.form.DateTimePropertyEditor; import com.sencha.gxt.widget.core.client.grid.ColumnConfig; import com.sencha.gxt.widget.core.client.grid.ColumnModel; import com.sencha.gxt.widget.core.client.grid.Grid; import com.sencha.gxt.widget.core.client.info.Info; import com.sencha.gxt.widget.core.client.tips.ToolTipConfig; @Detail( name = "Cell Grid", category = "Grid", icon = "cellgrid", classes = Plant.class, maxHeight = CellGridExample.MAX_HEIGHT, maxWidth = CellGridExample.MAX_WIDTH, minHeight = CellGridExample.MIN_HEIGHT, minWidth = CellGridExample.MIN_WIDTH ) public class CellGridExample implements IsWidget, EntryPoint { private static final String[] COLORS = new String[]{"161616", "002241", "006874", "82a700", "bbc039", "f3f1cd"}; interface PlaceProperties extends PropertyAccess<Plant> { ValueProvider<Plant, Date> available(); @Path("name") ModelKeyProvider<Plant> key(); ValueProvider<Plant, String> name(); ValueProvider<Plant, Integer> difficulty(); ValueProvider<Plant, Double> progress(); ValueProvider<Plant, String> color(); ValueProvider<Plant, String> light(); } protected static final int MAX_HEIGHT = 600; protected static final int MAX_WIDTH = 900; protected static final int MIN_HEIGHT = 320; protected static final int MIN_WIDTH = 900; private static final PlaceProperties properties = GWT.create(PlaceProperties.class); private ListStore<Plant> store; private ContentPanel panel; public class CellColumnResizer<M, T> implements ColumnWidthChangeHandler { private Grid<M> grid; private ColumnConfig<M, T> column; private ResizeCell<T> cell; public CellColumnResizer(Grid<M> grid, ColumnConfig<M, T> column, ResizeCell<T> cell) { this.grid = grid; this.column = column; this.cell = cell; } @Override public void onColumnWidthChange(ColumnWidthChangeEvent event) { if (column == event.getColumnConfig()) { int w = event.getColumnConfig().getWidth(); int rows = store.size(); int col = grid.getColumnModel().indexOf(column); cell.setWidth(w - 20); ListStore<M> store = grid.getStore(); for (int i = 0; i < rows; i++) { M p = grid.getStore().get(i); // option 1 // could be better for force fit where all columns are resized // would need to run deferred using DelayedTask to ensure only run once // grid.getStore().update(p); // option 2 Element parent = grid.getView().getCell(i, col); if (parent != null) { parent = parent.getFirstChildElement(); SafeHtmlBuilder sb = new SafeHtmlBuilder(); cell.render(new Context(i, col, store.getKeyProvider().getKey(p)), column.getValueProvider().getValue(p), sb); parent.setInnerSafeHtml(sb.toSafeHtml()); } } } } } @Override public Widget asWidget() { if (panel == null) { // reduce the padding on text element as we have widgets in the cells SafeStyles btnPaddingStyle = SafeStylesUtils.fromTrustedString("padding: 1px 3px 0;"); SafeStyles fieldPaddingStyle = SafeStylesUtils.fromTrustedString("padding: 2px 3px;"); ColumnConfig<Plant, String> nameColumn = new ColumnConfig<Plant, String>(properties.name(), 100, "Name"); // IMPORTANT we want the text element (cell parent) to only be as wide as // the cell and not fill the cell nameColumn.setColumnTextClassName(CommonStyles.get().inlineBlock()); nameColumn.setColumnTextStyle(btnPaddingStyle); TextButtonCell button = new TextButtonCell(); button.addSelectHandler(new SelectHandler() { @Override public void onSelect(SelectEvent event) { Context c = event.getContext(); int row = c.getIndex(); Plant p = store.get(row); Info.display("Event", "The " + p.getName() + " was clicked."); } }); nameColumn.setCell(button); DateCell dateCell = new DateCell(); dateCell.getDatePicker().addValueChangeHandler(new ValueChangeHandler<Date>() { @Override public void onValueChange(ValueChangeEvent<Date> event) { Info.display("Date Selected", "You selected " + DateTimeFormat.getFormat(PredefinedFormat.DATE_SHORT).format(event.getValue())); } }); dateCell.setPropertyEditor(new DateTimePropertyEditor(DateTimeFormat.getFormat(PredefinedFormat.DATE_SHORT))); ColumnConfig<Plant, Date> availableColumn = new ColumnConfig<Plant, Date>(properties.available(), 160, "Date"); availableColumn.setColumnTextStyle(fieldPaddingStyle); availableColumn.setCell(dateCell); ListStore<String> lights = new ListStore<String>(new ModelKeyProvider<String>() { @Override public String getKey(String item) { return item; } }); lights.add("Mostly Shady"); lights.add("Mostly Sunny"); lights.add("Shade"); lights.add("Sunny"); lights.add("Sun or Shade"); ColumnConfig<Plant, String> lightColumn = new ColumnConfig<Plant, String>(properties.light(), 130, "Light"); lightColumn.setColumnTextStyle(fieldPaddingStyle); ComboBoxCell<String> lightCombo = new ComboBoxCell<String>(lights, new LabelProvider<String>() { @Override public String getLabel(String item) { return item; } }); lightCombo.addSelectionHandler(new SelectionHandler<String>() { @Override public void onSelection(SelectionEvent<String> event) { CellSelectionEvent<String> sel = (CellSelectionEvent<String>) event; Plant p = store.get(sel.getContext().getIndex()); Info.display("Lightness Selected", p.getName() + " selected " + event.getSelectedItem()); } }); lightCombo.setTriggerAction(TriggerAction.ALL); lightCombo.setForceSelection(true); lightCombo.setWidth(120); lightColumn.setCell(lightCombo); ColumnConfig<Plant, String> colorColumn = new ColumnConfig<Plant, String>(properties.color(), 150, "Color"); colorColumn.setColumnTextStyle(fieldPaddingStyle); // This next line only works with any appearance that extends from Base ColorPaletteBaseAppearance appearance = GWT.create(ColorPaletteAppearance.class); appearance.setColumnCount(6); ColorPaletteCell colorPalette = new ColorPaletteCell(appearance, COLORS, COLORS) { @Override public boolean handlesSelection() { return true; } }; colorPalette.addSelectionHandler(new SelectionHandler<String>() { @Override public void onSelection(SelectionEvent<String> event) { Info.display("Color Selected", "You selected " + event.getSelectedItem()); } }); colorColumn.setCell(colorPalette); ColumnConfig<Plant, Integer> difficultyColumn = new ColumnConfig<Plant, Integer>(properties.difficulty(), 150, "Durability"); difficultyColumn.setColumnTextStyle(fieldPaddingStyle); // Custom slider tooltip configuration, which displays the tooltip to the right of the control. ToolTipConfig tooltipConfig = new ToolTipConfig(); tooltipConfig.setAnchor(Side.LEFT); tooltipConfig.setAnchorArrow(false); tooltipConfig.setMouseOffsetX(25); tooltipConfig.setMouseOffsetY(0); tooltipConfig.setDismissDelay(1000); SliderCell slider = new SliderCell() { @Override public boolean handlesSelection() { return true; } }; slider.setToolTipConfig(tooltipConfig); slider.setWidth(140); difficultyColumn.setCell(slider); final ColumnConfig<Plant, Double> progressColumn = new ColumnConfig<Plant, Double>(properties.progress(), 150, "Progress"); progressColumn.setColumnTextStyle(fieldPaddingStyle); final ProgressBarCell progress = new ProgressBarCell() { @Override public boolean handlesSelection() { return true; } }; progress.setProgressText("{0}% Complete"); progress.setWidth(140); progressColumn.setCell(progress); List<ColumnConfig<Plant, ?>> columns = new ArrayList<ColumnConfig<Plant, ?>>(); columns.add(nameColumn); columns.add(availableColumn); columns.add(lightColumn); columns.add(colorColumn); columns.add(difficultyColumn); columns.add(progressColumn); ColumnModel<Plant> cm = new ColumnModel<Plant>(columns); List<Plant> plants = new ArrayList<Plant>(TestData.getPlants()); for (Plant p : plants) { p.setColor(COLORS[Random.nextInt(4)]); } store = new ListStore<Plant>(properties.key()); store.addAll(plants); final Grid<Plant> grid = new Grid<Plant>(store, cm); grid.getView().setAutoExpandColumn(nameColumn); grid.getView().setTrackMouseOver(false); grid.getColumnModel().addColumnWidthChangeHandler(new CellColumnResizer<Plant, Double>(grid, progressColumn, progress)); panel = new ContentPanel(); panel.setHeading("Cell Grid"); panel.add(grid); panel.setButtonAlign(BoxLayoutPack.CENTER); panel.addButton(new TextButton("Reset", new SelectHandler() { @Override public void onSelect(SelectEvent event) { store.rejectChanges(); } })); panel.addButton(new TextButton("Save", new SelectHandler() { @Override public void onSelect(SelectEvent event) { store.commitChanges(); } })); } return panel; } @Override public void onModuleLoad() { new ExampleContainer(this) .setMaxHeight(MAX_HEIGHT) .setMaxWidth(MAX_WIDTH) .setMinHeight(MIN_HEIGHT) .setMinWidth(MIN_WIDTH) .doStandalone(); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.cloudstack.api.response; import java.util.Date; import java.util.Map; import com.google.gson.annotations.SerializedName; import org.apache.cloudstack.api.ApiConstants; import org.apache.cloudstack.api.BaseResponse; import org.apache.cloudstack.api.EntityReference; import com.cloud.serializer.Param; import com.cloud.storage.StoragePool; import com.cloud.storage.StoragePoolStatus; @EntityReference(value = StoragePool.class) public class StoragePoolResponse extends BaseResponse { @SerializedName("id") @Param(description = "the ID of the storage pool") private String id; @SerializedName("zoneid") @Param(description = "the Zone ID of the storage pool") private String zoneId; @SerializedName(ApiConstants.ZONE_NAME) @Param(description = "the Zone name of the storage pool") private String zoneName; @SerializedName("podid") @Param(description = "the Pod ID of the storage pool") private String podId; @SerializedName("podname") @Param(description = "the Pod name of the storage pool") private String podName; @SerializedName("name") @Param(description = "the name of the storage pool") private String name; @SerializedName("ipaddress") @Param(description = "the IP address of the storage pool") private String ipAddress; @SerializedName("path") @Param(description = "the storage pool path") private String path; @SerializedName("created") @Param(description = "the date and time the storage pool was created") private Date created; @SerializedName("type") @Param(description = "the storage pool type") private String type; @SerializedName("clusterid") @Param(description = "the ID of the cluster for the storage pool") private String clusterId; @SerializedName("clustername") @Param(description = "the name of the cluster for the storage pool") private String clusterName; @SerializedName("disksizetotal") @Param(description = "the total disk size of the storage pool") private Long diskSizeTotal; @SerializedName("disksizeallocated") @Param(description = "the host's currently allocated disk size") private Long diskSizeAllocated; @SerializedName("disksizeused") @Param(description = "the host's currently used disk size") private Long diskSizeUsed; @SerializedName("capacityiops") @Param(description = "IOPS CloudStack can provision from this storage pool") private Long capacityIops; @SerializedName("tags") @Param(description = "the tags for the storage pool") private String tags; @SerializedName(ApiConstants.STATE) @Param(description = "the state of the storage pool") private StoragePoolStatus state; @SerializedName(ApiConstants.SCOPE) @Param(description = "the scope of the storage pool") private String scope; @SerializedName("overprovisionfactor") @Param(description = "the overprovisionfactor for the storage pool", since = "4.4") private String overProvisionFactor; @SerializedName(ApiConstants.HYPERVISOR) @Param(description = "the hypervisor type of the storage pool") private String hypervisor; @SerializedName("suitableformigration") @Param(description = "true if this pool is suitable to migrate a volume," + " false otherwise") private Boolean suitableForMigration; @SerializedName(ApiConstants.STORAGE_CAPABILITIES) @Param(description = "the storage pool capabilities") private Map<String, String> caps; public Map<String, String> getCaps() { return caps; } public void setCaps(Map<String, String> cap) { this.caps = cap; } /** * @return the scope */ public String getScope() { return scope; } /** * @param scope the scope to set */ public void setScope(String scope) { this.scope = scope; } public String getHypervisor() { return hypervisor; } public void setHypervisor(String hypervisor) { this.hypervisor = hypervisor; } @Override public String getObjectId() { return this.getId(); } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getZoneId() { return zoneId; } public void setZoneId(String zoneId) { this.zoneId = zoneId; } public String getZoneName() { return zoneName; } public void setZoneName(String zoneName) { this.zoneName = zoneName; } public String getPodId() { return podId; } public void setPodId(String podId) { this.podId = podId; } public String getPodName() { return podName; } public void setPodName(String podName) { this.podName = podName; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getIpAddress() { return ipAddress; } public void setIpAddress(String ipAddress) { this.ipAddress = ipAddress; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public Date getCreated() { return created; } public void setCreated(Date created) { this.created = created; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getClusterId() { return clusterId; } public void setClusterId(String clusterId) { this.clusterId = clusterId; } public String getClusterName() { return clusterName; } public void setClusterName(String clusterName) { this.clusterName = clusterName; } public Long getDiskSizeTotal() { return diskSizeTotal; } public void setDiskSizeTotal(Long diskSizeTotal) { this.diskSizeTotal = diskSizeTotal; } public Long getDiskSizeAllocated() { return diskSizeAllocated; } public void setDiskSizeAllocated(Long diskSizeAllocated) { this.diskSizeAllocated = diskSizeAllocated; } public Long getDiskSizeUsed() { return diskSizeUsed; } public void setDiskSizeUsed(Long diskSizeUsed) { this.diskSizeUsed = diskSizeUsed; } public Long getCapacityIops() { return capacityIops; } public void setCapacityIops(Long capacityIops) { this.capacityIops = capacityIops; } public String getTags() { return tags; } public void setTags(String tags) { this.tags = tags; } public StoragePoolStatus getState() { return state; } public void setState(StoragePoolStatus state) { this.state = state; } public void setSuitableForMigration(Boolean suitableForMigration) { this.suitableForMigration = suitableForMigration; } public void setOverProvisionFactor(String overProvisionFactor) { this.overProvisionFactor = overProvisionFactor; } }
package se.wahlstromstekniska.acetest.authorizationserver; import java.io.InputStream; import java.io.StringWriter; import java.util.ArrayList; import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; import org.jose4j.jwk.EllipticCurveJsonWebKey; import org.json.JSONArray; import org.json.JSONObject; /** * Reads properties and authorization servers keys for signing and encryption. * WARNING: Class uses a insecure key file based key storage and this is NOT recommended * to be used in production. * * @author erikw * */ public class ServerConfiguration { final static Logger logger = Logger.getLogger(ServerConfiguration.class); private static ServerConfiguration instance = null; private static JSONObject properties = null; private ArrayList<ResourceServer> resourceServers = new ArrayList<ResourceServer>(); private ArrayList<Client> clients = new ArrayList<Client>(); private int coapPort = 5683; private int coapsPort = 5684; private String pskIdentity = null; private String pskKey = null; private EllipticCurveJsonWebKey signAndEncryptKey = null; private String configFilePath = "/authorizationserver.json"; protected ServerConfiguration() { try { logger.info("Loading authorization server configuration."); InputStream configIS = ServerConfiguration.class.getResourceAsStream(configFilePath); StringWriter configWriter = new StringWriter(); IOUtils.copy(configIS, configWriter, "UTF-8"); setProperties(new JSONObject(configWriter.toString())); // load resource servers logger.debug("Loading configured resource servers."); JSONArray rsList = getProperties().getJSONObject("authorizationserverconfig").getJSONArray("resourceservers"); for (int i=0; i<rsList.length(); i++) { JSONObject item = rsList.getJSONObject(i); String aud = item.getString("aud"); ResourceServer rs = new ResourceServer(aud); rs.setClientId(item.getString("clientId")); rs.setClientSecret(item.getString("clientSecret")); String csp = item.getString("csp"); rs.setCsp(csp); String tokenFormat = item.getString("tokenformat"); if("JWT".equals(tokenFormat)) { rs.setTokenFormat(ResourceServer.TOKEN_FORMAT_JWT); } else { rs.setTokenFormat(ResourceServer.TOKEN_FORMAT_CWT); } String transportEncryption = item.getString("transportEncryption"); if("dtls-psk".equals(transportEncryption)) { rs.setTransportEncryption(ResourceServer.TRANSPORT_ENCRYPTION_DTLS_PSK); } if("dtls-rpk".equals(transportEncryption)) { rs.setTransportEncryption(ResourceServer.TRANSPORT_ENCRYPTION_DTLS_RPK); } if("dtls-cert".equals(transportEncryption)) { rs.setTransportEncryption(ResourceServer.TRANSPORT_ENCRYPTION_DTLS_CERT); } if("oscon".equals(transportEncryption)) { rs.setTransportEncryption(ResourceServer.TRANSPORT_ENCRYPTION_OSCON); } String scopes = item.getString("scopes"); rs.setScopes(scopes); JSONArray authorizedClients = item.getJSONArray("authorizedClients"); for (int c=0; c<authorizedClients.length(); c++) { String client = authorizedClients.getString(c); rs.addAuthorizedClient(client); } String rpk = item.getJSONObject("serverKey").toString(); EllipticCurveJsonWebKey rpkJWK = (EllipticCurveJsonWebKey) EllipticCurveJsonWebKey.Factory.newPublicJwk(rpk); rs.setRPK(rpkJWK); resourceServers.add(rs); } // load clients logger.debug("Loading configured clients."); JSONArray clientList = getProperties().getJSONObject("authorizationserverconfig").getJSONArray("clients"); for (int i=0; i<clientList.length(); i++) { JSONObject item = clientList.getJSONObject(i); String clientID = item.getString("clientId"); String clientSecret = item.getString("clientSecret"); String encryptionKey = item.getJSONObject("encryptionKey").toString(); EllipticCurveJsonWebKey jwk = (EllipticCurveJsonWebKey) EllipticCurveJsonWebKey.Factory.newPublicJwk(encryptionKey); clients.add(new Client(clientID, clientSecret, jwk)); } // load port(s) config logger.debug("Loading ports resource servers."); setCoapPort(getProperties().getJSONObject("authorizationserverconfig").getJSONObject("authorizationserver").getInt("coapPort")); setCoapsPort(getProperties().getJSONObject("authorizationserverconfig").getJSONObject("authorizationserver").getInt("coapsPort")); // load psk identity used to connect to AS securely from the client logger.debug("Loading PSK."); setPskKey(getProperties().getJSONObject("authorizationserverconfig").getJSONObject("authorizationserver").getString("pskKey")); setPskIdentity(getProperties().getJSONObject("authorizationserverconfig").getJSONObject("authorizationserver").getString("pskIdentity")); // load sign and encryption key logger.debug("Loading sign and encryption key."); String key = getProperties().getJSONObject("authorizationserverconfig").getJSONObject("authorizationserver").getJSONObject("signAndEncryptKey").toString(); setSignAndEncryptKey((EllipticCurveJsonWebKey) EllipticCurveJsonWebKey.Factory.newPublicJwk(key.toString())); } catch (Exception e) { logger.fatal("Failed to parse configuration file: " + configFilePath); logger.fatal(e); logger.fatal("Run the system setup project. It will automatically create a dummy configuraton to get you started."); System.exit(0); } } public static ServerConfiguration getInstance() { if(instance == null) { try { instance = new ServerConfiguration(); } catch (Exception e) { logger.fatal("Could not read properties file.", e); } } return instance; } public ResourceServer getResourceServerWithClientId(String clientId) { ResourceServer foundRS = null; if(clientId != null && clientId.trim().length() != 0) { for (ResourceServer rs : resourceServers) { if(rs.getClientId().equals(clientId.trim())) { foundRS = rs; } } } return foundRS; } public ResourceServer getResourceServerWithAud(String aud) { ResourceServer foundRS = null; if(aud != null && aud.trim().length() != 0) { for (ResourceServer rs : resourceServers) { if(rs.getAud().equals(aud.trim())) { foundRS = rs; } } } return foundRS; } public ArrayList<ResourceServer> getResourceServers() { return resourceServers; } public Client getClient(String clientId) { Client found = null; for (Client client : clients) { if(client.getClient_id().equals(clientId)) { found = client; } } return found; } public ArrayList<Client> getClients() { return clients; } public static JSONObject getProperties() { return properties; } public static void setProperties(JSONObject properties) { ServerConfiguration.properties = properties; } public int getCoapPort() { return coapPort; } public void setCoapPort(int coapPort) { this.coapPort = coapPort; } public int getCoapsPort() { return coapsPort; } public void setCoapsPort(int coapsPort) { this.coapsPort = coapsPort; } public String getPskIdentity() { return pskIdentity; } public void setPskIdentity(String pskIdentity) { this.pskIdentity = pskIdentity; } public String getPskKey() { return pskKey; } public void setPskKey(String pskKey) { this.pskKey = pskKey; } public EllipticCurveJsonWebKey getSignAndEncryptKey() { return signAndEncryptKey; } public void setSignAndEncryptKey(EllipticCurveJsonWebKey signAndEncryptKey) { this.signAndEncryptKey = signAndEncryptKey; } }
/* * ServeStream: A HTTP stream browser/player for Android * Copyright 2014 William Seemann * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sourceforge.servestream.utils; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Arrays; import java.util.Formatter; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.xml.sax.SAXException; import wseemann.media.jplaylistparser.exception.JPlaylistParserException; import wseemann.media.jplaylistparser.parser.AutoDetectParser; import wseemann.media.jplaylistparser.playlist.Playlist; import wseemann.media.jplaylistparser.playlist.PlaylistEntry; import net.sourceforge.servestream.R; import net.sourceforge.servestream.bean.UriBean; import net.sourceforge.servestream.provider.Media; import net.sourceforge.servestream.service.IMediaPlaybackService; import net.sourceforge.servestream.service.MediaPlaybackService; import net.sourceforge.servestream.transport.AbsTransport; import net.sourceforge.servestream.transport.TransportFactory; import android.app.Activity; import android.app.Service; import android.content.ComponentName; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Context; import android.content.ContextWrapper; import android.content.Intent; import android.content.ServiceConnection; import android.content.SharedPreferences; import android.database.Cursor; import android.net.Uri; import android.os.AsyncTask; import android.os.Handler; import android.os.Message; import android.os.RemoteException; import android.text.format.Time; import android.util.Log; import android.widget.Toast; public class MusicUtils { public interface Defs { public final static int OPEN_URL = 0; public final static int ADD_TO_PLAYLIST = 1; public final static int USE_AS_RINGTONE = 2; public final static int PLAYLIST_SELECTED = 3; public final static int NEW_PLAYLIST = 4; public final static int PLAY_SELECTION = 5; public final static int GOTO_START = 6; public final static int GOTO_PLAYBACK = 7; public final static int PARTY_SHUFFLE = 8; public final static int SHUFFLE_ALL = 9; public final static int DELETE_ITEM = 10; public final static int SCAN_DONE = 11; public final static int QUEUE = 12; public final static int EFFECTS_PANEL = 13; public final static int CHILD_MENU_BASE = 14; // this should be the last item } public static IMediaPlaybackService sService = null; private static HashMap<Context, ServiceBinder> sConnectionMap = new HashMap<Context, ServiceBinder>(); public static class ServiceToken { ContextWrapper mWrappedContext; ServiceToken(ContextWrapper context) { mWrappedContext = context; } } public static ServiceToken bindToService(Activity context) { return bindToService(context, null); } public static ServiceToken bindToService(Activity context, ServiceConnection callback) { Activity realActivity = context.getParent(); if (realActivity == null) { realActivity = context; } ContextWrapper cw = new ContextWrapper(realActivity); cw.startService(new Intent(cw, MediaPlaybackService.class)); ServiceBinder sb = new ServiceBinder(callback); if (cw.bindService((new Intent()).setClass(cw, MediaPlaybackService.class), sb, 0)) { sConnectionMap.put(cw, sb); return new ServiceToken(cw); } Log.e("Music", "Failed to bind to service"); return null; } public static ServiceToken bindToService(Service context, ServiceConnection callback) { ContextWrapper cw = new ContextWrapper(context); cw.startService(new Intent(cw, MediaPlaybackService.class)); ServiceBinder sb = new ServiceBinder(callback); if (cw.bindService((new Intent()).setClass(cw, MediaPlaybackService.class), sb, 0)) { sConnectionMap.put(cw, sb); return new ServiceToken(cw); } Log.e("Music", "Failed to bind to service"); return null; } public static void unbindFromService(ServiceToken token) { if (token == null) { Log.e("MusicUtils", "Trying to unbind with null token"); return; } ContextWrapper cw = token.mWrappedContext; ServiceBinder sb = sConnectionMap.remove(cw); if (sb == null) { Log.e("MusicUtils", "Trying to unbind for unknown Context"); return; } cw.unbindService(sb); if (sConnectionMap.isEmpty()) { // presumably there is nobody interested in the service at this point, // so don't hang on to the ServiceConnection sService = null; } } private static class ServiceBinder implements ServiceConnection { ServiceConnection mCallback; ServiceBinder(ServiceConnection callback) { mCallback = callback; } public void onServiceConnected(ComponentName className, android.os.IBinder service) { sService = IMediaPlaybackService.Stub.asInterface(service); if (mCallback != null) { mCallback.onServiceConnected(className, service); } } public void onServiceDisconnected(ComponentName className) { if (mCallback != null) { mCallback.onServiceDisconnected(className); } sService = null; } } public static long getCurrentAudioId() { if (MusicUtils.sService != null) { try { return sService.getAudioId(); } catch (RemoteException ex) { } } return -1; } public static AddToCurrentPlaylistAsyncTask addToCurrentPlaylistFromURL(Context context, UriBean uri, Handler handler) { AddToCurrentPlaylistAsyncTask playlistTask = new AddToCurrentPlaylistAsyncTask(context, uri, handler); playlistTask.execute(); return playlistTask; } public static void addToCurrentPlaylist(Context context, long [] list) { if (list.length == 0 || sService == null) { Log.d("MusicUtils", "attempt to play empty song list"); // Don't try to play empty playlists. Nothing good will come of it. String message = context.getString(R.string.emptyplaylist, list.length); Toast.makeText(context, message, Toast.LENGTH_SHORT).show(); return; } try { sService.enqueue(list, MediaPlaybackService.LAST); String message = context.getResources().getQuantityString( R.plurals.NNNtrackstoplaylist, list.length, Integer.valueOf(list.length)); Toast.makeText(context, message, Toast.LENGTH_SHORT).show(); } catch (RemoteException ex) { } } public static class AddToCurrentPlaylistAsyncTask extends AsyncTask<Void, Void, Void> { Context mContext = null; private UriBean mUri; Handler mHandler = null; public AddToCurrentPlaylistAsyncTask(Context context, UriBean uri, Handler handler) { super(); mContext = context; mUri = uri; mHandler = handler; } @Override protected Void doInBackground(Void... arg0) { long [] list = new long[0]; AbsTransport transport = TransportFactory.getTransport(mUri.getProtocol()); transport.setUri(mUri); try { transport.connect(); if (transport.getContentType() != null && !transport.getContentType().contains("text/html")) { list = MusicUtils.getFilesInPlaylist(mContext, mUri.getScrubbedUri().toString(), transport.getContentType(), transport.getConnection()); } } catch (Exception e) { e.printStackTrace(); } finally { transport.close(); } Message msg = new Message(); msg.obj = list; mHandler.sendMessage(msg); return null; } } public static Cursor query(Context context, Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder, int limit) { try { ContentResolver resolver = context.getContentResolver(); if (resolver == null) { return null; } if (limit > 0) { uri = uri.buildUpon().appendQueryParameter("limit", "" + limit).build(); } return resolver.query(uri, projection, selection, selectionArgs, sortOrder); } catch (UnsupportedOperationException ex) { return null; } } public static Cursor query(Context context, Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { return query(context, uri, projection, selection, selectionArgs, sortOrder, 0); } /* Try to use String.format() as little as possible, because it creates a * new Formatter every time you call it, which is very inefficient. * Reusing an existing Formatter more than tripled the speed of * makeTimeString(). * This Formatter/StringBuilder are also used by makeAlbumSongsLabel() */ private static StringBuilder sFormatBuilder = new StringBuilder(); private static Formatter sFormatter = new Formatter(sFormatBuilder, Locale.getDefault()); private static final Object[] sTimeArgs = new Object[5]; public static String makeTimeString(Context context, long secs) { String durationformat = context.getString( secs < 3600 ? R.string.durationformatshort : R.string.durationformatlong); /* Provide multiple arguments so the format can be changed easily * by modifying the xml. */ sFormatBuilder.setLength(0); final Object[] timeArgs = sTimeArgs; timeArgs[0] = secs / 3600; timeArgs[1] = secs / 60; timeArgs[2] = (secs / 60) % 60; timeArgs[3] = secs; timeArgs[4] = secs % 60; return sFormatter.format(durationformat, timeArgs).toString(); } public static void playAll(Context context, long [] list, int position) { playAll(context, list, position, false, Intent.FLAG_ACTIVITY_CLEAR_TOP); } public static void playAllFromService(Context context, long [] list, int position) { playAll(context, list, position, false, Intent.FLAG_ACTIVITY_NEW_TASK); } private static void playAll(Context context, long [] list, int position, boolean force_shuffle, int flags) { if (list.length == 0 || sService == null) { Log.d("MusicUtils", "attempt to play empty song list"); // Don't try to play empty playlists. Nothing good will come of it. String message = context.getString(R.string.emptyplaylist, list.length); Toast.makeText(context, message, Toast.LENGTH_SHORT).show(); return; } try { if (force_shuffle) { sService.setShuffleMode(MediaPlaybackService.SHUFFLE_ON); } long curid = sService.getAudioId(); int curpos = sService.getQueuePosition(); if (position != -1 && curpos == position && curid == list[position]) { // The selected file is the file that's currently playing; // figure out if we need to restart with a new playlist, // or just launch the playback activity. long [] playlist = sService.getQueue(); if (Arrays.equals(list, playlist)) { // we don't need to set a new list, but we should resume playback if needed sService.play(); return; // the 'finally' block will still run } } if (position < 0) { position = 0; } sService.open(list, force_shuffle ? -1 : position); } catch (RemoteException ex) { } finally { Intent intent = new Intent("net.sourceforge.servestream.PLAYBACK_VIEWER") .setFlags(flags); context.startActivity(intent); } } private final static long [] sEmptyList = new long[0]; public static long [] getFilesInPlaylist(Context context, String uri, String contentType, InputStream is) { if (uri == null) { return sEmptyList; } AutoDetectParser parser = new AutoDetectParser(); // Should auto-detect! Playlist playlist = new Playlist(); try { parser.parse(uri, contentType, is, playlist); } catch (IOException e) { playlist = null; } catch (SAXException e) { playlist = null; } catch (JPlaylistParserException e) { playlist = null; } finally { Utils.closeInputStream(is); } if (playlist == null) { playlist = new Playlist(); PlaylistEntry playlistEntry = new PlaylistEntry(); playlistEntry.set(PlaylistEntry.URI, uri); playlistEntry.set(PlaylistEntry.TRACK, "1"); playlist.add(playlistEntry); } return addFilesToMediaStore(context, playlist); } public static long [] storeFile(Context context, String uri) { if (uri == null) { return sEmptyList; } Playlist playlist = new Playlist(); playlist = new Playlist(); PlaylistEntry playlistEntry = new PlaylistEntry(); playlistEntry.set(PlaylistEntry.URI, uri); playlistEntry.set(PlaylistEntry.TRACK, "1"); playlist.add(playlistEntry); return addFilesToMediaStore(context, playlist); } private static long [] addFilesToMediaStore(Context context, Playlist playlist) { if (playlist == null || playlist.getPlaylistEntries().size() == 0) { return sEmptyList; } List<ContentValues> contentValues = new ArrayList<ContentValues>(); ContentResolver contentResolver = context.getContentResolver(); Map<String, Integer> uriList = retrieveAllRows(context); long [] list = new long[playlist.getPlaylistEntries().size()]; // process the returned media files for (int i = 0; i < playlist.getPlaylistEntries().size(); i++) { long id = -1; String uri = null; try { uri = URLDecoder.decode(playlist.getPlaylistEntries().get(i).get(PlaylistEntry.URI), "UTF-8"); } catch (UnsupportedEncodingException ex) { ex.printStackTrace(); uri = playlist.getPlaylistEntries().get(i).get(PlaylistEntry.URI); } if (uriList.get(uri) != null) { id = uriList.get(uri); list[i] = id; } else { // the item doesn't exist, lets put it into the list to be inserted ContentValues value = new ContentValues(); value.put(Media.MediaColumns.URI, uri); if (playlist.getPlaylistEntries().get(i).get(PlaylistEntry.PLAYLIST_METADATA) != null) { value.put(Media.MediaColumns.TITLE, playlist.getPlaylistEntries().get(i).get(PlaylistEntry.PLAYLIST_METADATA)); } contentValues.add(value); } } if (contentValues.size() > 0) { ContentValues [] values = new ContentValues[contentValues.size()]; values = contentValues.toArray(values); int numInserted = contentResolver.bulkInsert(Media.MediaColumns.CONTENT_URI, values); if (numInserted > 0) { /*uriList = retrieveAllRows(context); for (int i = 0; i < mediaFiles.size(); i++) { if (uriList.get(mediaFiles.get(i).getUrl()) != null) { int id = uriList.get(mediaFiles.get(i).getUrl()); list[i] = id; } }*/ list = addFilesToMediaStore(context, playlist); } } return list; } private static Map<String, Integer> retrieveAllRows(Context context) { Map<String, Integer> list = new HashMap<String, Integer>(); // Form an array specifying which columns to return. String [] projection = new String [] { Media.MediaColumns._ID, Media.MediaColumns.URI }; // Get the base URI for the Media Files table in the Media content provider. Uri mediaFile = Media.MediaColumns.CONTENT_URI; // Make the query. Cursor cursor = context.getContentResolver().query(mediaFile, projection, null, null, null); while (cursor.moveToNext()) { int uriColumn = cursor.getColumnIndex(Media.MediaColumns.URI); int idColumn = cursor.getColumnIndex(Media.MediaColumns._ID); String uri = cursor.getString(uriColumn); int id = cursor.getInt(idColumn); list.put(uri, id); } cursor.close(); return list; } public static int getCardId(Context context) { SharedPreferences preferences = context.getSharedPreferences("Music", Activity.MODE_WORLD_READABLE | Activity.MODE_WORLD_WRITEABLE); boolean canRead = preferences.getBoolean("read_external_storage", false); int id = -1; if (canRead) { ContentResolver res = context.getContentResolver(); Cursor c = res.query(Uri.parse("content://media/external/fs_id"), null, null, null, null); if (c != null) { c.moveToFirst(); id = c.getInt(0); c.close(); } } return id; } static class LogEntry { Object item; long time; LogEntry(Object o) { item = o; time = System.currentTimeMillis(); } void dump(PrintWriter out) { sTime.set(time); out.print(sTime.toString() + " : "); if (item instanceof Exception) { ((Exception)item).printStackTrace(out); } else { out.println(item); } } } private static LogEntry[] sMusicLog = new LogEntry[100]; private static int sLogPtr = 0; private static Time sTime = new Time(); public static void debugLog(Object o) { sMusicLog[sLogPtr] = new LogEntry(o); sLogPtr++; if (sLogPtr >= sMusicLog.length) { sLogPtr = 0; } } static void debugDump(PrintWriter out) { for (int i = 0; i < sMusicLog.length; i++) { int idx = (sLogPtr + i); if (idx >= sMusicLog.length) { idx -= sMusicLog.length; } LogEntry entry = sMusicLog[idx]; if (entry != null) { entry.dump(out); } } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package controllers; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Date; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import models.Entries; import models.Entry; import models.Journal; /** * * @author Max */ public class EntryController implements Serializable{ private String filePath; private Journal oldJournal; //private Journal journal; private Journal journal; public EntryController() { } public EntryController(String filePath, Journal journal) { super(); this.filePath = filePath; this.journal = journal; } public void setFilePath(String filePath) throws Exception{ this.filePath = filePath; JAXBContext jc = JAXBContext.newInstance(Journal.class); Unmarshaller u = jc.createUnmarshaller(); FileInputStream fin = new FileInputStream(filePath); journal = (Journal) u.unmarshal(fin); this.oldJournal = journal; fin.close(); } public void updateXML(Journal journal, String filePath) throws Exception{ this.journal = journal; this.filePath = filePath; JAXBContext jc = JAXBContext.newInstance(Journal.class); Marshaller m = jc.createMarshaller(); m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); FileOutputStream fout = new FileOutputStream(filePath); m.marshal(journal, fout); fout.close(); } public void saveEntries() throws JAXBException, IOException{ for(Entry e : journal.getEntries()){ if(oldJournal.getEntries().contains(e)){ for(Entry o : oldJournal.getEntries()){ if(o.getUserID() == e.getUserID() && o.getJournalID() == e.getJournalID() && o.getEntryID() == e.getEntryID()){ o.replaceEntry(e); } } } else{ oldJournal.addEntry(e); } } JAXBContext jc = JAXBContext.newInstance(Journal.class); Marshaller m = jc.createMarshaller(); m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); FileOutputStream fout = new FileOutputStream(filePath); m.marshal(oldJournal, fout); fout.close(); } public int getNewEntryID(){ if(journal.getEntries().size() > 0){ int finalID = journal.getEntries().get(journal.getEntries().size() - 1).getEntryID(); return finalID + 1; } else{ return 1; } } public ArrayList<Entry> getEntries(){ return journal.getEntries(); } public ArrayList<Entry> getEntriesForJournal(int userID, int journalID){ ArrayList<Entry> journalEntries = new ArrayList<Entry>(); for(Entry e : journal.getEntries()){ if(e.getUserID() == userID && e.getJournalID() == journalID){ journalEntries.add(e); } } return journalEntries; } public Entry getEntryByID(int entryID){ Entries journalEntries = new Entries(); for(Entry e : journal.getEntries()){ if(e.getEntryID() == entryID){ return e; } } return null; } public ArrayList<Entry> getByDate(Date date){ Calendar c1 = Calendar.getInstance(); c1.setTime(date); Calendar c2 = Calendar.getInstance(); ArrayList<Entry> dateEntries = new ArrayList<Entry>(); for(Entry e : journal.getEntries()){ c2.setTime(e.getDateCreated()); if(c1.get(Calendar.YEAR) == c2.get(Calendar.YEAR) && c1.get(Calendar.DAY_OF_YEAR) == c2.get(Calendar.DAY_OF_YEAR)){ dateEntries.add(e); } } return dateEntries; } public ArrayList<Entry> getByTitle(String title){ ArrayList<Entry> titleEntries = new ArrayList<Entry>(); for(Entry e : journal.getEntries()){ if(e.getTitle().contains(title)){ titleEntries.add(e); } } return titleEntries; } public ArrayList<Entry> getAllEntries(){ ArrayList<Entry> journalEntries = new ArrayList<Entry>(); if(journal.getEntries().size() > 0){ for(Entry e : journal.getEntries()){ journalEntries.add(e); } } return journalEntries; } public ArrayList<Entry> getHiddenEntries(){ ArrayList<Entry> journalEntries = new ArrayList<Entry>(); if(journal.getEntries().size() > 0){ for(Entry e : journal.getEntries()){ if(e.getFlag().equals("hidden")){ journalEntries.add(e); } } } return journalEntries; } public ArrayList<Entry> getNonHiddenEntries(){ ArrayList<Entry> journalEntries = new ArrayList<Entry>(); if(journal.getEntries().size() > 0){ for(Entry e : journal.getEntries()){ if(e.getFlag().equals("visible")){ journalEntries.add(e); } } } return journalEntries; } public Journal getJournal(){ return this.journal; } public void setJournal(Journal journal){ this.journal = journal; } public void hideEntry(int entryID){ Entry entry = getEntryByID(entryID); if(entry.getFlag().equals("hidden")){ entry.setFlag("visible"); } else if(entry.getFlag().equals("visible")){ entry.setFlag("hidden"); } } public ArrayList<Entry> sortByTitle(ArrayList<Entry> e){ e.sort(Comparator.comparing(Entry::getTitleLowercase)); return e; } public ArrayList<Entry> sortByTitleDesc(ArrayList<Entry> e){ e.sort(Comparator.comparing(Entry::getTitleLowercase, (s1, s2) -> { return s2.compareTo(s1); })); return e; } }
/* * Copyright 2011-2013 the original author or authors. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package de.schildbach.wallet.goldcoin.ui; import java.math.BigInteger; import android.content.Context; import android.content.res.Resources; import android.graphics.Paint; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.os.Parcelable; import android.text.Editable; import android.text.InputType; import android.text.SpannableStringBuilder; import android.text.TextWatcher; import android.util.AttributeSet; import android.view.Gravity; import android.view.KeyEvent; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.EditorInfo; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.TextView; import android.widget.TextView.OnEditorActionListener; import com.google.goldcoin.core.Utils; import de.schildbach.wallet.goldcoin.Constants; import de.schildbach.wallet.goldcoin.util.WalletUtils; import de.schildbach.wallet.goldcoin.R; /** * @author Andreas Schildbach */ public final class CurrencyAmountView extends FrameLayout { public static interface Listener { void changed(); void done(); void focusChanged(final boolean hasFocus); } private int significantColor, lessSignificantColor, errorColor; private Drawable deleteButtonDrawable, contextButtonDrawable; private CurrencyCodeDrawable currencyCodeDrawable; private int precision = Constants.GLD_PRECISION; private boolean amountSigned = false; private boolean smallerInsignificant = true; private boolean validateAmount = true; private TextView textView; private View contextButton; private Listener listener; private OnClickListener contextButtonClickListener; public CurrencyAmountView(final Context context) { super(context); init(context); } public CurrencyAmountView(final Context context, final AttributeSet attrs) { super(context, attrs); init(context); } private void init(final Context context) { final Resources resources = context.getResources(); significantColor = resources.getColor(R.color.fg_significant); lessSignificantColor = resources.getColor(R.color.fg_less_significant); errorColor = resources.getColor(R.color.fg_error); deleteButtonDrawable = resources.getDrawable(R.drawable.ic_input_delete); } @Override protected void onFinishInflate() { super.onFinishInflate(); final Context context = getContext(); textView = (TextView) getChildAt(0); textView.setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_DECIMAL); textView.setHintTextColor(lessSignificantColor); setHint(null); setValidateAmount(textView instanceof EditText); final TextViewListener textViewListener = new TextViewListener(); textView.addTextChangedListener(textViewListener); textView.setOnFocusChangeListener(textViewListener); textView.setOnEditorActionListener(textViewListener); contextButton = new View(context) { @Override protected void onMeasure(final int wMeasureSpec, final int hMeasureSpec) { setMeasuredDimension(textView.getCompoundPaddingRight(), textView.getMeasuredHeight()); } }; final LayoutParams chooseViewParams = new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); chooseViewParams.gravity = Gravity.RIGHT; contextButton.setLayoutParams(chooseViewParams); this.addView(contextButton); setCurrencyCode(Constants.CURRENCY_CODE_LITECOIN); updateAppearance(); } public void setCurrencyCode(final String currencyCode) { if (currencyCode != null) { final float textSize = textView.getTextSize(); final float smallerTextSize = textSize * (smallerInsignificant ? (20f / 24f) : 1); currencyCodeDrawable = new CurrencyCodeDrawable(currencyCode, smallerTextSize, lessSignificantColor, textSize * 0.37f); } else { currencyCodeDrawable = null; } updateAppearance(); } public void setPrecision(final int precision) { this.precision = precision; } public void setAmountSigned(final boolean amountSigned) { this.amountSigned = amountSigned; } public void setSmallerInsignificant(final boolean smallerInsignificant) { this.smallerInsignificant = smallerInsignificant; } public void setValidateAmount(final boolean validateAmount) { this.validateAmount = validateAmount; } public void setContextButton(final int contextButtonResId, final OnClickListener contextButtonClickListener) { this.contextButtonDrawable = getContext().getResources().getDrawable(contextButtonResId); this.contextButtonClickListener = contextButtonClickListener; updateAppearance(); } public void setListener(final Listener listener) { this.listener = listener; } public BigInteger getAmount() { if (isValidAmount()) return Utils.toNanoCoins(textView.getText().toString().trim()); else return null; } public void setAmount(final BigInteger amount) { if (amount != null) textView.setText(amountSigned ? WalletUtils.formatValue(amount, Constants.CURRENCY_PLUS_SIGN, Constants.CURRENCY_MINUS_SIGN, precision) : WalletUtils.formatValue(amount, precision)); else textView.setText(null); } public void setHint(final BigInteger amount) { final SpannableStringBuilder hint; if (amount != null) hint = new SpannableStringBuilder(WalletUtils.formatValue(amount, precision)); else hint = new SpannableStringBuilder("0.00"); WalletUtils.formatSignificant(hint, smallerInsignificant ? WalletUtils.SMALLER_SPAN : null); textView.setHint(hint); } @Override public void setEnabled(final boolean enabled) { super.setEnabled(enabled); textView.setEnabled(enabled); updateAppearance(); } public void setTextColor(final int color) { significantColor = color; updateAppearance(); } public void setStrikeThru(final boolean strikeThru) { if (strikeThru) textView.setPaintFlags(textView.getPaintFlags() | Paint.STRIKE_THRU_TEXT_FLAG); else textView.setPaintFlags(textView.getPaintFlags() & ~Paint.STRIKE_THRU_TEXT_FLAG); } private boolean isValidAmount() { final String amount = textView.getText().toString().trim(); try { if (amount.length() > 0) { final BigInteger nanoCoins = Utils.toNanoCoins(amount); if (nanoCoins.signum() >= 0) return true; } } catch (final Exception x) { } return false; } private final OnClickListener deleteClickListener = new OnClickListener() { public void onClick(final View v) { textView.setText(null); textView.requestFocus(); } }; private void updateAppearance() { final boolean enabled = textView.isEnabled(); contextButton.setEnabled(enabled); final String amount = textView.getText().toString().trim(); if (enabled && amount.length() > 0) { textView.setCompoundDrawablesWithIntrinsicBounds(currencyCodeDrawable, null, deleteButtonDrawable, null); contextButton.setOnClickListener(deleteClickListener); } else if (enabled && contextButtonDrawable != null) { textView.setCompoundDrawablesWithIntrinsicBounds(currencyCodeDrawable, null, contextButtonDrawable, null); contextButton.setOnClickListener(contextButtonClickListener); } else { textView.setCompoundDrawablesWithIntrinsicBounds(currencyCodeDrawable, null, null, null); contextButton.setOnClickListener(null); } contextButton.requestLayout(); textView.setTextColor(!validateAmount || isValidAmount() ? significantColor : errorColor); } @Override protected Parcelable onSaveInstanceState() { final Bundle state = new Bundle(); state.putParcelable("super_state", super.onSaveInstanceState()); state.putSerializable("amount", getAmount()); return state; } @Override protected void onRestoreInstanceState(final Parcelable state) { if (state instanceof Bundle) { final Bundle bundle = (Bundle) state; super.onRestoreInstanceState(bundle.getParcelable("super_state")); setAmount((BigInteger) bundle.getSerializable("amount")); } else { super.onRestoreInstanceState(state); } } private final class TextViewListener implements TextWatcher, OnFocusChangeListener, OnEditorActionListener { public void afterTextChanged(final Editable s) { // workaround for German keyboards final String original = s.toString(); final String replaced = original.replace(',', '.'); if (!replaced.equals(original)) { s.clear(); s.append(replaced); } WalletUtils.formatSignificant(s, smallerInsignificant ? WalletUtils.SMALLER_SPAN : null); } public void beforeTextChanged(final CharSequence s, final int start, final int count, final int after) { } public void onTextChanged(final CharSequence s, final int start, final int before, final int count) { updateAppearance(); if (listener != null) listener.changed(); } public void onFocusChange(final View v, final boolean hasFocus) { if (!hasFocus) { final BigInteger amount = getAmount(); if (amount != null) setAmount(amount); } if (listener != null) listener.focusChanged(hasFocus); } public boolean onEditorAction(final TextView v, final int actionId, final KeyEvent event) { if (actionId == EditorInfo.IME_ACTION_DONE && listener != null) listener.done(); return false; } } }
/* * File: MatrixBasedTermSimilarityNetwork.java * Authors: Justin Basilico * Company: Sandia National Laboratories * Project: Cognitive Foundry * * Copyright March 18, 2009, Sandia Corporation. * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive * license for use of this work by or on behalf of the U.S. Government. Export * of this program may require a license from the United States Government. * See CopyrightHistory.txt for complete details. * */ package gov.sandia.cognition.text.term.relation; import gov.sandia.cognition.math.matrix.DimensionalityMismatchException; import gov.sandia.cognition.math.matrix.Matrix; import gov.sandia.cognition.math.matrix.VectorEntry; import gov.sandia.cognition.text.relation.RelationNetwork; import gov.sandia.cognition.text.term.IndexedTerm; import gov.sandia.cognition.text.term.Term; import gov.sandia.cognition.text.term.TermIndex; import java.util.Collections; import java.util.LinkedHashSet; import java.util.Set; /** * A relation network between terms based on their similarity. The similarity * values are stored in an underlying matrix. * * @author Justin Basilico * @since 3.0 */ public class MatrixBasedTermSimilarityNetwork implements RelationNetwork<IndexedTerm, IndexedTermSimilarityRelation> { /** The index of terms. */ protected TermIndex termIndex; /** The similarities between terms. */ protected Matrix similarities; /** * Creates a new {@code MatrixBasedTermSimilarityNetwork}. * * @param termIndex * The index of terms that contains the nodes of the network. * @param similarities * The square matrix of similarities between terms. Must have a number * of rows and columns equal to the number of terms in the term index. */ public MatrixBasedTermSimilarityNetwork( final TermIndex termIndex, final Matrix similarities) { super(); if (similarities.getNumRows() != termIndex.getTermCount() || similarities.getNumColumns() != termIndex.getTermCount()) { throw new DimensionalityMismatchException( "the number of terms in the term index must match the " + "dimensions of the square similarities matrix"); } this.setTermIndex(termIndex); this.setSimilarities(similarities); } /** * Gets the similarity between the two given terms. * * @param sourceTerm * The source term. * @param targetTerm * The target term. * @return * The similarity between the two given terms if both exist in the * term index. Otherwise, 0.0 is returned. */ public double getSimilarity( final Term sourceTerm, final Term targetTerm) { final IndexedTerm source = this.termIndex.getIndexedTerm(sourceTerm); final IndexedTerm target = this.termIndex.getIndexedTerm(targetTerm); if (source == null || target == null) { return 0.0; } else { return this.getSimilarity(source, target); } } /** * Gets the similarity between the two given terms. * * @param source * The source term. * @param target * The target term. * @return * The similarity between the two given terms if both exist in the * term index. Otherwise, 0.0 is returned. */ public double getSimilarity( final IndexedTerm source, final IndexedTerm target) { if (source == null || target == null) { return 0.0; } else { // TODO: Should we enforce that the indexed terms are valid here? IE: That they // actually match with the term index. return this.getSimilarity(source.getIndex(), target.getIndex()); } } /** * Gets the similarity between the two given terms. * * @param sourceIndex * The index of the source term. * @param targetIndex * The index of the target term. * @return * The similarity between the two given terms if both exist in the * term index. Otherwise, 0.0 is returned. */ public double getSimilarity( final int sourceIndex, final int targetIndex) { // TODO: Should we enforce the indices here or just let the matrix barf on them? return this.similarities.getElement(sourceIndex, targetIndex); } public int getObjectCount() { return this.termIndex.getTermCount(); } public Set<IndexedTerm> getObjects() { return new LinkedHashSet<IndexedTerm>(this.termIndex.getTerms()); } public boolean isObject( final Object o) { return o != null && o instanceof IndexedTerm && this.termIndex.hasIndexedTerm((IndexedTerm) o); } public boolean hasRelation( final IndexedTerm source, final IndexedTerm target) { return source != null && target != null && this.getSimilarity(source, target) != 0.0; } public IndexedTermSimilarityRelation getRelation( final IndexedTerm source, final IndexedTerm target) { if (source == null || target == null) { return null; } final int sourceIndex = source.getIndex(); final int targetIndex = target.getIndex(); final double similarity = this.similarities.getElement( sourceIndex, targetIndex); if (similarity != 0.0) { return new IndexedTermSimilarityRelation( source, target, similarity); } else { return null; } } public Set<IndexedTermSimilarityRelation> getAllRelations( final IndexedTerm source, final IndexedTerm target) { // This is a singleton relationship. final IndexedTermSimilarityRelation relation = this.getRelation(source, target); if (relation == null) { return Collections.emptySet(); } else { return Collections.singleton(relation); } } public IndexedTerm getRelationSource( final IndexedTermSimilarityRelation relation) { if (relation == null) { return null; } else { return relation.getSource(); } } public IndexedTerm getRelationTarget( final IndexedTermSimilarityRelation relation) { if (relation == null) { return null; } else { return relation.getTarget(); } } public Set<IndexedTermSimilarityRelation> relationsOf( final IndexedTerm term) { final LinkedHashSet<IndexedTermSimilarityRelation> result = new LinkedHashSet<IndexedTermSimilarityRelation>(); // Using the linked hash set will remove a redundant self-relation. result.addAll(this.relationsFrom(term)); result.addAll(this.relationsTo(term)); return result; } public Set<IndexedTermSimilarityRelation> relationsFrom( final IndexedTerm source) { final int sourceIndex = source.getIndex(); final LinkedHashSet<IndexedTermSimilarityRelation> result = new LinkedHashSet<IndexedTermSimilarityRelation>(); // Walk the rows of the matrix to get the relation. for (VectorEntry entry : this.similarities.getRow(sourceIndex)) { final double similarity = entry.getValue(); if (similarity != 0.0) { final IndexedTerm target = this.termIndex.getIndexedTerm( entry.getIndex()); result.add(new IndexedTermSimilarityRelation(source, target, similarity)); } // else - We ignore zero similarities. } return result; } public Set<IndexedTermSimilarityRelation> relationsTo( final IndexedTerm target) { final int targetIndex = target.getIndex(); final LinkedHashSet<IndexedTermSimilarityRelation> result = new LinkedHashSet<IndexedTermSimilarityRelation>(); // Walk the columns of the matrix to get the relation. for (VectorEntry entry : this.similarities.getColumn(targetIndex)) { final double similarity = entry.getValue(); if (similarity != 0.0) { final IndexedTerm source = this.termIndex.getIndexedTerm( entry.getIndex()); result.add(new IndexedTermSimilarityRelation(source, target, similarity)); } // else - We ignore zero similarities. } return result; } /** * Gets the index of terms. * * @return * The index of terms. */ public TermIndex getTermIndex() { return this.termIndex; } /** * Sets the index of terms. * * @param termIndex * The index of terms. */ protected void setTermIndex( final TermIndex termIndex) { this.termIndex = termIndex; } /** * Gets the similarities between terms. * * @return * The similarities between terms. */ public Matrix getSimilarities() { return this.similarities; } /** * Gets the similarities between terms. * * @param similarities * The similarities between terms. */ protected void setSimilarities( final Matrix similarities) { this.similarities = similarities; } }
/* * %CopyrightBegin% * * Copyright Ericsson AB 2000-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. * * %CopyrightEnd% */ package com.ericsson.otp.erlang; /** * <p> * Provides a simple mechanism for exchanging messages with Erlang processes or * other instances of this class. * </p> * * <p> * Each mailbox is associated with a unique {@link OtpErlangPid pid} that * contains information necessary for delivery of messages. When sending * messages to named processes or mailboxes, the sender pid is made available to * the recipient of the message. When sending messages to other mailboxes, the * recipient can only respond if the sender includes the pid as part of the * message contents. The sender can determine his own pid by calling * {@link #self self()}. * </p> * * <p> * Mailboxes can be named, either at creation or later. Messages can be sent to * named mailboxes and named Erlang processes without knowing the * {@link OtpErlangPid pid} that identifies the mailbox. This is neccessary in * order to set up initial communication between parts of an application. Each * mailbox can have at most one name. * </p> * * <p> * Since this class was intended for communication with Erlang, all of the send * methods take {@link OtpErlangObject OtpErlangObject} arguments. However this * class can also be used to transmit arbitrary Java objects (as long as they * implement one of java.io.Serializable or java.io.Externalizable) by * encapsulating the object in a {@link OtpErlangBinary OtpErlangBinary}. * </p> * * <p> * Messages to remote nodes are externalized for transmission, and as a result * the recipient receives a <b>copy</b> of the original Java object. To ensure * consistent behaviour when messages are sent between local mailboxes, such * messages are cloned before delivery. * </p> * * <p> * Additionally, mailboxes can be linked in much the same way as Erlang * processes. If a link is active when a mailbox is {@link #close closed}, any * linked Erlang processes or OtpMboxes will be sent an exit signal. As well, * exit signals will be (eventually) sent if a mailbox goes out of scope and its * {@link #finalize finalize()} method called. However due to the nature of * finalization (i.e. Java makes no guarantees about when {@link #finalize * finalize()} will be called) it is recommended that you always explicitly * close mailboxes if you are using links instead of relying on finalization to * notify other parties in a timely manner. * </p> * * When retrieving messages from a mailbox that has received an exit signal, an * {@link OtpErlangExit OtpErlangExit} exception will be raised. Note that the * exception is queued in the mailbox along with other messages, and will not be * raised until it reaches the head of the queue and is about to be retrieved. * </p> * */ public class OtpMbox { OtpNode home; OtpErlangPid self; GenericQueue queue; String name; Links links; // package constructor: called by OtpNode:createMbox(name) // to create a named mbox OtpMbox(final OtpNode home, final OtpErlangPid self, final String name) { this.self = self; this.home = home; this.name = name; queue = new GenericQueue(); links = new Links(10); } // package constructor: called by OtpNode:createMbox() // to create an anonymous OtpMbox(final OtpNode home, final OtpErlangPid self) { this(home, self, null); } /** * <p> * Get the identifying {@link OtpErlangPid pid} associated with this * mailbox. * </p> * * <p> * The {@link OtpErlangPid pid} associated with this mailbox uniquely * identifies the mailbox and can be used to address the mailbox. You can * send the {@link OtpErlangPid pid} to a remote communicating part so that * he can know where to send his response. * </p> * * @return the self pid for this mailbox. */ public OtpErlangPid self() { return self; } /** * <p> * Register or remove a name for this mailbox. Registering a name for a * mailbox enables others to send messages without knowing the * {@link OtpErlangPid pid} of the mailbox. A mailbox can have at most one * name; if the mailbox already had a name, calling this method will * supercede that name. * </p> * * @param name * the name to register for the mailbox. Specify null to * unregister the existing name from this mailbox. * * @return true if the name was available, or false otherwise. */ public synchronized boolean registerName(final String name) { return home.registerName(name, this); } /** * Get the registered name of this mailbox. * * @return the registered name of this mailbox, or null if the mailbox had * no registerd name. */ public String getName() { return name; } /** * Block until a message arrives for this mailbox. * * @return an {@link OtpErlangObject OtpErlangObject} representing the body * of the next message waiting in this mailbox. * * @exception OtpErlangDecodeException * if the message can not be decoded. * * @exception OtpErlangExit * if a linked {@link OtpErlangPid pid} has exited or has * sent an exit signal to this mailbox. */ public OtpErlangObject receive() throws OtpErlangExit, OtpErlangDecodeException { try { return receiveMsg().getMsg(); } catch (final OtpErlangExit e) { throw e; } catch (final OtpErlangDecodeException f) { throw f; } } /** * Wait for a message to arrive for this mailbox. * * @param timeout * the time, in milliseconds, to wait for a message before * returning null. * * @return an {@link OtpErlangObject OtpErlangObject} representing the body * of the next message waiting in this mailbox. * * @exception OtpErlangDecodeException * if the message can not be decoded. * * @exception OtpErlangExit * if a linked {@link OtpErlangPid pid} has exited or has * sent an exit signal to this mailbox. */ public OtpErlangObject receive(final long timeout) throws OtpErlangExit, OtpErlangDecodeException { try { final OtpMsg m = receiveMsg(timeout); if (m != null) { return m.getMsg(); } } catch (final OtpErlangExit e) { throw e; } catch (final OtpErlangDecodeException f) { throw f; } catch (final InterruptedException g) { } return null; } /** * Block until a message arrives for this mailbox. * * @return a byte array representing the still-encoded body of the next * message waiting in this mailbox. * * @exception OtpErlangExit * if a linked {@link OtpErlangPid pid} has exited or has * sent an exit signal to this mailbox. * */ public OtpInputStream receiveBuf() throws OtpErlangExit { return receiveMsg().getMsgBuf(); } /** * Wait for a message to arrive for this mailbox. * * @param timeout * the time, in milliseconds, to wait for a message before * returning null. * * @return a byte array representing the still-encoded body of the next * message waiting in this mailbox. * * @exception OtpErlangExit * if a linked {@link OtpErlangPid pid} has exited or has * sent an exit signal to this mailbox. * * @exception InterruptedException * if no message if the method times out before a message * becomes available. */ public OtpInputStream receiveBuf(final long timeout) throws InterruptedException, OtpErlangExit { final OtpMsg m = receiveMsg(timeout); if (m != null) { return m.getMsgBuf(); } return null; } /** * Block until a message arrives for this mailbox. * * @return an {@link OtpMsg OtpMsg} containing the header information as * well as the body of the next message waiting in this mailbox. * * @exception OtpErlangExit * if a linked {@link OtpErlangPid pid} has exited or has * sent an exit signal to this mailbox. * */ public OtpMsg receiveMsg() throws OtpErlangExit { final OtpMsg m = (OtpMsg) queue.get(); switch (m.type()) { case OtpMsg.exitTag: case OtpMsg.exit2Tag: try { final OtpErlangObject o = m.getMsg(); throw new OtpErlangExit(o, m.getSenderPid()); } catch (final OtpErlangDecodeException e) { throw new OtpErlangExit("unknown", m.getSenderPid()); } default: return m; } } /** * Wait for a message to arrive for this mailbox. * * @param timeout * the time, in milliseconds, to wait for a message. * * @return an {@link OtpMsg OtpMsg} containing the header information as * well as the body of the next message waiting in this mailbox. * * @exception OtpErlangExit * if a linked {@link OtpErlangPid pid} has exited or has * sent an exit signal to this mailbox. * * @exception InterruptedException * if no message if the method times out before a message * becomes available. */ public OtpMsg receiveMsg(final long timeout) throws InterruptedException, OtpErlangExit { final OtpMsg m = (OtpMsg) queue.get(timeout); if (m == null) { return null; } switch (m.type()) { case OtpMsg.exitTag: case OtpMsg.exit2Tag: try { final OtpErlangObject o = m.getMsg(); throw new OtpErlangExit(o, m.getSenderPid()); } catch (final OtpErlangDecodeException e) { throw new OtpErlangExit("unknown", m.getSenderPid()); } default: return m; } } /** * Send a message to a remote {@link OtpErlangPid pid}, representing either * another {@link OtpMbox mailbox} or an Erlang process. * * @param to * the {@link OtpErlangPid pid} identifying the intended * recipient of the message. * * @param msg * the body of the message to send. * */ public void send(final OtpErlangPid to, final OtpErlangObject msg) { try { final String node = to.node(); if (node.equals(home.node())) { home.deliver(new OtpMsg(to, (OtpErlangObject) msg.clone())); } else { final OtpCookedConnection conn = home.getConnection(node); if (conn == null) { return; } conn.send(self, to, msg); } } catch (final Exception e) { } } /** * Send a message to a named mailbox created from the same node as this * mailbox. * * @param name * the registered name of recipient mailbox. * * @param msg * the body of the message to send. * */ public void send(final String name, final OtpErlangObject msg) { home.deliver(new OtpMsg(self, name, (OtpErlangObject) msg.clone())); } /** * Send a message to a named mailbox created from another node. * * @param name * the registered name of recipient mailbox. * * @param node * the name of the remote node where the recipient mailbox is * registered. * * @param msg * the body of the message to send. * */ public void send(final String name, final String node, final OtpErlangObject msg) { try { final String currentNode = home.node(); if (node.equals(currentNode)) { send(name, msg); } else if (node.indexOf('@', 0) < 0 && node.equals(currentNode.substring(0, currentNode .indexOf('@', 0)))) { send(name, msg); } else { // other node final OtpCookedConnection conn = home.getConnection(node); if (conn == null) { return; } conn.send(self, name, msg); } } catch (final Exception e) { } } /** * Close this mailbox with the given reason. * * <p> * After this operation, the mailbox will no longer be able to receive * messages. Any delivered but as yet unretrieved messages can still be * retrieved however. * </p> * * <p> * If there are links from this mailbox to other {@link OtpErlangPid pids}, * they will be broken when this method is called and exit signals will be * sent. * </p> * * @param reason * an Erlang term describing the reason for the exit. */ public void exit(final OtpErlangObject reason) { home.closeMbox(this, reason); } /** * Equivalent to <code>exit(new OtpErlangAtom(reason))</code>. * </p> * * @see #exit(OtpErlangObject) */ public void exit(final String reason) { exit(new OtpErlangAtom(reason)); } /** * <p> * Send an exit signal to a remote {@link OtpErlangPid pid}. This method * does not cause any links to be broken, except indirectly if the remote * {@link OtpErlangPid pid} exits as a result of this exit signal. * </p> * * @param to * the {@link OtpErlangPid pid} to which the exit signal * should be sent. * * @param reason * an Erlang term indicating the reason for the exit. */ // it's called exit, but it sends exit2 public void exit(final OtpErlangPid to, final OtpErlangObject reason) { exit(2, to, reason); } /** * <p> * Equivalent to <code>exit(to, new * OtpErlangAtom(reason))</code>. * </p> * * @see #exit(OtpErlangPid, OtpErlangObject) */ public void exit(final OtpErlangPid to, final String reason) { exit(to, new OtpErlangAtom(reason)); } // this function used internally when "process" dies // since Erlang discerns between exit and exit/2. private void exit(final int arity, final OtpErlangPid to, final OtpErlangObject reason) { try { final String node = to.node(); if (node.equals(home.node())) { home.deliver(new OtpMsg(OtpMsg.exitTag, self, to, reason)); } else { final OtpCookedConnection conn = home.getConnection(node); if (conn == null) { return; } switch (arity) { case 1: conn.exit(self, to, reason); break; case 2: conn.exit2(self, to, reason); break; } } } catch (final Exception e) { } } /** * <p> * Link to a remote mailbox or Erlang process. Links are idempotent, calling * this method multiple times will not result in more than one link being * created. * </p> * * <p> * If the remote process subsequently exits or the mailbox is closed, a * subsequent attempt to retrieve a message through this mailbox will cause * an {@link OtpErlangExit OtpErlangExit} exception to be raised. Similarly, * if the sending mailbox is closed, the linked mailbox or process will * receive an exit signal. * </p> * * <p> * If the remote process cannot be reached in order to set the link, the * exception is raised immediately. * </p> * * @param to * the {@link OtpErlangPid pid} representing the object to * link to. * * @exception OtpErlangExit * if the {@link OtpErlangPid pid} referred to does not * exist or could not be reached. * */ public void link(final OtpErlangPid to) throws OtpErlangExit { try { final String node = to.node(); if (node.equals(home.node())) { if (!home.deliver(new OtpMsg(OtpMsg.linkTag, self, to))) { throw new OtpErlangExit("noproc", to); } } else { final OtpCookedConnection conn = home.getConnection(node); if (conn != null) { conn.link(self, to); } else { throw new OtpErlangExit("noproc", to); } } } catch (final OtpErlangExit e) { throw e; } catch (final Exception e) { } links.addLink(self, to); } /** * <p> * Remove a link to a remote mailbox or Erlang process. This method removes * a link created with {@link #link link()}. Links are idempotent; calling * this method once will remove all links between this mailbox and the * remote {@link OtpErlangPid pid}. * </p> * * @param to * the {@link OtpErlangPid pid} representing the object to * unlink from. * */ public void unlink(final OtpErlangPid to) { links.removeLink(self, to); try { final String node = to.node(); if (node.equals(home.node())) { home.deliver(new OtpMsg(OtpMsg.unlinkTag, self, to)); } else { final OtpCookedConnection conn = home.getConnection(node); if (conn != null) { conn.unlink(self, to); } } } catch (final Exception e) { } } /** * <p> * Create a connection to a remote node. * </p> * * <p> * Strictly speaking, this method is not necessary simply to set up a * connection, since connections are created automatically first time a * message is sent to a {@link OtpErlangPid pid} on the remote node. * </p> * * <p> * This method makes it possible to wait for a node to come up, however, or * check that a node is still alive. * </p> * * <p> * This method calls a method with the same name in {@link OtpNode#ping * Otpnode} but is provided here for convenience. * </p> * * @param node * the name of the node to ping. * * @param timeout * the time, in milliseconds, before reporting failure. */ public boolean ping(final String node, final long timeout) { return home.ping(node, timeout); } /** * <p> * Get a list of all known registered names on the same {@link OtpNode node} * as this mailbox. * </p> * * <p> * This method calls a method with the same name in {@link OtpNode#getNames * Otpnode} but is provided here for convenience. * </p> * * @return an array of Strings containing all registered names on this * {@link OtpNode node}. */ public String[] getNames() { return home.getNames(); } /** * Determine the {@link OtpErlangPid pid} corresponding to a registered name * on this {@link OtpNode node}. * * <p> * This method calls a method with the same name in {@link OtpNode#whereis * Otpnode} but is provided here for convenience. * </p> * * @return the {@link OtpErlangPid pid} corresponding to the registered * name, or null if the name is not known on this node. */ public OtpErlangPid whereis(final String name) { return home.whereis(name); } /** * Close this mailbox. * * <p> * After this operation, the mailbox will no longer be able to receive * messages. Any delivered but as yet unretrieved messages can still be * retrieved however. * </p> * * <p> * If there are links from this mailbox to other {@link OtpErlangPid pids}, * they will be broken when this method is called and exit signals with * reason 'normal' will be sent. * </p> * * <p> * This is equivalent to {@link #exit(String) exit("normal")}. * </p> */ public void close() { home.closeMbox(this); } @Override protected void finalize() { close(); queue.flush(); } /** * Determine if two mailboxes are equal. * * @return true if both Objects are mailboxes with the same identifying * {@link OtpErlangPid pids}. */ @Override public boolean equals(final Object o) { if (!(o instanceof OtpMbox)) { return false; } final OtpMbox m = (OtpMbox) o; return m.self.equals(self); } @Override public int hashCode() { return self.hashCode(); } /* * called by OtpNode to deliver message to this mailbox. * * About exit and exit2: both cause exception to be raised upon receive(). * However exit (not 2) causes any link to be removed as well, while exit2 * leaves any links intact. */ void deliver(final OtpMsg m) { switch (m.type()) { case OtpMsg.linkTag: links.addLink(self, m.getSenderPid()); break; case OtpMsg.unlinkTag: links.removeLink(self, m.getSenderPid()); break; case OtpMsg.exitTag: links.removeLink(self, m.getSenderPid()); queue.put(m); break; case OtpMsg.exit2Tag: default: queue.put(m); break; } } // used to break all known links to this mbox void breakLinks(final OtpErlangObject reason) { final Link[] l = links.clearLinks(); if (l != null) { final int len = l.length; for (int i = 0; i < len; i++) { exit(1, l[i].remote(), reason); } } } }
/* * #%L * wcm.io * %% * Copyright (C) 2014 wcm.io * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.wcm.handler.mediasource.dam.impl; import java.util.Date; import java.util.List; import org.apache.commons.io.FilenameUtils; import org.apache.sling.api.adapter.SlingAdaptable; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ValueMap; import org.jetbrains.annotations.NotNull; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.wcm.handler.media.CropDimension; import io.wcm.handler.media.MediaArgs; import io.wcm.handler.media.MediaFileType; import io.wcm.handler.media.Rendition; import io.wcm.handler.media.format.MediaFormat; import io.wcm.handler.url.UrlHandler; import io.wcm.sling.commons.adapter.AdaptTo; import io.wcm.wcm.commons.caching.ModificationDate; /** * {@link Rendition} implementation for DAM asset renditions. */ class DamRendition extends SlingAdaptable implements Rendition { private final DamContext damContext; private final MediaArgs mediaArgs; private final RenditionMetadata rendition; private boolean fallback; /** * @param cropDimension Crop dimension * @param mediaArgs Media args * @param damContext DAM context objects */ DamRendition(CropDimension cropDimension, Integer rotation, MediaArgs mediaArgs, DamContext damContext) { this.damContext = damContext; this.mediaArgs = mediaArgs; RenditionMetadata resolvedRendition = null; // if no transformation parameters are given find non-transformed matching rendition if (cropDimension == null && rotation == null) { RenditionHandler renditionHandler = new DefaultRenditionHandler(damContext); resolvedRendition = renditionHandler.getRendition(mediaArgs); } else { // try to match with all transformations that are configured RenditionHandler renditionHandler = new TransformedRenditionHandler(cropDimension, rotation, damContext); resolvedRendition = renditionHandler.getRendition(mediaArgs); // if no match was found check against renditions without applying the explicit cropping if (resolvedRendition == null && cropDimension != null) { if (rotation != null) { renditionHandler = new TransformedRenditionHandler(null, rotation, damContext); resolvedRendition = renditionHandler.getRendition(mediaArgs); } else { renditionHandler = new DefaultRenditionHandler(damContext); resolvedRendition = renditionHandler.getRendition(mediaArgs); } fallback = true; } } // if no match was found and auto-cropping is enabled, try to build a transformed rendition // with automatically devised cropping parameters if (resolvedRendition == null && mediaArgs.isAutoCrop()) { DamAutoCropping autoCropping = new DamAutoCropping(damContext.getAsset(), mediaArgs); List<CropDimension> autoCropDimensions = autoCropping.calculateAutoCropDimensions(); for (CropDimension autoCropDimension : autoCropDimensions) { RenditionHandler renditionHandler = new TransformedRenditionHandler(autoCropDimension, rotation, damContext); resolvedRendition = renditionHandler.getRendition(mediaArgs); if (resolvedRendition != null) { break; } } } this.rendition = resolvedRendition; } @Override public String getUrl() { if (this.rendition == null) { return null; } String url = null; if (!mediaArgs.isDynamicMediaDisabled() && damContext.isDynamicMediaEnabled() && damContext.isDynamicMediaAsset()) { // if DM is enabled: try to get rendition URL from dynamic media String dynamicMediaPath = this.rendition.getDynamicMediaPath(this.mediaArgs.isContentDispositionAttachment(), damContext); if (dynamicMediaPath != null) { String productionAssetUrl = damContext.getDynamicMediaServerUrl(); if (productionAssetUrl != null) { url = productionAssetUrl + dynamicMediaPath; } } } if (url == null && (!damContext.isDynamicMediaEnabled() || !damContext.isDynamicMediaAemFallbackDisabled())) { // Render renditions in AEM: build externalized URL UrlHandler urlHandler = AdaptTo.notNull(damContext, UrlHandler.class); String mediaPath = this.rendition.getMediaPath(this.mediaArgs.isContentDispositionAttachment()); url = urlHandler.get(mediaPath).urlMode(this.mediaArgs.getUrlMode()) .buildExternalResourceUrl(this.rendition.adaptTo(Resource.class)); } return url; } @Override public String getPath() { if (this.rendition != null) { return this.rendition.getRendition().getPath(); } else { return null; } } @Override public String getFileName() { if (this.rendition != null) { return this.rendition.getFileName(this.mediaArgs.isContentDispositionAttachment()); } else { return null; } } @Override public String getFileExtension() { return FilenameUtils.getExtension(getFileName()); } @Override public long getFileSize() { if (this.rendition != null) { return this.rendition.getFileSize(); } else { return 0L; } } @Override public String getMimeType() { if (this.rendition != null) { return this.rendition.getMimeType(); } else { return null; } } @Override public Date getModificationDate() { if (this.rendition != null) { return ModificationDate.get(this.rendition.getRendition().adaptTo(Resource.class)); } else { return null; } } @Override public MediaFormat getMediaFormat() { if (this.rendition != null) { return this.rendition.getMediaFormat(); } else { return null; } } @Override @SuppressWarnings("null") @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE") public @NotNull ValueMap getProperties() { if (this.rendition != null) { return this.rendition.getRendition().adaptTo(Resource.class).getValueMap(); } else { return ValueMap.EMPTY; } } @Override public boolean isImage() { return MediaFileType.isImage(getFileExtension()); } @Override public boolean isBrowserImage() { return MediaFileType.isBrowserImage(getFileExtension()); } @Override public boolean isVectorImage() { return MediaFileType.isVectorImage(getFileExtension()); } @Override @SuppressWarnings("deprecation") public boolean isFlash() { return MediaFileType.isFlash(getFileExtension()); } @Override public boolean isDownload() { return !isImage() && !isFlash(); } @Override public long getWidth() { if (this.rendition != null) { return this.rendition.getWidth(); } else { return 0; } } @Override public long getHeight() { if (this.rendition != null) { return this.rendition.getHeight(); } else { return 0; } } @Override public boolean isFallback() { return fallback; } @Override @SuppressWarnings("null") public <AdapterType> AdapterType adaptTo(Class<AdapterType> type) { if (this.rendition != null) { AdapterType result = this.rendition.adaptTo(type); if (result != null) { return result; } } return super.adaptTo(type); } @Override public String toString() { if (rendition != null) { return rendition.toString(); } return super.toString(); } }
package net.sourceforge.mayfly.acceptance; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; /** * To make this work: <pre> - Install postgres server and start it running - As a database superuser (perhaps "postgres"), run createuser --createdb --no-adduser -P mayflytest and supply a password of mayflytest. createdb mayflytest - Also edit pg_hba.conf to have a line such as: "host all all 127.0.0.1/32 trust" (which basically means that connections from localhost don't need to authenticate themselves; I'm not sure how this relates to passwords). </pre> */ public class PostgresDialect extends Dialect { @Override public Connection openConnection() throws Exception { Class.forName("org.postgresql.Driver"); Connection bootstrapConnection = DriverManager.getConnection("jdbc:postgresql:", "mayflytest", "mayflytest"); try { SqlTestCase.execute("DROP DATABASE test", bootstrapConnection); } catch (SQLException databaseDoesNotExist) { } SqlTestCase.execute("CREATE DATABASE test", bootstrapConnection); bootstrapConnection.close(); return openAdditionalConnection(); } @Override public Connection openAdditionalConnection() throws SQLException { return DriverManager.getConnection("jdbc:postgresql:test", "mayflytest", "mayflytest"); } @Override public void shutdown(Connection connection) throws Exception { connection.close(); Connection teardownConnection = DriverManager.getConnection("jdbc:postgresql:", "mayflytest", "mayflytest"); // The connection.close() above is needed for this to work, but // doesn't complete immediately. So we need the retries. executeWithRetries("DROP DATABASE test", teardownConnection); teardownConnection.close(); } private void executeWithRetries(String sql, Connection connection) throws Exception { int tries = 0; while (true) { try { SqlTestCase.execute(sql, connection); break; } catch (SQLException e) { if (tries == 10) { throw e; } ++tries; Thread.sleep(100); } } } @Override public boolean fromIsOptional() { return true; } @Override public boolean canHaveLimitWithoutOrderBy() { // The postgres manual warns that the results may not be // meaningful, but postgres doesn't throw an error. return true; } @Override public boolean isReservedWord(String word) { return "offset".equalsIgnoreCase(word); } @Override public boolean canOrderByExpression(boolean isAggregate) { return true; } @Override public boolean whereCanReferToColumnAlias() { return false; } // True for postgres 8.0.7, false for postgres 8.1.4 // public boolean canHaveHavingWithoutGroupBy() { // return true; // } // Seems to be false for postgres 8.1.4, true for 8.2.5 // public boolean aggregateAsteriskIsForCountOnly() { // // I didn't really look into just what postgres // // does for this case. // return false; // } /* * As of Postgres 8.1.8, this is true. */ // public boolean errorIfUpdateToAggregate(boolean rowsPresent) { // /* Some versions of Postgres apparently can crash - CVE-2006-5540 */ // // if (rowsPresent) { // // false for 8.1.4. Probably true for some future version. // return false; // } // else { // /* This one is already true, I guess, although the message // is "ctid is NULL" which doesn't really make it clear to // me that Posgres is winning on purpose rather than by accident. // */ // return true; // } // } @Override public boolean nullSortsLower() { return false; } @Override public boolean detectsSyntaxErrorsInPrepareStatement() { return false; } @Override public boolean backslashInAStringIsAnEscape() { /* * "our long-term plan to transition to SQL-standard * string literal rules, wherein backslash is * not a special character." * http://www.postgresql.org/docs/techdocs.50 */ return true; } @Override public boolean trailingSpacesConsultedInComparisons() { return true; } @Override public boolean schemasMissing() { // Haven't really looked too much at what postgres has // for schemas. "create schema authorization mayflytest" // seemed to get somewhere but "set schema" didn't work was // about as far as I got. return true; } @Override public boolean numberOfValuesMustMatchNumberOfColumns() { return false; } @Override public boolean canInsertNoValues() { /* The hibernate dialect makes it look like the postgres syntax is "insert into foo default values" which actually seems fairly sensible. Verify this. */ return false; } @Override public boolean disallowNullsInExpressions() { return false; } @Override public boolean disallowNullOnRightHandSideOfIn() { return false; } @Override public boolean haveTinyint() { return false; } @Override public boolean expressionsAreTypeLong() { return false; } @Override public String binaryTypeName() { return "bytea"; } @Override public boolean blobTypeWorks() { /* The error I'm getting is: Bad value for type int: \001\003\377\220 I guess this is just a postgres bug (why would the type be "int" when we declare it as bytea?). This is postgres 8.1.8-1.fc6 as shipped in Fedora. */ return false; } @Override public boolean canGetBytesOnNumber() { return true; } @Override public boolean canMixStringAndInteger() { return true; } @Override public boolean canSetStringOnDecimalColumn() { return false; } @Override public boolean haveDropTableFooIfExists() { return false; } @Override public boolean haveDropTableIfExistsFoo() { return false; } @Override public boolean haveModifyColumn() { return false; } @Override public boolean canDropLastColumn() { return true; } @Override public boolean haveDropForeignKey() { return false; } @Override public boolean defaultValueCanBeExpression() { return true; } @Override public boolean allowDateInTimestampColumn() { return true; } @Override public boolean allowTimestampInDateColumn() { return true; } @Override public boolean haveSequencySerial() { return true; } @Override public String identityType() { return "serial primary key"; } /** * According to discussion on postgres mailing lists, they plan on * adding sql200x syntax only when they can give it sql200x semantics. */ @Override public boolean haveSql2003AutoIncrement() { return false; } @Override public boolean allowHexForBinary() { /* Postgres does have the x'00' syntax but it just seems to be for BIT VARYING(x) which doesn't seem to behave quite like BYTEA (or BLOB/BINARY in other databases). */ return false; } @Override public String lastIdentityValueQuery(String table, String column) { return new StringBuffer().append("select currval('") .append(table) .append('_') .append(column) .append("_seq')") .toString(); } @Override public boolean autoCommitMustBeOffToCallRollback() { return false; } @Override public boolean allowOrderByOnDelete() { return false; } @Override public boolean metaDataProblemWithUppercaseTableName() { return true; } @Override public String productName() { return "PostgreSQL"; } @Override public boolean deleteAllRowsIsSmartAboutForeignKeys() { return true; } @Override public boolean callJavaMethodAsStoredProcedure() { return false; } @Override public boolean haveDropIndexOn() { return false; } }
// Copyright (c) 2000, 2001 Per M.A. Bothner. // This is free software; for terms and warranty disclaimer see ./COPYING. package gnu.expr; import gnu.bytecode.*; import gnu.kawa.reflect.OccurrenceType; import gnu.kawa.reflect.SingletonType; import gnu.kawa.lispexpr.LangPrimType; /** * A Target which is some variable that implements gnu.lists.Consumer. */ public class ConsumerTarget extends Target { Variable consumer; boolean isContextTarget; Type type; public ConsumerTarget(Variable consumer) { this.consumer = consumer; this.type = Type.objectType; } public ConsumerTarget(Variable consumer, Type type) { this.consumer = consumer; this.type = type; } private ConsumerTarget singleTarget; /** Get equivalent target but which only accepts a single item. */ public ConsumerTarget getSingleTarget() { if (singleTarget == null) { Type base; if (! (type instanceof OccurrenceType) || ! (OccurrenceType.itemCountIsOne (base = ((OccurrenceType) type).getBase()))) base = SingletonType.getInstance(); singleTarget = new ConsumerTarget(consumer, base); singleTarget.isContextTarget = this.isContextTarget; } return singleTarget; } public Variable getConsumerVariable() { return consumer; } /** True iff this target is the current CallContext's current Consumer. */ public final boolean isContextTarget () { return isContextTarget; } /** Make a Target that uses the current CallContext's current Consumer. */ public static Target makeContextTarget (Compilation comp, Type type) { CodeAttr code = comp.getCode(); comp.loadCallContext(); code.emitGetField(Compilation.typeCallContext .getDeclaredField("consumer")); Scope scope = code.getCurrentScope(); Variable result = scope.addVariable(code, Compilation.typeConsumer, "$result"); code.emitStore(result); ConsumerTarget target = new ConsumerTarget(result, type); target.isContextTarget = true; return target; } public static void compileUsingValues(Expression exp, Compilation comp, Target target) { ClassType typeValues = Compilation.typeValues; compileUsingConsumer(exp, comp, target, typeValues.getDeclaredMethod("make", 0), typeValues.getDeclaredMethod("canonicalize", 0)); } /** Compile an expression using a temporary Consumer, if needed. */ public static void compileUsingConsumer(Expression exp, Compilation comp, Target target) { if (target instanceof IgnoreTarget || target instanceof ConsumerTarget) exp.compile(comp, target); else compileUsingValues(exp, comp, target); } public static void compileUsingConsumer (Expression exp, Compilation comp, Target target, Method makeMethod, Method resultMethod) { CodeAttr code = comp.getCode(); Scope scope = code.pushScope(); Type ctype; if (makeMethod.getName() == "<init>") { ClassType cltype = makeMethod.getDeclaringClass(); ctype = cltype; code.emitNew(cltype); code.emitDup(ctype); code.emitInvoke(makeMethod); } else { ctype = makeMethod.getReturnType(); code.emitInvokeStatic(makeMethod); } Variable consumer = scope.addVariable(code, ctype, null); ConsumerTarget ctarget = new ConsumerTarget(consumer, exp.getType()); code.emitStore(consumer); exp.compile(comp, ctarget); code.emitLoad(consumer); if (resultMethod != null) code.emitInvoke(resultMethod); code.popScope(); target.compileFromStack(comp, exp.getType()); } public void compileFromStack(Compilation comp, Type stackType) { compileFromStack(comp, stackType, -1); } /** Write stack value to Consumer. * @param consumerPushed if -1, then Consumer has not been pushed; * if 1, Consumer was pushed before value, and value is a known singleton; * if 0, Consumer was pushed before value, otherwise. */ void compileFromStack(Compilation comp, Type stackType, int consumerPushed) { CodeAttr code = comp.getCode(); String methodName = null; Method method = null; ClassType methodClass = Compilation.typeConsumer; Type methodArg = null; boolean islong = false; char sig; Type ttype = getType(); if (! stackType.isVoid()) { StackTarget.convert(comp, stackType, ttype); stackType = ttype; } // We don't want to push a character as an int (which is its // implementation type) since it isn't an integer. So we box it. if (stackType instanceof LangPrimType && (stackType == LangPrimType.characterType || stackType == LangPrimType.characterOrEofType)) { stackType.emitCoerceToObject(code); stackType = Type.objectType; } Type implType = stackType.getImplementationType(); if (implType instanceof PrimType) { sig = implType.getSignature().charAt(0); switch (sig) { case 'I': if (stackType == LangPrimType.unsignedIntType) { methodName = "writeUInt"; methodClass = typeSequences; break; } // ... otherwise fall through ... case 'B': case 'S': methodName = "writeInt"; methodArg = Type.intType; break; case 'J': if (stackType == LangPrimType.unsignedLongType) { methodName = "writeULong"; methodClass = typeSequences; } else { methodName = "writeLong"; methodArg = Type.longType; } islong = true; break; case 'F': methodName = "writeFloat"; methodArg = Type.floatType; break; case 'D': methodName = "writeDouble"; methodArg = Type.doubleType; islong = true; break; case 'C': /* #ifdef JAVA5 */ methodName = "append"; methodArg = Type.charType; /* #else */ // methodName = "write"; // methodArg = Type.intType; /* #endif */ break; case 'Z': methodName = "writeBoolean"; methodArg = Type.booleanType; break; case 'V': return; } } else { sig = '\0'; if (consumerPushed == 1 || OccurrenceType.itemCountIsOne(implType)) { methodName = "writeObject"; methodArg = Type.pointer_type; } else { method = (Compilation.typeValues .getDeclaredMethod("writeValues", 2)); code.emitLoad(consumer); if (consumerPushed == 0) // ??? Seems wrong - Never used. code.emitSwap(); code.emitInvokeStatic(method); return; } } if (consumerPushed >= 0) { if (methodClass == typeSequences) throw new InternalError(); } else if (methodClass == typeSequences) { code.emitLoad(consumer); } else if (islong) { code.pushScope(); Variable temp = code.addLocal(implType); code.emitStore(temp); code.emitLoad(consumer); code.emitLoad(temp); code.popScope(); } else { code.emitLoad(consumer); code.emitSwap(); } if (methodClass == typeSequences) { method = methodClass.getDeclaredMethod(methodName, 2); } else if (method == null && methodName != null) { Type[] methodArgs = { methodArg }; method = methodClass.getDeclaredMethod(methodName, methodArgs); } if (method != null) code.emitInvoke(method); if (sig == 'C') code.emitPop(1); // Pop consumer result. } public boolean compileWrite (Expression exp, Compilation comp) { Type stackType = exp.getType(); Type implType = stackType.getImplementationType(); if ((implType instanceof PrimType && ! implType.isVoid() && stackType != LangPrimType.characterType && stackType != LangPrimType.characterOrEofType && stackType != LangPrimType.unsignedLongType && stackType != LangPrimType.unsignedIntType) || gnu.kawa.reflect.OccurrenceType.itemCountIsOne(implType)) { // Optimization to avoid a 'swap'. comp.getCode().emitLoad(this.consumer); Type ttype = this.type; exp.compile(comp, StackTarget.getInstance(ttype)); compileFromStack(comp, ttype, 1); return true; } return false; } public Type getType() { return type; } public static final ClassType typeSequences = ClassType.make("gnu.lists.Sequences"); }
/* * Copyright 2016 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config.materials.git; import com.thoughtworks.go.config.materials.ScmMaterial; import com.thoughtworks.go.config.materials.ScmMaterialConfig; import com.thoughtworks.go.config.materials.SubprocessExecutionContext; import com.thoughtworks.go.domain.MaterialInstance; import com.thoughtworks.go.domain.materials.*; import com.thoughtworks.go.domain.materials.git.GitCommand; import com.thoughtworks.go.domain.materials.git.GitMaterialInstance; import com.thoughtworks.go.domain.materials.svn.MaterialUrl; import com.thoughtworks.go.server.transaction.TransactionSynchronizationManager; import com.thoughtworks.go.util.GoConstants; import com.thoughtworks.go.util.StringUtil; import com.thoughtworks.go.util.command.InMemoryStreamConsumer; import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer; import com.thoughtworks.go.util.command.UrlArgument; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.math.NumberUtils; import org.apache.log4j.Logger; import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationAdapter; import java.io.File; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.thoughtworks.go.util.ExceptionUtils.bomb; import static com.thoughtworks.go.util.ExceptionUtils.bombIfFailedToRunCommandLine; import static com.thoughtworks.go.util.FileUtil.createParentFolderIfNotExist; import static com.thoughtworks.go.util.FileUtil.deleteDirectoryNoisily; import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer; import static java.lang.String.format; /** * Understands configuration for git version control */ public class GitMaterial extends ScmMaterial { private static final Logger LOG = Logger.getLogger(GitMaterial.class); public static final int UNSHALLOW_TRYOUT_STEP = 100; public static final int DEFAULT_SHALLOW_CLONE_DEPTH = 2; private UrlArgument url; private String branch = GitMaterialConfig.DEFAULT_BRANCH; private boolean shallowClone = false; private String submoduleFolder; //TODO: use iBatis to set the type for us, and we can get rid of this field. public static final String TYPE = "GitMaterial"; private static final Pattern GIT_VERSION_PATTERN = Pattern.compile(".*\\s+(\\d(\\.\\d)+).*"); private static final String ERR_GIT_NOT_FOUND = "Failed to find 'git' on your PATH. Please ensure 'git' is executable by the Go Server and on the Go Agents where this material will be used."; public static final String ERR_GIT_OLD_VERSION = "Please install Git-core 1.6 or above. "; public GitMaterial(String url) { super(TYPE); this.url = new UrlArgument(url); } public GitMaterial(String url, boolean shallowClone) { this(url, null, null, shallowClone); } public GitMaterial(String url, String branch) { this(url); if (branch != null) { this.branch = branch; } } public GitMaterial(String url, String branch, String folder) { this(url, branch); this.folder = folder; } public GitMaterial(String url, String branch, String folder, Boolean shallowClone) { this(url, branch, folder); if (shallowClone != null) { this.shallowClone = shallowClone; } } public GitMaterial(GitMaterialConfig config) { this(config.getUrl(), config.getBranch(), config.getFolder(), config.isShallowClone()); this.autoUpdate = config.getAutoUpdate(); this.filter = config.rawFilter(); this.name = config.getName(); this.submoduleFolder = config.getSubmoduleFolder(); this.invertFilter = config.getInvertFilter(); } @Override public MaterialConfig config() { return new GitMaterialConfig(url, branch, submoduleFolder, autoUpdate, filter, invertFilter, folder, name, shallowClone); } public List<Modification> latestModification(File baseDir, final SubprocessExecutionContext execCtx) { return getGit(baseDir, DEFAULT_SHALLOW_CLONE_DEPTH, execCtx).latestModification(); } public List<Modification> modificationsSince(File baseDir, Revision revision, final SubprocessExecutionContext execCtx) { GitCommand gitCommand = getGit(baseDir, DEFAULT_SHALLOW_CLONE_DEPTH, execCtx); if(!execCtx.isGitShallowClone()) { fullyUnshallow(gitCommand, ProcessOutputStreamConsumer.inMemoryConsumer()); } if (gitCommand.containsRevisionInBranch(revision)) { return gitCommand.modificationsSince(revision); } else { return latestModification(baseDir, execCtx); } } public MaterialInstance createMaterialInstance() { return new GitMaterialInstance(url.forCommandline(), branch, submoduleFolder, UUID.randomUUID().toString()); } @Override protected void appendCriteria(Map<String, Object> parameters) { parameters.put(ScmMaterialConfig.URL, url.forCommandline()); parameters.put("branch", branch); } @Override protected void appendAttributes(Map<String, Object> parameters) { parameters.put("url", url); parameters.put("branch", branch); parameters.put("shallowClone", shallowClone); } public void updateTo(ProcessOutputStreamConsumer outputStreamConsumer, File baseDir, RevisionContext revisionContext, final SubprocessExecutionContext execCtx) { Revision revision = revisionContext.getLatestRevision(); try { outputStreamConsumer.stdOutput(format("[%s] Start updating %s at revision %s from %s", GoConstants.PRODUCT_NAME, updatingTarget(), revision.getRevision(), url)); File workingDir = execCtx.isServer() ? baseDir : workingdir(baseDir); GitCommand git = git(outputStreamConsumer, workingDir, revisionContext.numberOfModifications() + 1, execCtx); git.fetch(outputStreamConsumer); unshallowIfNeeded(git, outputStreamConsumer, revisionContext.getOldestRevision(), baseDir); git.resetWorkingDir(outputStreamConsumer, revision); outputStreamConsumer.stdOutput(format("[%s] Done.\n", GoConstants.PRODUCT_NAME)); } catch (Exception e) { bomb(e); } } public ValidationBean checkConnection(final SubprocessExecutionContext execCtx) { try { GitCommand.checkConnection(url, branch, execCtx.getDefaultEnvironmentVariables()); return ValidationBean.valid(); } catch (Exception e) { try { return handleException(e, GitCommand.version(execCtx.getDefaultEnvironmentVariables())); } catch (Exception notInstallGitException) { return ValidationBean.notValid(ERR_GIT_NOT_FOUND); } } } public ValidationBean handleException(Exception e, String gitVersionConsoleOut) { ValidationBean defaultResponse = ValidationBean.notValid(e.getMessage()); try { if (!isVersionOnedotSixOrHigher(gitVersionConsoleOut)) { return ValidationBean.notValid(ERR_GIT_OLD_VERSION + gitVersionConsoleOut); } else { return defaultResponse; } } catch (Exception ex) { return defaultResponse; } } boolean isVersionOnedotSixOrHigher(String hgout) { String hgVersion = parseGitVersion(hgout); Float aFloat = NumberUtils.createFloat(hgVersion.subSequence(0, 3).toString()); return aFloat >= 1.6; } private String parseGitVersion(String hgOut) { String[] lines = hgOut.split("\n"); String firstLine = lines[0]; Matcher m = GIT_VERSION_PATTERN.matcher(firstLine); if (m.matches()) { return m.group(1); } else { throw bomb("can not parse hgout : " + hgOut); } } private GitCommand getGit(File workingdir, int preferredCloneDepth, SubprocessExecutionContext executionContext) { InMemoryStreamConsumer output = inMemoryConsumer(); try { return git(output, workingdir, preferredCloneDepth, executionContext); } catch (Exception e) { throw bomb(e.getMessage() + " " + output.getStdError(), e); } } private GitCommand git(ProcessOutputStreamConsumer outputStreamConsumer, final File workingFolder, int preferredCloneDepth, SubprocessExecutionContext executionContext) throws Exception { if (isSubmoduleFolder()) { return new GitCommand(getFingerprint(), new File(workingFolder.getPath()), GitMaterialConfig.DEFAULT_BRANCH, true, executionContext.getDefaultEnvironmentVariables()); } GitCommand gitCommand = new GitCommand(getFingerprint(), workingFolder, getBranch(), false, executionContext.getDefaultEnvironmentVariables()); if (!isGitRepository(workingFolder) || isRepositoryChanged(gitCommand, workingFolder)) { if (LOG.isDebugEnabled()) { LOG.debug("Invalid git working copy or repository changed. Delete folder: " + workingFolder); } deleteDirectoryNoisily(workingFolder); } createParentFolderIfNotExist(workingFolder); if (!workingFolder.exists()) { TransactionSynchronizationManager txManager = new TransactionSynchronizationManager(); if (txManager.isActualTransactionActive()) { txManager.registerSynchronization(new TransactionSynchronizationAdapter() { @Override public void afterCompletion(int status) { if (status != TransactionSynchronization.STATUS_COMMITTED) { FileUtils.deleteQuietly(workingFolder); } } }); } int cloneDepth = shallowClone ? preferredCloneDepth : Integer.MAX_VALUE; int returnValue; if(executionContext.isServer()) { returnValue = gitCommand.cloneWithNoCheckout(outputStreamConsumer, url.forCommandline()); } else { returnValue = gitCommand.clone(outputStreamConsumer, url.forCommandline(), cloneDepth); } bombIfFailedToRunCommandLine(returnValue, "Failed to run git clone command"); } return gitCommand; } // Unshallow local repo to include a revision operating on via two step process: // First try to fetch forward 100 level with "git fetch -depth 100". If revision still missing, // unshallow the whole repo with "git fetch --2147483647". private void unshallowIfNeeded(GitCommand gitCommand, ProcessOutputStreamConsumer streamConsumer, Revision revision, File workingDir) { if (gitCommand.isShallow() && !gitCommand.containsRevisionInBranch(revision)) { gitCommand.unshallow(streamConsumer, UNSHALLOW_TRYOUT_STEP); if (gitCommand.isShallow() && !gitCommand.containsRevisionInBranch(revision)) { fullyUnshallow(gitCommand, streamConsumer); } } } private void fullyUnshallow(GitCommand gitCommand, ProcessOutputStreamConsumer streamConsumer) { if(gitCommand.isShallow()) { gitCommand.unshallow(streamConsumer, Integer.MAX_VALUE); } } private boolean isSubmoduleFolder() { return getSubmoduleFolder() != null; } private boolean isGitRepository(File workingFolder) { return new File(workingFolder, ".git").isDirectory(); } private boolean isRepositoryChanged(GitCommand command, File workingDirectory) { UrlArgument currentWorkingUrl = command.workingRepositoryUrl(); if (LOG.isTraceEnabled()) { LOG.trace("Current repository url of [" + workingDirectory + "]: " + currentWorkingUrl); LOG.trace("Target repository url: " + url); } return !MaterialUrl.sameUrl(url.forCommandline(), currentWorkingUrl.forCommandline()) || !isBranchEqual(command) || (!shallowClone && command.isShallow()); } private boolean isBranchEqual(GitCommand command) { return branchWithDefault().equals(command.getCurrentBranch()); } /** * @deprecated Breaks encapsulation really badly. But we need it for IBatis :-( */ public String getUrl() { return url.forCommandline(); } public UrlArgument getUrlArgument() { return url; } public String getLongDescription() { return String.format("URL: %s, Branch: %s", url.forDisplay(), branch); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } GitMaterial that = (GitMaterial) o; if (branch != null ? !branch.equals(that.branch) : that.branch != null) { return false; } if (submoduleFolder != null ? !submoduleFolder.equals(that.submoduleFolder) : that.submoduleFolder != null) { return false; } if (url != null ? !url.equals(that.url) : that.url != null) { return false; } return true; } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (url != null ? url.hashCode() : 0); result = 31 * result + (branch != null ? branch.hashCode() : 0); result = 31 * result + (submoduleFolder != null ? submoduleFolder.hashCode() : 0); return result; } protected String getLocation() { return url.forDisplay(); } public String getTypeForDisplay() { return "Git"; } public String getBranch() { return this.branch; } public String getSubmoduleFolder() { return submoduleFolder; } public void setSubmoduleFolder(String submoduleFolder) { this.submoduleFolder = submoduleFolder; } public String getUserName() { return null; } public String getPassword() { return null; } public String getEncryptedPassword() { return null; } public boolean isCheckExternals() { return false; } public boolean isShallowClone() { return shallowClone; } @Override public String getShortRevision(String revision) { if (revision == null) return null; if (revision.length() < 7) return revision; return revision.substring(0, 7); } @Override public Map<String, Object> getAttributes(boolean addSecureFields) { Map<String, Object> materialMap = new HashMap<>(); materialMap.put("type", "git"); Map<String, Object> configurationMap = new HashMap<>(); if (addSecureFields) { configurationMap.put("url", url.forCommandline()); } else { configurationMap.put("url", url.forDisplay()); } configurationMap.put("branch", branch); configurationMap.put("shallow-clone", shallowClone); materialMap.put("git-configuration", configurationMap); return materialMap; } public Class getInstanceType() { return GitMaterialInstance.class; } @Override public String toString() { return "GitMaterial{" + "url=" + url + ", branch='" + branch + '\'' + ", submoduleFolder='" + submoduleFolder + '\'' + ", shallowClone=" + shallowClone + '}'; } @Override public void updateFromConfig(MaterialConfig materialConfig) { super.updateFromConfig(materialConfig); this.shallowClone = ((GitMaterialConfig) materialConfig).isShallowClone(); } public GitMaterial withShallowClone(boolean value) { GitMaterialConfig config = (GitMaterialConfig) config(); config.setShallowClone(value); return new GitMaterial(config); } public String branchWithDefault() { return StringUtil.isBlank(branch) ? GitMaterialConfig.DEFAULT_BRANCH : branch; } }
/* TarToSeqFile.java - Convert tar files into Hadoop SequenceFiles. * * Copyright (C) 2008 Stuart Sierra * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * http:www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.altlaw.hadoop; /* From ant.jar, http://ant.apache.org/ */ import org.apache.tools.bzip2.CBZip2InputStream; import org.apache.tools.tar.TarEntry; import org.apache.tools.tar.TarInputStream; /* From hadoop-*-core.jar, http://hadoop.apache.org/ * Developed with Hadoop 0.16.3. */ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.IOException; import java.util.zip.GZIPInputStream; /** Utility to convert tar files into Hadoop SequenceFiles. The tar * files may be compressed with GZip or BZip2. The output * SequenceFile will be stored with BLOCK compression. Each key (a * Text) in the SequenceFile is the name of the file in the tar * archive, and its value (a BytesWritable) is the contents of the * file. * * <p>This class can be run at the command line; run without * arguments to get usage instructions. * * @author Stuart Sierra (mail@stuartsierra.com) * @see <a href="http://hadoop.apache.org/core/docs/r0.16.3/api/org/apache/hadoop/io/SequenceFile.html">SequenceFile</a> * @see <a href="http://hadoop.apache.org/core/docs/r0.16.3/api/org/apache/hadoop/io/Text.html">Text</a> * @see <a href="http://hadoop.apache.org/core/docs/r0.16.3/api/org/apache/hadoop/io/BytesWritable.html">BytesWritable</a> */ public class TarToSeqFile { private File inputFile; private File outputFile; private LocalSetup setup; private boolean activate_block_compression = false; public void activate_block_compression(boolean compression_switch) { activate_block_compression = compression_switch; } protected static final Logger logger = Logger.getLogger(TarToSeqFile.class); /** Sets up Configuration and LocalFileSystem instances for * Hadoop. Throws Exception if they fail. Does not load any * Hadoop XML configuration files, just sets the minimum * configuration necessary to use the local file system. */ public TarToSeqFile() throws Exception { setup = new LocalSetup(); } /** Sets the input tar file. */ public void setInput(File inputFile) { this.inputFile = inputFile; } /** Sets the output SequenceFile. */ public void setOutput(File outputFile) { this.outputFile = outputFile; } /** Performs the conversion. */ public void execute() throws Exception { TarInputStream input = null; SequenceFile.Writer output = null; try { input = openInputFile(); output = openOutputFile(activate_block_compression); TarEntry entry; while ((entry = input.getNextEntry()) != null) { if (entry.isDirectory()) { logger.info("Skipping directory "+entry.getName() ); continue; } logger.info("Processing file "+entry.getName() +" size:"+entry.getSize()); String filename = entry.getName(); byte[] data = TarToSeqFile.getBytes(input, entry.getSize()); Text key = new Text(filename); Text value = new Text(data); output.append(key, value); } } finally { if (input != null) { input.close(); } if (output != null) { output.close(); } } } private TarInputStream openInputFile() throws Exception { InputStream fileStream = new FileInputStream(inputFile); String name = inputFile.getName(); InputStream theStream = null; if (name.endsWith(".tar.gz") || name.endsWith(".tgz")) { theStream = new GZIPInputStream(fileStream); } else if (name.endsWith(".tar.bz2") || name.endsWith(".tbz2")) { /* Skip the "BZ" header added by bzip2. */ fileStream.skip(2); theStream = new CBZip2InputStream(fileStream); } else { /* Assume uncompressed tar file. */ theStream = fileStream; } return new TarInputStream(theStream); } private SequenceFile.Writer openOutputFile(boolean enable_compression) throws Exception { Path outputPath = new Path(outputFile.getAbsolutePath()); if(!enable_compression) return SequenceFile.createWriter(setup.getLocalFileSystem(), setup.getConf(), outputPath, Text.class, Text.class, SequenceFile.CompressionType.NONE); else return SequenceFile.createWriter(setup.getLocalFileSystem(), setup.getConf(), outputPath, Text.class, Text.class, SequenceFile.CompressionType.RECORD); } /** Reads all bytes from the current entry in the tar file and * returns them as a byte array. * * @see http://www.exampledepot.com/egs/java.io/File2ByteArray.html */ private static byte[] getBytes(TarInputStream input, long size) throws Exception { if (size > Integer.MAX_VALUE) { throw new Exception("A file in the tar archive is too large."); } int length = (int)size; byte[] bytes = new byte[length]; int offset = 0; int numRead = 0; while (offset < bytes.length && (numRead = input.read(bytes, offset, bytes.length - offset)) >= 0) { offset += numRead; } if (offset < bytes.length) { throw new IOException("A file in the tar archive could not be completely read."); } return bytes; } /** Runs the converter at the command line. */ public static void main(String[] args) { if (args.length > 3) { exitWithHelp(); } try { TarToSeqFile me = new TarToSeqFile(); if (args[0].compareTo("-c") == 0){ logger.info("Turning on compression"); me.setInput(new File(args[1])); me.setOutput(new File(args[2])); me.activate_block_compression(true); } else { logger.info("Arg[0] is "+args[0]); logger.info("No compression will be used"); me.setInput(new File(args[0])); me.setOutput(new File(args[1])); me.activate_block_compression(false); } me.execute(); } catch (Exception e) { e.printStackTrace(); exitWithHelp(); } } public static void exitWithHelp() { System.err.println("Usage: java org.altlaw.hadoop.TarToSeqFile [-c] <tarfile> <output>\n\n" + "<tarfile> may be GZIP or BZIP2 compressed, must have a\n" + "recognizable extension .tar, .tar.gz, .tgz, .tar.bz2, or .tbz2."+ "\nUse -c option to enable record compression"); System.exit(1); } }
package com.jlabar.runstats; import java.util.Arrays; import java.util.List; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Path; import android.graphics.PointF; import android.view.View; import com.jlabar.runstats.domain.accumulators.IAccumulator; import com.jlabar.runstats.domain.groups.IGroup; import com.jlabar.runstats.domain.order.IGroupComparator; import com.jlabar.runstats.domain.sorters.IGroupSorter; public class BarChart extends View { private final Paint _unitOfMeasurementPaint = new Paint() { { setTextSize(22); setColor(Color.WHITE); } }; private final Paint _axisValuePaint = new Paint() { { setTextSize(16); setColor(Color.WHITE); } }; private final Paint _axisLinePaint = new Paint() { { setColor(Color.WHITE); setStrokeCap(Cap.ROUND); setStrokeJoin(Join.BEVEL); setStrokeWidth(4.0f); setStyle(Style.STROKE); } }; private final float _padding = 6.0f; private IAccumulator _accumulator; private IGroupSorter _sorter; private IGroupComparator _orderer; private List<IGroup> _groups; private final PointF _yLabelPanelTopLeft = new PointF(_padding, _padding); private final PointF _yLabelPanelTopRight = new PointF(_padding, _padding); private final PointF _yLabelPanelBottomLeft = new PointF(_padding, _padding); private final PointF _yLabelPanelBottomRight = new PointF(_padding, _padding); private final PointF _xLabelPanelTopLeft = new PointF(_padding, _padding); private final PointF _xLabelPanelTopRight = new PointF(_padding, _padding); private final PointF _xLabelPanelBottomLeft = new PointF(_padding, _padding); private final PointF _xLabelPanelBottomRight = new PointF(_padding, _padding); private final Path _path = new Path(); public BarChart(Context context) { super(context); _accumulator = null; _sorter = null; _groups = null; } public void update( IAccumulator currentAccumulator, IGroupSorter currentSorter, IGroupComparator currentOrderer, List<IGroup> currentGroups) { _accumulator = currentAccumulator; _sorter = currentSorter; _groups = currentGroups; _orderer = currentOrderer; invalidate(); } @Override protected void onDraw(Canvas canvas) { canvas.drawColor(Color.BLACK); if (_sorter == null || _accumulator == null || _groups == null || _orderer == null) { return; } Util.sort(_groups, _orderer); double[] yAxisSteps = getStepsForY(); String[] xAxisSteps = getStepsForX(); calculateAxisPanels(canvas, yAxisSteps, xAxisSteps); writeYAxisLabels(canvas, yAxisSteps); writeXAxisLabels(canvas, xAxisSteps); drawAxisLines(canvas); writeUintOfMeasurementLabels(canvas); canvas.restore(); } private void writeUintOfMeasurementLabels(Canvas canvas) { float labelLength = _unitOfMeasurementPaint.measureText(_sorter.getDisplayUnitOfMeasurement()); float labelX = (((_xLabelPanelTopRight.x - _xLabelPanelTopLeft.x) / 2.0f) + _xLabelPanelTopLeft.x - (labelLength / 2.0f)); float labelY = canvas.getHeight() - _padding; canvas.drawText(_sorter.getDisplayUnitOfMeasurement(), labelX, labelY, _unitOfMeasurementPaint); labelLength = _unitOfMeasurementPaint.measureText(_accumulator.getDisplayUnitOfMeasurement()); labelX = _padding + _unitOfMeasurementPaint.getTextSize(); labelY = _yLabelPanelTopLeft.y - (((_yLabelPanelTopLeft.y - _yLabelPanelBottomLeft.y) / 2.0f) - (labelLength / 2.0f)); canvas.save(); canvas.rotate(-90, labelX, labelY); canvas.drawText(_accumulator.getDisplayUnitOfMeasurement(), labelX, labelY, _unitOfMeasurementPaint); canvas.restore(); } private void drawAxisLines(Canvas canvas) { _path.reset(); _path.moveTo(_yLabelPanelTopRight.x, _yLabelPanelTopRight.y); _path.lineTo(_yLabelPanelBottomRight.x, _yLabelPanelBottomRight.y); _path.lineTo(_xLabelPanelTopRight.x, _xLabelPanelTopRight.y); canvas.drawPath(_path, _axisLinePaint); } private void writeXAxisLabels(Canvas canvas, String[] xAxisSteps) { final float xLabelPanelWidth = (_xLabelPanelTopRight.x - _xLabelPanelTopLeft.x); final float pixelsPerLabelValue = xLabelPanelWidth / (float)xAxisSteps.length; float xLabelIter = _xLabelPanelTopLeft.x + (pixelsPerLabelValue / 2.0f) - (_axisValuePaint.getTextSize() / 2.0f); for(String s : xAxisSteps) { float labelHeight = _axisValuePaint.measureText(s); float labelY = _xLabelPanelTopRight.y + _padding + labelHeight; canvas.save(); canvas.rotate(-90, xLabelIter, labelY); canvas.drawText(s, xLabelIter, labelY, _axisValuePaint); canvas.restore(); xLabelIter += pixelsPerLabelValue; } } private void writeYAxisLabels(Canvas canvas, double[] yAxisSteps) { final float yLabelPanelHeight = (_yLabelPanelTopRight.y - _yLabelPanelBottomRight.y); final float pixelsPerLabelValue = yLabelPanelHeight / (float)yAxisSteps.length; float yLabelIter = _yLabelPanelBottomRight.y + (pixelsPerLabelValue / 2.0f) + (_axisValuePaint.getTextSize() / 2.0f); for (double d : yAxisSteps) { String value = _accumulator.getDisplayValue(d); float valueWidth = _axisValuePaint.measureText(value); float xLabelValue = _yLabelPanelTopRight.x - valueWidth - _padding; canvas.drawText(value, xLabelValue, yLabelIter, _axisValuePaint); yLabelIter += pixelsPerLabelValue; } } private void calculateAxisPanels(Canvas canvas, double[] yAxisSteps, String[] xAxisSteps) { final float unitOfMeasLabelHeight = _padding + _padding + _unitOfMeasurementPaint.getTextSize(); float xLabelPanelHeight = 0.0f; float yLabelPanelWidth = 0.0f; for (double d : yAxisSteps) { float width = _padding + _padding + _axisValuePaint.measureText(_accumulator.getDisplayValue(d)); if (width > yLabelPanelWidth) { yLabelPanelWidth = width; } } for (String s : xAxisSteps) { float width = _padding + _padding + _axisValuePaint.measureText(s); if (width > xLabelPanelHeight) { xLabelPanelHeight = width; } } _yLabelPanelTopLeft.x = _padding + _padding + _axisValuePaint.getTextSize(); _yLabelPanelTopRight.x = _yLabelPanelTopLeft.x + yLabelPanelWidth; _yLabelPanelBottomLeft.x = _yLabelPanelTopLeft.x; _yLabelPanelBottomLeft.y = canvas.getHeight() - unitOfMeasLabelHeight - xLabelPanelHeight; _yLabelPanelBottomRight.x = _yLabelPanelTopRight.x; _yLabelPanelBottomRight.y =_yLabelPanelBottomLeft.y; _xLabelPanelTopLeft.x = _yLabelPanelBottomRight.x; _xLabelPanelTopLeft.y = _yLabelPanelBottomRight.y; _xLabelPanelTopRight.x = canvas.getWidth() - _padding; _xLabelPanelTopRight.y =_xLabelPanelTopLeft.y; _xLabelPanelBottomLeft.x = _xLabelPanelTopLeft.x; _xLabelPanelBottomLeft.y = canvas.getHeight() - unitOfMeasLabelHeight; _xLabelPanelBottomRight.x = _xLabelPanelTopRight.x; _xLabelPanelBottomRight.y = _xLabelPanelBottomLeft.y; } private String[] getStepsForX() { final int steps = 18; if (steps > _groups.size()) { String[] values = new String[_groups.size()]; for(int i = 0; i < values.length; i++) { values[i] = _groups.get(i).getDescription(); } return values; } else { double increment = _groups.size() / (steps - 1); String[] values = new String[steps]; for(int i = 0; i < values.length; i++) { int index = (int)Math.ceil(i * increment); values[i] = _groups.get(index).getDescription(); } return values; } } private double[] getStepsForY() { final int steps = 9; if (steps >= _groups.size()) { double[] values = new double[_groups.size()]; for(int i = 0; i < values.length; i++) { values[i] = _groups.get(i).getValue(); } Arrays.sort(values); return values; } else { double maxValue = Double.MIN_VALUE; for (IGroup group : _groups) { if (group.getValue() > maxValue) { maxValue = group.getValue(); } } double[] values = new double[steps + 1]; values[0] = 0.0; for (int i = 1; i < steps; i++) { values[i] = maxValue * ((double) i / (double) steps); } values[steps] = maxValue; Arrays.sort(values); return values; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode.ha; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.net.URI; import java.util.Iterator; import java.util.List; import java.util.function.Supplier; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.qjournal.MiniQJMHACluster; import org.apache.hadoop.hdfs.qjournal.server.JournalTestUtil; import org.apache.hadoop.hdfs.server.namenode.NNStorage; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Lists; import static org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter.getFileInfo; import static org.apache.hadoop.hdfs.qjournal.client.QuorumJournalManager.QJM_RPC_MAX_TXNS_KEY; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; /** * Test cases for in progress tailing edit logs by * the standby node. */ public class TestStandbyInProgressTail { private static final Logger LOG = LoggerFactory.getLogger(TestStandbyInProgressTail.class); private Configuration conf; private MiniQJMHACluster qjmhaCluster; private MiniDFSCluster cluster; private NameNode nn0; private NameNode nn1; @Before public void startUp() throws IOException { conf = new Configuration(); // Set period of tail edits to a large value (20 mins) for test purposes conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 20 * 60); conf.setBoolean(DFSConfigKeys.DFS_HA_TAILEDITS_INPROGRESS_KEY, true); conf.setInt(DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY, 500); // Set very samll limit of transactions per a journal rpc call conf.setInt(QJM_RPC_MAX_TXNS_KEY, 3); HAUtil.setAllowStandbyReads(conf, true); qjmhaCluster = new MiniQJMHACluster.Builder(conf).build(); cluster = qjmhaCluster.getDfsCluster(); // Get NameNode from cluster to future manual control nn0 = cluster.getNameNode(0); nn1 = cluster.getNameNode(1); } @After public void tearDown() throws IOException { if (qjmhaCluster != null) { qjmhaCluster.shutdown(); } } @Test public void testDefault() throws Exception { if (qjmhaCluster != null) { qjmhaCluster.shutdown(); } conf = new Configuration(); // Set period of tail edits to a large value (20 mins) for test purposes conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 20 * 60); conf.setBoolean(DFSConfigKeys.DFS_HA_TAILEDITS_INPROGRESS_KEY, false); HAUtil.setAllowStandbyReads(conf, true); qjmhaCluster = new MiniQJMHACluster.Builder(conf).build(); cluster = qjmhaCluster.getDfsCluster(); try { // During HA startup, both nodes should be in // standby and we shouldn't have any edits files // in any edits directory! List<URI> allDirs = Lists.newArrayList(); allDirs.addAll(cluster.getNameDirs(0)); allDirs.addAll(cluster.getNameDirs(1)); assertNoEditFiles(allDirs); // Set the first NN to active, make sure it creates edits // in its own dirs and the shared dir. The standby // should still have no edits! cluster.transitionToActive(0); assertEditFiles(cluster.getNameDirs(0), NNStorage.getInProgressEditsFileName(1)); assertNoEditFiles(cluster.getNameDirs(1)); cluster.getNameNode(0).getRpcServer().mkdirs("/test", FsPermission.createImmutable((short) 0755), true); cluster.getNameNode(1).getNamesystem().getEditLogTailer().doTailEdits(); // StandbyNameNode should not finish tailing in-progress logs assertNull(getFileInfo(cluster.getNameNode(1), "/test", true, false, false)); // Restarting the standby should not finalize any edits files // in the shared directory when it starts up! cluster.restartNameNode(1); assertEditFiles(cluster.getNameDirs(0), NNStorage.getInProgressEditsFileName(1)); assertNoEditFiles(cluster.getNameDirs(1)); // Additionally it should not have applied any in-progress logs // at start-up -- otherwise, it would have read half-way into // the current log segment, and on the next roll, it would have to // either replay starting in the middle of the segment (not allowed) // or double-replay the edits (incorrect). assertNull(getFileInfo(cluster.getNameNode(1), "/test", true, false, false)); cluster.getNameNode(0).getRpcServer().mkdirs("/test2", FsPermission.createImmutable((short) 0755), true); // If we restart NN0, it'll come back as standby, and we can // transition NN1 to active and make sure it reads edits correctly. cluster.restartNameNode(0); cluster.transitionToActive(1); // NN1 should have both the edits that came before its restart, // and the edits that came after its restart. assertNotNull(getFileInfo(cluster.getNameNode(1), "/test", true, false, false)); assertNotNull(getFileInfo(cluster.getNameNode(1), "/test2", true, false, false)); } finally { if (qjmhaCluster != null) { qjmhaCluster.shutdown(); } } } @Test public void testSetup() throws Exception { // During HA startup, both nodes should be in // standby and we shouldn't have any edits files // in any edits directory! List<URI> allDirs = Lists.newArrayList(); allDirs.addAll(cluster.getNameDirs(0)); allDirs.addAll(cluster.getNameDirs(1)); assertNoEditFiles(allDirs); // Set the first NN to active, make sure it creates edits // in its own dirs and the shared dir. The standby // should still have no edits! cluster.transitionToActive(0); assertEditFiles(cluster.getNameDirs(0), NNStorage.getInProgressEditsFileName(1)); assertNoEditFiles(cluster.getNameDirs(1)); cluster.getNameNode(0).getRpcServer().mkdirs("/test", FsPermission.createImmutable((short) 0755), true); waitForFileInfo(nn1, "/test"); // Restarting the standby should not finalize any edits files // in the shared directory when it starts up! cluster.restartNameNode(1); assertEditFiles(cluster.getNameDirs(0), NNStorage.getInProgressEditsFileName(1)); assertNoEditFiles(cluster.getNameDirs(1)); // Because we're using in-progress tailer, this should not be null assertNotNull(getFileInfo(cluster.getNameNode(1), "/test", true, false, false)); cluster.getNameNode(0).getRpcServer().mkdirs("/test2", FsPermission.createImmutable((short) 0755), true); // If we restart NN0, it'll come back as standby, and we can // transition NN1 to active and make sure it reads edits correctly. cluster.restartNameNode(0); cluster.transitionToActive(1); // NN1 should have both the edits that came before its restart, // and the edits that came after its restart. assertNotNull(getFileInfo(cluster.getNameNode(1), "/test", true, false, false)); assertNotNull(getFileInfo(cluster.getNameNode(1), "/test2", true, false, false)); } @Test public void testHalfStartInProgressTail() throws Exception { // Set the first NN to active, make sure it creates edits // in its own dirs and the shared dir. The standby // should still have no edits! cluster.transitionToActive(0); assertEditFiles(cluster.getNameDirs(0), NNStorage.getInProgressEditsFileName(1)); assertNoEditFiles(cluster.getNameDirs(1)); cluster.getNameNode(0).getRpcServer().mkdirs("/test", FsPermission.createImmutable((short) 0755), true); // StandbyNameNode should tail the in-progress edit waitForFileInfo(nn1, "/test"); // Create a new edit and finalized it cluster.getNameNode(0).getRpcServer().mkdirs("/test2", FsPermission.createImmutable((short) 0755), true); nn0.getRpcServer().rollEditLog(); // StandbyNameNode shouldn't tail the edit since we do not call the method waitForFileInfo(nn1, "/test2"); // Create a new in-progress edit and let SBNN do the tail cluster.getNameNode(0).getRpcServer().mkdirs("/test3", FsPermission.createImmutable((short) 0755), true); // StandbyNameNode should tail the finalized edit and the new in-progress waitForFileInfo(nn1, "/test", "/test2", "/test3"); } @Test public void testInitStartInProgressTail() throws Exception { // Set the first NN to active, make sure it creates edits // in its own dirs and the shared dir. The standby // should still have no edits! cluster.transitionToActive(0); assertEditFiles(cluster.getNameDirs(0), NNStorage.getInProgressEditsFileName(1)); assertNoEditFiles(cluster.getNameDirs(1)); cluster.getNameNode(0).getRpcServer().mkdirs("/test", FsPermission.createImmutable((short) 0755), true); cluster.getNameNode(0).getRpcServer().mkdirs("/test2", FsPermission.createImmutable((short) 0755), true); nn0.getRpcServer().rollEditLog(); cluster.getNameNode(0).getRpcServer().mkdirs("/test3", FsPermission.createImmutable((short) 0755), true); assertNull(getFileInfo(nn1, "/test", true, false, false)); assertNull(getFileInfo(nn1, "/test2", true, false, false)); assertNull(getFileInfo(nn1, "/test3", true, false, false)); // StandbyNameNode should tail the finalized edit and the new in-progress waitForFileInfo(nn1, "/test", "/test2", "/test3"); } @Test public void testNewStartInProgressTail() throws Exception { cluster.transitionToActive(0); assertEditFiles(cluster.getNameDirs(0), NNStorage.getInProgressEditsFileName(1)); assertNoEditFiles(cluster.getNameDirs(1)); cluster.getNameNode(0).getRpcServer().mkdirs("/test", FsPermission.createImmutable((short) 0755), true); cluster.getNameNode(0).getRpcServer().mkdirs("/test2", FsPermission.createImmutable((short) 0755), true); waitForFileInfo(nn1, "/test", "/test2"); nn0.getRpcServer().rollEditLog(); cluster.getNameNode(0).getRpcServer().mkdirs("/test3", FsPermission.createImmutable((short) 0755), true); // StandbyNameNode should tail the finalized edit and the new in-progress waitForFileInfo(nn1, "/test", "/test2", "/test3"); } /** * Test that Standby Node tails multiple segments while catching up * during the transition to Active. */ @Test public void testUndertailingWhileFailover() throws Exception { cluster.transitionToActive(0); cluster.waitActive(0); String p = "/testFailoverWhileTailingWithoutCache/"; mkdirs(nn0, p + 0, p + 1, p + 2, p + 3, p + 4); nn0.getRpcServer().rollEditLog(); // create segment 1 mkdirs(nn0, p + 5, p + 6, p + 7, p + 8, p + 9); nn0.getRpcServer().rollEditLog(); // create segment 2 mkdirs(nn0, p + 10, p + 11, p + 12, p + 13, p + 14); nn0.getRpcServer().rollEditLog(); // create segment 3 cluster.transitionToStandby(0); cluster.transitionToActive(1); cluster.waitActive(1); waitForFileInfo(nn1, p + 0, p + 1, p + 14); } @Test public void testNonUniformConfig() throws Exception { // Test case where some NNs (in this case the active NN) in the cluster // do not have in-progress tailing enabled. Configuration newConf = cluster.getNameNode(0).getConf(); newConf.setBoolean( DFSConfigKeys.DFS_HA_TAILEDITS_INPROGRESS_KEY, false); cluster.restartNameNode(0); cluster.transitionToActive(0); cluster.getNameNode(0).getRpcServer().mkdirs("/test", FsPermission.createImmutable((short) 0755), true); cluster.getNameNode(0).getRpcServer().rollEdits(); waitForFileInfo(nn1, "/test"); } @Test public void testEditsServedViaCache() throws Exception { cluster.transitionToActive(0); cluster.waitActive(0); mkdirs(nn0, "/test", "/test2"); nn0.getRpcServer().rollEditLog(); for (int idx = 0; idx < qjmhaCluster.getJournalCluster().getNumNodes(); idx++) { File[] startingEditFile = qjmhaCluster.getJournalCluster() .getCurrentDir(idx, DFSUtil.getNamenodeNameServiceId(conf)) .listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.matches("edits_0+1-[0-9]+"); } }); assertNotNull(startingEditFile); assertEquals(1, startingEditFile.length); // Delete this edit file to ensure that edits can't be served via the // streaming mechanism - RPC/cache-based only startingEditFile[0].delete(); } // Ensure edits were not tailed before the edit files were deleted; // quick spot check of a single dir assertNull(getFileInfo(nn1, "/tmp0", false, false, false)); waitForFileInfo(nn1, "/test", "/test2"); } @Test public void testCorruptJournalCache() throws Exception { cluster.transitionToActive(0); cluster.waitActive(0); // Shut down one JN so there is only a quorum remaining to make it easier // to manage the remaining two qjmhaCluster.getJournalCluster().getJournalNode(0).stopAndJoin(0); mkdirs(nn0, "/test", "/test2"); JournalTestUtil.corruptJournaledEditsCache(1, qjmhaCluster.getJournalCluster().getJournalNode(1) .getJournal(DFSUtil.getNamenodeNameServiceId(conf))); nn0.getRpcServer().rollEditLog(); waitForFileInfo(nn1, "/test", "/test2"); mkdirs(nn0, "/test3", "/test4"); JournalTestUtil.corruptJournaledEditsCache(3, qjmhaCluster.getJournalCluster().getJournalNode(2) .getJournal(DFSUtil.getNamenodeNameServiceId(conf))); waitForFileInfo(nn1, "/test3", "/test4"); } @Test public void testTailWithoutCache() throws Exception { qjmhaCluster.shutdown(); // Effectively disable the cache by setting its size too small to be used conf.setInt(DFSConfigKeys.DFS_JOURNALNODE_EDIT_CACHE_SIZE_KEY, 1); qjmhaCluster = new MiniQJMHACluster.Builder(conf).build(); cluster = qjmhaCluster.getDfsCluster(); cluster.transitionToActive(0); cluster.waitActive(0); nn0 = cluster.getNameNode(0); nn1 = cluster.getNameNode(1); mkdirs(nn0, "/test", "/test2"); nn0.getRpcServer().rollEditLog(); mkdirs(nn0, "/test3", "/test4"); // Skip the last directory; the JournalNodes' idea of the committed // txn ID may not have been updated to include it yet waitForFileInfo(nn1, "/test", "/test2", "/test3"); } /** * Check that no edits files are present in the given storage dirs. */ private static void assertNoEditFiles(Iterable<URI> dirs) throws IOException { assertEditFiles(dirs); } /** * Check that the given list of edits files are present in the given storage * dirs. */ private static void assertEditFiles(Iterable<URI> dirs, String... files) throws IOException { for (URI u : dirs) { File editDirRoot = new File(u.getPath()); File editDir = new File(editDirRoot, "current"); GenericTestUtils.assertExists(editDir); if (files.length == 0) { LOG.info("Checking no edit files exist in " + editDir); } else { LOG.info("Checking for following edit files in " + editDir + ": " + Joiner.on(",").join(files)); } GenericTestUtils.assertGlobEquals(editDir, "edits_.*", files); } } /** * Create the given directories on the provided NameNode. */ private static void mkdirs(NameNode nameNode, String... dirNames) throws Exception { for (String dirName : dirNames) { nameNode.getRpcServer().mkdirs(dirName, FsPermission.createImmutable((short) 0755), true); } } /** * Wait up to 1 second until the given NameNode is aware of the existing of * all of the provided fileNames. */ private static void waitForFileInfo(NameNode standbyNN, String... fileNames) throws Exception { List<String> remainingFiles = Lists.newArrayList(fileNames); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { try { standbyNN.getNamesystem().getEditLogTailer().doTailEdits(); for (Iterator<String> it = remainingFiles.iterator(); it.hasNext();) { if (getFileInfo(standbyNN, it.next(), true, false, false) == null) { return false; } else { it.remove(); } } return true; } catch (IOException|InterruptedException e) { throw new AssertionError("Exception while waiting: " + e); } } }, 10, 1000); } }
package com.dropbox.core.android; /* * This software is provided 'as-is', without any express or implied * warranty. In no event will Google be held liable for any damages * arising from the use of this software. * * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, as long as the origin is not misrepresented. */ import android.os.Build; import android.os.Process; import android.util.Log; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.security.Provider; import java.security.SecureRandom; import java.security.SecureRandomSpi; /** * Older versions of Android have a SecureRandom that isn't actually secure. This * class implements a workaround. Call the static {@link #get()} method to get a * secure SecureRandom instance. * * <p> * This workaround code was recommended by Google in a * <a href="http://android-developers.blogspot.com.es/2013/08/some-securerandom-thoughts.html">euphemistically-titled blog post</a>. * Our code is slightly different because we're a library so we don't want to change * global JVM settings. * </p> */ public final class FixedSecureRandom extends SecureRandom { private static final long serialVersionUID = 0L; public static SecureRandom get() { if (Build.VERSION.SDK_INT > VERSION_CODE_JELLY_BEAN_MR2) { // This version of Android doesn't have the issue. return new SecureRandom(); } else { return new FixedSecureRandom(); } } private static final int VERSION_CODE_JELLY_BEAN_MR2 = 18; private static final byte[] BUILD_FINGERPRINT_AND_DEVICE_SERIAL = getBuildFingerprintAndDeviceSerial(); private FixedSecureRandom() { super(new LinuxPrngSecureRandomSpi(), new LinuxPrngSecureRandomProvider()); } /** * {@code Provider} of {@code SecureRandom} engines which pass through * all requests to the Linux PRNG. */ private static class LinuxPrngSecureRandomProvider extends Provider { private static final long serialVersionUID = 0L; public LinuxPrngSecureRandomProvider() { super("LinuxPRNG", 1.0, "A Linux-specific random number provider that uses" + " /dev/urandom"); // Although /dev/urandom is not a SHA-1 PRNG, some apps // explicitly request a SHA1PRNG SecureRandom and we thus need to // prevent them from getting the default implementation whose output // may have low entropy. put("SecureRandom.SHA1PRNG", LinuxPrngSecureRandomSpi.class.getName()); put("SecureRandom.SHA1PRNG ImplementedIn", "Software"); } } /** * {@link SecureRandomSpi} which passes all requests to the Linux PRNG * ({@code /dev/urandom}). */ public static class LinuxPrngSecureRandomSpi extends SecureRandomSpi { private static final long serialVersionUID = 0L; /* * IMPLEMENTATION NOTE: Requests to generate bytes and to mix in a seed * are passed through to the Linux PRNG (/dev/urandom). Instances of * this class seed themselves by mixing in the current time, PID, UID, * build fingerprint, and hardware serial number (where available) into * Linux PRNG. * * Concurrency: Read requests to the underlying Linux PRNG are * serialized (on sLock) to ensure that multiple threads do not get * duplicated PRNG output. */ private static final File URANDOM_FILE = new File("/dev/urandom"); private static final Object sLock = new Object(); /** * Input stream for reading from Linux PRNG or {@code null} if not yet * opened. * * @GuardedBy("sLock") */ private static DataInputStream sUrandomIn; /** * Output stream for writing to Linux PRNG or {@code null} if not yet * opened. * * @GuardedBy("sLock") */ private static OutputStream sUrandomOut; /** * Whether this engine instance has been seeded. This is needed because * each instance needs to seed itself if the client does not explicitly * seed it. */ private boolean mSeeded; @Override protected void engineSetSeed(byte[] bytes) { try { OutputStream out; synchronized (sLock) { out = getUrandomOutputStream(); } out.write(bytes); out.flush(); } catch (IOException e) { // On a small fraction of devices /dev/urandom is not writable. // Log and ignore. Log.w(LinuxPrngSecureRandomSpi.class.getSimpleName(), "Failed to mix seed into " + URANDOM_FILE); } finally { mSeeded = true; } } @Override protected void engineNextBytes(byte[] bytes) { if (!mSeeded) { // Mix in the device- and invocation-specific seed. engineSetSeed(generateSeed()); } try { DataInputStream in; synchronized (sLock) { in = getUrandomInputStream(); } synchronized (in) { in.readFully(bytes); } } catch (IOException e) { throw new SecurityException( "Failed to read from " + URANDOM_FILE, e); } } @Override protected byte[] engineGenerateSeed(int size) { byte[] seed = new byte[size]; engineNextBytes(seed); return seed; } private DataInputStream getUrandomInputStream() { synchronized (sLock) { if (sUrandomIn == null) { // NOTE: Consider inserting a BufferedInputStream between // DataInputStream and FileInputStream if you need higher // PRNG output performance and can live with future PRNG // output being pulled into this process prematurely. try { sUrandomIn = new DataInputStream( new FileInputStream(URANDOM_FILE)); } catch (IOException e) { throw new SecurityException("Failed to open " + URANDOM_FILE + " for reading", e); } } return sUrandomIn; } } private OutputStream getUrandomOutputStream() throws IOException { synchronized (sLock) { if (sUrandomOut == null) { sUrandomOut = new FileOutputStream(URANDOM_FILE); } return sUrandomOut; } } } /** * Generates a device- and invocation-specific seed to be mixed into the * Linux PRNG. */ private static byte[] generateSeed() { try { ByteArrayOutputStream seedBuffer = new ByteArrayOutputStream(); DataOutputStream seedBufferOut = new DataOutputStream(seedBuffer); seedBufferOut.writeLong(System.currentTimeMillis()); seedBufferOut.writeLong(System.nanoTime()); seedBufferOut.writeInt(Process.myPid()); seedBufferOut.writeInt(Process.myUid()); seedBufferOut.write(BUILD_FINGERPRINT_AND_DEVICE_SERIAL); seedBufferOut.close(); return seedBuffer.toByteArray(); } catch (IOException e) { throw new SecurityException("Failed to generate seed", e); } } /** * Gets the hardware serial number of this device. * * @return serial number or {@code null} if not available. */ private static String getDeviceSerialNumber() { // We're using the Reflection API because Build.SERIAL is only available // since API Level 9 (Gingerbread, Android 2.3). try { return (String) Build.class.getField("SERIAL").get(null); } catch (Exception ignored) { return null; } } private static byte[] getBuildFingerprintAndDeviceSerial() { StringBuilder result = new StringBuilder(); String fingerprint = Build.FINGERPRINT; if (fingerprint != null) { result.append(fingerprint); } String serial = getDeviceSerialNumber(); if (serial != null) { result.append(serial); } try { return result.toString().getBytes("UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException("UTF-8 encoding not supported"); } } }
package net.ros.client.render; import com.google.common.collect.HashBasedTable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Table; import net.minecraft.block.Block; import net.minecraft.block.BlockDirectional; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.block.model.*; import net.minecraft.client.renderer.texture.TextureAtlasSprite; import net.minecraft.client.renderer.vertex.DefaultVertexFormats; import net.minecraft.entity.EntityLivingBase; import net.minecraft.item.ItemStack; import net.minecraft.util.EnumFacing; import net.minecraft.util.ResourceLocation; import net.minecraft.world.World; import net.minecraftforge.client.model.ModelLoader; import net.minecraftforge.client.model.ModelLoaderRegistry; import net.minecraftforge.common.model.TRSRTransformation; import net.minecraftforge.common.property.IExtendedBlockState; import net.ros.client.render.model.ModelCacheManager; import net.ros.client.render.model.obj.PipeOBJStates; import net.ros.client.render.model.obj.ROSOBJState; import net.ros.client.render.model.obj.StateProperties; import net.ros.common.block.BlockPipeBase; import net.ros.common.grid.node.IBlockPipe; import net.ros.common.grid.node.PipeSize; import org.apache.commons.lang3.tuple.Pair; import javax.annotation.Nonnull; import javax.vecmath.Matrix4f; import java.util.ArrayList; import java.util.EnumMap; import java.util.List; import java.util.Map; public class ModelPipeCover implements IBakedModel { private final Table<ROSOBJState, EnumFacing, CompositeBakedModel> CACHE = HashBasedTable.create(); private final ResourceLocation modelLocation; private final BlockPipeBase pipeBlock; private final Block coverBlock; public ModelPipeCover(ResourceLocation modelLocation, Block coverBlock, BlockPipeBase pipeBlock) { this.modelLocation = modelLocation; this.pipeBlock = pipeBlock; this.coverBlock = coverBlock; } @Nonnull @Override public List<BakedQuad> getQuads(IBlockState state, EnumFacing face, long rand) { return getModel(((IExtendedBlockState) state).getClean(), (ROSOBJState) ((IExtendedBlockState) state).getUnlistedProperties() .get(StateProperties.VISIBILITY_PROPERTY).get(), state.getValue(BlockDirectional.FACING)).getQuads(state, face, rand); } private CompositeBakedModel getModel(IBlockState coverState, ROSOBJState pipeState, EnumFacing coverFacing) { ModelManager modelManager = Minecraft.getMinecraft().getRenderItem().getItemModelMesher().getModelManager(); if (CACHE.contains(pipeState, coverFacing)) { return CACHE.get(pipeState, coverFacing); } else { IBakedModel coverModel; try { coverModel = ModelLoaderRegistry.getModel(modelLocation) .process(ImmutableMap.of("flip-v", "true")) .bake(TRSRTransformation.from(coverFacing.getOpposite()), DefaultVertexFormats.ITEM, ModelLoader.defaultTextureGetter()); } catch (Exception e) { e.printStackTrace(); coverModel = modelManager.getMissingModel(); } Pair<PipeSize, String> pipeVariantKey = PipeOBJStates.getVariantKey(pipeState); if (pipeBlock != null && pipeBlock.getPipeType().getSize() == PipeSize.SMALL && !pipeVariantKey.getValue().startsWith("c")) pipeState = PipeOBJStates.getVisibilityState(pipeVariantKey.getKey(), "c" + pipeVariantKey.getValue()); CompositeBakedModel model = new CompositeBakedModel(coverState, ModelCacheManager.getPipeQuads(pipeBlock, pipeState), coverModel, Minecraft.getMinecraft().getBlockRendererDispatcher() .getModelForState(pipeBlock.getDefaultState())); CACHE.put(pipeState, coverFacing, model); return model; } } @Nonnull @Override public ItemOverrideList getOverrides() { return itemHandler; } @Override public boolean isAmbientOcclusion() { return false; } @Override public boolean isGui3d() { return true; } @Override public boolean isBuiltInRenderer() { return false; } @Nonnull @Override public TextureAtlasSprite getParticleTexture() { return Minecraft.getMinecraft().getTextureMapBlocks().getAtlasSprite("minecraft:blocks/dirt"); } @Nonnull @Override public ItemCameraTransforms getItemCameraTransforms() { return ItemCameraTransforms.DEFAULT; } private static class CompositeBakedModel implements IBakedModel { private IBakedModel pipeModel; private final List<BakedQuad> genQuads; private final Map<EnumFacing, List<BakedQuad>> faceQuads = new EnumMap<>(EnumFacing.class); CompositeBakedModel(IBlockState coverState, List<BakedQuad> pipeQuads, IBakedModel coverModel, IBakedModel pipeModel) { this.pipeModel = pipeModel; ImmutableList.Builder<BakedQuad> genBuilder = ImmutableList.builder(); for (EnumFacing e: EnumFacing.VALUES) faceQuads.put(e, new ArrayList<>()); coverModel.getQuads(coverState, null, 0).forEach(genBuilder::add); for (EnumFacing e: EnumFacing.VALUES) coverModel.getQuads(coverState, e, 0).forEach(faceQuads.get(e)::add); genBuilder.addAll(pipeQuads); genQuads = genBuilder.build(); } @Nonnull @Override public List<BakedQuad> getQuads(IBlockState state, EnumFacing face, long rand) { return face == null ? genQuads : faceQuads.get(face); } @Override public boolean isAmbientOcclusion() { return pipeModel.isAmbientOcclusion(); } @Override public boolean isGui3d() { return pipeModel.isGui3d(); } @Override public boolean isBuiltInRenderer() { return pipeModel.isBuiltInRenderer(); } @Nonnull @Override public TextureAtlasSprite getParticleTexture() { return pipeModel.getParticleTexture(); } @Nonnull @Override public ItemOverrideList getOverrides() { return ItemOverrideList.NONE; } @Override public Pair<? extends IBakedModel, Matrix4f> handlePerspective(ItemCameraTransforms.TransformType cameraTransformType) { return Pair.of(this, pipeModel.handlePerspective(cameraTransformType).getRight()); } } private final ItemOverrideList itemHandler = new ItemOverrideList(ImmutableList.of()) { @Nonnull @Override public IBakedModel handleItemState(@Nonnull IBakedModel model, ItemStack stack, World world, EntityLivingBase entity) { return ModelPipeCover.this.getModel(coverBlock.getDefaultState(), PipeOBJStates.getVisibilityState(pipeBlock.getPipeType().getSize(), ((IBlockPipe) pipeBlock).getPipeType().getSize() == PipeSize.SMALL, EnumFacing.WEST, EnumFacing.EAST), EnumFacing.NORTH); } }; }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http; import io.netty.buffer.ByteBuf; import io.netty.util.CharsetUtil; import static io.netty.handler.codec.http.HttpConstants.SP; /** * The response code and its description of HTTP or its derived protocols, such as * <a href="http://en.wikipedia.org/wiki/Real_Time_Streaming_Protocol">RTSP</a> and * <a href="http://en.wikipedia.org/wiki/Internet_Content_Adaptation_Protocol">ICAP</a>. */ public class HttpResponseStatus implements Comparable<HttpResponseStatus> { /** * 100 Continue */ public static final HttpResponseStatus CONTINUE = new HttpResponseStatus(100, "Continue", true); /** * 101 Switching Protocols */ public static final HttpResponseStatus SWITCHING_PROTOCOLS = new HttpResponseStatus(101, "Switching Protocols", true); /** * 102 Processing (WebDAV, RFC2518) */ public static final HttpResponseStatus PROCESSING = new HttpResponseStatus(102, "Processing", true); /** * 200 OK */ public static final HttpResponseStatus OK = new HttpResponseStatus(200, "OK", true); /** * 201 Created */ public static final HttpResponseStatus CREATED = new HttpResponseStatus(201, "Created", true); /** * 202 Accepted */ public static final HttpResponseStatus ACCEPTED = new HttpResponseStatus(202, "Accepted", true); /** * 203 Non-Authoritative Information (since HTTP/1.1) */ public static final HttpResponseStatus NON_AUTHORITATIVE_INFORMATION = new HttpResponseStatus(203, "Non-Authoritative Information", true); /** * 204 No Content */ public static final HttpResponseStatus NO_CONTENT = new HttpResponseStatus(204, "No Content", true); /** * 205 Reset Content */ public static final HttpResponseStatus RESET_CONTENT = new HttpResponseStatus(205, "Reset Content", true); /** * 206 Partial Content */ public static final HttpResponseStatus PARTIAL_CONTENT = new HttpResponseStatus(206, "Partial Content", true); /** * 207 Multi-Status (WebDAV, RFC2518) */ public static final HttpResponseStatus MULTI_STATUS = new HttpResponseStatus(207, "Multi-Status", true); /** * 300 Multiple Choices */ public static final HttpResponseStatus MULTIPLE_CHOICES = new HttpResponseStatus(300, "Multiple Choices", true); /** * 301 Moved Permanently */ public static final HttpResponseStatus MOVED_PERMANENTLY = new HttpResponseStatus(301, "Moved Permanently", true); /** * 302 Found */ public static final HttpResponseStatus FOUND = new HttpResponseStatus(302, "Found", true); /** * 303 See Other (since HTTP/1.1) */ public static final HttpResponseStatus SEE_OTHER = new HttpResponseStatus(303, "See Other", true); /** * 304 Not Modified */ public static final HttpResponseStatus NOT_MODIFIED = new HttpResponseStatus(304, "Not Modified", true); /** * 305 Use Proxy (since HTTP/1.1) */ public static final HttpResponseStatus USE_PROXY = new HttpResponseStatus(305, "Use Proxy", true); /** * 307 Temporary Redirect (since HTTP/1.1) */ public static final HttpResponseStatus TEMPORARY_REDIRECT = new HttpResponseStatus(307, "Temporary Redirect", true); /** * 400 Bad Request */ public static final HttpResponseStatus BAD_REQUEST = new HttpResponseStatus(400, "Bad Request", true); /** * 401 Unauthorized */ public static final HttpResponseStatus UNAUTHORIZED = new HttpResponseStatus(401, "Unauthorized", true); /** * 402 Payment Required */ public static final HttpResponseStatus PAYMENT_REQUIRED = new HttpResponseStatus(402, "Payment Required", true); /** * 403 Forbidden */ public static final HttpResponseStatus FORBIDDEN = new HttpResponseStatus(403, "Forbidden", true); /** * 404 Not Found */ public static final HttpResponseStatus NOT_FOUND = new HttpResponseStatus(404, "Not Found", true); /** * 405 Method Not Allowed */ public static final HttpResponseStatus METHOD_NOT_ALLOWED = new HttpResponseStatus(405, "Method Not Allowed", true); /** * 406 Not Acceptable */ public static final HttpResponseStatus NOT_ACCEPTABLE = new HttpResponseStatus(406, "Not Acceptable", true); /** * 407 Proxy Authentication Required */ public static final HttpResponseStatus PROXY_AUTHENTICATION_REQUIRED = new HttpResponseStatus(407, "Proxy Authentication Required", true); /** * 408 Request Timeout */ public static final HttpResponseStatus REQUEST_TIMEOUT = new HttpResponseStatus(408, "Request Timeout", true); /** * 409 Conflict */ public static final HttpResponseStatus CONFLICT = new HttpResponseStatus(409, "Conflict", true); /** * 410 Gone */ public static final HttpResponseStatus GONE = new HttpResponseStatus(410, "Gone", true); /** * 411 Length Required */ public static final HttpResponseStatus LENGTH_REQUIRED = new HttpResponseStatus(411, "Length Required", true); /** * 412 Precondition Failed */ public static final HttpResponseStatus PRECONDITION_FAILED = new HttpResponseStatus(412, "Precondition Failed", true); /** * 413 Request Entity Too Large */ public static final HttpResponseStatus REQUEST_ENTITY_TOO_LARGE = new HttpResponseStatus(413, "Request Entity Too Large", true); /** * 414 Request-URI Too Long */ public static final HttpResponseStatus REQUEST_URI_TOO_LONG = new HttpResponseStatus(414, "Request-URI Too Long", true); /** * 415 Unsupported Media Type */ public static final HttpResponseStatus UNSUPPORTED_MEDIA_TYPE = new HttpResponseStatus(415, "Unsupported Media Type", true); /** * 416 Requested Range Not Satisfiable */ public static final HttpResponseStatus REQUESTED_RANGE_NOT_SATISFIABLE = new HttpResponseStatus(416, "Requested Range Not Satisfiable", true); /** * 417 Expectation Failed */ public static final HttpResponseStatus EXPECTATION_FAILED = new HttpResponseStatus(417, "Expectation Failed", true); /** * 422 Unprocessable Entity (WebDAV, RFC4918) */ public static final HttpResponseStatus UNPROCESSABLE_ENTITY = new HttpResponseStatus(422, "Unprocessable Entity", true); /** * 423 Locked (WebDAV, RFC4918) */ public static final HttpResponseStatus LOCKED = new HttpResponseStatus(423, "Locked", true); /** * 424 Failed Dependency (WebDAV, RFC4918) */ public static final HttpResponseStatus FAILED_DEPENDENCY = new HttpResponseStatus(424, "Failed Dependency", true); /** * 425 Unordered Collection (WebDAV, RFC3648) */ public static final HttpResponseStatus UNORDERED_COLLECTION = new HttpResponseStatus(425, "Unordered Collection", true); /** * 426 Upgrade Required (RFC2817) */ public static final HttpResponseStatus UPGRADE_REQUIRED = new HttpResponseStatus(426, "Upgrade Required", true); /** * 428 Precondition Required (RFC6585) */ public static final HttpResponseStatus PRECONDITION_REQUIRED = new HttpResponseStatus(428, "Precondition Required", true); /** * 429 Too Many Requests (RFC6585) */ public static final HttpResponseStatus TOO_MANY_REQUESTS = new HttpResponseStatus(429, "Too Many Requests", true); /** * 431 Request Header Fields Too Large (RFC6585) */ public static final HttpResponseStatus REQUEST_HEADER_FIELDS_TOO_LARGE = new HttpResponseStatus(431, "Request Header Fields Too Large", true); /** * 500 Internal Server Error */ public static final HttpResponseStatus INTERNAL_SERVER_ERROR = new HttpResponseStatus(500, "Internal Server Error", true); /** * 501 Not Implemented */ public static final HttpResponseStatus NOT_IMPLEMENTED = new HttpResponseStatus(501, "Not Implemented", true); /** * 502 Bad Gateway */ public static final HttpResponseStatus BAD_GATEWAY = new HttpResponseStatus(502, "Bad Gateway", true); /** * 503 Service Unavailable */ public static final HttpResponseStatus SERVICE_UNAVAILABLE = new HttpResponseStatus(503, "Service Unavailable", true); /** * 504 Gateway Timeout */ public static final HttpResponseStatus GATEWAY_TIMEOUT = new HttpResponseStatus(504, "Gateway Timeout", true); /** * 505 HTTP Version Not Supported */ public static final HttpResponseStatus HTTP_VERSION_NOT_SUPPORTED = new HttpResponseStatus(505, "HTTP Version Not Supported", true); /** * 506 Variant Also Negotiates (RFC2295) */ public static final HttpResponseStatus VARIANT_ALSO_NEGOTIATES = new HttpResponseStatus(506, "Variant Also Negotiates", true); /** * 507 Insufficient Storage (WebDAV, RFC4918) */ public static final HttpResponseStatus INSUFFICIENT_STORAGE = new HttpResponseStatus(507, "Insufficient Storage", true); /** * 510 Not Extended (RFC2774) */ public static final HttpResponseStatus NOT_EXTENDED = new HttpResponseStatus(510, "Not Extended", true); /** * 511 Network Authentication Required (RFC6585) */ public static final HttpResponseStatus NETWORK_AUTHENTICATION_REQUIRED = new HttpResponseStatus(511, "Network Authentication Required", true); /** * Returns the {@link HttpResponseStatus} represented by the specified code. * If the specified code is a standard HTTP getStatus code, a cached instance * will be returned. Otherwise, a new instance will be returned. */ public static HttpResponseStatus valueOf(int code) { switch (code) { case 100: return CONTINUE; case 101: return SWITCHING_PROTOCOLS; case 102: return PROCESSING; case 200: return OK; case 201: return CREATED; case 202: return ACCEPTED; case 203: return NON_AUTHORITATIVE_INFORMATION; case 204: return NO_CONTENT; case 205: return RESET_CONTENT; case 206: return PARTIAL_CONTENT; case 207: return MULTI_STATUS; case 300: return MULTIPLE_CHOICES; case 301: return MOVED_PERMANENTLY; case 302: return FOUND; case 303: return SEE_OTHER; case 304: return NOT_MODIFIED; case 305: return USE_PROXY; case 307: return TEMPORARY_REDIRECT; case 400: return BAD_REQUEST; case 401: return UNAUTHORIZED; case 402: return PAYMENT_REQUIRED; case 403: return FORBIDDEN; case 404: return NOT_FOUND; case 405: return METHOD_NOT_ALLOWED; case 406: return NOT_ACCEPTABLE; case 407: return PROXY_AUTHENTICATION_REQUIRED; case 408: return REQUEST_TIMEOUT; case 409: return CONFLICT; case 410: return GONE; case 411: return LENGTH_REQUIRED; case 412: return PRECONDITION_FAILED; case 413: return REQUEST_ENTITY_TOO_LARGE; case 414: return REQUEST_URI_TOO_LONG; case 415: return UNSUPPORTED_MEDIA_TYPE; case 416: return REQUESTED_RANGE_NOT_SATISFIABLE; case 417: return EXPECTATION_FAILED; case 422: return UNPROCESSABLE_ENTITY; case 423: return LOCKED; case 424: return FAILED_DEPENDENCY; case 425: return UNORDERED_COLLECTION; case 426: return UPGRADE_REQUIRED; case 428: return PRECONDITION_REQUIRED; case 429: return TOO_MANY_REQUESTS; case 431: return REQUEST_HEADER_FIELDS_TOO_LARGE; case 500: return INTERNAL_SERVER_ERROR; case 501: return NOT_IMPLEMENTED; case 502: return BAD_GATEWAY; case 503: return SERVICE_UNAVAILABLE; case 504: return GATEWAY_TIMEOUT; case 505: return HTTP_VERSION_NOT_SUPPORTED; case 506: return VARIANT_ALSO_NEGOTIATES; case 507: return INSUFFICIENT_STORAGE; case 510: return NOT_EXTENDED; case 511: return NETWORK_AUTHENTICATION_REQUIRED; } final String reasonPhrase; if (code < 100) { reasonPhrase = "Unknown Status"; } else if (code < 200) { reasonPhrase = "Informational"; } else if (code < 300) { reasonPhrase = "Successful"; } else if (code < 400) { reasonPhrase = "Redirection"; } else if (code < 500) { reasonPhrase = "Client Error"; } else if (code < 600) { reasonPhrase = "Server Error"; } else { reasonPhrase = "Unknown Status"; } return new HttpResponseStatus(code, reasonPhrase + " (" + code + ')'); } private final int code; private final String reasonPhrase; private final byte[] bytes; /** * Creates a new instance with the specified {@code code} and its * {@code reasonPhrase}. */ public HttpResponseStatus(int code, String reasonPhrase) { this(code, reasonPhrase, false); } private HttpResponseStatus(int code, String reasonPhrase, boolean bytes) { if (code < 0) { throw new IllegalArgumentException( "code: " + code + " (expected: 0+)"); } if (reasonPhrase == null) { throw new NullPointerException("reasonPhrase"); } for (int i = 0; i < reasonPhrase.length(); i ++) { char c = reasonPhrase.charAt(i); // Check prohibited characters. switch (c) { case '\n': case '\r': throw new IllegalArgumentException( "reasonPhrase contains one of the following prohibited characters: " + "\\r\\n: " + reasonPhrase); } } this.code = code; this.reasonPhrase = reasonPhrase; if (bytes) { this.bytes = (code + " " + reasonPhrase).getBytes(CharsetUtil.US_ASCII); } else { this.bytes = null; } } /** * Returns the code of this {@link HttpResponseStatus}. */ public int code() { return code; } /** * Returns the reason phrase of this {@link HttpResponseStatus}. */ public String reasonPhrase() { return reasonPhrase; } @Override public int hashCode() { return code(); } /** * Equality of {@link HttpResponseStatus} only depends on {@link #code()}. The * reason phrase is not considered for equality. */ @Override public boolean equals(Object o) { if (!(o instanceof HttpResponseStatus)) { return false; } return code() == ((HttpResponseStatus) o).code(); } /** * Equality of {@link HttpResponseStatus} only depends on {@link #code()}. The * reason phrase is not considered for equality. */ @Override public int compareTo(HttpResponseStatus o) { return code() - o.code(); } @Override public String toString() { StringBuilder buf = new StringBuilder(reasonPhrase.length() + 5); buf.append(code); buf.append(' '); buf.append(reasonPhrase); return buf.toString(); } void encode(ByteBuf buf) { if (bytes == null) { HttpHeaders.encodeAscii0(String.valueOf(code()), buf); buf.writeByte(SP); HttpHeaders.encodeAscii0(String.valueOf(reasonPhrase()), buf); } else { buf.writeBytes(bytes); } } }
package org.apache.cloud.rdf.web.sail; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.setup.MockMvcBuilders.standaloneSetup; import java.io.ByteArrayInputStream; import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.query.resultio.QueryResultIO; import org.eclipse.rdf4j.query.resultio.TupleQueryResultFormat; import org.eclipse.rdf4j.repository.Repository; import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.rio.RDFFormat; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.ResultActions; @RunWith(SpringJUnit4ClassRunner.class) @WebAppConfiguration @ContextConfiguration({"/controllerIntegrationTest-accumulo.xml", "/controllerIntegrationTest-root.xml"}) public class RdfControllerAccumuloTest { private MockMvc mockMvc; @Autowired private RdfController controller; @Autowired private Repository repository; @Rule public ExpectedException thrown = ExpectedException.none(); @Before public void setup() { this.mockMvc = standaloneSetup(controller).build(); try { RepositoryConnection con = repository.getConnection(); con.add(getClass().getResourceAsStream("/test.nt"), "", RDFFormat.NTRIPLES); con.close(); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } } @Test public void emptyQuery() throws Exception { mockMvc.perform(get("/queryrdf?query=")) .andExpect(status().isOk()); } @Test public void emptyQueryXMLFormat() throws Exception { this.mockMvc.perform(get("/queryrdf") .param("query", "SELECT * WHERE { ?s a <http://mynamespace/ProductType> . }") .param("query.resultformat", "xml")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.TEXT_XML)); } @Test public void loadDataWithVisibilities() throws Exception { this.mockMvc.perform(post("/loadrdf") .content("<http://loadDataWithVisibilities/AB> <http://loadDataWithVisibilities#pred1> \"loadDataWithVisibilities_AB\" . ") .param("format", "N-Triples") .param("conf.cv", "A&B")) .andExpect(status().isOk()); this.mockMvc.perform(post("/loadrdf") .content("<http://loadDataWithVisibilities/BC> <http://loadDataWithVisibilities#pred1> \"loadDataWithVisibilities_BC\" . ") .param("format", "N-Triples") .param("conf.cv", "B&C")) .andExpect(status().isOk()); ResultActions actions; actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://loadDataWithVisibilities#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 0); actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query.auth", "A") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://loadDataWithVisibilities#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 0); actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query.auth", "A,B") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://loadDataWithVisibilities#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 1); actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query.auth", "B,C") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://loadDataWithVisibilities#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 1); actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query.auth", "A,B,C") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://loadDataWithVisibilities#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 2); } private static void validateCount(MockHttpServletResponse response, int count) throws Exception { String rstString = response.getContentAsString(); TupleQueryResult result = QueryResultIO.parseTuple(new ByteArrayInputStream(rstString.getBytes()), TupleQueryResultFormat.SPARQL); assertEquals(1, result.getBindingNames().size()); String binding = result.getBindingNames().get(0); assertTrue(result.hasNext()); BindingSet bs = result.next(); assertEquals(Integer.toString(count), bs.getBinding(binding).getValue().stringValue()); } @Test public void updateQueryWithVisibilities() throws Exception { this.mockMvc.perform(get("/queryrdf") .param("query", "INSERT DATA { <http://mynamespace/ProductType1_AB> <http://mynamespace#pred1> \"test_AB\" }") .param("conf.cv", "A&B")) .andExpect(status().isOk()); this.mockMvc.perform(get("/queryrdf") .param("query", "INSERT DATA { <http://mynamespace/ProductType1_BC> <http://mynamespace#pred1> \"test_BC\" }") .param("conf.cv", "B&C")) .andExpect(status().isOk()); ResultActions actions; actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://mynamespace#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 0); actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query.auth", "A") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://mynamespace#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 0); actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query.auth", "A,B") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://mynamespace#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 1); actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query.auth", "B,C") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://mynamespace#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 1); actions = this.mockMvc.perform(get("/queryrdf") .param("query.resultformat", "xml") .param("query.auth", "A,B,C") .param("query", "SELECT (COUNT(?s) as ?c) WHERE {?s <http://mynamespace#pred1> ?o}")) .andExpect(status().isOk()); validateCount(actions.andReturn().getResponse(), 2); } }
package com.mapswithme.maps.editor; import android.content.DialogInterface; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.support.annotation.CallSuper; import android.support.annotation.DrawableRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.StringRes; import android.support.design.widget.TextInputLayout; import android.support.v7.app.AlertDialog; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.SwitchCompat; import android.text.InputType; import android.text.TextUtils; import android.util.SparseArray; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.EditText; import android.widget.ImageView; import android.widget.TextView; import com.mapswithme.maps.Framework; import com.mapswithme.maps.R; import com.mapswithme.maps.base.BaseMwmFragment; import com.mapswithme.maps.bookmarks.data.Metadata.MetadataType; import com.mapswithme.maps.dialog.EditTextDialogFragment; import com.mapswithme.maps.editor.data.LocalizedName; import com.mapswithme.maps.editor.data.LocalizedStreet; import com.mapswithme.maps.editor.data.TimeFormatUtils; import com.mapswithme.maps.editor.data.Timetable; import com.mapswithme.util.Constants; import com.mapswithme.util.Graphics; import com.mapswithme.util.InputUtils; import com.mapswithme.util.StringUtils; import com.mapswithme.util.UiUtils; import org.solovyev.android.views.llm.LinearLayoutManager; public class EditorFragment extends BaseMwmFragment implements View.OnClickListener, EditTextDialogFragment.EditTextDialogInterface { final static String LAST_INDEX_OF_NAMES_ARRAY = "LastIndexOfNamesArray"; private TextView mCategory; private View mCardName; private View mCardAddress; private View mCardMetadata; private RecyclerView mNamesView; private final RecyclerView.AdapterDataObserver mNamesObserver = new RecyclerView.AdapterDataObserver() { @Override public void onChanged() { refreshNamesCaption(); } @Override public void onItemRangeChanged(int positionStart, int itemCount) { refreshNamesCaption(); } @Override public void onItemRangeInserted(int positionStart, int itemCount) { refreshNamesCaption(); } @Override public void onItemRangeRemoved(int positionStart, int itemCount) { refreshNamesCaption(); } @Override public void onItemRangeMoved(int fromPosition, int toPosition, int itemCount) { refreshNamesCaption(); } }; private MultilanguageAdapter mNamesAdapter; private TextView mNamesCaption; private TextView mAddLanguage; private TextView mMoreLanguages; private TextView mStreet; private EditText mHouseNumber; private EditText mZipcode; private View mBlockLevels; private EditText mBuildingLevels; private EditText mPhone; private EditText mWebsite; private EditText mEmail; private TextView mCuisine; private EditText mOperator; private SwitchCompat mWifi; private TextInputLayout mInputHouseNumber; private TextInputLayout mInputBuildingLevels; private TextInputLayout mInputZipcode; private TextInputLayout mInputPhone; private TextInputLayout mInputWebsite; private TextInputLayout mInputEmail; private View mEmptyOpeningHours; private TextView mOpeningHours; private View mEditOpeningHours; private EditText mDescription; private final SparseArray<View> mMetaBlocks = new SparseArray<>(7); private TextView mReset; private EditorHostFragment mParent; @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_editor, container, false); } @CallSuper @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { mParent = (EditorHostFragment) getParentFragment(); initViews(view); mCategory.setText(Editor.nativeGetCategory()); final LocalizedStreet street = Editor.nativeGetStreet(); mStreet.setText(street.defaultName); mHouseNumber.setText(Editor.nativeGetHouseNumber()); mHouseNumber.addTextChangedListener(new StringUtils.SimpleTextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { UiUtils.setInputError(mInputHouseNumber, Editor.nativeIsHouseValid(s.toString()) ? 0 : R.string.error_enter_correct_house_number); } }); mZipcode.setText(Editor.nativeGetZipCode()); mZipcode.addTextChangedListener(new StringUtils.SimpleTextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { UiUtils.setInputError(mInputZipcode, Editor.nativeIsZipcodeValid(s.toString()) ? 0 : R.string.error_enter_correct_zip_code); } }); mBuildingLevels.setText(Editor.nativeGetBuildingLevels()); mBuildingLevels.addTextChangedListener(new StringUtils.SimpleTextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { UiUtils.setInputError(mInputBuildingLevels, Editor.nativeIsLevelValid(s.toString()) ? 0 : R.string.error_enter_correct_storey_number); } }); mPhone.setText(Editor.nativeGetPhone()); mPhone.addTextChangedListener(new StringUtils.SimpleTextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { UiUtils.setInputError(mInputPhone, Editor.nativeIsPhoneValid(s.toString()) ? 0 : R.string.error_enter_correct_phone); } }); mWebsite.setText(Editor.nativeGetWebsite()); mWebsite.addTextChangedListener(new StringUtils.SimpleTextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { UiUtils.setInputError(mInputWebsite, Editor.nativeIsWebsiteValid(s.toString()) ? 0 : R.string.error_enter_correct_web); } }); mEmail.setText(Editor.nativeGetEmail()); mEmail.addTextChangedListener(new StringUtils.SimpleTextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { UiUtils.setInputError(mInputEmail, Editor.nativeIsEmailValid(s.toString()) ? 0 : R.string.error_enter_correct_email); } }); mCuisine.setText(Editor.nativeGetFormattedCuisine()); mOperator.setText(Editor.nativeGetOperator()); mWifi.setChecked(Editor.nativeHasWifi()); refreshOpeningTime(); refreshEditableFields(); refreshResetButton(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); setEdits(); } boolean setEdits() { if (!validateFields()) return false; Editor.nativeSetHouseNumber(mHouseNumber.getText().toString()); Editor.nativeSetZipCode(mZipcode.getText().toString()); Editor.nativeSetBuildingLevels(mBuildingLevels.getText().toString()); Editor.nativeSetPhone(mPhone.getText().toString()); Editor.nativeSetWebsite(mWebsite.getText().toString()); Editor.nativeSetEmail(mEmail.getText().toString()); Editor.nativeSetHasWifi(mWifi.isChecked()); Editor.nativeSetOperator(mOperator.getText().toString()); Editor.nativeSetNames(mParent.getNamesAsArray()); return true; } @NonNull protected String getDescription() { return mDescription.getText().toString().trim(); } private boolean validateFields() { if (Editor.nativeIsAddressEditable()) { if (!Editor.nativeIsHouseValid(mHouseNumber.getText().toString())) { mHouseNumber.requestFocus(); InputUtils.showKeyboard(mHouseNumber); return false; } if (!Editor.nativeIsLevelValid(mBuildingLevels.getText().toString())) { mBuildingLevels.requestFocus(); InputUtils.showKeyboard(mBuildingLevels); return false; } } if (!Editor.nativeIsZipcodeValid(mZipcode.getText().toString())) { mZipcode.requestFocus(); InputUtils.showKeyboard(mZipcode); return false; } if (!Editor.nativeIsPhoneValid(mPhone.getText().toString())) { mPhone.requestFocus(); InputUtils.showKeyboard(mPhone); return false; } if (!Editor.nativeIsWebsiteValid(mWebsite.getText().toString())) { mWebsite.requestFocus(); InputUtils.showKeyboard(mWebsite); return false; } if (!Editor.nativeIsEmailValid(mEmail.getText().toString())) { mEmail.requestFocus(); InputUtils.showKeyboard(mEmail); return false; } return validateNames(); } private boolean validateNames() { for (int pos = 0; pos < mNamesAdapter.getItemCount(); pos++) { LocalizedName localizedName = mNamesAdapter.getNameAtPos(pos); if (Editor.nativeIsNameValid(localizedName.name)) continue; View nameView = mNamesView.getChildAt(pos); nameView.requestFocus(); InputUtils.showKeyboard(nameView); return false; } return true; } private void refreshEditableFields() { UiUtils.showIf(Editor.nativeIsNameEditable(), mCardName); UiUtils.showIf(Editor.nativeIsAddressEditable(), mCardAddress); UiUtils.showIf(Editor.nativeIsBuilding() && !Editor.nativeIsPointType(), mBlockLevels); final int[] editableMeta = Editor.nativeGetEditableFields(); if (editableMeta.length == 0) { UiUtils.hide(mCardMetadata); return; } for (int i = 0; i < mMetaBlocks.size(); i++) UiUtils.hide(mMetaBlocks.valueAt(i)); boolean anyEditableMeta = false; for (int type : editableMeta) { final View metaBlock = mMetaBlocks.get(type); if (metaBlock == null) continue; anyEditableMeta = true; UiUtils.show(metaBlock); } UiUtils.showIf(anyEditableMeta, mCardMetadata); } private void refreshOpeningTime() { final Timetable[] timetables = OpeningHours.nativeTimetablesFromString(Editor.nativeGetOpeningHours()); if (timetables == null) { UiUtils.show(mEmptyOpeningHours); UiUtils.hide(mOpeningHours, mEditOpeningHours); } else { UiUtils.hide(mEmptyOpeningHours); UiUtils.setTextAndShow(mOpeningHours, TimeFormatUtils.formatTimetables(timetables)); UiUtils.show(mEditOpeningHours); } } private void initNamesView(final View view) { mNamesCaption = (TextView) view.findViewById(R.id.show_additional_names); mNamesCaption.setOnClickListener(this); mAddLanguage = (TextView) view.findViewById(R.id.add_langs); mAddLanguage.setOnClickListener(this); mMoreLanguages = (TextView) view.findViewById(R.id.more_names); mMoreLanguages.setOnClickListener(this); mNamesView = (RecyclerView) view.findViewById(R.id.recycler); mNamesView.setNestedScrollingEnabled(false); mNamesView.setLayoutManager(new LinearLayoutManager(getActivity())); mNamesAdapter = new MultilanguageAdapter(mParent); mNamesView.setAdapter(mNamesAdapter); mNamesAdapter.registerAdapterDataObserver(mNamesObserver); final Bundle args = getArguments(); if (args == null || !args.containsKey(LAST_INDEX_OF_NAMES_ARRAY)) { showAdditionalNames(false); return; } showAdditionalNames(true); UiUtils.waitLayout(mNamesView, new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { LinearLayoutManager lm = (LinearLayoutManager) mNamesView.getLayoutManager(); int position = args.getInt(LAST_INDEX_OF_NAMES_ARRAY); View nameItem = lm.findViewByPosition(position); int cvNameTop = mCardName.getTop(); int nameItemTop = nameItem.getTop(); view.scrollTo(0, cvNameTop + nameItemTop); // TODO(mgsergio): Uncomment if focus and keyboard are required. // TODO(mgsergio): Keyboard doesn't want to hide. Only pressing back button works. // View nameItemInput = nameItem.findViewById(R.id.input); // nameItemInput.requestFocus(); // InputUtils.showKeyboard(nameItemInput); } }); } private void initViews(View view) { final View categoryBlock = view.findViewById(R.id.category); categoryBlock.setOnClickListener(this); // TODO show icon and fill it when core will implement that UiUtils.hide(categoryBlock.findViewById(R.id.icon)); mCategory = (TextView) categoryBlock.findViewById(R.id.name); mCardName = view.findViewById(R.id.cv__name); mCardAddress = view.findViewById(R.id.cv__address); mCardMetadata = view.findViewById(R.id.cv__metadata); initNamesView(view); // Address view.findViewById(R.id.block_street).setOnClickListener(this); mStreet = (TextView) view.findViewById(R.id.street); View blockHouseNumber = view.findViewById(R.id.block_building); mHouseNumber = findInputAndInitBlock(blockHouseNumber, 0, R.string.house_number); mInputHouseNumber = (TextInputLayout) blockHouseNumber.findViewById(R.id.custom_input); View blockZipcode = view.findViewById(R.id.block_zipcode); mZipcode = findInputAndInitBlock(blockZipcode, 0, R.string.editor_zip_code); mInputZipcode = (TextInputLayout) blockZipcode.findViewById(R.id.custom_input); // Details mBlockLevels = view.findViewById(R.id.block_levels); mBuildingLevels = findInputAndInitBlock(mBlockLevels, 0, getString(R.string.editor_storey_number, 25)); mBuildingLevels.setInputType(InputType.TYPE_CLASS_NUMBER); mInputBuildingLevels = (TextInputLayout) mBlockLevels.findViewById(R.id.custom_input); View blockPhone = view.findViewById(R.id.block_phone); mPhone = findInputAndInitBlock(blockPhone, R.drawable.ic_phone, R.string.phone); mPhone.setInputType(InputType.TYPE_CLASS_PHONE); mInputPhone = (TextInputLayout) blockPhone.findViewById(R.id.custom_input); View blockWeb = view.findViewById(R.id.block_website); mWebsite = findInputAndInitBlock(blockWeb, R.drawable.ic_website, R.string.website); mWebsite.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_URI); mInputWebsite = (TextInputLayout) blockWeb.findViewById(R.id.custom_input); View blockEmail = view.findViewById(R.id.block_email); mEmail = findInputAndInitBlock(blockEmail, R.drawable.ic_email, R.string.email); mEmail.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_EMAIL_ADDRESS); mInputEmail = (TextInputLayout) blockEmail.findViewById(R.id.custom_input); View blockCuisine = view.findViewById(R.id.block_cuisine); blockCuisine.setOnClickListener(this); mCuisine = (TextView) view.findViewById(R.id.cuisine); View blockOperator = view.findViewById(R.id.block_operator); mOperator = findInputAndInitBlock(blockOperator, R.drawable.ic_operator, R.string.editor_operator); View blockWifi = view.findViewById(R.id.block_wifi); mWifi = (SwitchCompat) view.findViewById(R.id.sw__wifi); blockWifi.setOnClickListener(this); View blockOpeningHours = view.findViewById(R.id.block_opening_hours); mEditOpeningHours = blockOpeningHours.findViewById(R.id.edit_opening_hours); mEditOpeningHours.setOnClickListener(this); mEmptyOpeningHours = blockOpeningHours.findViewById(R.id.empty_opening_hours); mEmptyOpeningHours.setOnClickListener(this); mOpeningHours = (TextView) blockOpeningHours.findViewById(R.id.opening_hours); mOpeningHours.setOnClickListener(this); final View cardMore = view.findViewById(R.id.cv__more); mDescription = findInput(cardMore); cardMore.findViewById(R.id.about_osm).setOnClickListener(this); mReset = (TextView) view.findViewById(R.id.reset); mReset.setOnClickListener(this); mMetaBlocks.append(MetadataType.FMD_OPEN_HOURS.toInt(), blockOpeningHours); mMetaBlocks.append(MetadataType.FMD_PHONE_NUMBER.toInt(), blockPhone); mMetaBlocks.append(MetadataType.FMD_WEBSITE.toInt(), blockWeb); mMetaBlocks.append(MetadataType.FMD_EMAIL.toInt(), blockEmail); mMetaBlocks.append(MetadataType.FMD_CUISINE.toInt(), blockCuisine); mMetaBlocks.append(MetadataType.FMD_OPERATOR.toInt(), blockOperator); mMetaBlocks.append(MetadataType.FMD_INTERNET.toInt(), blockWifi); } private static EditText findInput(View blockWithInput) { return (EditText) blockWithInput.findViewById(R.id.input); } private EditText findInputAndInitBlock(View blockWithInput, @DrawableRes int icon, @StringRes int hint) { return findInputAndInitBlock(blockWithInput, icon, getString(hint)); } private static EditText findInputAndInitBlock(View blockWithInput, @DrawableRes int icon, String hint) { ((ImageView) blockWithInput.findViewById(R.id.icon)).setImageResource(icon); final TextInputLayout input = (TextInputLayout) blockWithInput.findViewById(R.id.custom_input); input.setHint(hint); return (EditText) input.findViewById(R.id.input); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.edit_opening_hours: case R.id.empty_opening_hours: case R.id.opening_hours: mParent.editTimetable(); break; case R.id.block_wifi: mWifi.toggle(); break; case R.id.block_street: mParent.editStreet(); break; case R.id.block_cuisine: mParent.editCuisine(); break; case R.id.category: mParent.editCategory(); break; case R.id.more_names: case R.id.show_additional_names: if (mNamesAdapter.areAdditionalLanguagesShown() && !validateNames()) break; showAdditionalNames(!mNamesAdapter.areAdditionalLanguagesShown()); break; case R.id.add_langs: mParent.addLanguage(); break; case R.id.about_osm: startActivity(new Intent((Intent.ACTION_VIEW), Uri.parse(Constants.Url.OSM_ABOUT))); break; case R.id.reset: reset(); break; } } private void showAdditionalNames(boolean show) { mNamesAdapter.showAdditionalLanguages(show); refreshNamesCaption(); } private void refreshNamesCaption() { if (mNamesAdapter.getNamesCount() <= mNamesAdapter.getMandatoryNamesCount()) setNamesArrow(0 /* arrowResourceId */); // bind arrow with empty resource (do not draw arrow) else if (mNamesAdapter.areAdditionalLanguagesShown()) setNamesArrow(R.drawable.ic_expand_less); else setNamesArrow(R.drawable.ic_expand_more); boolean showAddLanguage = mNamesAdapter.getNamesCount() <= mNamesAdapter.getMandatoryNamesCount() || mNamesAdapter.areAdditionalLanguagesShown(); UiUtils.showIf(showAddLanguage, mAddLanguage); UiUtils.showIf(!showAddLanguage, mMoreLanguages); } // Bind arrow in the top right corner of names caption with needed resource. private void setNamesArrow(@DrawableRes int arrowResourceId) { if (arrowResourceId == 0) { mNamesCaption.setCompoundDrawablesWithIntrinsicBounds(null, null, null, null); return; } mNamesCaption.setCompoundDrawablesWithIntrinsicBounds( null, null, Graphics.tint(getActivity(), arrowResourceId, R.attr.iconTint), null); } private void refreshResetButton() { if (mParent.addingNewObject()) { UiUtils.hide(mReset); return; } if (Editor.nativeIsMapObjectUploaded()) { mReset.setText(R.string.editor_place_doesnt_exist); return; } switch (Editor.nativeGetMapObjectStatus()) { case Editor.CREATED: mReset.setText(R.string.editor_remove_place_button); break; case Editor.MODIFIED: mReset.setText(R.string.editor_reset_edits_button); break; case Editor.UNTOUCHED: mReset.setText(R.string.editor_place_doesnt_exist); break; case Editor.DELETED: throw new IllegalStateException("Can't delete already deleted feature."); case Editor.OBSOLETE: throw new IllegalStateException("Obsolete objects cannot be reverted."); } } private void reset() { if (Editor.nativeIsMapObjectUploaded()) { placeDoesntExist(); return; } switch (Editor.nativeGetMapObjectStatus()) { case Editor.CREATED: rollback(Editor.CREATED); break; case Editor.MODIFIED: rollback(Editor.MODIFIED); break; case Editor.UNTOUCHED: placeDoesntExist(); break; case Editor.DELETED: throw new IllegalStateException("Can't delete already deleted feature."); case Editor.OBSOLETE: throw new IllegalStateException("Obsolete objects cannot be reverted."); } } private void rollback(@Editor.FeatureStatus int status) { int title; int message; if (status == Editor.CREATED) { title = R.string.editor_remove_place_button; message = R.string.editor_remove_place_message; } else { title = R.string.editor_reset_edits_button; message = R.string.editor_reset_edits_message; } new AlertDialog.Builder(getActivity()).setTitle(message) .setPositiveButton(getString(title).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Editor.nativeRollbackMapObject(); Framework.nativePokeSearchInViewport(); mParent.onBackPressed(); } }) .setNegativeButton(getString(R.string.cancel).toUpperCase(), null) .show(); } private void placeDoesntExist() { EditTextDialogFragment.show(getString(R.string.editor_place_doesnt_exist), "", getString(R.string.editor_comment_hint), getString(R.string.editor_report_problem_send_button), getString(R.string.cancel), this); } @NonNull @Override public EditTextDialogFragment.OnTextSaveListener getSaveTextListener() { return text -> { Editor.nativePlaceDoesNotExist(text); mParent.onBackPressed(); }; } @NonNull @Override public EditTextDialogFragment.Validator getValidator() { return (activity, text) -> !TextUtils.isEmpty(text); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mgn.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mgn-2020-02-26/DescribeVcenterClients" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeVcenterClientsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * List of items returned by DescribeVcenterClients. * </p> */ private java.util.List<VcenterClient> items; /** * <p> * Next pagination token returned from DescribeVcenterClients. * </p> */ private String nextToken; /** * <p> * List of items returned by DescribeVcenterClients. * </p> * * @return List of items returned by DescribeVcenterClients. */ public java.util.List<VcenterClient> getItems() { return items; } /** * <p> * List of items returned by DescribeVcenterClients. * </p> * * @param items * List of items returned by DescribeVcenterClients. */ public void setItems(java.util.Collection<VcenterClient> items) { if (items == null) { this.items = null; return; } this.items = new java.util.ArrayList<VcenterClient>(items); } /** * <p> * List of items returned by DescribeVcenterClients. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setItems(java.util.Collection)} or {@link #withItems(java.util.Collection)} if you want to override the * existing values. * </p> * * @param items * List of items returned by DescribeVcenterClients. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeVcenterClientsResult withItems(VcenterClient... items) { if (this.items == null) { setItems(new java.util.ArrayList<VcenterClient>(items.length)); } for (VcenterClient ele : items) { this.items.add(ele); } return this; } /** * <p> * List of items returned by DescribeVcenterClients. * </p> * * @param items * List of items returned by DescribeVcenterClients. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeVcenterClientsResult withItems(java.util.Collection<VcenterClient> items) { setItems(items); return this; } /** * <p> * Next pagination token returned from DescribeVcenterClients. * </p> * * @param nextToken * Next pagination token returned from DescribeVcenterClients. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * Next pagination token returned from DescribeVcenterClients. * </p> * * @return Next pagination token returned from DescribeVcenterClients. */ public String getNextToken() { return this.nextToken; } /** * <p> * Next pagination token returned from DescribeVcenterClients. * </p> * * @param nextToken * Next pagination token returned from DescribeVcenterClients. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeVcenterClientsResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getItems() != null) sb.append("Items: ").append(getItems()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeVcenterClientsResult == false) return false; DescribeVcenterClientsResult other = (DescribeVcenterClientsResult) obj; if (other.getItems() == null ^ this.getItems() == null) return false; if (other.getItems() != null && other.getItems().equals(this.getItems()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getItems() == null) ? 0 : getItems().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public DescribeVcenterClientsResult clone() { try { return (DescribeVcenterClientsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.util; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.lang.ArrayUtils; import org.apache.sysml.parser.Expression.ValueType; import org.apache.sysml.runtime.matrix.data.FrameBlock; import org.apache.sysml.runtime.matrix.data.MatrixIndexes; import org.apache.sysml.runtime.matrix.data.NumItemsByEachReducerMetaData; import org.apache.sysml.runtime.matrix.data.Pair; import org.apache.sysml.runtime.matrix.mapred.IndexedMatrixValue; public class UtilFunctions { //for accurate cast of double values to int and long //IEEE754: binary64 (double precision) eps = 2^(-53) = 1.11 * 10^(-16) //(same epsilon as used for matrix index cast in R) public static double DOUBLE_EPS = Math.pow(2, -53); //prime numbers for old hash function (divide prime close to max int, //because it determines the max hash domain size public static final long ADD_PRIME1 = 99991; public static final int DIVIDE_PRIME = 1405695061; public static int intHashCode(int key1, int key2) { return 31 * (31 + key1) + key2; } public static int longHashCode(long key1) { return (int)(key1^(key1>>>32)); } /** * Returns the hash code for a long-long pair. This is the default * hash function for the keys of a distributed matrix in MR/Spark. * * @param key1 first long key * @param key2 second long key * @return hash code */ public static int longHashCode(long key1, long key2) { //basic hash mixing of two longs hashes (similar to //Arrays.hashCode(long[]) but w/o array creation/copy) int h = 31 + (int)(key1 ^ (key1 >>> 32)); return h*31 + (int)(key2 ^ (key2 >>> 32)); } /** * Returns the hash code for a long-long-long triple. This is the default * hash function for the keys of a distributed matrix in MR/Spark. * * @param key1 first long key * @param key2 second long key * @param key3 third long key * @return hash code */ public static int longHashCode(long key1, long key2, long key3) { //basic hash mixing of three longs hashes (similar to //Arrays.hashCode(long[]) but w/o array creation/copy) int h1 = 31 + (int)(key1 ^ (key1 >>> 32)); int h2 = h1*31 + (int)(key2 ^ (key2 >>> 32)); return h2*31 + (int)(key3 ^ (key3 >>> 32)); } public static int nextIntPow2( int in ) { int expon = (in==0) ? 0 : 32-Integer.numberOfLeadingZeros(in-1); long pow2 = pow(2, expon); return (int)((pow2>Integer.MAX_VALUE)?Integer.MAX_VALUE : pow2); } public static long pow(int base, int exp) { return (base==2 && 0 <= exp && exp < 63) ? 1L << exp : (long)Math.pow(base, exp); } /** * Computes the 1-based block index based on the global cell index and block size meta * data. See computeCellIndex for the inverse operation. * * @param cellIndex global cell index * @param blockSize block size * @return 1-based block index */ public static long computeBlockIndex(long cellIndex, int blockSize) { return (cellIndex-1)/blockSize + 1; } /** * Computes the 0-based cell-in-block index based on the global cell index and block * size meta data. See computeCellIndex for the inverse operation. * * @param cellIndex global cell index * @param blockSize block size * @return 0-based cell-in-block index */ public static int computeCellInBlock(long cellIndex, int blockSize) { return (int) ((cellIndex-1)%blockSize); } /** * Computes the global 1-based cell index based on the block index, block size meta data, * and specific 0-based in-block cell index. * * NOTE: this is equivalent to cellIndexCalculation. * * @param blockIndex block index * @param blockSize block size * @param cellInBlock 0-based cell-in-block index * @return global 1-based cell index */ public static long computeCellIndex( long blockIndex, int blockSize, int cellInBlock ) { return (blockIndex-1)*blockSize + 1 + cellInBlock; } /** * Computes the actual block size based on matrix dimension, block index, and block size * meta data. For boundary blocks, the actual block size is less or equal than the block * size meta data; otherwise they are identical. * * @param len matrix dimension * @param blockIndex block index * @param blockSize block size metadata * @return actual block size */ public static int computeBlockSize( long len, long blockIndex, long blockSize ) { long remain = len - (blockIndex-1)*blockSize; return (int)Math.min(blockSize, remain); } public static boolean isInBlockRange( MatrixIndexes ix, int brlen, int bclen, long rl, long ru, long cl, long cu ) { long bRLowerIndex = (ix.getRowIndex()-1)*brlen + 1; long bRUpperIndex = ix.getRowIndex()*brlen; long bCLowerIndex = (ix.getColumnIndex()-1)*bclen + 1; long bCUpperIndex = ix.getColumnIndex()*bclen; if(rl > bRUpperIndex || ru < bRLowerIndex) { return false; } else if(cl > bCUpperIndex || cu < bCLowerIndex) { return false; } else { return true; } } public static boolean isInFrameBlockRange( Long ix, int brlen, long rl, long ru ) { if(rl > ix+brlen-1 || ru < ix) return false; else return true; } public static boolean isInBlockRange( MatrixIndexes ix, int brlen, int bclen, IndexRange ixrange ) { return isInBlockRange(ix, brlen, bclen, ixrange.rowStart, ixrange.rowEnd, ixrange.colStart, ixrange.colEnd); } public static boolean isInFrameBlockRange( Long ix, int brlen, int bclen, IndexRange ixrange ) { return isInFrameBlockRange(ix, brlen, ixrange.rowStart, ixrange.rowEnd); } // Reused by both MR and Spark for performing zero out public static IndexRange getSelectedRangeForZeroOut(IndexedMatrixValue in, int blockRowFactor, int blockColFactor, IndexRange indexRange) { IndexRange tempRange = new IndexRange(-1, -1, -1, -1); long topBlockRowIndex=UtilFunctions.computeBlockIndex(indexRange.rowStart, blockRowFactor); int topRowInTopBlock=UtilFunctions.computeCellInBlock(indexRange.rowStart, blockRowFactor); long bottomBlockRowIndex=UtilFunctions.computeBlockIndex(indexRange.rowEnd, blockRowFactor); int bottomRowInBottomBlock=UtilFunctions.computeCellInBlock(indexRange.rowEnd, blockRowFactor); long leftBlockColIndex=UtilFunctions.computeBlockIndex(indexRange.colStart, blockColFactor); int leftColInLeftBlock=UtilFunctions.computeCellInBlock(indexRange.colStart, blockColFactor); long rightBlockColIndex=UtilFunctions.computeBlockIndex(indexRange.colEnd, blockColFactor); int rightColInRightBlock=UtilFunctions.computeCellInBlock(indexRange.colEnd, blockColFactor); //no overlap if(in.getIndexes().getRowIndex()<topBlockRowIndex || in.getIndexes().getRowIndex()>bottomBlockRowIndex || in.getIndexes().getColumnIndex()<leftBlockColIndex || in.getIndexes().getColumnIndex()>rightBlockColIndex) { tempRange.set(-1,-1,-1,-1); return tempRange; } //get the index range inside the block tempRange.set(0, in.getValue().getNumRows()-1, 0, in.getValue().getNumColumns()-1); if(topBlockRowIndex==in.getIndexes().getRowIndex()) tempRange.rowStart=topRowInTopBlock; if(bottomBlockRowIndex==in.getIndexes().getRowIndex()) tempRange.rowEnd=bottomRowInBottomBlock; if(leftBlockColIndex==in.getIndexes().getColumnIndex()) tempRange.colStart=leftColInLeftBlock; if(rightBlockColIndex==in.getIndexes().getColumnIndex()) tempRange.colEnd=rightColInRightBlock; return tempRange; } // Reused by both MR and Spark for performing zero out public static IndexRange getSelectedRangeForZeroOut(Pair<Long, FrameBlock> in, int blockRowFactor, int blockColFactor, IndexRange indexRange, long lSrcRowIndex, long lDestRowIndex) { int iRowStart, iRowEnd, iColStart, iColEnd; if(indexRange.rowStart <= lDestRowIndex) iRowStart = 0; else iRowStart = (int) (indexRange.rowStart - in.getKey()); iRowEnd = (int) Math.min(indexRange.rowEnd - lSrcRowIndex, blockRowFactor)-1; iColStart = UtilFunctions.computeCellInBlock(indexRange.colStart, blockColFactor); iColEnd = UtilFunctions.computeCellInBlock(indexRange.colEnd, blockColFactor); return new IndexRange(iRowStart, iRowEnd, iColStart, iColEnd); } public static long getTotalLength(NumItemsByEachReducerMetaData metadata) { long[] counts=metadata.getNumItemsArray(); long total=0; for(long count: counts) total+=count; return total; } public static long getLengthForInterQuantile(NumItemsByEachReducerMetaData metadata, double p) { long total = UtilFunctions.getTotalLength(metadata); long lpos=(long)Math.ceil(total*p);//lower bound is inclusive long upos=(long)Math.ceil(total*(1-p));//upper bound is inclusive return upos-lpos+1; } /** * JDK8 floating decimal double parsing, which is generally faster * than &lt;JDK8 parseDouble and works well in multi-threaded tasks. * * @param str string to parse to double * @return double value */ public static double parseToDouble(String str) { //return FloatingDecimal.parseDouble(str); return Double.parseDouble(str); } public static int parseToInt( String str ) { int ret = -1; if( str.contains(".") ) ret = toInt( Double.parseDouble(str) ); else ret = Integer.parseInt(str); return ret; } public static long parseToLong( String str ) { long ret = -1; if( str.contains(".") ) ret = toLong( Double.parseDouble(str) ); else ret = Long.parseLong(str); return ret; } public static int toInt( double val ) { return (int) Math.floor( val + DOUBLE_EPS ); } public static long toLong( double val ) { return (long) Math.floor( val + DOUBLE_EPS ); } public static int toInt(Object obj) { return (obj instanceof Long) ? ((Long)obj).intValue() : ((Integer)obj).intValue(); } public static long getSeqLength(double from, double to, double incr) { return getSeqLength(from, to, incr, true); } public static long getSeqLength(double from, double to, double incr, boolean check) { //Computing the length of a sequence with 1 + floor((to-from)/incr) //can lead to incorrect results due to round-off errors in case of //a very small increment. Hence, we use a different formulation //that exhibits better numerical stability by avoiding the subtraction //of numbers of different magnitude. if( check && (Double.isNaN(from) || Double.isNaN(to) || Double.isNaN(incr) || (from > to && incr > 0) || (from < to && incr < 0)) ) { throw new RuntimeException("Invalid seq parameters: ("+from+", "+to+", "+incr+")"); } return 1L + (long) Math.floor(to/incr - from/incr); } /** * Obtain sequence list * * @param low lower bound (inclusive) * @param up upper bound (inclusive) * @param incr increment * @return list of integers */ public static List<Integer> getSeqList(int low, int up, int incr) { ArrayList<Integer> ret = new ArrayList<Integer>(); for( int i=low; i<=up; i+=incr ) ret.add(i); return ret; } /** * Obtain sequence array * * @param low lower bound (inclusive) * @param up upper bound (inclusive) * @param incr increment * @return array of integers */ public static int[] getSeqArray(int low, int up, int incr) { int len = (int) getSeqLength(low, up, incr); int[] ret = new int[len]; for( int i=0, val=low; i<len; i++, val+=incr ) ret[i] = val; return ret; } public static int roundToNext(int val, int factor) { //round up to next non-zero multiple of factor int pval = Math.max(val, factor); return ((pval + factor-1) / factor) * factor; } public static Object doubleToObject(ValueType vt, double in) { return doubleToObject(vt, in, true); } public static Object doubleToObject(ValueType vt, double in, boolean sparse) { if( in == 0 && sparse) return null; switch( vt ) { case STRING: return String.valueOf(in); case BOOLEAN: return (in!=0); case INT: return UtilFunctions.toLong(in); case DOUBLE: return in; default: throw new RuntimeException("Unsupported value type: "+vt); } } public static Object stringToObject(ValueType vt, String in) { if( in == null ) return null; switch( vt ) { case STRING: return in; case BOOLEAN: return Boolean.parseBoolean(in); case INT: return Long.parseLong(in); case DOUBLE: return Double.parseDouble(in); default: throw new RuntimeException("Unsupported value type: "+vt); } } public static double objectToDouble(ValueType vt, Object in) { if( in == null ) return 0; switch( vt ) { case STRING: return !((String)in).isEmpty() ? Double.parseDouble((String)in) : 0; case BOOLEAN: return ((Boolean)in)?1d:0d; case INT: return (Long)in; case DOUBLE: return (Double)in; default: throw new RuntimeException("Unsupported value type: "+vt); } } public static String objectToString( Object in ) { return (in !=null) ? in.toString() : null; } /** * Convert object to string * * @param in object * @param ignoreNull If this flag has set, it will ignore null. This flag is mainly used in merge functionality to override data with "null" data. * @return string representation of object */ public static String objectToString( Object in, boolean ignoreNull ) { String strReturn = objectToString(in); if( strReturn == null ) return strReturn; else if (ignoreNull){ if(in instanceof Double && ((Double)in).doubleValue() == 0.0) return null; else if(in instanceof Long && ((Long)in).longValue() == 0) return null; else if(in instanceof Boolean && ((Boolean)in).booleanValue() == false) return null; else if(in instanceof String && ((String)in).trim().length() == 0) return null; else return strReturn; } else return strReturn; } public static Object objectToObject(ValueType vt, Object in) { if( in instanceof Double && vt == ValueType.DOUBLE || in instanceof Long && vt == ValueType.INT || in instanceof Boolean && vt == ValueType.BOOLEAN || in instanceof String && vt == ValueType.STRING ) return in; //quick path to avoid double parsing else return stringToObject(vt, objectToString(in) ); } public static Object objectToObject(ValueType vt, Object in, boolean ignoreNull ) { String str = objectToString(in, ignoreNull); if (str==null || vt == ValueType.STRING) return str; else return stringToObject(vt, str); } public static int compareTo(ValueType vt, Object in1, Object in2) { if(in1 == null && in2 == null) return 0; else if(in1 == null) return -1; else if(in2 == null) return 1; switch( vt ) { case STRING: return ((String)in1).compareTo((String)in2); case BOOLEAN: return ((Boolean)in1).compareTo((Boolean)in2); case INT: return ((Long)in1).compareTo((Long)in2); case DOUBLE: return ((Double)in1).compareTo((Double)in2); default: throw new RuntimeException("Unsupported value type: "+vt); } } /** * Compares two version strings of format x.y.z, where x is major, * y is minor, and z is maintenance release. * * @param version1 first version string * @param version2 second version string * @return 1 if version1 greater, -1 if version2 greater, 0 if equal */ public static int compareVersion( String version1, String version2 ) { String[] partsv1 = version1.split("\\."); String[] partsv2 = version2.split("\\."); int len = Math.min(partsv1.length, partsv2.length); for( int i=0; i<partsv1.length && i<len; i++ ) { Integer iv1 = Integer.parseInt(partsv1[i]); Integer iv2 = Integer.parseInt(partsv2[i]); if( iv1.compareTo(iv2) != 0 ) return iv1.compareTo(iv2); } return 0; //equal } public static boolean isIntegerNumber( String str ) { byte[] c = str.getBytes(); for( int i=0; i<c.length; i++ ) if( c[i] < 48 || c[i] > 57 ) return false; return true; } public static byte max( byte[] array ) { byte ret = Byte.MIN_VALUE; for( int i=0; i<array.length; i++ ) ret = (array[i]>ret)?array[i]:ret; return ret; } public static String unquote(String s) { if (s != null && s.length() >=2 && ((s.startsWith("\"") && s.endsWith("\"")) || (s.startsWith("'") && s.endsWith("'")))) { s = s.substring(1, s.length() - 1); } return s; } public static String quote(String s) { return "\"" + s + "\""; } /** * Parses a memory size with optional g/m/k quantifiers into its * number representation. * * @param arg memory size as readable string * @return byte count of memory size */ public static long parseMemorySize(String arg) { if ( arg.endsWith("g") || arg.endsWith("G") ) return Long.parseLong(arg.substring(0,arg.length()-1)) * 1024 * 1024 * 1024; else if ( arg.endsWith("m") || arg.endsWith("M") ) return Long.parseLong(arg.substring(0,arg.length()-1)) * 1024 * 1024; else if( arg.endsWith("k") || arg.endsWith("K") ) return Long.parseLong(arg.substring(0,arg.length()-1)) * 1024; else return Long.parseLong(arg.substring(0,arg.length())); } /** * Format a memory size with g/m/k quantifiers into its * number representation. * * @param arg byte count of memory size * @return memory size as readable string */ public static String formatMemorySize(long arg) { if (arg >= 1024 * 1024 * 1024) return String.format("%d GB", arg/(1024*1024*1024)); else if (arg >= 1024 * 1024) return String.format("%d MB", arg/(1024*1024)); else if (arg >= 1024) return String.format("%d KB", arg/(1024)); else return String.format("%d", arg); } public static double getDouble(Object obj) { return (obj instanceof Double) ? (Double)obj : Double.parseDouble(obj.toString()); } public static boolean isNonZero(Object obj) { if( obj instanceof Double ) return ((Double) obj) != 0; else { //avoid expensive double parsing String sobj = obj.toString(); return (!sobj.equals("0") && !sobj.equals("0.0")); } } public static ValueType[] nCopies(int n, ValueType vt) { ValueType[] ret = new ValueType[n]; Arrays.fill(ret, vt); return ret; } public static int frequency(ValueType[] schema, ValueType vt) { int count = 0; for( ValueType tmp : schema ) count += tmp.equals(vt) ? 1 : 0; return count; } public static ValueType[] copyOf(ValueType[] schema1, ValueType[] schema2) { return (ValueType[]) ArrayUtils.addAll(schema1, schema2); } public static int countNonZeros(double[] data, int pos, int len) { int ret = 0; for( int i=pos; i<pos+len; i++ ) ret += (data[i] != 0) ? 1 : 0; return ret; } public static boolean containsZero(double[] data, int pos, int len) { for( int i=pos; i<pos+len; i++ ) if( data[i] == 0 ) return true; return false; } @SafeVarargs public static <T> Set<T> asSet(T[]... inputs) { Set<T> ret = new HashSet<>(); for( T[] input : inputs ) for( T element : input ) ret.add(element); return ret; } }
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.net; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.IOException; import java.security.AccessController; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.apache.harmony.luni.internal.nls.Messages; import org.apache.harmony.luni.util.PriviAction; /** * Default implementation for {@code ProxySelector}. */ @SuppressWarnings("unchecked") class ProxySelectorImpl extends ProxySelector { private static final int HTTP_PROXY_PORT = 80; private static final int HTTPS_PROXY_PORT = 443; private static final int FTP_PROXY_PORT = 80; private static final int SOCKS_PROXY_PORT = 1080; // Net properties read from net.properties file. private static Properties netProps = null; // read net.properties file static { AccessController.doPrivileged(new java.security.PrivilegedAction() { public Object run() { File f = new File(System.getProperty("java.home") //$NON-NLS-1$ + File.separator + "lib" + File.separator //$NON-NLS-1$ + "net.properties"); //$NON-NLS-1$ if (f.exists()) { try { FileInputStream fis = new FileInputStream(f); InputStream is = new BufferedInputStream(fis); netProps = new Properties(); netProps.load(is); is.close(); } catch (IOException e) { } } return null; } }); } public ProxySelectorImpl() { super(); } @Override public void connectFailed(URI uri, SocketAddress sa, IOException ioe) { if (null == uri || null == sa || null == ioe) { // luni.4D=Argument must not be null" throw new IllegalArgumentException(Messages.getString("luni.4D")); //$NON-NLS-1$ } } @Override public List<Proxy> select(URI uri) { // argument check if (null == uri) { // luni.4D=Argument must not be null throw new IllegalArgumentException(Messages.getString("luni.4D")); //$NON-NLS-1$ } // check scheme String scheme = uri.getScheme(); if (null == scheme) { throw new IllegalArgumentException(); } String host = uri.getHost(); Proxy proxy = Proxy.NO_PROXY; if ("http".equals(scheme)) { //$NON-NLS-1$ proxy = selectHttpProxy(host); } else if ("https".equals(scheme)) { //$NON-NLS-1$ proxy = selectHttpsProxy(); } else if ("ftp".equals(scheme)) { //$NON-NLS-1$ proxy = selectFtpProxy(host); } else if ("socket".equals(scheme)) { //$NON-NLS-1$ proxy = selectSocksProxy(); } List<Proxy> proxyList = new ArrayList<Proxy>(1); proxyList.add(proxy); return proxyList; } /* * Gets proxy for http request. 1. gets from "http.proxyHost", then gets * port from "http.proxyPort", or from "proxyPort" if "http.proxyPort" is * unavailable. 2. gets from "proxyHost" if 1 is unavailable,then get port * from "proxyPort", or from "http.proxyPort" if "proxyPort" is unavailable. * 3. gets from "socksProxyHost" if 2 is unavailable. */ private Proxy selectHttpProxy(String uriHost) { String host; String port = null; Proxy.Type type = Proxy.Type.DIRECT; String nonProxyHosts = getSystemProperty("http.nonProxyHosts"); //$NON-NLS-1$ // if host is in non proxy host list, returns Proxy.NO_PROXY if (isNonProxyHost(uriHost, nonProxyHosts)) { return Proxy.NO_PROXY; } host = getSystemProperty("http.proxyHost"); //$NON-NLS-1$ if (null != host) { // case 1: http.proxyHost is set, use exact http proxy type = Proxy.Type.HTTP; port = getSystemPropertyOrAlternative("http.proxyPort", //$NON-NLS-1$ "proxyPort", String.valueOf(HTTP_PROXY_PORT)); //$NON-NLS-1$ } else if ((host = getSystemProperty("proxyHost", null)) != null) { //$NON-NLS-1$ // case 2: proxyHost is set, use exact http proxy type = Proxy.Type.HTTP; port = getSystemPropertyOrAlternative("proxyPort", //$NON-NLS-1$ "http.proxyPort", String.valueOf(HTTP_PROXY_PORT)); //$NON-NLS-1$ } else if ((host = getSystemProperty("socksProxyHost")) != null) { //$NON-NLS-1$ // case 3: use socks proxy instead type = Proxy.Type.SOCKS; port = getSystemProperty( "socksProxyPort", String.valueOf(SOCKS_PROXY_PORT)); //$NON-NLS-1$ } int defaultPort = (type == Proxy.Type.SOCKS) ? SOCKS_PROXY_PORT : HTTP_PROXY_PORT; return createProxy(type, host, port, defaultPort); } /* * Gets proxy for https request. */ private Proxy selectHttpsProxy() { String host; String port = null; Proxy.Type type = Proxy.Type.DIRECT; host = getSystemProperty("https.proxyHost"); //$NON-NLS-1$ if (null != host) { // case 1: use exact https proxy type = Proxy.Type.HTTP; port = getSystemProperty( "https.proxyPort", String.valueOf(HTTPS_PROXY_PORT)); //$NON-NLS-1$ } else { host = getSystemProperty("socksProxyHost"); //$NON-NLS-1$ if (null != host) { // case 2: use socks proxy instead type = Proxy.Type.SOCKS; port = getSystemProperty( "socksProxyPort", String.valueOf(SOCKS_PROXY_PORT)); //$NON-NLS-1$ } } int defaultPort = (type == Proxy.Type.SOCKS) ? SOCKS_PROXY_PORT : HTTPS_PROXY_PORT; return createProxy(type, host, port, defaultPort); } /* * Gets proxy for ftp request. */ private Proxy selectFtpProxy(String uriHost) { String host; String port = null; Proxy.Type type = Proxy.Type.DIRECT; String nonProxyHosts = getSystemProperty("ftp.nonProxyHosts"); //$NON-NLS-1$ // if host is in non proxy host list, returns Proxy.NO_PROXY if (isNonProxyHost(uriHost, nonProxyHosts)) { return Proxy.NO_PROXY; } host = getSystemProperty("ftp.proxyHost"); //$NON-NLS-1$ if (null != host) { // case 1: use exact ftp proxy type = Proxy.Type.HTTP; port = getSystemProperty( "ftp.proxyPort", String.valueOf(FTP_PROXY_PORT)); //$NON-NLS-1$ } else { host = getSystemProperty("socksProxyHost"); //$NON-NLS-1$ if (null != host) { // case 2: use socks proxy instead type = Proxy.Type.SOCKS; port = getSystemProperty( "socksProxyPort", String.valueOf(SOCKS_PROXY_PORT)); //$NON-NLS-1$ } } int defaultPort = (type == Proxy.Type.SOCKS) ? SOCKS_PROXY_PORT : FTP_PROXY_PORT; return createProxy(type, host, port, defaultPort); } /* * Gets proxy for socks request. */ private Proxy selectSocksProxy() { String host; String port = null; Proxy.Type type = Proxy.Type.DIRECT; host = getSystemProperty("socksProxyHost"); //$NON-NLS-1$ if (null != host) { type = Proxy.Type.SOCKS; port = getSystemProperty( "socksProxyPort", String.valueOf(SOCKS_PROXY_PORT)); //$NON-NLS-1$ } return createProxy(type, host, port, SOCKS_PROXY_PORT); } /* * checks whether the host needs proxy. return true if it doesn't need a * proxy. */ private boolean isNonProxyHost(String host, String nonProxyHosts) { // nonProxyHosts is not set if (null == host || null == nonProxyHosts) { return false; } // Construct regex expression of nonProxyHosts int length = nonProxyHosts.length(); char ch; StringBuilder buf = new StringBuilder(length); for (int i = 0; i < nonProxyHosts.length(); i++) { ch = nonProxyHosts.charAt(i); switch (ch) { case '.': buf.append("\\."); //$NON-NLS-1$ break; case '*': buf.append(".*"); //$NON-NLS-1$ break; default: buf.append(ch); } } String nonProxyHostsReg = buf.toString(); // check whether the host is the nonProxyHosts. return host.matches(nonProxyHostsReg); } /* * Create Proxy by "type","host" and "port". */ private Proxy createProxy(Proxy.Type type, String host, String port, int defaultPort) { Proxy proxy; if (type == Proxy.Type.DIRECT) { proxy = Proxy.NO_PROXY; } else { int iPort; try { iPort = Integer.valueOf(port).intValue(); } catch (NumberFormatException e) { iPort = defaultPort; } proxy = new Proxy(type, InetSocketAddress.createUnresolved(host, iPort)); } return proxy; } /* * gets system property, privileged operation. If the value of the property * is null or empty String, it returns defaultValue. */ private String getSystemProperty(final String property) { return getSystemProperty(property, null); } /* * gets system property, privileged operation. If the value of the property * is null or empty String, it returns defaultValue. */ private String getSystemProperty(final String property, final String defaultValue) { String value = AccessController.doPrivileged(new PriviAction<String>( property)); if (null == value || "".equals(value)) { //$NON-NLS-1$ value = (netProps != null) ? netProps.getProperty(property, defaultValue) : defaultValue; } return value; } /* * gets system property, privileged operation. If the value of "key" * property is null, then retrieve value from "alternative" property. * Finally, if the value is null or empty String, it returns defaultValue. */ private String getSystemPropertyOrAlternative(final String key, final String alternativeKey, final String defaultValue) { String value = getSystemProperty(key); if (value == null) { value = getSystemProperty(alternativeKey); if (null == value) { value = defaultValue; } } return value; } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.components; // Start of user code for imports import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.BasicDiagnostic; import org.eclipse.emf.common.util.Diagnostic; import org.eclipse.emf.common.util.WrappedException; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.emf.eef.runtime.api.notify.EStructuralFeatureNotificationFilter; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.notify.NotificationFilter; import org.eclipse.emf.eef.runtime.context.PropertiesEditingContext; import org.eclipse.emf.eef.runtime.context.impl.EObjectPropertiesEditionContext; import org.eclipse.emf.eef.runtime.context.impl.EReferencePropertiesEditionContext; import org.eclipse.emf.eef.runtime.impl.components.SinglePartPropertiesEditingComponent; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.impl.utils.EEFConverterUtil; import org.eclipse.emf.eef.runtime.policies.PropertiesEditingPolicy; import org.eclipse.emf.eef.runtime.policies.impl.CreateEditingPolicy; import org.eclipse.emf.eef.runtime.providers.PropertiesEditingProvider; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.viewers.ViewerFilter; import org.wso2.developerstudio.eclipse.gmf.esb.CommentMediator; import org.wso2.developerstudio.eclipse.gmf.esb.ConditionalRouterMediatorAdditionalOutputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.parts.ConditionalRouterMediatorAdditionalOutputConnectorPropertiesEditionPart; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; // End of user code /** * * */ public class ConditionalRouterMediatorAdditionalOutputConnectorPropertiesEditionComponent extends SinglePartPropertiesEditingComponent { public static String BASE_PART = "Base"; //$NON-NLS-1$ /** * Settings for commentMediators ReferencesTable */ protected ReferencesTableSettings commentMediatorsSettings; /** * Default constructor * */ public ConditionalRouterMediatorAdditionalOutputConnectorPropertiesEditionComponent(PropertiesEditingContext editingContext, EObject conditionalRouterMediatorAdditionalOutputConnector, String editing_mode) { super(editingContext, conditionalRouterMediatorAdditionalOutputConnector, editing_mode); parts = new String[] { BASE_PART }; repositoryKey = EsbViewsRepository.class; partKey = EsbViewsRepository.ConditionalRouterMediatorAdditionalOutputConnector.class; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#initPart(java.lang.Object, int, org.eclipse.emf.ecore.EObject, * org.eclipse.emf.ecore.resource.ResourceSet) * */ public void initPart(Object key, int kind, EObject elt, ResourceSet allResource) { setInitializing(true); if (editingPart != null && key == partKey) { editingPart.setContext(elt, allResource); final ConditionalRouterMediatorAdditionalOutputConnector conditionalRouterMediatorAdditionalOutputConnector = (ConditionalRouterMediatorAdditionalOutputConnector)elt; final ConditionalRouterMediatorAdditionalOutputConnectorPropertiesEditionPart basePart = (ConditionalRouterMediatorAdditionalOutputConnectorPropertiesEditionPart)editingPart; // init values if (isAccessible(EsbViewsRepository.ConditionalRouterMediatorAdditionalOutputConnector.Properties.commentMediators)) { commentMediatorsSettings = new ReferencesTableSettings(conditionalRouterMediatorAdditionalOutputConnector, EsbPackage.eINSTANCE.getOutputConnector_CommentMediators()); basePart.initCommentMediators(commentMediatorsSettings); } // init filters if (isAccessible(EsbViewsRepository.ConditionalRouterMediatorAdditionalOutputConnector.Properties.commentMediators)) { basePart.addFilterToCommentMediators(new ViewerFilter() { /** * {@inheritDoc} * * @see org.eclipse.jface.viewers.ViewerFilter#select(org.eclipse.jface.viewers.Viewer, java.lang.Object, java.lang.Object) */ public boolean select(Viewer viewer, Object parentElement, Object element) { return (element instanceof String && element.equals("")) || (element instanceof CommentMediator); //$NON-NLS-1$ } }); // Start of user code for additional businessfilters for commentMediators // End of user code } // init values for referenced views // init filters for referenced views } setInitializing(false); } /** * {@inheritDoc} * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#associatedFeature(java.lang.Object) */ public EStructuralFeature associatedFeature(Object editorKey) { if (editorKey == EsbViewsRepository.ConditionalRouterMediatorAdditionalOutputConnector.Properties.commentMediators) { return EsbPackage.eINSTANCE.getOutputConnector_CommentMediators(); } return super.associatedFeature(editorKey); } /** * {@inheritDoc} * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updateSemanticModel(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void updateSemanticModel(final IPropertiesEditionEvent event) { ConditionalRouterMediatorAdditionalOutputConnector conditionalRouterMediatorAdditionalOutputConnector = (ConditionalRouterMediatorAdditionalOutputConnector)semanticObject; if (EsbViewsRepository.ConditionalRouterMediatorAdditionalOutputConnector.Properties.commentMediators == event.getAffectedEditor()) { if (event.getKind() == PropertiesEditionEvent.ADD) { EReferencePropertiesEditionContext context = new EReferencePropertiesEditionContext(editingContext, this, commentMediatorsSettings, editingContext.getAdapterFactory()); PropertiesEditingProvider provider = (PropertiesEditingProvider)editingContext.getAdapterFactory().adapt(semanticObject, PropertiesEditingProvider.class); if (provider != null) { PropertiesEditingPolicy policy = provider.getPolicy(context); if (policy instanceof CreateEditingPolicy) { policy.execute(); } } } else if (event.getKind() == PropertiesEditionEvent.EDIT) { EObjectPropertiesEditionContext context = new EObjectPropertiesEditionContext(editingContext, this, (EObject) event.getNewValue(), editingContext.getAdapterFactory()); PropertiesEditingProvider provider = (PropertiesEditingProvider)editingContext.getAdapterFactory().adapt((EObject) event.getNewValue(), PropertiesEditingProvider.class); if (provider != null) { PropertiesEditingPolicy editionPolicy = provider.getPolicy(context); if (editionPolicy != null) { editionPolicy.execute(); } } } else if (event.getKind() == PropertiesEditionEvent.REMOVE) { commentMediatorsSettings.removeFromReference((EObject) event.getNewValue()); } else if (event.getKind() == PropertiesEditionEvent.MOVE) { commentMediatorsSettings.move(event.getNewIndex(), (CommentMediator) event.getNewValue()); } } } /** * {@inheritDoc} * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updatePart(org.eclipse.emf.common.notify.Notification) */ public void updatePart(Notification msg) { super.updatePart(msg); if (editingPart.isVisible()) { ConditionalRouterMediatorAdditionalOutputConnectorPropertiesEditionPart basePart = (ConditionalRouterMediatorAdditionalOutputConnectorPropertiesEditionPart)editingPart; if (EsbPackage.eINSTANCE.getOutputConnector_CommentMediators().equals(msg.getFeature()) && isAccessible(EsbViewsRepository.ConditionalRouterMediatorAdditionalOutputConnector.Properties.commentMediators)) basePart.updateCommentMediators(); } } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#getNotificationFilters() */ @Override protected NotificationFilter[] getNotificationFilters() { NotificationFilter filter = new EStructuralFeatureNotificationFilter( EsbPackage.eINSTANCE.getOutputConnector_CommentMediators() ); return new NotificationFilter[] {filter,}; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#validateValue(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public Diagnostic validateValue(IPropertiesEditionEvent event) { Diagnostic ret = Diagnostic.OK_INSTANCE; if (event.getNewValue() != null) { try { } catch (IllegalArgumentException iae) { ret = BasicDiagnostic.toDiagnostic(iae); } catch (WrappedException we) { ret = BasicDiagnostic.toDiagnostic(we); } } return ret; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.transforms.reflect; import com.google.auto.value.AutoValue; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Collections; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.DoFn.StateId; import org.apache.beam.sdk.transforms.DoFn.TimerId; import org.apache.beam.sdk.transforms.reflect.DoFnSignature.Parameter.RestrictionTrackerParameter; import org.apache.beam.sdk.transforms.reflect.DoFnSignature.Parameter.StateParameter; import org.apache.beam.sdk.transforms.reflect.DoFnSignature.Parameter.TimerParameter; import org.apache.beam.sdk.transforms.reflect.DoFnSignature.Parameter.WindowParameter; import org.apache.beam.sdk.transforms.splittabledofn.RestrictionTracker; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.Timer; import org.apache.beam.sdk.util.TimerSpec; import org.apache.beam.sdk.util.state.State; import org.apache.beam.sdk.util.state.StateSpec; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; /** * Describes the signature of a {@link DoFn}, in particular, which features it uses, which extra * context it requires, types of the input and output elements, etc. * * <p>See <a href="https://s.apache.org/a-new-dofn">A new DoFn</a>. */ @AutoValue public abstract class DoFnSignature { /** Class of the original {@link DoFn} from which this signature was produced. */ public abstract Class<? extends DoFn<?, ?>> fnClass(); /** Whether this {@link DoFn} does a bounded amount of work per element. */ public abstract PCollection.IsBounded isBoundedPerElement(); /** Details about this {@link DoFn}'s {@link DoFn.ProcessElement} method. */ public abstract ProcessElementMethod processElement(); /** Details about the state cells that this {@link DoFn} declares. Immutable. */ public abstract Map<String, StateDeclaration> stateDeclarations(); /** Details about this {@link DoFn}'s {@link DoFn.StartBundle} method. */ @Nullable public abstract BundleMethod startBundle(); /** Details about this {@link DoFn}'s {@link DoFn.FinishBundle} method. */ @Nullable public abstract BundleMethod finishBundle(); /** Details about this {@link DoFn}'s {@link DoFn.Setup} method. */ @Nullable public abstract LifecycleMethod setup(); /** Details about this {@link DoFn}'s {@link DoFn.Teardown} method. */ @Nullable public abstract LifecycleMethod teardown(); /** Timer declarations present on the {@link DoFn} class. Immutable. */ public abstract Map<String, TimerDeclaration> timerDeclarations(); /** Details about this {@link DoFn}'s {@link DoFn.GetInitialRestriction} method. */ @Nullable public abstract GetInitialRestrictionMethod getInitialRestriction(); /** Details about this {@link DoFn}'s {@link DoFn.SplitRestriction} method. */ @Nullable public abstract SplitRestrictionMethod splitRestriction(); /** Details about this {@link DoFn}'s {@link DoFn.GetRestrictionCoder} method. */ @Nullable public abstract GetRestrictionCoderMethod getRestrictionCoder(); /** Details about this {@link DoFn}'s {@link DoFn.NewTracker} method. */ @Nullable public abstract NewTrackerMethod newTracker(); /** Details about this {@link DoFn}'s {@link DoFn.OnTimer} methods. */ @Nullable public abstract Map<String, OnTimerMethod> onTimerMethods(); /** @deprecated use {@link #usesState()}, it's cleaner */ @Deprecated public boolean isStateful() { return stateDeclarations().size() > 0; } /** Whether the {@link DoFn} described by this signature uses state. */ public boolean usesState() { return stateDeclarations().size() > 0; } /** Whether the {@link DoFn} described by this signature uses timers. */ public boolean usesTimers() { return timerDeclarations().size() > 0; } static Builder builder() { return new AutoValue_DoFnSignature.Builder(); } @AutoValue.Builder abstract static class Builder { abstract Builder setFnClass(Class<? extends DoFn<?, ?>> fnClass); abstract Builder setIsBoundedPerElement(PCollection.IsBounded isBounded); abstract Builder setProcessElement(ProcessElementMethod processElement); abstract Builder setStartBundle(BundleMethod startBundle); abstract Builder setFinishBundle(BundleMethod finishBundle); abstract Builder setSetup(LifecycleMethod setup); abstract Builder setTeardown(LifecycleMethod teardown); abstract Builder setGetInitialRestriction(GetInitialRestrictionMethod getInitialRestriction); abstract Builder setSplitRestriction(SplitRestrictionMethod splitRestriction); abstract Builder setGetRestrictionCoder(GetRestrictionCoderMethod getRestrictionCoder); abstract Builder setNewTracker(NewTrackerMethod newTracker); abstract Builder setStateDeclarations(Map<String, StateDeclaration> stateDeclarations); abstract Builder setTimerDeclarations(Map<String, TimerDeclaration> timerDeclarations); abstract Builder setOnTimerMethods(Map<String, OnTimerMethod> onTimerMethods); abstract DoFnSignature build(); } /** A method delegated to an annotated method of an underlying {@link DoFn}. */ public interface DoFnMethod { /** The annotated method itself. */ Method targetMethod(); } /** * A method delegated to an annotated method of an underlying {@link DoFn} that accepts a dynamic * list of parameters. */ public interface MethodWithExtraParameters extends DoFnMethod { /** * Types of optional parameters of the annotated method, in the order they appear. * * <p>Validation that these are allowed is external to this class. */ List<Parameter> extraParameters(); /** The type of window expected by this method, if any. */ @Nullable TypeDescriptor<? extends BoundedWindow> windowT(); } /** A descriptor for an optional parameter of the {@link DoFn.ProcessElement} method. */ public abstract static class Parameter { // Private as no extensions other than those nested here are permitted private Parameter() {} /** * Performs case analysis on this {@link Parameter}, processing it with the appropriate * {@link Cases#dispatch} case of the provided {@link Cases} object. */ public <ResultT> ResultT match(Cases<ResultT> cases) { // This could be done with reflection, but since the number of cases is small and known, // they are simply inlined. if (this instanceof ContextParameter) { return cases.dispatch((ContextParameter) this); } else if (this instanceof ProcessContextParameter) { return cases.dispatch((ProcessContextParameter) this); } else if (this instanceof OnTimerContextParameter) { return cases.dispatch((OnTimerContextParameter) this); } else if (this instanceof WindowParameter) { return cases.dispatch((WindowParameter) this); } else if (this instanceof RestrictionTrackerParameter) { return cases.dispatch((RestrictionTrackerParameter) this); } else if (this instanceof StateParameter) { return cases.dispatch((StateParameter) this); } else if (this instanceof TimerParameter) { return cases.dispatch((TimerParameter) this); } else { throw new IllegalStateException( String.format("Attempt to case match on unknown %s subclass %s", Parameter.class.getCanonicalName(), this.getClass().getCanonicalName())); } } /** * An interface for destructuring a {@link Parameter}. */ public interface Cases<ResultT> { ResultT dispatch(ContextParameter p); ResultT dispatch(ProcessContextParameter p); ResultT dispatch(OnTimerContextParameter p); ResultT dispatch(WindowParameter p); ResultT dispatch(RestrictionTrackerParameter p); ResultT dispatch(StateParameter p); ResultT dispatch(TimerParameter p); /** * A base class for a visitor with a default method for cases it is not interested in. */ abstract class WithDefault<ResultT> implements Cases<ResultT> { protected abstract ResultT dispatchDefault(Parameter p); @Override public ResultT dispatch(ContextParameter p) { return dispatchDefault(p); } @Override public ResultT dispatch(ProcessContextParameter p) { return dispatchDefault(p); } @Override public ResultT dispatch(OnTimerContextParameter p) { return dispatchDefault(p); } @Override public ResultT dispatch(WindowParameter p) { return dispatchDefault(p); } @Override public ResultT dispatch(RestrictionTrackerParameter p) { return dispatchDefault(p); } @Override public ResultT dispatch(StateParameter p) { return dispatchDefault(p); } @Override public ResultT dispatch(TimerParameter p) { return dispatchDefault(p); } } } // These parameter descriptors are constant private static final ContextParameter CONTEXT_PARAMETER = new AutoValue_DoFnSignature_Parameter_ContextParameter(); private static final ProcessContextParameter PROCESS_CONTEXT_PARAMETER = new AutoValue_DoFnSignature_Parameter_ProcessContextParameter(); private static final OnTimerContextParameter ON_TIMER_CONTEXT_PARAMETER = new AutoValue_DoFnSignature_Parameter_OnTimerContextParameter(); /** Returns a {@link ContextParameter}. */ public static ContextParameter context() { return CONTEXT_PARAMETER; } /** Returns a {@link ProcessContextParameter}. */ public static ProcessContextParameter processContext() { return PROCESS_CONTEXT_PARAMETER; } /** Returns a {@link OnTimerContextParameter}. */ public static OnTimerContextParameter onTimerContext() { return ON_TIMER_CONTEXT_PARAMETER; } /** Returns a {@link WindowParameter}. */ public static WindowParameter boundedWindow(TypeDescriptor<? extends BoundedWindow> windowT) { return new AutoValue_DoFnSignature_Parameter_WindowParameter(windowT); } /** * Returns a {@link RestrictionTrackerParameter}. */ public static RestrictionTrackerParameter restrictionTracker(TypeDescriptor<?> trackerT) { return new AutoValue_DoFnSignature_Parameter_RestrictionTrackerParameter(trackerT); } /** * Returns a {@link StateParameter} referring to the given {@link StateDeclaration}. */ public static StateParameter stateParameter(StateDeclaration decl) { return new AutoValue_DoFnSignature_Parameter_StateParameter(decl); } public static TimerParameter timerParameter(TimerDeclaration decl) { return new AutoValue_DoFnSignature_Parameter_TimerParameter(decl); } /** * Descriptor for a {@link Parameter} of type {@link DoFn.Context}. * * <p>All such descriptors are equal. */ @AutoValue public abstract static class ContextParameter extends Parameter { ContextParameter() {} } /** * Descriptor for a {@link Parameter} of type {@link DoFn.ProcessContext}. * * <p>All such descriptors are equal. */ @AutoValue public abstract static class ProcessContextParameter extends Parameter { ProcessContextParameter() {} } /** * Descriptor for a {@link Parameter} of type {@link DoFn.OnTimerContext}. * * <p>All such descriptors are equal. */ @AutoValue public abstract static class OnTimerContextParameter extends Parameter { OnTimerContextParameter() {} } /** * Descriptor for a {@link Parameter} of type {@link BoundedWindow}. * * <p>All such descriptors are equal. */ @AutoValue public abstract static class WindowParameter extends Parameter { WindowParameter() {} public abstract TypeDescriptor<? extends BoundedWindow> windowT(); } /** * Descriptor for a {@link Parameter} of a subclass of {@link RestrictionTracker}. * * <p>All such descriptors are equal. */ @AutoValue public abstract static class RestrictionTrackerParameter extends Parameter { // Package visible for AutoValue RestrictionTrackerParameter() {} public abstract TypeDescriptor<?> trackerT(); } /** * Descriptor for a {@link Parameter} of a subclass of {@link State}, with an id indicated by * its {@link StateId} annotation. * * <p>All descriptors for the same declared state are equal. */ @AutoValue public abstract static class StateParameter extends Parameter { // Package visible for AutoValue StateParameter() {} public abstract StateDeclaration referent(); } /** * Descriptor for a {@link Parameter} of type {@link Timer}, with an id indicated by * its {@link TimerId} annotation. */ @AutoValue public abstract static class TimerParameter extends Parameter { // Package visible for AutoValue TimerParameter() {} public abstract TimerDeclaration referent(); } } /** Describes a {@link DoFn.ProcessElement} method. */ @AutoValue public abstract static class ProcessElementMethod implements MethodWithExtraParameters { /** The annotated method itself. */ @Override public abstract Method targetMethod(); /** Types of optional parameters of the annotated method, in the order they appear. */ @Override public abstract List<Parameter> extraParameters(); /** Concrete type of the {@link RestrictionTracker} parameter, if present. */ @Nullable public abstract TypeDescriptor<?> trackerT(); /** The window type used by this method, if any. */ @Nullable public abstract TypeDescriptor<? extends BoundedWindow> windowT(); static ProcessElementMethod create( Method targetMethod, List<Parameter> extraParameters, TypeDescriptor<?> trackerT, @Nullable TypeDescriptor<? extends BoundedWindow> windowT) { return new AutoValue_DoFnSignature_ProcessElementMethod( targetMethod, Collections.unmodifiableList(extraParameters), trackerT, windowT); } /** * Whether this {@link DoFn} observes - directly or indirectly - the window that an element * resides in. * * <p>{@link State} and {@link Timer} parameters indirectly observe the window, because * they are each scoped to a single window. */ public boolean observesWindow() { return Iterables.any( extraParameters(), Predicates.or( Predicates.instanceOf(WindowParameter.class), Predicates.instanceOf(TimerParameter.class), Predicates.instanceOf(StateParameter.class))); } /** * Whether this {@link DoFn} is <a href="https://s.apache.org/splittable-do-fn">splittable</a>. */ public boolean isSplittable() { return Iterables.any( extraParameters(), Predicates.instanceOf(RestrictionTrackerParameter.class)); } } /** Describes a {@link DoFn.OnTimer} method. */ @AutoValue public abstract static class OnTimerMethod implements MethodWithExtraParameters { /** The id on the method's {@link DoFn.TimerId} annotation. */ public abstract String id(); /** The annotated method itself. */ @Override public abstract Method targetMethod(); /** The window type used by this method, if any. */ @Nullable public abstract TypeDescriptor<? extends BoundedWindow> windowT(); /** Types of optional parameters of the annotated method, in the order they appear. */ @Override public abstract List<Parameter> extraParameters(); static OnTimerMethod create( Method targetMethod, String id, TypeDescriptor<? extends BoundedWindow> windowT, List<Parameter> extraParameters) { return new AutoValue_DoFnSignature_OnTimerMethod( id, targetMethod, windowT, Collections.unmodifiableList(extraParameters)); } } /** * Describes a timer declaration; a field of type {@link TimerSpec} annotated with * {@link DoFn.TimerId}. */ @AutoValue public abstract static class TimerDeclaration { public abstract String id(); public abstract Field field(); static TimerDeclaration create(String id, Field field) { return new AutoValue_DoFnSignature_TimerDeclaration(id, field); } } /** Describes a {@link DoFn.StartBundle} or {@link DoFn.FinishBundle} method. */ @AutoValue public abstract static class BundleMethod implements DoFnMethod { /** The annotated method itself. */ @Override public abstract Method targetMethod(); static BundleMethod create(Method targetMethod) { return new AutoValue_DoFnSignature_BundleMethod(targetMethod); } } /** * Describes a state declaration; a field of type {@link StateSpec} annotated with * {@link DoFn.StateId}. */ @AutoValue public abstract static class StateDeclaration { public abstract String id(); public abstract Field field(); public abstract TypeDescriptor<? extends State> stateType(); static StateDeclaration create( String id, Field field, TypeDescriptor<? extends State> stateType) { field.setAccessible(true); return new AutoValue_DoFnSignature_StateDeclaration(id, field, stateType); } } /** Describes a {@link DoFn.Setup} or {@link DoFn.Teardown} method. */ @AutoValue public abstract static class LifecycleMethod implements DoFnMethod { /** The annotated method itself. */ @Override public abstract Method targetMethod(); static LifecycleMethod create(Method targetMethod) { return new AutoValue_DoFnSignature_LifecycleMethod(targetMethod); } } /** Describes a {@link DoFn.GetInitialRestriction} method. */ @AutoValue public abstract static class GetInitialRestrictionMethod implements DoFnMethod { /** The annotated method itself. */ @Override public abstract Method targetMethod(); /** Type of the returned restriction. */ public abstract TypeDescriptor<?> restrictionT(); static GetInitialRestrictionMethod create(Method targetMethod, TypeDescriptor<?> restrictionT) { return new AutoValue_DoFnSignature_GetInitialRestrictionMethod(targetMethod, restrictionT); } } /** Describes a {@link DoFn.SplitRestriction} method. */ @AutoValue public abstract static class SplitRestrictionMethod implements DoFnMethod { /** The annotated method itself. */ @Override public abstract Method targetMethod(); /** Type of the restriction taken and returned. */ public abstract TypeDescriptor<?> restrictionT(); static SplitRestrictionMethod create(Method targetMethod, TypeDescriptor<?> restrictionT) { return new AutoValue_DoFnSignature_SplitRestrictionMethod(targetMethod, restrictionT); } } /** Describes a {@link DoFn.NewTracker} method. */ @AutoValue public abstract static class NewTrackerMethod implements DoFnMethod { /** The annotated method itself. */ @Override public abstract Method targetMethod(); /** Type of the input restriction. */ public abstract TypeDescriptor<?> restrictionT(); /** Type of the returned {@link RestrictionTracker}. */ public abstract TypeDescriptor<?> trackerT(); static NewTrackerMethod create( Method targetMethod, TypeDescriptor<?> restrictionT, TypeDescriptor<?> trackerT) { return new AutoValue_DoFnSignature_NewTrackerMethod(targetMethod, restrictionT, trackerT); } } /** Describes a {@link DoFn.GetRestrictionCoder} method. */ @AutoValue public abstract static class GetRestrictionCoderMethod implements DoFnMethod { /** The annotated method itself. */ @Override public abstract Method targetMethod(); /** Type of the returned {@link Coder}. */ public abstract TypeDescriptor<?> coderT(); static GetRestrictionCoderMethod create(Method targetMethod, TypeDescriptor<?> coderT) { return new AutoValue_DoFnSignature_GetRestrictionCoderMethod(targetMethod, coderT); } } }
/* * Copyright 2013-2014 Richard M. Hightower * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * __________ _____ __ .__ * \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____ * | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\ * | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ > * |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ / * \/ \/ \/ \/ \/ \//_____/ * ____. ___________ _____ ______________.___. * | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | | * | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | | * /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ | * \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______| * \/ \/ \/ \/ \/ \/ */ package org.boon.template; /* * Copyright 2013-2014 Richard M. Hightower * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * __________ _____ __ .__ * \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____ * | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\ * | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ > * |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ / * \/ \/ \/ \/ \/ \//_____/ * ____. ___________ _____ ______________.___. * | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | | * | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | | * /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ | * \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______| * \/ \/ \/ \/ \/ \/ */ import org.boon.Str; import org.boon.core.reflection.FastStringUtils; import org.boon.primitive.CharScanner; import org.boon.template.support.Token; import org.boon.template.support.TokenTypes; import java.util.ArrayList; import java.util.List; import static org.boon.Boon.putl; import static org.boon.Boon.puts; /** * @author Rick Hightower * <p/> * This supports handlebar templates, freemarker-like jstl and JSTL-like templates. * <p/> * <pre> * {{#if foo}} * * {{foo}} //Escaped * * {{{foo}}} //Unescaped * {{/if foo}} * * {{#with foo}} * * {{foo}} //Escaped * {{{foo}}} /Unescaped * * {{/with}} * * {{#each fruits}} * {{this}} * {{/each}} * </pre> * <p/> * see http://www.tutorialspoint.com/jsp/jsp_standard_tag_library.htm * see http://www.tutorialspoint.com/jsp/jstl_core_foreach_tag.htm * see http://docs.oracle.com/javaee/5/jstl/1.1/docs/tlddocs/c/forEach.html * <pre> * <c:if test="foo"> * * ${fn:escapeXml(foo)} //Escaped * ${foo} //Unescaped * * </c:if> * * <c:with item="foo"> * * ${fn:escapeXml(foo)} //Escaped * ${foo} //Unescaped * * </c:with> * * <c:forEach items="fruits"> * * ${item} * * </c:forEach> * * <c:forEach var="window" items="${windows}"> * <c:out value="${window}"/> * </c:forEach> * * </pre> * <p/> * Freemarker like * <pre> * <#if foo> * * ${fn:escapeXml(foo)} //Escaped * ${foo} //Unescaped * * </#if> * * <#with foo> * * ${fn:escapeXml(foo)} //Escaped * ${foo} //Unescaped * * </#with> * * <#list fruits as fruit> * * ${fruit} * * </#list> * * * <#list fruits> * * ${item} * * </#list> * * * </pre> * <p/> * Freemarker has an expression language for if and such. * There are no plans to add a full expression langauge to this jstl. * The same goes for JSTL-like. Simple boolean expressions and calls to functions but no logic. * <p/> * Velocity-like * <pre> * <ul> * #foreach( $product in $allProducts ) * <li>$product</li> * #end * </ul> * * #if( $foo ) * <strong>Velocity!</strong> * #end * </pre> * <p/> * Velocity and Freemarker have if and else, else if. * Handlebars has if / else. Handlebars also has unless. * JSTL just has if. * <p/> * Velocity, JSTL and Freemarker have full expression languages. * Handlebars does not. * <p/> * The plan (currently) is for this templating to have no expressions. * You have property paths that are either true or false only, and you can call functions * that return true or false. Or rather true-y or falsey (null = false, empty list = false, etc.). * Python style true or false which is more ore less what handlebars/mustache do. * So this will be a stupid jstl, i.e., no logic other than true/false. * <p/> * Start of block characters * <pre> * Freemarker-like * * <#if = '<#' * * Velocity-like * * #if = '#' * * JSTL-like * * <c:if = '<c:if' * * Mustache * {{#if = '{{' * * </pre> * <p/> * <p/> * END of start block character * <pre> * Freemarker-like * * <#if blah> = '>' * ^ * | * * Velocity-like * * #if<SPACE> = ' ' * ^ * | * * JSTL-like * * <c:if test="foo"> = '>' * ^ * | * * Mustache * {{#if } = '>' * ^ * | * * * </pre> * <p/> * This parser merely delivers up the stuff between start and stop. * Then there are handlers to further divide up the strings as the syntax varies a lot of how * expression of {{if}}, {{each}}, and {{with}} are handled. * <p/> * Start of expression characters * <pre> * * Freemarker-like * * ${ * * Velocity-like * * $ * * JSTL-like * * ${ * * Mustache/Handlebar * {{ * * * Mustache/Handlebar * {{{ * * </pre> * <p/> * <p/> * End of expression characters * <pre> * * Freemarker-like * * } * * Velocity-like * * <SPACE> * * JSTL-like * * } * * Mustache/Handlebar * }} * * * Mustache/Handlebar (NOT HANDLED BY PARSER, HANDLED BY LOOKUP) * }}} * * </pre> * <p/> * Rather than treat mustache as having two expressions (tried that), I will treat it as one. * Then the lookupWithDefault mechanism will handle the other case. * <p/> * Velocity allows $vice and ${vice}maniac forms. Rather than handling two forms. * All parsers treat $vice as an expression. So that $foo is always an expression. * This would mean for mustache you would have $vice or {{vice}}maniac. * The caveat being that $expression handlng is a flag so you can turn it off for handlebars by default. * <p/> * One of the goals is to have the ability to write scripts 100% compatible with Handlebars. * <p/> * There is not a similar goal with Velocity, JSTL, and Freemarker. * You can only ever write scripts that are similar to JSTL not true JSTL scripts. * You can only ever write scripts that are similar to Velocity not true Velocity scripts. * You can only ever write scripts that are similar to Freemarker not true Freemarker scripts. * The goal is the ability to write scripts that are 100% Handlebar compatible and can be rendered by browser or Java. * <p/> * The goal is for BoonTemplate is to be a superset of Handlebars and always only a subset of JSTL, * Velocity and Freemarker. * <p/> * Order of importance: * <ol> * <li><Handlebar compliance</li>, * <li>JSTL style support</li>, * <li>Freemarker style support</li> * <li> and then Velocity style support</li> * </ol> * <p/> * Handlebars has the advantage of the expression, and commands start with the same character, which * will help with parsing. * <p/> * <code><pre> * <p/> * if (sameStart) { * look for start char of command and expression * } else { * look for start char of expression or command #NOTE this takes longer * } * </pre></code> * <p/> * Also we need to support comments. * <p/> * <pre> * Velocity like multiline * ##. * .## * * Freemarker like * <#-- * --> * * Handlebar like * {{! * }} * * JSTL like (I made this one up JSTL does not have comments JSP does. :) * <c:comment * > * </pre> * <p/> * Input: * char[] * <p/> * Output array of tokens in IndexOverlay style * <pre> * * Array item: TemplateToken (Block or Text or Expression or Comment, startIndex, stopIndex) * * </pre> * <p/> * There is no logic in this parser. Just an array of token positions. * It is up to BoonTemplate on how to interpret those tokens. */ public class BoonCoreTemplateParser implements TemplateParser { char charArray[]; int index; char ch; private List<Token> tokenList = new ArrayList<>(); @Override public void parse(String string) { this.charArray = FastStringUtils.toCharArray(string); this.index = 0; tokenList.clear(); processLoop(); } private void processLoop() { Token text = Token.text(index, -1); for (; index < charArray.length; index++) { ch = charArray[index]; if (ch == '<') { if (CharScanner.matchChars(TokenTypes.COMMAND_START.jstlStyle(), index, this.charArray)) { text = textToken(text); index += TokenTypes.COMMAND_START.jstlStyle().length; handleCommand(); } } else if (ch == '$') { char ch1 = charArray[index + 1]; if (ch1 == '{') { if (CharScanner.matchChars(TokenTypes.EXPRESSION_START.jstlStyle(), index, this.charArray)) { text = textToken(text); index += TokenTypes.EXPRESSION_START.jstlStyle().length; handleCurlyExpression(); text = Token.text(index, -1); index--; } } else { text = textToken(text); index++; handleExpression(null); text = Token.text(index, -1); index--; } } else { if (text == null) { text = Token.text(index, -1); } } } if (text != null) { text.stop(charArray.length); this.tokenList.add(text); } } private void handleCurlyExpression() { int startIndex = index; index = CharScanner.findChars(TokenTypes.EXPRESSION_END.jstlStyle(), index, charArray); if (index > 0) { this.tokenList.add(Token.expression(startIndex, index)); index += TokenTypes.EXPRESSION_END.jstlStyle().length; } } private void handleExpression(String term) { int startIndex = index; index = CharScanner.findWhiteSpace(index, charArray); if (term != null) { if (index == -1) { index = startIndex; index = CharScanner.findChars(term.toCharArray(), index, charArray); } } if (index == -1) { index = charArray.length; } this.tokenList.add(Token.expression(startIndex, index)); } private void handleCommand() { int startIndex = index; boolean noBody = false; index = CharScanner.findChars(TokenTypes.COMMAND_END_START.jstlStyle(), index, charArray); if (index == -1 ) { return; } int foundIndex = CharScanner.findChars(TokenTypes.COMMAND_START_TAG_END.jstlStyle(), index-1, charArray); if (foundIndex!=-1) { noBody = true; } if (noBody) { index--; } //Add this command start to the token list. this.tokenList.add(Token.commandStart(startIndex, index)); index += TokenTypes.COMMAND_END_START.jstlStyle().length; if (noBody) { tokenList.add(Token.commandBody(index, index)); return; } Token commandBody = Token.commandBody(index, index); tokenList.add(commandBody); Token text = Token.text(index, -1); for (; index< charArray.length; index++) { ch = charArray[index]; if (ch=='<') { if (CharScanner.matchChars(TokenTypes.COMMAND_START.jstlStyle(), index, this.charArray)) { text = textToken(text); index+= TokenTypes.COMMAND_START.jstlStyle().length; handleCommand(); } else if (CharScanner.matchChars(TokenTypes.COMMAND_START_END.jstlStyle(), index, this.charArray)) { text = textToken(text); commandBody.stop(index); index++; index = CharScanner.findChar('>', index, charArray); break; } } else if (ch=='$') { char ch1 = charArray[index+1]; if (ch1 == '{') { if (CharScanner.matchChars(TokenTypes.EXPRESSION_START.jstlStyle(), index, this.charArray)) { text = textToken(text); index += TokenTypes.EXPRESSION_START.jstlStyle().length; handleCurlyExpression(); text = Token.text(index, -1); index--; } } else { text = textToken(text); index++; handleExpression("</"); text = Token.text(index, -1); index--; } } else { if (text == null) { text = Token.text(index, -1); } } } if (commandBody.stop() == -1) { commandBody.stop(index); } if (text!=null) { text.stop(charArray.length); this.tokenList.add( text ); } } private Token textToken(Token text) { if (text != null) { text.stop(index); if (text.start() != text.stop()) { this.tokenList.add(text); } text = null; } return text; } @Override public List<Token> getTokenList() { return tokenList; } @Override public void displayTokens(String template) { for (Token token : this.getTokenList()) { puts("token", token, Str.slc(template, token.start(), token.stop())); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This class is used to store a list of StoreFamilyRevision for a column * family in zookeeper. * */ /** * Autogenerated by Thrift Compiler (0.7.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING */ package org.apache.hcatalog.hbase.snapshot.transaction.thrift; import java.util.ArrayList; import java.util.Collections; import java.util.EnumMap; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; public class StoreFamilyRevisionList implements org.apache.thrift.TBase<StoreFamilyRevisionList, StoreFamilyRevisionList._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StoreFamilyRevisionList"); private static final org.apache.thrift.protocol.TField REVISION_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("revisionList", org.apache.thrift.protocol.TType.LIST, (short) 1); public List<StoreFamilyRevision> revisionList; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { REVISION_LIST((short) 1, "revisionList"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch (fieldId) { case 1: // REVISION_LIST return REVISION_LIST; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.REVISION_LIST, new org.apache.thrift.meta_data.FieldMetaData("revisionList", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, StoreFamilyRevision.class)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(StoreFamilyRevisionList.class, metaDataMap); } public StoreFamilyRevisionList() { } public StoreFamilyRevisionList( List<StoreFamilyRevision> revisionList) { this(); this.revisionList = revisionList; } /** * Performs a deep copy on <i>other</i>. */ public StoreFamilyRevisionList(StoreFamilyRevisionList other) { if (other.isSetRevisionList()) { List<StoreFamilyRevision> __this__revisionList = new ArrayList<StoreFamilyRevision>(); for (StoreFamilyRevision other_element : other.revisionList) { __this__revisionList.add(new StoreFamilyRevision(other_element)); } this.revisionList = __this__revisionList; } } public StoreFamilyRevisionList deepCopy() { return new StoreFamilyRevisionList(this); } @Override public void clear() { this.revisionList = null; } public int getRevisionListSize() { return (this.revisionList == null) ? 0 : this.revisionList.size(); } public java.util.Iterator<StoreFamilyRevision> getRevisionListIterator() { return (this.revisionList == null) ? null : this.revisionList.iterator(); } public void addToRevisionList(StoreFamilyRevision elem) { if (this.revisionList == null) { this.revisionList = new ArrayList<StoreFamilyRevision>(); } this.revisionList.add(elem); } public List<StoreFamilyRevision> getRevisionList() { return this.revisionList; } public StoreFamilyRevisionList setRevisionList(List<StoreFamilyRevision> revisionList) { this.revisionList = revisionList; return this; } public void unsetRevisionList() { this.revisionList = null; } /** Returns true if field revisionList is set (has been assigned a value) and false otherwise */ public boolean isSetRevisionList() { return this.revisionList != null; } public void setRevisionListIsSet(boolean value) { if (!value) { this.revisionList = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case REVISION_LIST: if (value == null) { unsetRevisionList(); } else { setRevisionList((List<StoreFamilyRevision>) value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case REVISION_LIST: return getRevisionList(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case REVISION_LIST: return isSetRevisionList(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof StoreFamilyRevisionList) return this.equals((StoreFamilyRevisionList) that); return false; } public boolean equals(StoreFamilyRevisionList that) { if (that == null) return false; boolean this_present_revisionList = true && this.isSetRevisionList(); boolean that_present_revisionList = true && that.isSetRevisionList(); if (this_present_revisionList || that_present_revisionList) { if (!(this_present_revisionList && that_present_revisionList)) return false; if (!this.revisionList.equals(that.revisionList)) return false; } return true; } @Override public int hashCode() { return 0; } public int compareTo(StoreFamilyRevisionList other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; StoreFamilyRevisionList typedOther = (StoreFamilyRevisionList) other; lastComparison = Boolean.valueOf(isSetRevisionList()).compareTo(typedOther.isSetRevisionList()); if (lastComparison != 0) { return lastComparison; } if (isSetRevisionList()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.revisionList, typedOther.revisionList); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField field; iprot.readStructBegin(); while (true) { field = iprot.readFieldBegin(); if (field.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (field.id) { case 1: // REVISION_LIST if (field.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list0 = iprot.readListBegin(); this.revisionList = new ArrayList<StoreFamilyRevision>(_list0.size); for (int _i1 = 0; _i1 < _list0.size; ++_i1) { StoreFamilyRevision _elem2; // required _elem2 = new StoreFamilyRevision(); _elem2.read(iprot); this.revisionList.add(_elem2); } iprot.readListEnd(); } } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { validate(); oprot.writeStructBegin(STRUCT_DESC); if (this.revisionList != null) { oprot.writeFieldBegin(REVISION_LIST_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, this.revisionList.size())); for (StoreFamilyRevision _iter3 : this.revisionList) { _iter3.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } @Override public String toString() { StringBuilder sb = new StringBuilder("StoreFamilyRevisionList("); boolean first = true; sb.append("revisionList:"); if (this.revisionList == null) { sb.append("null"); } else { sb.append(this.revisionList); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.vault.rcp.impl; import java.util.Arrays; import java.util.List; import java.util.UUID; import javax.jcr.Credentials; import javax.jcr.Repository; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.SimpleCredentials; import org.apache.jackrabbit.spi2dav.ConnectionOptions; import org.apache.jackrabbit.vault.davex.DAVExRepositoryFactory; import org.apache.jackrabbit.vault.fs.api.PathFilterSet; import org.apache.jackrabbit.vault.fs.api.ProgressTrackerListener; import org.apache.jackrabbit.vault.fs.api.RepositoryAddress; import org.apache.jackrabbit.vault.fs.api.WorkspaceFilter; import org.apache.jackrabbit.vault.fs.config.ConfigurationException; import org.apache.jackrabbit.vault.fs.config.DefaultWorkspaceFilter; import org.apache.jackrabbit.vault.fs.filter.DefaultPathFilter; import org.apache.jackrabbit.vault.rcp.RcpTask; import org.apache.jackrabbit.vault.util.RepositoryCopier; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; /** {@code RcpTask}... */ @JsonAutoDetect(getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE, creatorVisibility = JsonAutoDetect.Visibility.ANY, fieldVisibility = JsonAutoDetect.Visibility.ANY) public class RcpTaskImpl implements Runnable, RcpTask { /** default logger */ private static final Logger log = LoggerFactory.getLogger(RcpTaskImpl.class); private final String id; private final RepositoryAddress src; @JsonIgnore private Credentials srcCreds; private final String dst; private final boolean recursive; private List<String> excludes; private transient Result result; private final RepositoryCopier rcp; private transient Thread thread; private transient Session srcSession; private transient Session dstSession; /** classloader used in the thread executing the task */ private transient ClassLoader classLoader; WorkspaceFilter filter; private final ConnectionOptions connectionOptions; private static final class ResultImpl implements RcpTask.Result { private final State state; private final Throwable throwable; public ResultImpl(State state) { this(state, null); } public ResultImpl(State state, Throwable throwable) { super(); this.state = state; this.throwable = throwable; } @Override public State getState() { return state; } @Override public Throwable getThrowable() { return throwable; } } public RcpTaskImpl(ClassLoader classLoader, RepositoryAddress src, ConnectionOptions connectionOptions, Credentials srcCreds, String dst, String id, List<String> excludes, @Nullable Boolean recursive) throws ConfigurationException { this(classLoader, src, connectionOptions, srcCreds, dst, id, createFilterForExcludes(excludes), recursive); this.excludes = excludes; } @JsonCreator(mode = JsonCreator.Mode.PROPERTIES) public RcpTaskImpl(@JsonProperty("classLoader") ClassLoader dynLoader, @JsonProperty("source") RepositoryAddress src, @JsonProperty("connectionOptions") ConnectionOptions connectionOptions, @JsonProperty("srcCreds") Credentials srcCreds, @JsonProperty("destination") String dst, @JsonProperty("id") String id, @JsonProperty("filter") WorkspaceFilter srcFilter, @JsonProperty("recursive") @Nullable Boolean recursive) { this.src = src; this.dst = dst; this.srcCreds = srcCreds; this.id = id == null || id.length() == 0 ? UUID.randomUUID().toString() : id; this.recursive = recursive != null ? recursive : false; this.classLoader = dynLoader; this.connectionOptions = connectionOptions; this.filter = srcFilter; rcp = new RepositoryCopier(); rcp.setTracker(new ProgressTrackerListener() { public void onMessage(Mode mode, String action, String path) { log.info("{} {}", action, path); } public void onError(Mode mode, String path, Exception e) { log.error("{} {}", path, e.toString()); } }); if (srcFilter != null) { rcp.setSourceFilter(srcFilter); } result = new ResultImpl(Result.State.NEW); } // additional constructor for editing existing tasks, all arguments are optional except the first one public RcpTaskImpl(@NotNull RcpTaskImpl oldTask, @Nullable RepositoryAddress src, @Nullable ConnectionOptions connectionOptions, @Nullable Credentials srcCreds, @Nullable String dst, @Nullable List<String> excludes, @Nullable WorkspaceFilter srcFilter, @Nullable Boolean recursive) { this.src = src != null ? src : oldTask.src; this.connectionOptions = connectionOptions != null ? connectionOptions : oldTask.connectionOptions; this.dst = dst != null ? dst : oldTask.dst; this.srcCreds = srcCreds != null ? srcCreds : oldTask.srcCreds; this.id = oldTask.id; this.recursive = recursive != null ? recursive : oldTask.recursive; this.excludes = excludes != null ? excludes : oldTask.excludes; this.filter = srcFilter != null ? srcFilter : oldTask.filter; // leave all other fields untouched this.classLoader = oldTask.classLoader; this.rcp = oldTask.rcp; this.result = oldTask.result; } private static WorkspaceFilter createFilterForExcludes(List<String> excludes) throws ConfigurationException { // could be done better DefaultWorkspaceFilter srcFilter = new DefaultWorkspaceFilter(); PathFilterSet filterSet = new PathFilterSet("/"); for (String path : excludes) { filterSet.addExclude(new DefaultPathFilter(path)); } return srcFilter; } public void setClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; } @Override public String getId() { return id; } @Override public RepositoryCopier getRcp() { return rcp; } @Override public boolean stop() { // wait for thread if (result.getState() != Result.State.STOPPED && result.getState() != Result.State.STOPPING && result.getState() != Result.State.NEW) { rcp.abort(); int cnt = 3; while (thread != null && thread.isAlive() && cnt-- > 0) { result = new ResultImpl(Result.State.STOPPING); log.info("Stopping task {}...", id); try { thread.join(10000); } catch (InterruptedException e) { log.error("Error while waiting for thread: " + thread.getName(), e); thread.interrupt(); } if (thread.isAlive()) { // try to interrupt the thread thread.interrupt(); } } result = new ResultImpl(Result.State.STOPPED); thread = null; if (srcSession != null) { srcSession.logout(); srcSession = null; } if (dstSession != null) { dstSession.logout(); dstSession = null; } log.info("Stopping task {}...done", id); } return true; } @Override public boolean start(Session session) throws RepositoryException { if (result.getState() == Result.State.RUNNING || result.getState() == Result.State.STOPPING) { throw new IllegalStateException("Unable to start task " + id + ". wrong state = " + result.getState()); } // clone session dstSession = session.impersonate(new SimpleCredentials(session.getUserID(), new char[0])); ClassLoader oldLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(classLoader); try { srcSession = getSourceSession(src); } finally { Thread.currentThread().setContextClassLoader(oldLoader); } thread = new Thread(this, "Vault RCP Task - " + id); thread.setContextClassLoader(classLoader); thread.start(); return true; } private Session getSourceSession(RepositoryAddress src) throws RepositoryException { DAVExRepositoryFactory factory = new DAVExRepositoryFactory(); Repository srcRepo; try { srcRepo = factory.createRepository(src, connectionOptions); } catch (RepositoryException e) { log.error("Error while retrieving src repository {}: {}", src, e.toString()); throw e; } try { String wsp = src.getWorkspace(); if (wsp == null) { return srcRepo.login(srcCreds); } else { return srcRepo.login(srcCreds, wsp); } } catch (RepositoryException e) { log.error("Error while logging in src repository {}: {}", src, e.toString()); throw e; } } public void run() { result = new ResultImpl(Result.State.RUNNING); log.info("Starting repository copy task id={}. From {} to {}.", new Object[] { id, src.toString(), dst }); try { rcp.copy(srcSession, src.getPath(), dstSession, dst, recursive); result = new ResultImpl(Result.State.ENDED); } catch (Throwable e) { log.error("Error while executing RCP task {}", getId(), e); result = new ResultImpl(Result.State.ENDED, e); } // todo: notify manager that we ended. } @Override public Result getResult() { return result; } @Override public RepositoryAddress getSource() { return src; } @Override public ConnectionOptions getConnectionOptions() { return connectionOptions; } Credentials getSourceCredentials() { return srcCreds; } public void setSourceCredentials(Credentials srcCreds) { this.srcCreds = srcCreds; } @Override public String getDestination() { return dst; } @Override public boolean isRecursive() { return recursive; } @Override public List<String> getExcludes() { return excludes; } @Override public WorkspaceFilter getFilter() { return filter; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((dst == null) ? 0 : dst.hashCode()); result = prime * result + ((excludes == null) ? 0 : excludes.hashCode()); result = prime * result + ((filter == null) ? 0 : filter.hashCode()); result = prime * result + ((id == null) ? 0 : id.hashCode()); result = prime * result + (recursive ? 1231 : 1237); result = prime * result + ((src == null) ? 0 : src.hashCode()); result = prime * result + ((srcCreds == null) ? 0 : srcCreds.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; RcpTaskImpl other = (RcpTaskImpl) obj; if (dst == null) { if (other.dst != null) return false; } else if (!dst.equals(other.dst)) return false; if (excludes == null) { if (other.excludes != null) return false; } else if (!excludes.equals(other.excludes)) return false; if (!areFiltersEqual(filter, other.filter)) { return false; } if (id == null) { if (other.id != null) return false; } else if (!id.equals(other.id)) return false; if (recursive != other.recursive) return false; if (src == null) { if (other.src != null) return false; } else if (!src.equals(other.src)) return false; if (!areCredentialsEqual(srcCreds, other.srcCreds)) { return false; } // equals for RCP if (!areRepositoryCopiersEqual(rcp, other.rcp)) { return false; } return true; } @Override public String toString() { return "RcpTaskImpl [" + (id != null ? "id=" + id + ", " : "") + (src != null ? "src=" + src + ", " : "") + (srcCreds != null ? "srcCreds=" + srcCreds + ", " : "") + (dst != null ? "dst=" + dst + ", " : "") + "recursive=" + recursive + ", " + (excludes != null ? "excludes=" + excludes + ", " : "") + (filter != null ? "filter=" + filter.getSourceAsString() + ", " : "") + (rcp != null ? "rcp=" + repositoryCopierToString(rcp) + ", " : "") + "]"; } /** @param credentials * @return */ static boolean areCredentialsEqual(Credentials credentials, Credentials otherCredentials) { if (credentials == null || otherCredentials == null) { if (otherCredentials != null || credentials != null) { return false; } } else { if (credentials.getClass() != otherCredentials.getClass()) { return false; } if (!(credentials instanceof SimpleCredentials)) { throw new IllegalArgumentException("Only equality check for SimpleCredentials supported!"); } SimpleCredentials simpleCredentials = SimpleCredentials.class.cast(credentials); SimpleCredentials simpleOtherCredentials = SimpleCredentials.class.cast(otherCredentials); if (!Arrays.equals(simpleCredentials.getPassword(), simpleOtherCredentials.getPassword())) { return false; } if (!simpleCredentials.getUserID().equals(simpleOtherCredentials.getUserID())) { return false; } if (!Arrays.equals(simpleCredentials.getAttributeNames(), simpleOtherCredentials.getAttributeNames())) { return false; } for (String attributeName : simpleCredentials.getAttributeNames()) { if (!simpleCredentials.getAttribute(attributeName).equals(simpleOtherCredentials.getAttribute(attributeName))) { return false; } } } return true; } /** Cannot rely on RepositoryCopier.equals() as not implemented in older versions of FileVault */ static boolean areRepositoryCopiersEqual(RepositoryCopier rcp, RepositoryCopier otherRcp) { if (rcp == null || otherRcp == null) { if (otherRcp != null || rcp != null) { return false; } } else { if (rcp.getBatchSize() != otherRcp.getBatchSize()) { return false; } if (rcp.getThrottle() != otherRcp.getThrottle()) { return false; } if (rcp.isOnlyNewer() != otherRcp.isOnlyNewer()) { return false; } if (rcp.isUpdate() != otherRcp.isUpdate()) { return false; } if (rcp.isNoOrdering() != otherRcp.isNoOrdering()) { return false; } } return true; } /** Cannot rely on RepositoryCopier.equals() as not implemented in older versions of FileVault */ static boolean areFiltersEqual(WorkspaceFilter filter, WorkspaceFilter otherFilter) { if (filter == null || otherFilter == null) { if (otherFilter != null || filter != null) { return false; } } else { if (!filter.getSourceAsString().equals(otherFilter.getSourceAsString())) { return false; } } return true; } static String repositoryCopierToString(RepositoryCopier rcp) { return "RepositoryCopier [batchSize=" + rcp.getBatchSize() + ", onlyNewer="+ rcp.isOnlyNewer() + ", update=" + rcp.isUpdate() + ", noOrdering=" + rcp.isNoOrdering() + ", throttle=" + rcp.getThrottle() + "]"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tephra.persist; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.primitives.Longs; import it.unimi.dsi.fastutil.longs.LongArrayList; import org.apache.hadoop.conf.Configuration; import org.apache.tephra.ChangeId; import org.apache.tephra.Transaction; import org.apache.tephra.TransactionConflictException; import org.apache.tephra.TransactionManager; import org.apache.tephra.TransactionType; import org.apache.tephra.TxConstants; import org.apache.tephra.metrics.TxMetricsCollector; import org.apache.tephra.util.TransactionEditUtil; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Random; import java.util.Set; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * Commons tests to run against the {@link TransactionStateStorage} implementations. */ public abstract class AbstractTransactionStateStorageTest { private static final Logger LOG = LoggerFactory.getLogger(AbstractTransactionStateStorageTest.class); private static Random random = new Random(); protected abstract Configuration getConfiguration(String testName) throws IOException; protected abstract AbstractTransactionStateStorage getStorage(Configuration conf); @Test public void testSnapshotPersistence() throws Exception { Configuration conf = getConfiguration("testSnapshotPersistence"); TransactionSnapshot snapshot = createRandomSnapshot(); TransactionStateStorage storage = getStorage(conf); try { storage.startAndWait(); storage.writeSnapshot(snapshot); TransactionSnapshot readSnapshot = storage.getLatestSnapshot(); assertNotNull(readSnapshot); assertEquals(snapshot, readSnapshot); } finally { storage.stopAndWait(); } } @Test public void testLogWriteAndRead() throws Exception { Configuration conf = getConfiguration("testLogWriteAndRead"); // create some random entries List<TransactionEdit> edits = TransactionEditUtil.createRandomEdits(100); TransactionStateStorage storage = getStorage(conf); try { long now = System.currentTimeMillis(); storage.startAndWait(); TransactionLog log = storage.createLog(now); for (TransactionEdit edit : edits) { log.append(edit); } log.close(); Collection<TransactionLog> logsToRead = storage.getLogsSince(now); // should only be our one log assertNotNull(logsToRead); assertEquals(1, logsToRead.size()); TransactionLogReader logReader = logsToRead.iterator().next().getReader(); assertNotNull(logReader); List<TransactionEdit> readEdits = Lists.newArrayListWithExpectedSize(edits.size()); TransactionEdit nextEdit; while ((nextEdit = logReader.next()) != null) { readEdits.add(nextEdit); } logReader.close(); assertEquals(edits.size(), readEdits.size()); for (int i = 0; i < edits.size(); i++) { LOG.info("Checking edit " + i); assertEquals(edits.get(i), readEdits.get(i)); } } finally { storage.stopAndWait(); } } @Test public void testTransactionManagerPersistence() throws Exception { Configuration conf = getConfiguration("testTransactionManagerPersistence"); conf.setInt(TxConstants.Manager.CFG_TX_CLEANUP_INTERVAL, 0); // no cleanup thread // start snapshot thread, but with long enough interval so we only get snapshots on shutdown conf.setInt(TxConstants.Manager.CFG_TX_SNAPSHOT_INTERVAL, 600); TransactionStateStorage storage = null; TransactionStateStorage storage2 = null; TransactionStateStorage storage3 = null; try { storage = getStorage(conf); TransactionManager txManager = new TransactionManager(conf, storage, new TxMetricsCollector()); txManager.startAndWait(); // TODO: replace with new persistence tests final byte[] a = { 'a' }; final byte[] b = { 'b' }; // Start and invalidate a transaction Transaction invalid = txManager.startShort("clientTx"); txManager.invalidate(invalid.getTransactionId()); // start a tx1, add a change A and commit Transaction tx1 = txManager.startShort("client1"); txManager.canCommit(tx1.getTransactionId(), Collections.singleton(a)); txManager.commit(tx1.getTransactionId(), tx1.getWritePointer()); // start a tx2 and add a change B Transaction tx2 = txManager.startShort("client2"); txManager.canCommit(tx2.getTransactionId(), Collections.singleton(b)); // start a tx3 Transaction tx3 = txManager.startShort("client3"); // restart txManager.stopAndWait(); TransactionSnapshot origState = txManager.getCurrentState(); LOG.info("Orig state: " + origState); Thread.sleep(100); // starts a new tx manager storage2 = getStorage(conf); txManager = new TransactionManager(conf, storage2, new TxMetricsCollector()); txManager.startAndWait(); // check that the reloaded state matches the old TransactionSnapshot newState = txManager.getCurrentState(); LOG.info("New state: " + newState); assertEquals(origState, newState); // Verify that the invalid transaction list matches Transaction checkTx = txManager.startShort(); Assert.assertEquals(origState.getInvalid(), Longs.asList(checkTx.getInvalids())); txManager.abort(checkTx); txManager.abort(invalid); // commit tx2 txManager.commit(tx2.getTransactionId(), tx2.getWritePointer()); // start another transaction, must be greater than tx3 Transaction tx4 = txManager.startShort(); Assert.assertTrue(tx4.getTransactionId() > tx3.getTransactionId()); // tx1 must be visble from tx2, but tx3 and tx4 must not Assert.assertTrue(tx2.isVisible(tx1.getTransactionId())); Assert.assertFalse(tx2.isVisible(tx3.getTransactionId())); Assert.assertFalse(tx2.isVisible(tx4.getTransactionId())); // add same change for tx3 try { txManager.canCommit(tx3.getTransactionId(), Collections.singleton(b)); Assert.fail("canCommit() should have failed"); } catch (TransactionConflictException e) { // expected } // check visibility with new xaction Transaction tx5 = txManager.startShort(); Assert.assertTrue(tx5.isVisible(tx1.getTransactionId())); Assert.assertTrue(tx5.isVisible(tx2.getTransactionId())); Assert.assertFalse(tx5.isVisible(tx3.getTransactionId())); Assert.assertFalse(tx5.isVisible(tx4.getTransactionId())); // can commit tx3? txManager.abort(tx3); txManager.abort(tx4); txManager.abort(tx5); // start new tx and verify its exclude list is empty Transaction tx6 = txManager.startShort(); Assert.assertFalse(tx6.hasExcludes()); txManager.abort(tx6); // now start 5 x claim size transactions Transaction tx = txManager.startShort(); for (int i = 1; i < 50; i++) { tx = txManager.startShort(); } origState = txManager.getCurrentState(); Thread.sleep(100); // simulate crash by starting a new tx manager without a stopAndWait storage3 = getStorage(conf); txManager = new TransactionManager(conf, storage3, new TxMetricsCollector()); txManager.startAndWait(); // verify state again matches (this time should include WAL replay) newState = txManager.getCurrentState(); assertEquals(origState, newState); // get a new transaction and verify it is greater Transaction txAfter = txManager.startShort(); Assert.assertTrue(txAfter.getTransactionId() > tx.getTransactionId()); } finally { if (storage != null) { storage.stopAndWait(); } if (storage2 != null) { storage2.stopAndWait(); } if (storage3 != null) { storage3.stopAndWait(); } } } /** * Tests whether the committed set is advanced properly on WAL replay. */ @Test public void testCommittedSetClearing() throws Exception { Configuration conf = getConfiguration("testCommittedSetClearing"); conf.setInt(TxConstants.Manager.CFG_TX_CLEANUP_INTERVAL, 0); // no cleanup thread conf.setInt(TxConstants.Manager.CFG_TX_SNAPSHOT_INTERVAL, 0); // no periodic snapshots TransactionStateStorage storage1 = null; TransactionStateStorage storage2 = null; try { storage1 = getStorage(conf); TransactionManager txManager = new TransactionManager (conf, storage1, new TxMetricsCollector()); txManager.startAndWait(); // TODO: replace with new persistence tests final byte[] a = { 'a' }; final byte[] b = { 'b' }; // start a tx1, add a change A and commit Transaction tx1 = txManager.startShort(); txManager.canCommit(tx1.getTransactionId(), Collections.singleton(a)); txManager.commit(tx1.getTransactionId(), tx1.getWritePointer()); // start a tx2 and add a change B Transaction tx2 = txManager.startShort(); txManager.canCommit(tx2.getTransactionId(), Collections.singleton(b)); // start a tx3 Transaction tx3 = txManager.startShort(); TransactionSnapshot origState = txManager.getCurrentState(); LOG.info("Orig state: " + origState); // simulate a failure by starting a new tx manager without stopping first storage2 = getStorage(conf); txManager = new TransactionManager(conf, storage2, new TxMetricsCollector()); txManager.startAndWait(); // check that the reloaded state matches the old TransactionSnapshot newState = txManager.getCurrentState(); LOG.info("New state: " + newState); assertEquals(origState, newState); } finally { if (storage1 != null) { storage1.stopAndWait(); } if (storage2 != null) { storage2.stopAndWait(); } } } /** * Tests removal of old snapshots and old transaction logs. */ @Test public void testOldFileRemoval() throws Exception { Configuration conf = getConfiguration("testOldFileRemoval"); TransactionStateStorage storage = null; try { storage = getStorage(conf); storage.startAndWait(); long now = System.currentTimeMillis(); long writePointer = 1; Collection<Long> invalid = Lists.newArrayList(); NavigableMap<Long, TransactionManager.InProgressTx> inprogress = Maps.newTreeMap(); Map<Long, Set<ChangeId>> committing = Maps.newHashMap(); Map<Long, Set<ChangeId>> committed = Maps.newHashMap(); TransactionSnapshot snapshot = new TransactionSnapshot(now, 0, writePointer++, invalid, inprogress, committing, committed); TransactionEdit dummyEdit = TransactionEdit.createStarted(1, 0, Long.MAX_VALUE, TransactionType.SHORT); // write snapshot 1 storage.writeSnapshot(snapshot); TransactionLog log = storage.createLog(now); log.append(dummyEdit); log.close(); snapshot = new TransactionSnapshot(now + 1, 0, writePointer++, invalid, inprogress, committing, committed); // write snapshot 2 storage.writeSnapshot(snapshot); log = storage.createLog(now + 1); log.append(dummyEdit); log.close(); snapshot = new TransactionSnapshot(now + 2, 0, writePointer++, invalid, inprogress, committing, committed); // write snapshot 3 storage.writeSnapshot(snapshot); log = storage.createLog(now + 2); log.append(dummyEdit); log.close(); snapshot = new TransactionSnapshot(now + 3, 0, writePointer++, invalid, inprogress, committing, committed); // write snapshot 4 storage.writeSnapshot(snapshot); log = storage.createLog(now + 3); log.append(dummyEdit); log.close(); snapshot = new TransactionSnapshot(now + 4, 0, writePointer++, invalid, inprogress, committing, committed); // write snapshot 5 storage.writeSnapshot(snapshot); log = storage.createLog(now + 4); log.append(dummyEdit); log.close(); snapshot = new TransactionSnapshot(now + 5, 0, writePointer++, invalid, inprogress, committing, committed); // write snapshot 6 storage.writeSnapshot(snapshot); log = storage.createLog(now + 5); log.append(dummyEdit); log.close(); List<String> allSnapshots = storage.listSnapshots(); LOG.info("All snapshots: " + allSnapshots); assertEquals(6, allSnapshots.size()); List<String> allLogs = storage.listLogs(); LOG.info("All logs: " + allLogs); assertEquals(6, allLogs.size()); long oldestKept = storage.deleteOldSnapshots(3); assertEquals(now + 3, oldestKept); allSnapshots = storage.listSnapshots(); LOG.info("All snapshots: " + allSnapshots); assertEquals(3, allSnapshots.size()); storage.deleteLogsOlderThan(oldestKept); allLogs = storage.listLogs(); LOG.info("All logs: " + allLogs); assertEquals(3, allLogs.size()); } finally { if (storage != null) { storage.stopAndWait(); } } } @Test public void testLongTxnEditReplay() throws Exception { Configuration conf = getConfiguration("testLongTxnEditReplay"); TransactionStateStorage storage = null; try { storage = getStorage(conf); storage.startAndWait(); // Create long running txns. Abort one of them, invalidate another, invalidate and abort the last. long time1 = System.currentTimeMillis(); long wp1 = time1 * TxConstants.MAX_TX_PER_MS; TransactionEdit edit1 = TransactionEdit.createStarted(wp1, wp1 - 10, time1 + 100000, TransactionType.LONG); TransactionEdit edit2 = TransactionEdit.createAborted(wp1, TransactionType.LONG, null); long time2 = time1 + 100; long wp2 = time2 * TxConstants.MAX_TX_PER_MS; TransactionEdit edit3 = TransactionEdit.createStarted(wp2, wp2 - 10, time2 + 100000, TransactionType.LONG); TransactionEdit edit4 = TransactionEdit.createInvalid(wp2); long time3 = time1 + 200; long wp3 = time3 * TxConstants.MAX_TX_PER_MS; TransactionEdit edit5 = TransactionEdit.createStarted(wp3, wp3 - 10, time3 + 100000, TransactionType.LONG); TransactionEdit edit6 = TransactionEdit.createInvalid(wp3); TransactionEdit edit7 = TransactionEdit.createAborted(wp3, TransactionType.LONG, null); // write transaction edits TransactionLog log = storage.createLog(time1); log.append(edit1); log.append(edit2); log.append(edit3); log.append(edit4); log.append(edit5); log.append(edit6); log.append(edit7); log.close(); // Start transaction manager TransactionManager txm = new TransactionManager(conf, storage, new TxMetricsCollector()); txm.startAndWait(); try { // Verify that all txns are in invalid list. TransactionSnapshot snapshot1 = txm.getCurrentState(); assertEquals(ImmutableList.of(wp1, wp2, wp3), snapshot1.getInvalid()); assertEquals(0, snapshot1.getInProgress().size()); assertEquals(0, snapshot1.getCommittedChangeSets().size()); assertEquals(0, snapshot1.getCommittedChangeSets().size()); } finally { txm.stopAndWait(); } } finally { if (storage != null) { storage.stopAndWait(); } } } @Test public void testTruncateInvalidTxEditReplay() throws Exception { Configuration conf = getConfiguration("testTruncateInvalidTxEditReplay"); TransactionStateStorage storage = null; try { storage = getStorage(conf); storage.startAndWait(); // Create some txns, and invalidate all of them. long time1 = System.currentTimeMillis(); long wp1 = time1 * TxConstants.MAX_TX_PER_MS; TransactionEdit edit1 = TransactionEdit.createStarted(wp1, wp1 - 10, time1 + 100000, TransactionType.LONG); TransactionEdit edit2 = TransactionEdit.createInvalid(wp1); long time2 = time1 + 100; long wp2 = time2 * TxConstants.MAX_TX_PER_MS; TransactionEdit edit3 = TransactionEdit.createStarted(wp2, wp2 - 10, time2 + 10000, TransactionType.SHORT); TransactionEdit edit4 = TransactionEdit.createInvalid(wp2); long time3 = time1 + 2000; long wp3 = time3 * TxConstants.MAX_TX_PER_MS; TransactionEdit edit5 = TransactionEdit.createStarted(wp3, wp3 - 10, time3 + 100000, TransactionType.LONG); TransactionEdit edit6 = TransactionEdit.createInvalid(wp3); long time4 = time1 + 2100; long wp4 = time4 * TxConstants.MAX_TX_PER_MS; TransactionEdit edit7 = TransactionEdit.createStarted(wp4, wp4 - 10, time4 + 10000, TransactionType.SHORT); TransactionEdit edit8 = TransactionEdit.createInvalid(wp4); // remove wp1 and wp3 from invalid list TransactionEdit edit9 = TransactionEdit.createTruncateInvalidTx(ImmutableSet.of(wp1, wp3)); // truncate invalid transactions before time3 TransactionEdit edit10 = TransactionEdit.createTruncateInvalidTxBefore(time3); // write transaction edits TransactionLog log = storage.createLog(time1); log.append(edit1); log.append(edit2); log.append(edit3); log.append(edit4); log.append(edit5); log.append(edit6); log.append(edit7); log.append(edit8); log.append(edit9); log.append(edit10); log.close(); // Start transaction manager TransactionManager txm = new TransactionManager(conf, storage, new TxMetricsCollector()); txm.startAndWait(); try { // Only wp4 should be in invalid list. TransactionSnapshot snapshot = txm.getCurrentState(); assertEquals(ImmutableList.of(wp4), snapshot.getInvalid()); assertEquals(0, snapshot.getInProgress().size()); assertEquals(0, snapshot.getCommittedChangeSets().size()); assertEquals(0, snapshot.getCommittedChangeSets().size()); } finally { txm.stopAndWait(); } } finally { if (storage != null) { storage.stopAndWait(); } } } /** * Generates a new snapshot object with semi-randomly populated values. This does not necessarily accurately * represent a typical snapshot's distribution of values, as we only set an upper bound on pointer values. * * We generate a new snapshot with the contents: * <ul> * <li>readPointer = 1M + (random % 1M)</li> * <li>writePointer = readPointer + 1000</li> * <li>waterMark = writePointer + 1000</li> * <li>inProgress = one each for (writePointer - 500)..writePointer, ~ 5% "long" transaction</li> * <li>invalid = 100 randomly distributed, 0..1M</li> * <li>committing = one each, (readPointer + 1)..(readPointer + 100)</li> * <li>committed = one each, (readPointer - 1000)..readPointer</li> * </ul> * @return a new snapshot of transaction state. */ private TransactionSnapshot createRandomSnapshot() { // limit readPointer to a reasonable range, but make it > 1M so we can assign enough keys below long readPointer = (Math.abs(random.nextLong()) % 1000000L) + 1000000L; long writePointer = readPointer + 1000L; // generate in progress -- assume last 500 write pointer values NavigableMap<Long, TransactionManager.InProgressTx> inProgress = Maps.newTreeMap(); long startPointer = writePointer - 500L; for (int i = 0; i < 500; i++) { long currentTime = System.currentTimeMillis(); // make some "long" transactions if (i % 20 == 0) { inProgress.put(startPointer + i, new TransactionManager.InProgressTx(startPointer - 1, currentTime + TimeUnit.DAYS.toSeconds(1), TransactionManager.InProgressType.LONG)); } else { inProgress.put(startPointer + i, new TransactionManager.InProgressTx(startPointer - 1, currentTime + 300000L, TransactionManager.InProgressType.SHORT)); } } // make 100 random invalid IDs LongArrayList invalid = new LongArrayList(); for (int i = 0; i < 100; i++) { invalid.add(Math.abs(random.nextLong()) % 1000000L); } // make 100 committing entries, 10 keys each Map<Long, Set<ChangeId>> committing = Maps.newHashMap(); for (int i = 0; i < 100; i++) { committing.put(readPointer + i, generateChangeSet(10)); } // make 1000 committed entries, 10 keys each long startCommitted = readPointer - 1000L; NavigableMap<Long, Set<ChangeId>> committed = Maps.newTreeMap(); for (int i = 0; i < 1000; i++) { committed.put(startCommitted + i, generateChangeSet(10)); } return new TransactionSnapshot(System.currentTimeMillis(), readPointer, writePointer, invalid, inProgress, committing, committed); } private Set<ChangeId> generateChangeSet(int numEntries) { Set<ChangeId> changes = Sets.newHashSet(); for (int i = 0; i < numEntries; i++) { byte[] bytes = new byte[8]; random.nextBytes(bytes); changes.add(new ChangeId(bytes)); } return changes; } }
/** * Copyright (C) 2009-2013 Dell, Inc. * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud.opsource; import java.util.*; import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException; import org.dasein.cloud.dc.DataCenter; import org.dasein.cloud.dc.DataCenterServices; import org.dasein.cloud.dc.Region; import org.dasein.cloud.util.APITrace; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import javax.annotation.Nonnull; public class OpSourceLocation implements DataCenterServices { private OpSource provider = null; OpSourceLocation(OpSource provider) { this.provider = provider; } @Override public DataCenter getDataCenter(String dataCenterId) throws InternalException,CloudException { APITrace.begin(provider, "DC.getDataCenter"); try { ArrayList<Region> regions = (ArrayList<Region>) listRegions(); if( regions == null ) { return null; } for(Region region: regions){ ArrayList<DataCenter> dataCenters = (ArrayList<DataCenter>) this.listDataCenters(region.getProviderRegionId()); for(DataCenter dc : dataCenters ){ if(dc.getProviderDataCenterId().equals(dataCenterId)){ return dc; } } } return null; } finally { APITrace.end(); } } @Override public String getProviderTermForDataCenter(Locale locale) { return "Data Center"; } @Override public String getProviderTermForRegion(Locale locale) { return "Location"; } @Override public Region getRegion(String regionId) throws InternalException,CloudException { APITrace.begin(provider, "DC.getRegion"); try { ArrayList<Region> regions = (ArrayList<Region>) listRegions(); if(regions != null){ for(Region region: regions){ if(regionId.equals(region.getProviderRegionId())){ return region; } } } return null; } finally { APITrace.end(); } } @Override public @Nonnull Collection<DataCenter> listDataCenters(@Nonnull String regionId)throws InternalException, CloudException { APITrace.begin(provider, "DC.listDataCenters"); try { Region region = this.getRegion(regionId); if(region == null){ throw new CloudException("No such region with regionId -> " + regionId); } DataCenter dc = new DataCenter(); dc.setActive(true); dc.setAvailable(true); dc.setName(region.getName() + " (DC)"); dc.setRegionId(regionId); dc.setProviderDataCenterId(regionId); return Collections.singletonList(dc); } finally { APITrace.end(); } } @Override public Collection<Region> listRegions() throws InternalException,CloudException { APITrace.begin(provider, "DC.listRegions"); try{ ArrayList<Region> list = new ArrayList <Region>(); HashMap<Integer, Param> parameters = new HashMap<Integer, Param>(); Param param = new Param(OpSource.LOCATION_BASE_PATH, null); parameters.put(0, param); String cloudName = getCloudNameFromEndpoint(); if(cloudName == null){ //Error retrieving cloud from endpoint, use old method /*OpSourceMethod method = new OpSourceMethod(provider, provider.buildUrl(null,true, parameters), provider.getBasicRequestParameters(OpSource.Content_Type_Value_Single_Para, "GET",null)); Document doc = method.invoke();*/ Document doc = CallCache.getInstance().getAPICall(OpSource.LOCATION_BASE_PATH, provider, parameters, ""); String sNS = ""; try{ sNS = doc.getDocumentElement().getTagName().substring(0, doc.getDocumentElement().getTagName().indexOf(":") + 1); } catch(IndexOutOfBoundsException ex){} NodeList blocks = doc.getElementsByTagName(sNS + "datacenterWithLimits"); if(blocks != null){ for(int i=0; i< blocks.getLength();i++){ Node item = blocks.item(i); Region region = toRegion(item, sNS); if(region != null){ list.add(region); provider.setRegionEndpoint(region.getProviderRegionId(), provider.getContext().getEndpoint()); } } } } else{ HashMap<String, ArrayList<String>> endpointMap = provider.getProivderEndpointMap(); ArrayList<String> currentCloudEndpoints = endpointMap.get(cloudName); for(String endpoint : currentCloudEndpoints){ try{ String t = endpoint.toLowerCase(); if(!(t.startsWith("http://") || t.startsWith("https://") || t.matches("^[a-z]+://.*"))){ endpoint = "https://" + endpoint; } OpSourceMethod method = new OpSourceMethod(provider, provider.buildUrlWithEndpoint(endpoint, null,true, parameters), provider.getBasicRequestParameters(OpSource.Content_Type_Value_Single_Para, "GET",null)); Document doc = method.invoke(); //Document doc = CallCache.getInstance().getAPICall(OpSource.LOCATION_BASE_PATH, provider, parameters, ""); String sNS = ""; try{ sNS = doc.getDocumentElement().getTagName().substring(0, doc.getDocumentElement().getTagName().indexOf(":") + 1); } catch(IndexOutOfBoundsException ex){} NodeList blocks = doc.getElementsByTagName(sNS + "datacenterWithLimits"); if(blocks != null){ for(int i=0; i< blocks.getLength();i++){ Node item = blocks.item(i); Region region = toRegion(item, sNS); if(region != null){ list.add(region); provider.setRegionEndpoint(region.getProviderRegionId(), endpoint); } } } } catch(Exception ex){ //System.out.println("OpSourceLocation error"); //ex.printStackTrace(); /* If this fails it is likely a 401 authentication error against the endpoint. Rather than getting a nice XML API error response however, OpSource returns the default apache htaccess 401 error so it fails to parse and throws an exception. We're not really interested in this exception as some accounts legitimately don't have access to all the endpoints. */ } } } return list; } finally { APITrace.end(); } } public String getCloudNameFromEndpoint(){ String endpoint = provider.getEndpoint(null); endpoint = endpoint.substring(endpoint.indexOf("://") + 3); if(endpoint.contains("/oec/0.9/"))endpoint = endpoint.substring(0, endpoint.indexOf("/oec/0.9/")); HashMap<String, ArrayList<String>> endpointMap = provider.getProivderEndpointMap(); Set<Map.Entry<String, ArrayList<String>>> endpointList = endpointMap.entrySet(); Iterator<Map.Entry<String, ArrayList<String>>> it = endpointList.iterator(); while(it.hasNext()){ Map.Entry<String, ArrayList<String>> current = it.next(); ArrayList<String> endpoints = current.getValue(); for(int i=0;i<endpoints.size();i++){ if(endpoints.get(i).equals(endpoint))return current.getKey(); } } return null; } public Region toRegion( Node region, String nameSpace) throws CloudException{ if(region == null){ return null; } NodeList data; data = region.getChildNodes(); String country = "US"; Region r = new Region(); for( int i=0; i<data.getLength(); i++ ) { Node item = data.item(i); if(item.getNodeType() == Node.TEXT_NODE) continue; if( item.getNodeName().equals(nameSpace + "location") ) { r.setProviderRegionId(item.getFirstChild().getNodeValue()); } else if( item.getNodeName().equals(nameSpace + "displayName") ) { r.setName(item.getFirstChild().getNodeValue()); } else if(item.getNodeName().equals(nameSpace + "country")){ country = item.getFirstChild().getNodeValue(); } } r.setActive(true); r.setAvailable(true); if(country.equals("US")){ r.setJurisdiction("US"); } else if(country.equals("Australia")){ r.setJurisdiction("AU"); } else if(country.equals("South Africa")){ r.setJurisdiction("ZA"); } else{ //The only one where the country is different r.setJurisdiction("EU"); } return r; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards.NodeGatewayStartedShards; import org.elasticsearch.index.shard.ShardStateMetaData; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; /** * The primary shard allocator allocates unassigned primary shards to nodes that hold * valid copies of the unassigned primaries. It does this by iterating over all unassigned * primary shards in the routing table and fetching shard metadata from each node in the cluster * that holds a copy of the shard. The shard metadata from each node is compared against the * set of valid allocation IDs and for all valid shard copies (if any), the primary shard allocator * executes the allocation deciders to chose a copy to assign the primary shard to. * * Note that the PrimaryShardAllocator does *not* allocate primaries on index creation * (see {@link org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator}), * nor does it allocate primaries when a primary shard failed and there is a valid replica * copy that can immediately be promoted to primary, as this takes place in {@link RoutingNodes#failShard}. */ public abstract class PrimaryShardAllocator extends AbstractComponent { private static final Function<String, String> INITIAL_SHARDS_PARSER = (value) -> { switch (value) { case "quorum": case "quorum-1": case "half": case "one": case "full": case "full-1": case "all-1": case "all": return value; default: Integer.parseInt(value); // it can be parsed that's all we care here? return value; } }; public static final Setting<String> NODE_INITIAL_SHARDS_SETTING = new Setting<>("gateway.initial_shards", (settings) -> settings.get("gateway.local.initial_shards", "quorum"), INITIAL_SHARDS_PARSER, Property.Dynamic, Property.NodeScope); @Deprecated public static final Setting<String> INDEX_RECOVERY_INITIAL_SHARDS_SETTING = new Setting<>("index.recovery.initial_shards", (settings) -> NODE_INITIAL_SHARDS_SETTING.get(settings) , INITIAL_SHARDS_PARSER, Property.Dynamic, Property.IndexScope); public PrimaryShardAllocator(Settings settings) { super(settings); logger.debug("using initial_shards [{}]", NODE_INITIAL_SHARDS_SETTING.get(settings)); } public void allocateUnassigned(RoutingAllocation allocation) { final RoutingNodes routingNodes = allocation.routingNodes(); final MetaData metaData = allocation.metaData(); final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator(); while (unassignedIterator.hasNext()) { final ShardRouting shard = unassignedIterator.next(); if (shard.primary() == false) { continue; } if (shard.recoverySource().getType() != RecoverySource.Type.EXISTING_STORE && shard.recoverySource().getType() != RecoverySource.Type.SNAPSHOT) { continue; } final AsyncShardFetch.FetchResult<NodeGatewayStartedShards> shardState = fetchData(shard, allocation); if (shardState.hasData() == false) { logger.trace("{}: ignoring allocation, still fetching shard started state", shard); allocation.setHasPendingAsyncFetch(); unassignedIterator.removeAndIgnore(AllocationStatus.FETCHING_SHARD_DATA, allocation.changes()); continue; } // don't create a new IndexSetting object for every shard as this could cause a lot of garbage // on cluster restart if we allocate a boat load of shards final IndexMetaData indexMetaData = metaData.getIndexSafe(shard.index()); final Set<String> inSyncAllocationIds = indexMetaData.inSyncAllocationIds(shard.id()); final boolean snapshotRestore = shard.recoverySource().getType() == RecoverySource.Type.SNAPSHOT; final boolean recoverOnAnyNode = recoverOnAnyNode(indexMetaData); final NodeShardsResult nodeShardsResult; final boolean enoughAllocationsFound; if (inSyncAllocationIds.isEmpty()) { assert Version.indexCreated(indexMetaData.getSettings()).before(Version.V_5_0_0_alpha1) : "trying to allocated a primary with an empty allocation id set, but index is new"; // when we load an old index (after upgrading cluster) or restore a snapshot of an old index // fall back to old version-based allocation mode // Note that once the shard has been active, lastActiveAllocationIds will be non-empty nodeShardsResult = buildVersionBasedNodeShardsResult(shard, snapshotRestore || recoverOnAnyNode, allocation.getIgnoreNodes(shard.shardId()), shardState); if (snapshotRestore || recoverOnAnyNode) { enoughAllocationsFound = nodeShardsResult.allocationsFound > 0; } else { enoughAllocationsFound = isEnoughVersionBasedAllocationsFound(indexMetaData, nodeShardsResult); } logger.debug("[{}][{}]: version-based allocation for pre-{} index found {} allocations of {}", shard.index(), shard.id(), Version.V_5_0_0_alpha1, nodeShardsResult.allocationsFound, shard); } else { assert inSyncAllocationIds.isEmpty() == false; // use allocation ids to select nodes nodeShardsResult = buildAllocationIdBasedNodeShardsResult(shard, snapshotRestore || recoverOnAnyNode, allocation.getIgnoreNodes(shard.shardId()), inSyncAllocationIds, shardState); enoughAllocationsFound = nodeShardsResult.orderedAllocationCandidates.size() > 0; logger.debug("[{}][{}]: found {} allocation candidates of {} based on allocation ids: [{}]", shard.index(), shard.id(), nodeShardsResult.orderedAllocationCandidates.size(), shard, inSyncAllocationIds); } if (enoughAllocationsFound == false){ if (snapshotRestore) { // let BalancedShardsAllocator take care of allocating this shard logger.debug("[{}][{}]: missing local data, will restore from [{}]", shard.index(), shard.id(), shard.recoverySource()); } else if (recoverOnAnyNode) { // let BalancedShardsAllocator take care of allocating this shard logger.debug("[{}][{}]: missing local data, recover from any node", shard.index(), shard.id()); } else { // we can't really allocate, so ignore it and continue unassignedIterator.removeAndIgnore(AllocationStatus.NO_VALID_SHARD_COPY, allocation.changes()); logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}]", shard.index(), shard.id(), nodeShardsResult.allocationsFound); } continue; } final NodesToAllocate nodesToAllocate = buildNodesToAllocate( allocation, nodeShardsResult.orderedAllocationCandidates, shard, false ); if (nodesToAllocate.yesNodeShards.isEmpty() == false) { NodeGatewayStartedShards nodeShardState = nodesToAllocate.yesNodeShards.get(0); logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, nodeShardState.getNode()); unassignedIterator.initialize(nodeShardState.getNode().getId(), nodeShardState.allocationId(), ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, allocation.changes()); } else if (nodesToAllocate.throttleNodeShards.isEmpty() == true && nodesToAllocate.noNodeShards.isEmpty() == false) { // The deciders returned a NO decision for all nodes with shard copies, so we check if primary shard // can be force-allocated to one of the nodes. final NodesToAllocate nodesToForceAllocate = buildNodesToAllocate( allocation, nodeShardsResult.orderedAllocationCandidates, shard, true ); if (nodesToForceAllocate.yesNodeShards.isEmpty() == false) { NodeGatewayStartedShards nodeShardState = nodesToForceAllocate.yesNodeShards.get(0); logger.debug("[{}][{}]: allocating [{}] to [{}] on forced primary allocation", shard.index(), shard.id(), shard, nodeShardState.getNode()); unassignedIterator.initialize(nodeShardState.getNode().getId(), nodeShardState.allocationId(), ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, allocation.changes()); } else if (nodesToForceAllocate.throttleNodeShards.isEmpty() == false) { logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on forced primary allocation", shard.index(), shard.id(), shard, nodesToForceAllocate.throttleNodeShards); unassignedIterator.removeAndIgnore(AllocationStatus.DECIDERS_THROTTLED, allocation.changes()); } else { logger.debug("[{}][{}]: forced primary allocation denied [{}]", shard.index(), shard.id(), shard); unassignedIterator.removeAndIgnore(AllocationStatus.DECIDERS_NO, allocation.changes()); } } else { // we are throttling this, but we have enough to allocate to this node, ignore it for now logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, nodesToAllocate.throttleNodeShards); unassignedIterator.removeAndIgnore(AllocationStatus.DECIDERS_THROTTLED, allocation.changes()); } } } /** * Builds a list of nodes. If matchAnyShard is set to false, only nodes that have an allocation id matching * lastActiveAllocationIds are added to the list. Otherwise, any node that has a shard is added to the list, but * entries with matching allocation id are always at the front of the list. */ protected NodeShardsResult buildAllocationIdBasedNodeShardsResult(ShardRouting shard, boolean matchAnyShard, Set<String> ignoreNodes, Set<String> lastActiveAllocationIds, AsyncShardFetch.FetchResult<NodeGatewayStartedShards> shardState) { LinkedList<NodeGatewayStartedShards> matchingNodeShardStates = new LinkedList<>(); LinkedList<NodeGatewayStartedShards> nonMatchingNodeShardStates = new LinkedList<>(); int numberOfAllocationsFound = 0; for (NodeGatewayStartedShards nodeShardState : shardState.getData().values()) { DiscoveryNode node = nodeShardState.getNode(); String allocationId = nodeShardState.allocationId(); if (ignoreNodes.contains(node.getId())) { continue; } if (nodeShardState.storeException() == null) { if (allocationId == null && nodeShardState.legacyVersion() == ShardStateMetaData.NO_VERSION) { logger.trace("[{}] on node [{}] has no shard state information", shard, nodeShardState.getNode()); } else if (allocationId != null) { assert nodeShardState.legacyVersion() == ShardStateMetaData.NO_VERSION : "Allocation id and legacy version cannot be both present"; logger.trace("[{}] on node [{}] has allocation id [{}]", shard, nodeShardState.getNode(), allocationId); } else { logger.trace("[{}] on node [{}] has no allocation id, out-dated shard (shard state version: [{}])", shard, nodeShardState.getNode(), nodeShardState.legacyVersion()); } } else { final String finalAllocationId = allocationId; logger.trace((Supplier<?>) () -> new ParameterizedMessage("[{}] on node [{}] has allocation id [{}] but the store can not be opened, treating as no allocation id", shard, nodeShardState.getNode(), finalAllocationId), nodeShardState.storeException()); allocationId = null; } if (allocationId != null) { numberOfAllocationsFound++; if (lastActiveAllocationIds.contains(allocationId)) { if (nodeShardState.primary()) { matchingNodeShardStates.addFirst(nodeShardState); } else { matchingNodeShardStates.addLast(nodeShardState); } } else if (matchAnyShard) { if (nodeShardState.primary()) { nonMatchingNodeShardStates.addFirst(nodeShardState); } else { nonMatchingNodeShardStates.addLast(nodeShardState); } } } } List<NodeGatewayStartedShards> nodeShardStates = new ArrayList<>(); nodeShardStates.addAll(matchingNodeShardStates); nodeShardStates.addAll(nonMatchingNodeShardStates); if (logger.isTraceEnabled()) { logger.trace("{} candidates for allocation: {}", shard, nodeShardStates.stream().map(s -> s.getNode().getName()).collect(Collectors.joining(", "))); } return new NodeShardsResult(nodeShardStates, numberOfAllocationsFound); } /** * used by old version-based allocation */ private boolean isEnoughVersionBasedAllocationsFound(IndexMetaData indexMetaData, NodeShardsResult nodeShardsResult) { // check if the counts meets the minimum set int requiredAllocation = 1; // if we restore from a repository one copy is more then enough String initialShards = INDEX_RECOVERY_INITIAL_SHARDS_SETTING.get(indexMetaData.getSettings(), settings); if ("quorum".equals(initialShards)) { if (indexMetaData.getNumberOfReplicas() > 1) { requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1; } } else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) { if (indexMetaData.getNumberOfReplicas() > 2) { requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2); } } else if ("one".equals(initialShards)) { requiredAllocation = 1; } else if ("full".equals(initialShards) || "all".equals(initialShards)) { requiredAllocation = indexMetaData.getNumberOfReplicas() + 1; } else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) { if (indexMetaData.getNumberOfReplicas() > 1) { requiredAllocation = indexMetaData.getNumberOfReplicas(); } } else { requiredAllocation = Integer.parseInt(initialShards); } return nodeShardsResult.allocationsFound >= requiredAllocation; } /** * Split the list of node shard states into groups yes/no/throttle based on allocation deciders */ private NodesToAllocate buildNodesToAllocate(RoutingAllocation allocation, List<NodeGatewayStartedShards> nodeShardStates, ShardRouting shardRouting, boolean forceAllocate) { List<NodeGatewayStartedShards> yesNodeShards = new ArrayList<>(); List<NodeGatewayStartedShards> throttledNodeShards = new ArrayList<>(); List<NodeGatewayStartedShards> noNodeShards = new ArrayList<>(); for (NodeGatewayStartedShards nodeShardState : nodeShardStates) { RoutingNode node = allocation.routingNodes().node(nodeShardState.getNode().getId()); if (node == null) { continue; } Decision decision = forceAllocate ? allocation.deciders().canForceAllocatePrimary(shardRouting, node, allocation) : allocation.deciders().canAllocate(shardRouting, node, allocation); if (decision.type() == Decision.Type.THROTTLE) { throttledNodeShards.add(nodeShardState); } else if (decision.type() == Decision.Type.NO) { noNodeShards.add(nodeShardState); } else { yesNodeShards.add(nodeShardState); } } return new NodesToAllocate(Collections.unmodifiableList(yesNodeShards), Collections.unmodifiableList(throttledNodeShards), Collections.unmodifiableList(noNodeShards)); } /** * Builds a list of previously started shards. If matchAnyShard is set to false, only shards with the highest shard version are added to * the list. Otherwise, any existing shard is added to the list, but entries with highest version are always at the front of the list. */ NodeShardsResult buildVersionBasedNodeShardsResult(ShardRouting shard, boolean matchAnyShard, Set<String> ignoreNodes, AsyncShardFetch.FetchResult<NodeGatewayStartedShards> shardState) { final List<NodeGatewayStartedShards> allocationCandidates = new ArrayList<>(); int numberOfAllocationsFound = 0; long highestVersion = ShardStateMetaData.NO_VERSION; for (NodeGatewayStartedShards nodeShardState : shardState.getData().values()) { long version = nodeShardState.legacyVersion(); DiscoveryNode node = nodeShardState.getNode(); if (ignoreNodes.contains(node.getId())) { continue; } if (nodeShardState.storeException() == null) { if (version == ShardStateMetaData.NO_VERSION && nodeShardState.allocationId() == null) { logger.trace("[{}] on node [{}] has no shard state information", shard, nodeShardState.getNode()); } else if (version != ShardStateMetaData.NO_VERSION) { assert nodeShardState.allocationId() == null : "Allocation id and legacy version cannot be both present"; logger.trace("[{}] on node [{}] has version [{}] of shard", shard, nodeShardState.getNode(), version); } else { // shard was already selected in a 5.x cluster as primary for recovery, was initialized (and wrote a new state file) but // did not make it to STARTED state before the cluster crashed (otherwise list of active allocation ids would be // non-empty and allocation id - based allocation mode would be chosen). // Prefer this shard copy again. version = Long.MAX_VALUE; logger.trace("[{}] on node [{}] has allocation id [{}]", shard, nodeShardState.getNode(), nodeShardState.allocationId()); } } else { final long finalVerison = version; // when there is an store exception, we disregard the reported version and assign it as no version (same as shard does not exist) logger.trace((Supplier<?>) () -> new ParameterizedMessage("[{}] on node [{}] has version [{}] but the store can not be opened, treating no version", shard, nodeShardState.getNode(), finalVerison), nodeShardState.storeException()); version = ShardStateMetaData.NO_VERSION; } if (version != ShardStateMetaData.NO_VERSION) { numberOfAllocationsFound++; // If we've found a new "best" candidate, clear the // current candidates and add it if (version > highestVersion) { highestVersion = version; if (matchAnyShard == false) { allocationCandidates.clear(); } allocationCandidates.add(nodeShardState); } else if (version == highestVersion) { // If the candidate is the same, add it to the // list, but keep the current candidate allocationCandidates.add(nodeShardState); } } } // sort array so the node with the highest version is at the beginning CollectionUtil.timSort(allocationCandidates, Comparator.comparing(NodeGatewayStartedShards::legacyVersion).reversed()); if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("["); for (NodeGatewayStartedShards n : allocationCandidates) { sb.append("[").append(n.getNode().getName()).append("]").append(" -> ").append(n.legacyVersion()).append(", "); } sb.append("]"); logger.trace("{} candidates for allocation: {}", shard, sb.toString()); } return new NodeShardsResult(Collections.unmodifiableList(allocationCandidates), numberOfAllocationsFound); } /** * Return {@code true} if the index is configured to allow shards to be * recovered on any node */ private boolean recoverOnAnyNode(IndexMetaData metaData) { return (IndexMetaData.isOnSharedFilesystem(metaData.getSettings()) || IndexMetaData.isOnSharedFilesystem(this.settings)) && IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING.get(metaData.getSettings(), this.settings); } protected abstract AsyncShardFetch.FetchResult<NodeGatewayStartedShards> fetchData(ShardRouting shard, RoutingAllocation allocation); static class NodeShardsResult { public final List<NodeGatewayStartedShards> orderedAllocationCandidates; public final int allocationsFound; public NodeShardsResult(List<NodeGatewayStartedShards> orderedAllocationCandidates, int allocationsFound) { this.orderedAllocationCandidates = orderedAllocationCandidates; this.allocationsFound = allocationsFound; } } static class NodesToAllocate { final List<NodeGatewayStartedShards> yesNodeShards; final List<NodeGatewayStartedShards> throttleNodeShards; final List<NodeGatewayStartedShards> noNodeShards; public NodesToAllocate(List<NodeGatewayStartedShards> yesNodeShards, List<NodeGatewayStartedShards> throttleNodeShards, List<NodeGatewayStartedShards> noNodeShards) { this.yesNodeShards = yesNodeShards; this.throttleNodeShards = throttleNodeShards; this.noNodeShards = noNodeShards; } } }
package com.uservoice.uservoicesdk.ui; import java.util.ArrayList; import java.util.List; import android.content.Context; import android.content.Intent; import android.support.v4.app.FragmentActivity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.TextView; import com.uservoice.uservoicesdk.Config; import com.uservoice.uservoicesdk.R; import com.uservoice.uservoicesdk.Session; import com.uservoice.uservoicesdk.UserVoice; import com.uservoice.uservoicesdk.activity.ContactActivity; import com.uservoice.uservoicesdk.activity.ForumActivity; import com.uservoice.uservoicesdk.activity.SearchActivity; import com.uservoice.uservoicesdk.flow.InitManager; import com.uservoice.uservoicesdk.model.Article; import com.uservoice.uservoicesdk.model.BaseModel; import com.uservoice.uservoicesdk.model.Forum; import com.uservoice.uservoicesdk.model.Suggestion; import com.uservoice.uservoicesdk.model.Topic; public class PortalAdapter extends SearchAdapter<BaseModel> implements AdapterView.OnItemClickListener { public static int SCOPE_ALL = 0; public static int SCOPE_ARTICLES = 1; public static int SCOPE_IDEAS = 2; private static int KB_HEADER = 0; private static int FORUM = 1; private static int TOPIC = 2; private static int LOADING = 3; private static int CONTACT = 4; private static int ARTICLE = 5; private static int POWERED_BY = 6; private LayoutInflater inflater; private final FragmentActivity context; private boolean configLoaded = false; private List<Integer> staticRows; private List<Article> articles; public PortalAdapter(FragmentActivity context) { this.context = context; inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); new InitManager(context, new Runnable() { @Override public void run() { configLoaded = true; notifyDataSetChanged(); loadForum(); loadTopics(); } }).init(); } private List<Topic> getTopics() { return Session.getInstance().getTopics(); } private boolean shouldShowArticles() { return Session.getInstance().getConfig(context).getTopicId() != -1 || (getTopics() != null && getTopics().isEmpty()); } private void loadForum() { Config config = Session.getInstance().getConfig(context); if (config.shouldShowForum() || config.shouldShowPostIdea()) { Forum.loadForum(context, Session.getInstance().getConfig(context).getForumId(), new DefaultCallback<Forum>(context) { @Override public void onModel(Forum model) { Session.getInstance().setForum(model); notifyDataSetChanged(); } }); } } private void loadTopics() { final DefaultCallback<List<Article>> articlesCallback = new DefaultCallback<List<Article>>(context) { @Override public void onModel(List<Article> model) { Session.getInstance().setTopics(new ArrayList<Topic>()); articles = model; notifyDataSetChanged(); } }; if (Session.getInstance().getConfig(context).getTopicId() != -1) { Article.loadPageForTopic(context, Session.getInstance().getConfig(context).getTopicId(), 1, articlesCallback); } else { Topic.loadTopics(context, new DefaultCallback<List<Topic>>(context) { @Override public void onModel(List<Topic> model) { if (model.isEmpty()) { Session.getInstance().setTopics(model); Article.loadPage(context, 1, articlesCallback); } else { ArrayList<Topic> topics = new ArrayList<Topic>(model); topics.add(Topic.allArticlesTopic(context)); Session.getInstance().setTopics(topics); notifyDataSetChanged(); } } }); } } private void computeStaticRows() { if (staticRows == null) { staticRows = new ArrayList<Integer>(); Config config = Session.getInstance().getConfig(context); if (config.shouldShowContactUs()) staticRows.add(CONTACT); if (config.shouldShowForum()) staticRows.add(FORUM); if (config.shouldShowKnowledgeBase()) staticRows.add(KB_HEADER); } } @Override public int getCount() { if (!configLoaded) { return 1; } else { computeStaticRows(); int rows = staticRows.size(); if (Session.getInstance().getConfig(context).shouldShowKnowledgeBase()) { if (getTopics() == null || (shouldShowArticles() && articles == null)) { rows += 1; } else { rows += shouldShowArticles() ? articles.size() : getTopics().size(); } } if (!Session.getInstance().getClientConfig().isWhiteLabel()) { rows += 1; } return rows; } } public List<BaseModel> getScopedSearchResults() { if (scope == SCOPE_ALL) { return searchResults; } else if (scope == SCOPE_ARTICLES) { List<BaseModel> articles = new ArrayList<BaseModel>(); for (BaseModel model : searchResults) { if (model instanceof Article) articles.add(model); } return articles; } else if (scope == SCOPE_IDEAS) { List<BaseModel> ideas = new ArrayList<BaseModel>(); for (BaseModel model : searchResults) { if (model instanceof Suggestion) ideas.add(model); } return ideas; } return null; } @Override public Object getItem(int position) { computeStaticRows(); if (position < staticRows.size() && staticRows.get(position) == FORUM) return Session.getInstance().getForum(); else if (getTopics() != null && !shouldShowArticles() && position >= staticRows.size() && position - staticRows.size() < getTopics().size()) return getTopics().get(position - staticRows.size()); else if (articles != null && shouldShowArticles() && position >= staticRows.size() && position - staticRows.size() < articles.size()) return articles.get(position - staticRows.size()); return null; } @Override public long getItemId(int position) { return position; } @Override public boolean isEnabled(int position) { if (!configLoaded) return false; computeStaticRows(); if (position < staticRows.size()) { int type = staticRows.get(position); if (type == KB_HEADER || type == LOADING) return false; } return true; } @Override protected void searchResultsUpdated() { int articleResults = 0; int ideaResults = 0; for (BaseModel model : searchResults) { if (model instanceof Article) articleResults += 1; else ideaResults += 1; } ((SearchActivity) context).updateScopedSearch(searchResults.size(), articleResults, ideaResults); } @Override public View getView(int position, View convertView, ViewGroup parent) { View view = convertView; int type = getItemViewType(position); if (view == null) { if (type == LOADING) view = inflater.inflate(R.layout.uv_loading_item, null); else if (type == FORUM) view = inflater.inflate(R.layout.uv_text_item, null); else if (type == KB_HEADER) view = inflater.inflate(R.layout.uv_header_item_light, null); else if (type == TOPIC) view = inflater.inflate(R.layout.uv_text_item, null); else if (type == CONTACT) view = inflater.inflate(R.layout.uv_text_item, null); else if (type == ARTICLE) view = inflater.inflate(R.layout.uv_text_item, null); else if (type == POWERED_BY) view = inflater.inflate(R.layout.uv_powered_by_item, null); } if (type == FORUM) { TextView textView = (TextView) view.findViewById(R.id.uv_text); textView.setText(R.string.uv_feedback_forum); TextView text2 = (TextView) view.findViewById(R.id.uv_text2); text2.setText(Utils.getQuantityString(text2, R.plurals.uv_ideas, Session.getInstance().getForum().getNumberOfOpenSuggestions())); } else if (type == KB_HEADER) { TextView textView = (TextView) view.findViewById(R.id.uv_header_text); textView.setText(R.string.uv_knowledge_base); } else if (type == TOPIC) { Topic topic = (Topic) getItem(position); TextView textView = (TextView) view.findViewById(R.id.uv_text); textView.setText(topic.getName()); textView = (TextView) view.findViewById(R.id.uv_text2); if (topic.getId() == -1) { textView.setVisibility(View.GONE); } else { textView.setVisibility(View.VISIBLE); textView.setText(String.format("%d %s", topic.getNumberOfArticles(), context.getResources().getQuantityString(R.plurals.uv_articles, topic.getNumberOfArticles()))); } } else if (type == CONTACT) { TextView textView = (TextView) view.findViewById(R.id.uv_text); textView.setText(R.string.uv_contact_us); view.findViewById(R.id.uv_text2).setVisibility(View.GONE); } else if (type == ARTICLE) { TextView textView = (TextView) view.findViewById(R.id.uv_text); Article article = (Article) getItem(position); textView.setText(article.getTitle()); } else if (type == POWERED_BY) { TextView textView = (TextView) view.findViewById(R.id.uv_version); textView.setText(context.getString(R.string.uv_android_sdk) + " v" + UserVoice.getVersion()); } View divider = view.findViewById(R.id.uv_divider); if (divider != null) divider.setVisibility((position == getCount() - 2 && getItemViewType(getCount() - 1) == POWERED_BY) || position == getCount() - 1 ? View.GONE : View.VISIBLE); if (type == FORUM) divider.setVisibility(View.GONE); return view; } @Override public int getViewTypeCount() { return 7; } @Override public int getItemViewType(int position) { if (!configLoaded) return LOADING; computeStaticRows(); if (position < staticRows.size()) { int type = staticRows.get(position); if (type == FORUM && Session.getInstance().getForum() == null) return LOADING; return type; } if (Session.getInstance().getConfig(context).shouldShowKnowledgeBase()) { if (getTopics() == null || (shouldShowArticles() && articles == null)) { if (position - staticRows.size() == 0) return LOADING; } else if (shouldShowArticles() && position - staticRows.size() < articles.size()) { return ARTICLE; } else if (!shouldShowArticles() && position - staticRows.size() < getTopics().size()) { return TOPIC; } } return POWERED_BY; } public void onItemClick(AdapterView<?> parent, View view, int position, long id) { int type = getItemViewType(position); if (type == CONTACT) { context.startActivity(new Intent(context, ContactActivity.class)); } else if (type == FORUM) { context.startActivity(new Intent(context, ForumActivity.class)); } else if (type == TOPIC || type == ARTICLE) { Utils.showModel(context, (BaseModel) getItem(position)); } } }
package com.fasterxml.jackson.core.json.async; import java.io.IOException; import com.fasterxml.jackson.core.*; import com.fasterxml.jackson.core.async.AsyncTestBase; import com.fasterxml.jackson.core.testsupport.AsyncReaderWrapper; public class AsyncSimpleNestedTest extends AsyncTestBase { private final JsonFactory JSON_F = new JsonFactory(); /* /********************************************************************** /* Test methods, success /********************************************************************** */ public void testStuffInObject() throws Exception { byte[] data = _jsonDoc(aposToQuotes( "{'foobar':[1,2,-999],'emptyObject':{},'emptyArray':[], 'other':{'':null} }")); JsonFactory f = JSON_F; _testStuffInObject(f, data, 0, 100); _testStuffInObject(f, data, 0, 3); _testStuffInObject(f, data, 0, 1); _testStuffInObject(f, data, 1, 100); _testStuffInObject(f, data, 1, 3); _testStuffInObject(f, data, 1, 1); } private void _testStuffInObject(JsonFactory f, byte[] data, int offset, int readSize) throws IOException { AsyncReaderWrapper r = asyncForBytes(f, readSize, data, offset); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertFalse(r.parser().hasTextCharacters()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals("foobar", r.currentName()); assertToken(JsonToken.START_ARRAY, r.nextToken()); assertEquals("[", r.currentText()); assertToken(JsonToken.VALUE_NUMBER_INT, r.nextToken()); assertEquals(1, r.getIntValue()); assertToken(JsonToken.VALUE_NUMBER_INT, r.nextToken()); assertEquals(2, r.getIntValue()); assertToken(JsonToken.VALUE_NUMBER_INT, r.nextToken()); assertEquals(-999, r.getIntValue()); assertToken(JsonToken.END_ARRAY, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals("emptyObject", r.currentName()); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertToken(JsonToken.END_OBJECT, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals("emptyArray", r.currentName()); assertToken(JsonToken.START_ARRAY, r.nextToken()); assertToken(JsonToken.END_ARRAY, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals("other", r.currentName()); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals("", r.currentName()); assertToken(JsonToken.VALUE_NULL, r.nextToken()); assertToken(JsonToken.END_OBJECT, r.nextToken()); assertToken(JsonToken.END_OBJECT, r.nextToken()); // another twist: close in the middle, verify r = asyncForBytes(f, readSize, data, offset); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); r.parser().close(); assertTrue(r.parser().isClosed()); assertNull(r.parser().nextToken()); } public void testStuffInArray() throws Exception { byte[] data = _jsonDoc(aposToQuotes("[true,{'moreStuff':0},[null],{'extraOrdinary':23}]")); JsonFactory f = JSON_F; _testStuffInArray(f, data, 0, 100); _testStuffInArray(f, data, 0, 3); _testStuffInArray(f, data, 0, 1); _testStuffInArray(f, data, 3, 100); _testStuffInArray(f, data, 3, 3); _testStuffInArray(f, data, 3, 1); } private void _testStuffInArray(JsonFactory f, byte[] data, int offset, int readSize) throws IOException { AsyncReaderWrapper r = asyncForBytes(f, readSize, data, offset); assertToken(JsonToken.START_ARRAY, r.nextToken()); assertFalse(r.parser().hasTextCharacters()); assertToken(JsonToken.VALUE_TRUE, r.nextToken()); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertEquals("{", r.currentText()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals("moreStuff", r.currentName()); assertToken(JsonToken.VALUE_NUMBER_INT, r.nextToken()); assertEquals(0L, r.getLongValue()); assertToken(JsonToken.END_OBJECT, r.nextToken()); assertToken(JsonToken.START_ARRAY, r.nextToken()); assertToken(JsonToken.VALUE_NULL, r.nextToken()); assertToken(JsonToken.END_ARRAY, r.nextToken()); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals("extraOrdinary", r.currentName()); assertToken(JsonToken.VALUE_NUMBER_INT, r.nextToken()); assertEquals(23, r.getIntValue()); assertToken(JsonToken.END_OBJECT, r.nextToken()); assertToken(JsonToken.END_ARRAY, r.nextToken()); } final static String SHORT_NAME = String.format("u-%s", UNICODE_SEGMENT); final static String LONG_NAME = String.format("Unicode-with-some-longer-name-%s", UNICODE_SEGMENT); public void testStuffInArray2() throws Exception { byte[] data = _jsonDoc(aposToQuotes(String.format( "[{'%s':true},{'%s':false},{'%s':true},{'%s':false}]", SHORT_NAME, LONG_NAME, LONG_NAME, SHORT_NAME))); JsonFactory f = JSON_F; _testStuffInArray2(f, data, 0, 100); _testStuffInArray2(f, data, 0, 3); _testStuffInArray2(f, data, 0, 1); _testStuffInArray2(f, data, 3, 100); _testStuffInArray2(f, data, 3, 3); _testStuffInArray2(f, data, 3, 1); } private void _testStuffInArray2(JsonFactory f, byte[] data, int offset, int readSize) throws IOException { AsyncReaderWrapper r = asyncForBytes(f, readSize, data, offset); assertToken(JsonToken.START_ARRAY, r.nextToken()); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals(SHORT_NAME, r.currentName()); assertToken(JsonToken.VALUE_TRUE, r.nextToken()); assertToken(JsonToken.END_OBJECT, r.nextToken()); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals(LONG_NAME, r.currentName()); assertToken(JsonToken.VALUE_FALSE, r.nextToken()); assertToken(JsonToken.END_OBJECT, r.nextToken()); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals(LONG_NAME, r.currentName()); assertToken(JsonToken.VALUE_TRUE, r.nextToken()); assertToken(JsonToken.END_OBJECT, r.nextToken()); assertToken(JsonToken.START_OBJECT, r.nextToken()); assertToken(JsonToken.FIELD_NAME, r.nextToken()); assertEquals(SHORT_NAME, r.currentName()); assertToken(JsonToken.VALUE_FALSE, r.nextToken()); assertToken(JsonToken.END_OBJECT, r.nextToken()); assertToken(JsonToken.END_ARRAY, r.nextToken()); } /* /********************************************************************** /* Test methods, fail checking /********************************************************************** */ public void testMismatchedArray() throws Exception { byte[] data = _jsonDoc(aposToQuotes("[ }")); JsonFactory f = JSON_F; _testMismatchedArray(f, data, 0, 99); _testMismatchedArray(f, data, 0, 3); _testMismatchedArray(f, data, 0, 2); _testMismatchedArray(f, data, 0, 1); _testMismatchedArray(f, data, 1, 3); _testMismatchedArray(f, data, 1, 1); } private void _testMismatchedArray(JsonFactory f, byte[] data, int offset, int readSize) throws IOException { AsyncReaderWrapper r = asyncForBytes(f, readSize, data, offset); assertToken(JsonToken.START_ARRAY, r.nextToken()); try { r.nextToken(); fail("Should not pass"); } catch (JsonParseException e) { verifyException(e, "Unexpected close marker '}': expected ']'"); } } public void testMismatchedObject() throws Exception { byte[] data = _jsonDoc(aposToQuotes("{ ]")); JsonFactory f = JSON_F; _testMismatchedObject(f, data, 0, 99); _testMismatchedObject(f, data, 0, 3); _testMismatchedObject(f, data, 0, 2); _testMismatchedObject(f, data, 0, 1); _testMismatchedObject(f, data, 1, 3); _testMismatchedObject(f, data, 1, 1); } private void _testMismatchedObject(JsonFactory f, byte[] data, int offset, int readSize) throws IOException { AsyncReaderWrapper r = asyncForBytes(f, readSize, data, offset); assertToken(JsonToken.START_OBJECT, r.nextToken()); try { r.nextToken(); fail("Should not pass"); } catch (JsonParseException e) { verifyException(e, "Unexpected close marker ']': expected '}'"); } } }
/* * Copyright (c) 1999, 2000, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /** * @test * @bug 4248728 * @summary Test ReferenceType.allLineLocations * @author Gordon Hirsch * * @library scaffold * @run build JDIScaffold VMConnection * @run compile -g RefTypes.java * @run build AllLineLocations * * @run main AllLineLocations RefTypes */ import com.sun.jdi.*; import com.sun.jdi.event.*; import com.sun.jdi.request.*; import java.util.List; public class AllLineLocations extends JDIScaffold { final String[] args; public static void main(String args[]) throws Exception { new AllLineLocations(args).startTests(); } AllLineLocations(String args[]) { super(); this.args = args; } protected void runTests() throws Exception { connect(args); waitForVMStart(); /* * Get to a point where the classes are loaded. */ BreakpointEvent bp = resumeTo("RefTypes", "loadClasses", "()V"); stepOut(bp.thread()); /* * These classes should have no line numbers, except for * one in the implicit constructor. */ ReferenceType rt = findReferenceType("AllAbstract"); if (rt == null) { throw new Exception("AllAbstract: not loaded"); } List list = rt.allLineLocations(); if (list.size() != 1) { throw new Exception("AllAbstract: incorrect number of line locations"); } if (rt.locationsOfLine(5000).size() != 0) { throw new Exception("AllAbstract: incorrect locationsOfLine"); } Method method = findMethod(rt, "<init>", "()V"); if (method == null) { throw new Exception("AllAbstract.<init> not found"); } List list2 = method.allLineLocations(); if (!list2.equals(list)) { throw new Exception("AllAbstract: line locations in wrong method"); } if (method.locationsOfLine(5000).size() != 0) { throw new Exception("AllAbstract: incorrect locationsOfLine"); } System.out.println("AllAbstract: passed"); rt = findReferenceType("AllNative"); if (rt == null) { throw new Exception("AllNative: not loaded"); } list = rt.allLineLocations(); if (list.size() != 1) { throw new Exception("AllNative: incorrect number of line locations"); } if (rt.locationsOfLine(5000).size() != 0) { throw new Exception("AllNative: incorrect locationsOfLine"); } method = findMethod(rt, "<init>", "()V"); if (method == null) { throw new Exception("AllNative.<init> not found"); } list2 = method.allLineLocations(); if (!list2.equals(list)) { throw new Exception("AllNative: line locations in wrong method"); } if (method.locationsOfLine(5000).size() != 0) { throw new Exception("AllNative: incorrect locationsOfLine"); } System.out.println("AllNative: passed"); rt = findReferenceType("Interface"); if (rt == null) { throw new Exception("Interface: not loaded"); } list = rt.allLineLocations(); if (list.size() != 0) { throw new Exception("Interface: locations reported for abstract methods"); } System.out.println("Interface: passed"); /* * These classes have line numbers in one method and * in the implicit constructor. */ rt = findReferenceType("Abstract"); if (rt == null) { throw new Exception("Abstract: not loaded"); } list = rt.allLineLocations(); if (list.size() != 5) { throw new Exception("Abstract: incorrect number of line locations"); } method = findMethod(rt, "b", "()V"); if (method == null) { throw new Exception("Abstract.b not found"); } list2 = method.allLineLocations(); list.removeAll(list2); // Remaining location should be in constructor if ((list.size() != 1) || !(((Location)list.get(0)).method().name().equals("<init>"))) { throw new Exception("Abstract: line locations in wrong method"); } if (method.locationsOfLine(20).size() != 1) { throw new Exception("Abstract method: incorrect locationsOfLine"); } if (method.locationsOfLine(5000).size() != 0) { throw new Exception("Abstract method: incorrect locationsOfLine"); } method = findMethod(rt, "a", "()V"); if (method.locationsOfLine(5000).size() != 0) { throw new Exception("Abstract method: incorrect locationsOfLine"); } System.out.println("Abstract: passed"); rt = findReferenceType("Native"); if (rt == null) { throw new Exception("Native: not loaded"); } list = rt.allLineLocations(); if (list.size() != 5) { throw new Exception("Native: incorrect number of line locations"); } if (rt.locationsOfLine(5000).size() != 0) { throw new Exception("Native: incorrect locationsOfLine"); } method = findMethod(rt, "b", "()V"); if (method == null) { throw new Exception("Native.b not found"); } list2 = method.allLineLocations(); list.removeAll(list2); // Remaining location should be in constructor if ((list.size() != 1) || !(((Location)list.get(0)).method().name().equals("<init>"))) { throw new Exception("Native: line locations in wrong method"); } if (method.locationsOfLine(30).size() != 1) { throw new Exception("Native method: incorrect locationsOfLine"); } if (method.locationsOfLine(5000).size() != 0) { throw new Exception("Native method: incorrect locationsOfLine"); } method = findMethod(rt, "a", "()V"); if (method.locationsOfLine(5000).size() != 0) { throw new Exception("Native method: incorrect locationsOfLine"); } System.out.println("Native: passed"); rt = findReferenceType("AbstractAndNative"); if (rt == null) { throw new Exception("AbstractAndNative: not loaded"); } list = rt.allLineLocations(); if (list.size() != 5) { throw new Exception("AbstractAndNative: incorrect number of line locations"); } if (rt.locationsOfLine(5000).size() != 0) { throw new Exception("AbstractAndNative: incorrect locationsOfLine"); } method = findMethod(rt, "c", "()V"); if (method == null) { throw new Exception("AbstractAndNative.c not found"); } list2 = method.allLineLocations(); list.removeAll(list2); // Remaining location should be in constructor if ((list.size() != 1) || !(((Location)list.get(0)).method().name().equals("<init>"))) { throw new Exception("AbstractAndNative: line locations in wrong method"); } System.out.println("AbstractAndNative: passed"); // Allow application to complete resumeToVMDeath(); } }
/* * Copyright 2005 Joe Walker * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.directwebremoting.util; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import javax.servlet.ServletRequest; import javax.servlet.http.HttpServletRequest; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * A wrapper around Jetty and Grizzly Ajax Continuations * @author Joe Walker [joe at getahead dot ltd dot uk] */ public class Continuation { /** * Fish the continuation out of the request if it exists. * @param request The http request */ public Continuation(HttpServletRequest request) { Object tempContinuation = null; try { if (isJetty()) { // Get Continuation through Jetty API Class<?> jContinuation = LocalUtil.classForName("org.eclipse.jetty.continuation.ContinuationSupport"); Method jMethod = jContinuation.getMethod("getContinuation", ServletRequest.class); tempContinuation = jMethod.invoke(null, request); } else if (isGrizzly()) { // Get Continuation through Grizzly API Class<?> gContinuation = LocalUtil.classForName("com.sun.grizzly.Continuation"); Method gMethod = gContinuation.getMethod("getContinuation"); tempContinuation = gMethod.invoke(null, (Object[]) null); } } catch (Throwable ignored) { log.debug("Throwable caught when trying to get server Continuation", ignored); } proxy = tempContinuation; } /** * Are continuations working? * If this method returns false then all the other methods will fail. * @return true if Continuations are working */ public boolean isAvailable() { return proxy != null; } /** * Suspend the thread for a maximum of sleepTime milliseconds * @throws Exception If reflection breaks */ public void suspend() throws Exception { try { if (isJetty()) { suspendMethod.invoke(proxy); } else if (isGrizzly()) { suspendMethod.invoke(proxy, 60000); // Suspend for 1 minute? } } catch (InvocationTargetException ex) { rethrowWithoutWrapper(ex); } } /** * Resume a continuation. * For Jetty: does not work like a real continuation because it restarts * the http request. * @throws Exception If reflection breaks */ public void resume() throws Exception { try { if (null == isSuspendedMethod || ((Boolean)isSuspendedMethod.invoke(proxy)).booleanValue()) { resumeMethod.invoke(proxy); } } catch (InvocationTargetException ex) { rethrowWithoutWrapper(ex); } } /** * We shouldn't be catching Jetty RetryRequests so we re-throw them. * @param th The exception to test for continuation-ness */ public static void rethrowIfContinuation(Throwable th) { Throwable ex = th; if (ex instanceof InvocationTargetException) { ex = ((InvocationTargetException) ex).getTargetException(); } // Allow Jetty RequestRetry exception to propagate to container! if ("org.eclipse.jetty.RetryRequest".equals(ex.getClass().getName())) { throw (RuntimeException) ex; } } /** * Unwrap an InvocationTargetException * @param ex The exception to unwrap * @return Nothing. This method will not complete normally * @throws Exception If reflection breaks */ private static Object rethrowWithoutWrapper(InvocationTargetException ex) throws Exception { Throwable target = ex.getTargetException(); if (target instanceof Exception) { throw (Exception) target; } if (target instanceof Error) { throw (Error) target; } throw ex; } /** * The real continuation object */ private final Object proxy; /** * The log stream */ private static final Log log = LogFactory.getLog(Continuation.class); /** * Jetty code used by reflection to allow it to run outside of Jetty */ protected static final Class<?> continuationClass; /** * How we suspend the continuation */ protected static final Method suspendMethod; /** * How we resume the continuation */ protected static final Method resumeMethod; /** * Is this Continuation suspended? */ protected static final Method isSuspendedMethod; /** * Are we using Jetty at all? */ protected static boolean isJetty = false; /** * Are we using Grizzly at all? */ protected static boolean isGrizzly = false; /** * Can we use Jetty/Grizzly? */ static { Class<?> tempContinuationClass = null; try { try { tempContinuationClass = LocalUtil.classForName("org.eclipse.jetty.continuation.Continuation"); isJetty = true; } catch (Exception ex) { Class<?> gContinuation = LocalUtil.classForName("com.sun.grizzly.Continuation"); Method gMethod = gContinuation.getMethod("getContinuation"); tempContinuationClass = gMethod.invoke(gMethod).getClass(); isGrizzly = true; } } catch (Exception ex) { isJetty = false; log.debug("No Jetty or Grizzly Continuation class, using standard Servlet API"); } continuationClass = tempContinuationClass; if (isJetty()) { suspendMethod = LocalUtil.getMethod(continuationClass, "suspend"); isSuspendedMethod = LocalUtil.getMethod(continuationClass, "isSuspended"); } else if (isGrizzly) { suspendMethod = LocalUtil.getMethod(continuationClass, "suspend", long.class); isSuspendedMethod = null; } else { suspendMethod = null; isSuspendedMethod = null; } resumeMethod = LocalUtil.getMethod(continuationClass, "resume"); } /** * @return True if we have detected Jetty classes */ public static boolean isJetty() { return isJetty; } /** * @return True if we have detected Grizzly classes */ public static boolean isGrizzly() { return isGrizzly; } }
/* * Copyright 2012-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.mongo.embedded; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; import com.mongodb.Mongo; import com.mongodb.MongoClient; import de.flapdoodle.embed.mongo.Command; import de.flapdoodle.embed.mongo.MongodExecutable; import de.flapdoodle.embed.mongo.MongodStarter; import de.flapdoodle.embed.mongo.config.DownloadConfigBuilder; import de.flapdoodle.embed.mongo.config.ExtractedArtifactStoreBuilder; import de.flapdoodle.embed.mongo.config.IMongodConfig; import de.flapdoodle.embed.mongo.config.MongodConfigBuilder; import de.flapdoodle.embed.mongo.config.Net; import de.flapdoodle.embed.mongo.config.RuntimeConfigBuilder; import de.flapdoodle.embed.mongo.distribution.Feature; import de.flapdoodle.embed.mongo.distribution.IFeatureAwareVersion; import de.flapdoodle.embed.process.config.IRuntimeConfig; import de.flapdoodle.embed.process.config.io.ProcessOutput; import de.flapdoodle.embed.process.io.Processors; import de.flapdoodle.embed.process.io.Slf4jLevel; import de.flapdoodle.embed.process.io.progress.Slf4jProgressListener; import de.flapdoodle.embed.process.runtime.Network; import de.flapdoodle.embed.process.store.ArtifactStoreBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.data.mongo.MongoClientDependsOnBeanFactoryPostProcessor; import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration; import org.springframework.boot.autoconfigure.mongo.MongoProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.MapPropertySource; import org.springframework.core.env.MutablePropertySources; import org.springframework.core.env.PropertySource; import org.springframework.util.Assert; /** * {@link EnableAutoConfiguration Auto-configuration} for Embedded Mongo. * * @author Henryk Konsek * @author Andy Wilkinson * @since 1.3.0 */ @Configuration @EnableConfigurationProperties({ MongoProperties.class, EmbeddedMongoProperties.class }) @AutoConfigureBefore(MongoAutoConfiguration.class) @ConditionalOnClass({ Mongo.class, MongodStarter.class }) public class EmbeddedMongoAutoConfiguration { @Autowired private MongoProperties properties; @Autowired private EmbeddedMongoProperties embeddedProperties; @Autowired private ApplicationContext context; @Autowired(required = false) private IRuntimeConfig runtimeConfig; @Bean @ConditionalOnMissingBean @ConditionalOnClass(Logger.class) public IRuntimeConfig embeddedMongoRuntimeConfig() { Logger logger = LoggerFactory .getLogger(getClass().getPackage().getName() + ".EmbeddedMongo"); ProcessOutput processOutput = new ProcessOutput( Processors.logTo(logger, Slf4jLevel.INFO), Processors.logTo(logger, Slf4jLevel.ERROR), Processors.named("[console>]", Processors.logTo(logger, Slf4jLevel.DEBUG))); return new RuntimeConfigBuilder().defaultsWithLogger(Command.MongoD, logger) .processOutput(processOutput).artifactStore(getArtifactStore(logger)) .build(); } private ArtifactStoreBuilder getArtifactStore(Logger logger) { return new ExtractedArtifactStoreBuilder().defaults(Command.MongoD) .download(new DownloadConfigBuilder().defaultsForCommand(Command.MongoD) .progressListener(new Slf4jProgressListener(logger))); } @Bean(initMethod = "start", destroyMethod = "stop") @ConditionalOnMissingBean public MongodExecutable embeddedMongoServer(IMongodConfig mongodConfig) throws IOException { if (getPort() == 0) { publishPortInfo(mongodConfig.net().getPort()); } MongodStarter mongodStarter = getMongodStarter(this.runtimeConfig); return mongodStarter.prepare(mongodConfig); } private MongodStarter getMongodStarter(IRuntimeConfig runtimeConfig) { if (runtimeConfig == null) { return MongodStarter.getDefaultInstance(); } return MongodStarter.getInstance(runtimeConfig); } @Bean @ConditionalOnMissingBean public IMongodConfig embeddedMongoConfiguration() throws IOException { IFeatureAwareVersion featureAwareVersion = new ToStringFriendlyFeatureAwareVersion( this.embeddedProperties.getVersion(), this.embeddedProperties.getFeatures()); MongodConfigBuilder builder = new MongodConfigBuilder() .version(featureAwareVersion); if (getPort() > 0) { builder.net(new Net(getHost().getHostAddress(), getPort(), Network.localhostIsIPv6())); } else { builder.net(new Net(getHost().getHostAddress(), Network.getFreeServerPort(getHost()), Network.localhostIsIPv6())); } return builder.build(); } private int getPort() { if (this.properties.getPort() == null) { return MongoProperties.DEFAULT_PORT; } return this.properties.getPort(); } private InetAddress getHost() throws UnknownHostException { if (this.properties.getHost() == null) { return InetAddress.getByAddress(Network.localhostIsIPv6() ? new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 } : new byte[] { 127, 0, 0, 1 }); } return InetAddress.getByName(this.properties.getHost()); } private void publishPortInfo(int port) { setPortProperty(this.context, port); } private void setPortProperty(ApplicationContext currentContext, int port) { if (currentContext instanceof ConfigurableApplicationContext) { MutablePropertySources sources = ((ConfigurableApplicationContext) currentContext) .getEnvironment().getPropertySources(); getMongoPorts(sources).put("local.mongo.port", port); } if (currentContext.getParent() != null) { setPortProperty(currentContext.getParent(), port); } } @SuppressWarnings("unchecked") private Map<String, Object> getMongoPorts(MutablePropertySources sources) { PropertySource<?> propertySource = sources.get("mongo.ports"); if (propertySource == null) { propertySource = new MapPropertySource("mongo.ports", new HashMap<String, Object>()); sources.addFirst(propertySource); } return (Map<String, Object>) propertySource.getSource(); } /** * Additional configuration to ensure that {@link MongoClient} beans depend on the * {@code embeddedMongoServer} bean. */ @Configuration @ConditionalOnClass(MongoClient.class) protected static class EmbeddedMongoDependencyConfiguration extends MongoClientDependsOnBeanFactoryPostProcessor { public EmbeddedMongoDependencyConfiguration() { super("embeddedMongoServer"); } } /** * A workaround for the lack of a {@code toString} implementation on * {@code GenericFeatureAwareVersion}. */ private final static class ToStringFriendlyFeatureAwareVersion implements IFeatureAwareVersion { private final String version; private final Set<Feature> features; private ToStringFriendlyFeatureAwareVersion(String version, Set<Feature> features) { Assert.notNull(version, "version must not be null"); this.version = version; this.features = (features == null ? Collections.<Feature>emptySet() : features); } @Override public String asInDownloadPath() { return this.version; } @Override public boolean enabled(Feature feature) { return this.features.contains(feature); } @Override public String toString() { return this.version; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + this.features.hashCode(); result = prime * result + this.version.hashCode(); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() == obj.getClass()) { ToStringFriendlyFeatureAwareVersion other = (ToStringFriendlyFeatureAwareVersion) obj; boolean equals = true; equals &= this.features.equals(other.features); equals &= this.version.equals(other.version); return equals; } return super.equals(obj); } } }
/* * The MIT License (MIT) * Copyright (c) 2007-2015 by Institute for Computational Biomedicine, * Weill Medical College of Cornell University. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.broad.igv.goby; import com.google.protobuf.ByteString; import edu.cornell.med.icb.goby.alignments.Alignments; import edu.cornell.med.icb.goby.alignments.EntryFlagHelper; import it.unimi.dsi.fastutil.bytes.ByteArrayList; import it.unimi.dsi.fastutil.bytes.ByteList; import it.unimi.dsi.fastutil.objects.ObjectArrayList; import it.unimi.dsi.lang.MutableString; import org.apache.log4j.Logger; import org.broad.igv.data.CharArrayList; import org.broad.igv.feature.LocusScore; import org.broad.igv.feature.Strand; import org.broad.igv.prefs.Constants; import org.broad.igv.prefs.PreferencesManager; import org.broad.igv.sam.*; import org.broad.igv.track.WindowFunction; import java.awt.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; /** * A Facade to a <a href="http://goby.campagnelab.org">Goby</a> alignment entry. The facade exposes * <a href="http://goby.campagnelab.org">Goby</a> alignment entries in the format expected by * IGV. Since <a href="http://goby.campagnelab.org">Goby</a> does not store read sequences, * we retrieve the reference sequence on the fly from IGV and transform it to produce the read bases. * <p/> * For further information about Goby, or to obtain sample alignment files, see http://goby.campagnelab.org * * @author Fabien Campagne * Date: Jun 29, 2010 * Time: 12:07:52 PM */ public class GobyAlignment implements Alignment { /** * Used to log debug and informational messages. */ private static final Logger LOG = Logger.getLogger(GobyAlignment.class); protected final Alignments.AlignmentEntry entry; private final GobyAlignmentIterator iterator; protected AlignmentBlockImpl[] block = new AlignmentBlockImpl[1]; protected AlignmentBlockImpl[] insertionBlock; private CharArrayList gapTypes = null; private List<Gap> gaps = null; private static final ReadMate unmappedMate = new ReadMate("*", -1, false, true); private Comparator<? super AlignmentBlock> blockComparator = new Comparator<AlignmentBlock>() { public int compare(AlignmentBlock alignmentBlock, AlignmentBlock alignmentBlock1) { return alignmentBlock.getStart() - alignmentBlock1.getStart(); } }; /** * Construct the facade for an iterator and entry. * * @param iterator Used to retrieve chromosome names from target indices. * @param entry Alignement entry (from Goby protocol buffer Alignment.entries). */ public GobyAlignment(final GobyAlignmentIterator iterator, final Alignments.AlignmentEntry entry) { this.iterator = iterator; this.entry = entry; buildBlocks(entry); } private boolean hasReadInsertion(String from) { return from.length() > 0 && from.charAt(0) == '-'; } /** * Construct alignment blocks from the Goby alignment entry. This method uses the convention that '=' denotes a match to the reference. * <p/> * Conventions for storing sequence variations in Goby alignments are described * <a href="http://tinyurl.com/goby-sequence-variations">here</a> * * @param alignmentEntry The Goby alignment entry to use */ public void buildBlocks(Alignments.AlignmentEntry alignmentEntry) { ObjectArrayList<AlignmentBlock> blocks = new ObjectArrayList<AlignmentBlock>(); ObjectArrayList<AlignmentBlock> insertionBlocks = new ObjectArrayList<AlignmentBlock>(); int start = alignmentEntry.getPosition(); ByteArrayList bases = new ByteArrayList(); ByteArrayList scores = new ByteArrayList(); int readLength = alignmentEntry.getQueryLength(); byte[] readBases = new byte[readLength]; byte[] readQual = new byte[readLength]; Arrays.fill(readBases, (byte) '='); if (alignmentEntry.hasReadQualityScores()) { readQual = alignmentEntry.getReadQualityScores().toByteArray(); } else { Arrays.fill(readQual, (byte) 40); } int j = 0; int insertedBases = 0; int deletedBases = 0; final int leftPadding = alignmentEntry.getQueryPosition(); boolean showSoftClipped = PreferencesManager.getPreferences().getAsBoolean(Constants.SAM_SHOW_SOFT_CLIPPED); if (showSoftClipped && entry.hasSoftClippedBasesLeft()) { int clipLength = entry.getSoftClippedBasesLeft().length(); addSoftClipBlock(blocks, Math.max(0, entry.getPosition() - clipLength), entry.getSoftClippedBasesLeft(), readQual, entry.hasSoftClippedQualityLeft(), entry.getSoftClippedQualityLeft().toByteArray(), 0); } for (Alignments.SequenceVariation var : alignmentEntry.getSequenceVariationsList()) { final String from = var.getFrom(); final int fromLength = from.length(); final String to = var.getTo(); final int toLength = from.length(); final int sequenceVariationLength = Math.max(fromLength, toLength); final ByteString toQuality = var.getToQuality(); if (hasReadInsertion(from)) { bases.clear(); scores.clear(); for (int i = 0; i < sequenceVariationLength; i++) { final char toChar = i >= toLength ? '-' : to.charAt(i); int size = toQuality.size(); final byte qual = size > 0 && i < size ? toQuality.byteAt(i) : 40; bases.add((byte) toChar); scores.add(qual); deletedBases++; } addBlock(insertionBlocks, alignmentEntry.getPosition() + var.getPosition(), bases, scores); bases.clear(); scores.clear(); } else if (!to.contains("-")) { for (int i = 0; i < toLength; i++) { final int offset = j + var.getPosition() + i - 1 + leftPadding - insertedBases; if (offset > 0 && offset < readBases.length) { readBases[offset] = (byte) to.charAt(i); if (i < toQuality.size()) { readQual[offset] = toQuality.byteAt(i); } } } } else { // has read deletion: insertedBases++; } } int pos = start; int matchLength = alignmentEntry.getQueryAlignedLength() - deletedBases; int endAlignmentRefPosition = matchLength + start; bases.clear(); scores.clear(); int maxIndex = Math.min(readBases.length, readQual.length); while (pos < endAlignmentRefPosition) { final int index = pos - start + leftPadding; if (index < maxIndex) { bases.add(readBases[index]); scores.add(readQual[index]); } else { break; } ++pos; } addBlock(blocks, start, bases, scores); blocks = introduceDeletions(blocks, entry); if (showSoftClipped && entry.hasSoftClippedBasesRight()) { int targetAlignedLength = entry.getTargetAlignedLength(); addSoftClipBlock(blocks, entry.getPosition() + targetAlignedLength, entry.getSoftClippedBasesRight(), readQual, entry.hasSoftClippedQualityRight(), entry.getSoftClippedQualityRight().toByteArray(), entry.getQueryAlignedLength() + entry.getSoftClippedBasesLeft().length()); } block = blocks.toArray(new AlignmentBlockImpl[blocks.size()]); Arrays.sort(block, blockComparator); insertionBlock = insertionBlocks.toArray(new AlignmentBlockImpl[insertionBlocks.size()]); Arrays.sort(insertionBlock, blockComparator); ObjectArrayList<GobyAlignment> list = null; if (alignmentEntry.hasSplicedForwardAlignmentLink() || alignmentEntry.hasSplicedBackwardAlignmentLink()) { // if has a forward link, store a reference to this alignment in the reader (which represents the window scope) list = iterator.cacheSpliceComponent(this); if (list.size() > 1 && spliceListIsValid(list)) { final GobyAlignment spliceHeadAlignment = list.get(0); ObjectArrayList<AlignmentBlock> splicedBlocks = new ObjectArrayList<AlignmentBlock>(); splicedBlocks.addAll(ObjectArrayList.wrap(spliceHeadAlignment.block)); splicedBlocks.addAll(blocks); spliceHeadAlignment.block = splicedBlocks.toArray(new AlignmentBlockImpl[splicedBlocks.size()]); ObjectArrayList<AlignmentBlock> splicedInsertionBlocks = new ObjectArrayList<AlignmentBlock>(); splicedInsertionBlocks.addAll(ObjectArrayList.wrap(spliceHeadAlignment.insertionBlock)); splicedInsertionBlocks.addAll(insertionBlocks); spliceHeadAlignment.insertionBlock = splicedInsertionBlocks.toArray(new AlignmentBlockImpl[splicedInsertionBlocks.size()]); if (spliceHeadAlignment.gapTypes == null) { spliceHeadAlignment.gapTypes = new CharArrayList(10); } spliceHeadAlignment.gapTypes.add(SAMAlignment.SKIPPED_REGION); // Since the previous alignment carries this information, we clear up block and insertionBlock // in this alignment, but keep any softClips: this.block = keepSoftClips(block); this.insertionBlock = new AlignmentBlockImpl[0]; } } block = removeNulls(block); } private AlignmentBlockImpl[] removeNulls(AlignmentBlockImpl[] block) { int nullCount = 0; for (int i = 0; i < block.length; i++) { AlignmentBlock alignmentBlock = block[i]; if (alignmentBlock == null) { nullCount++; } } if (nullCount == 0) { // nothing to filter return block; } else { int newLength = block.length - nullCount; AlignmentBlockImpl[] result = new AlignmentBlockImpl[newLength]; int j = 0; for (int i = 0; i < result.length; i++) { result[i] = block[j++]; } return result; } } private AlignmentBlockImpl[] keepSoftClips(AlignmentBlockImpl[] blocks) { int numSoftCLippedBlocks = 0; for (AlignmentBlock block : blocks) { if (block.isSoftClipped()) numSoftCLippedBlocks++; } AlignmentBlockImpl[] tmp = new AlignmentBlockImpl[numSoftCLippedBlocks]; int j = 0; for (int i = 0; i < numSoftCLippedBlocks; i++) { AlignmentBlockImpl block = blocks[j++]; if (block.isSoftClipped()) { tmp[i] = block; } } return tmp; } private void addSoftClipBlock(ObjectArrayList<AlignmentBlock> blocks, int position, String softClippedBasesLeft, byte[] readQualScores, boolean hasSoftClippedQuality, byte[] softClippedQuality, int j) { final int length = softClippedBasesLeft.length(); byte[] bases = new byte[length]; byte[] scores = new byte[length]; for (int i = 0; i < length; i++) { bases[i] = (byte) softClippedBasesLeft.charAt(i); scores[i] = hasSoftClippedQuality ? softClippedQuality[i] : readQualScores[j++]; } final AlignmentBlockImpl alignmentBlock = new AlignmentBlockImpl( position, bases, scores); alignmentBlock.setSoftClipped(true); blocks.add(alignmentBlock); } /** * Verify that the list has an appropriate unbroken chain of back links. * * @param list the list of splices to validate * @return true if the list has an unbroken chain of back links */ boolean spliceListIsValid(final ObjectArrayList<GobyAlignment> list) { if (list != null && list.size() > 1) { Alignments.AlignmentEntry prevEntry = list.get(0).entry; for (int i = 1; i < list.size(); i++) { Alignments.AlignmentEntry currentEntry = list.get(i).entry; if (!currentEntry.hasSplicedBackwardAlignmentLink()) return false; else { Alignments.RelatedAlignmentEntry currentBackwardLink = currentEntry.getSplicedBackwardAlignmentLink(); if ((prevEntry.getQueryIndex() != currentEntry.getQueryIndex()) || (prevEntry.getFragmentIndex() != currentBackwardLink.getFragmentIndex()) || (prevEntry.getPosition() != currentBackwardLink.getPosition()) || (prevEntry.getTargetIndex() != currentBackwardLink.getTargetIndex())) { return false; } } prevEntry = currentEntry; } } return true; } /** * This method splits blocks whose boundaries contain a read deletion. * * @param blocks * @param alignmentEntry * @return */ private ObjectArrayList<AlignmentBlock> introduceDeletions(ObjectArrayList<AlignmentBlock> blocks, Alignments.AlignmentEntry alignmentEntry) { ObjectArrayList<AlignmentBlock> newBlocks = new ObjectArrayList<AlignmentBlock>(); for (Alignments.SequenceVariation var : alignmentEntry.getSequenceVariationsList()) { for (AlignmentBlock block : blocks) { if (!block.isSoftClipped()) { final int vrPos = var.getPosition() + entry.getPosition(); if (hasReadDeletion(var) && vrPos >= block.getStart() && vrPos <= block.getEnd()) { ByteList leftBases = new ByteArrayList(block.getBases()); ByteList leftScores = new ByteArrayList(block.getQualities()); ByteList rightBases = new ByteArrayList(block.getBases()); ByteList rightScores = new ByteArrayList(block.getQualities()); int deletionPosition = var.getPosition() - 1; leftBases = leftBases.subList(0, deletionPosition); rightBases = rightBases.subList(deletionPosition, rightBases.size()); leftScores = leftScores.subList(0, deletionPosition); rightScores = rightScores.subList(deletionPosition, rightScores.size()); AlignmentBlock left = new AlignmentBlockImpl(block.getStart(), leftBases.toByteArray(new byte[leftBases.size()]), leftScores.toByteArray(new byte[leftScores.size()])); AlignmentBlock right = new AlignmentBlockImpl(block.getStart() + leftBases.size() + var.getFrom().length(), rightBases.toByteArray(new byte[rightBases.size()]), rightScores.toByteArray(new byte[rightScores.size()])); blocks.remove(block); newBlocks.add(left); newBlocks.add(right); } } } } newBlocks.addAll(blocks); return newBlocks; } private boolean hasReadDeletion(Alignments.SequenceVariation var) { return (var.getTo().contains("-")); } private int addBlock(ObjectArrayList<AlignmentBlock> blocks, int start, ByteArrayList bases, ByteArrayList scores) { blocks.add( new AlignmentBlockImpl(start, bases.toByteArray(new byte[bases.size()]), scores.toByteArray(new byte[scores.size()]))); start += bases.size(); bases.clear(); scores.clear(); return start; } /** * Transform the read index into a readname: * * @return */ public String getReadName() { return Integer.toString(entry.getQueryIndex()); } public String getReadSequence() { return "read-sequence"; } /** * Get the reference id from the iterator, prepend "chr". */ public String getChr() { return getChromosome(entry.getTargetIndex()); } @Override public String getContig() { return getChr(); } /** * Get the reference id from the iterator, prepend "chr". * * @param targetIndex Returns the chromosome id */ public String getChromosome(int targetIndex) { return "chr" + iterator.getId(targetIndex).toString(); } public int getAlignmentStart() { // //LOG.info("getAlignmentStart"); return entry.getPosition(); } public boolean contains(double location) { return location >= getStart() && location < getEnd(); } public AlignmentBlock[] getAlignmentBlocks() { // //LOG.info("getAlignmentBlocks"); return block; } public AlignmentBlock[] getInsertions() { //LOG.info("getInsertions"); return insertionBlock; } @Override public List<Gap> getGaps() { if(gaps == null && gapTypes != null && gapTypes.getSize() > 0 && block.length > 1) { gaps = new ArrayList<Gap>(gapTypes.getSize()); char[] types = gapTypes.toArray(); AlignmentBlock leftBlock = block[0]; for(int i=1; i<block.length; i++ ) { AlignmentBlock rightBlock = block[i]; int gapStart = leftBlock.getEnd(); int nBases = rightBlock.getStart() - gapStart; char type = types.length <= i ? types[i-1] : 'N'; if(type == SAMAlignment.SKIPPED_REGION) { gaps.add(new SpliceGap(gapStart, nBases, type, leftBlock.getLength(), rightBlock.getLength())); } else { gaps.add(new Gap(gapStart, nBases, type)); } leftBlock = rightBlock; } } return gaps; } public String getCigarString() { //LOG.info("getCigarString"); return null; } public int getInferredInsertSize() { if (entry.hasInsertSize()) { return entry.getInsertSize(); } else return 0; } public int getMappingQuality() { if (entry.hasMappingQuality()) { return entry.getMappingQuality(); } else { return 255; } } /** * Returns the mate for a paired-end read. Please note that this method will return an unmapped * mate for any single end read as well. Do check if the read is paired before calling getMate(). * * @return The mate, or a constant unmapped mate (for single end reads, or paired end where the mate is not found). */ public ReadMate getMate() { if (entry.hasPairAlignmentLink()) { Alignments.RelatedAlignmentEntry link = entry.getPairAlignmentLink(); String mateChr = getChromosome(link.getTargetIndex()); int mateStart = link.getPosition(); boolean mateNegativeStrand = EntryFlagHelper.isMateReverseStrand(entry); boolean isReadUnmappedFlag = EntryFlagHelper.isReadUnmapped(entry); final ReadMate mate = new ReadMate(mateChr, mateStart, mateNegativeStrand, isReadUnmappedFlag); return mate; } else { return unmappedMate; } } public boolean isProperPair() { if (entry.hasPairFlags()) { return EntryFlagHelper.isProperlyPaired(entry); } else return false; } public boolean isMapped() { return true; } public boolean isPaired() { if (entry.hasPairFlags()) { return EntryFlagHelper.isPaired(entry); } else return false; } public boolean isNegativeStrand() { // //LOG.info("isNegativeStrand"); return entry.getMatchingReverseStrand(); } public boolean isDuplicate() { //LOG.info("isDuplicate"); return false; } public int getAlignmentEnd() { //LOG.info("getAlignmentEnd"); return entry.getPosition() + entry.getTargetAlignedLength(); } public String getSample() { //LOG.info("getSample"); return null; } public String getReadGroup() { //LOG.info("getReadGroup"); return null; } public String getLibrary() { //LOG.info("getReadGroup"); return null; } public Object getAttribute(String key) { //LOG.info("getAttribute"); return null; } /** * Return the strand of the read marked "first-in-pair" for a paired alignment. This method can return * Strand.NONE if the end marked first is unmapped. * * @return strand of first-of-pair */ public Strand getFirstOfPairStrand() { if (isPaired()) { if (isFirstOfPair()) { return isNegativeStrand() ? Strand.NEGATIVE : Strand.POSITIVE; } else { ReadMate mate = getMate(); if (mate.isMapped() && isProperPair()) { return mate.isNegativeStrand() ? Strand.NEGATIVE : Strand.POSITIVE; } else { return Strand.NONE; } } } else { return isNegativeStrand() ? Strand.NEGATIVE : Strand.POSITIVE; } } /** * Return the strand of the read marked "second-in-pair" for a paired alignment. The strand is * undefined (Strand.NONE) for non-paired alignments * * @return strand of second-of-pair */ public Strand getSecondOfPairStrand() { if (isPaired()) { if (isSecondOfPair()) { return isNegativeStrand() ? Strand.NEGATIVE : Strand.POSITIVE; } else { ReadMate mate = getMate(); if (mate.isMapped() && isProperPair()) { return mate.isNegativeStrand() ? Strand.NEGATIVE : Strand.POSITIVE; } else { return Strand.NONE; } } } else { // Undefined for non-paired alignments return Strand.NONE; } } public void setMateSequence(String sequence) { //LOG.info("setMateSequence"); } public String getPairOrientation() { //LOG.info("getPairOrientation"); String pairOrientation = ""; if (EntryFlagHelper.isPaired(entry) && !EntryFlagHelper.isMateUnmapped(entry) && entry.getTargetIndex() == entry.getPairAlignmentLink().getTargetIndex()) { char s1 = EntryFlagHelper.isReadReverseStrand(entry) ? 'R' : 'F'; char s2 = EntryFlagHelper.isMateReverseStrand(entry) ? 'R' : 'F'; char o1 = ' '; char o2 = ' '; char[] tmp = new char[4]; if (EntryFlagHelper.isFirstInPair(entry)) { o1 = '1'; o2 = '2'; } else if (EntryFlagHelper.isSecondInPair(entry)) { o1 = '2'; o2 = '1'; } if (getInferredInsertSize() > 0) { tmp[0] = s1; tmp[1] = o1; tmp[2] = s2; tmp[3] = o2; } else { tmp[2] = s1; tmp[3] = o1; tmp[0] = s2; tmp[1] = o2; } pairOrientation = new String(tmp); } return pairOrientation; } public boolean isSmallInsert() { //LOG.info("isSmallInsert"); return false; } /** * Return true if this read failed vendor quality checks */ public boolean isVendorFailedRead() { return false; } /** * Return the default color with which to render this alignment * * @return */ public Color getColor() { return null; } public int getStart() { // //LOG.info("getStart"); return entry.getPosition(); } public int getEnd() { // //LOG.info("getEnd"); if (block.length == 0) return getStart(); else { int last = block.length - 1; if (block[last] == null) { // System.out.println("STOP"); return entry.getPosition() + entry.getTargetAlignedLength(); } return block[last].getEnd(); } } public void setStart( int start) { // //LOG.info("setStart"); throw new UnsupportedOperationException("setStart is not supported"); //To change body of implemented methods use File | Settings | File Templates. } public void setEnd( int end) { throw new UnsupportedOperationException("setEnd is not supported"); //To change body of implemented methods use File | Settings | File Templates. } public float getScore() { //LOG.info("getScore"); return entry.getScore(); } public LocusScore copy() { return this; } /** * This method is provide to provide as a hook for customizing the text that is copied to the clipboard. * The default behavior is to just copy the tooltip text. * * @param location * @param mouseX * @return */ public String getClipboardString(double location, int mouseX) { return getValueString(location, mouseX, null); } public String getValueString(double position, int mouseX, WindowFunction windowFunction) { // //LOG.info("getValueString"); MutableString buffer = new MutableString(); buffer.append(entry.toString()); buffer.replace("\n", "<br>"); if (this.isPaired()) { buffer.append("----------------------" + "<br>"); buffer.append("Mate start = " + getMate().positionString() + "<br>"); buffer.append("Mate is mapped = " + (getMate().isMapped() ? "yes" : "no") + "<br>"); //buf.append("Pair is proper = " + (getProperPairFlag() ? "yes" : "no") + "<br>"); if (getChr().equals(getMate().getChr())) { buffer.append("Insert size = " + getInferredInsertSize() + "<br>"); } if (getPairOrientation().length() > 0) { buffer.append("Pair orientation = " + getPairOrientation() + "<br>"); } if (isFirstOfPair()) { buffer.append("First of pair <br>"); } if (isSecondOfPair()) { buffer.append("Second of pair <br>"); } } return buffer.toString(); } public byte getBase(double position) { int basePosition = (int) position; for (AlignmentBlock block : getAlignmentBlocks()) { if (block.contains(basePosition)) { int offset = basePosition - block.getStart(); byte base = block.getBase(offset); return base; } } return 0; } public byte getPhred(double position) { int basePosition = (int) position; for (AlignmentBlock block : getAlignmentBlocks()) { if (block.contains(basePosition)) { int offset = basePosition - block.getStart(); byte score = block.getQuality(offset); return score; } } return 0; } /** * Return true if this alignment is marked "first in pair". Added to suppor bisulfite sequencing mode. */ public boolean isFirstOfPair() { return EntryFlagHelper.isFirstInPair(entry); } /** * Return true if this alignment is marked "second in pair". Added to suppor bisulfite sequencing mode. */ public boolean isSecondOfPair() { return EntryFlagHelper.isSecondInPair(entry); } public Strand getReadStrand() { return isNegativeStrand() ? Strand.NEGATIVE : Strand.POSITIVE; } @Override public void finish() { } @Override public boolean isPrimary() { return !EntryFlagHelper.isNotPrimaryAlignment(entry); } @Override public boolean isSupplementary() { // The SAM 0x0800 tag return false; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sqoop.mapreduce; import java.io.IOException; import org.apache.avro.file.DataFileConstants; import org.apache.avro.mapred.AvroJob; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.sqoop.util.PerfCounters; import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.config.ConfigurationHelper; import com.cloudera.sqoop.io.CodecMap; import com.cloudera.sqoop.manager.ImportJobContext; import com.cloudera.sqoop.mapreduce.JobBase; import com.cloudera.sqoop.orm.TableClassName; import com.cloudera.sqoop.util.ImportException; /** * Base class for running an import MapReduce job. * Allows dependency injection, etc, for easy customization of import job types. */ public class ImportJobBase extends JobBase { private ImportJobContext context; public static final Log LOG = LogFactory.getLog( ImportJobBase.class.getName()); public ImportJobBase() { this(null); } public ImportJobBase(final SqoopOptions opts) { this(opts, null, null, null, null); } public ImportJobBase(final SqoopOptions opts, final Class<? extends Mapper> mapperClass, final Class<? extends InputFormat> inputFormatClass, final Class<? extends OutputFormat> outputFormatClass, final ImportJobContext context) { super(opts, mapperClass, inputFormatClass, outputFormatClass); this.context = context; } /** * Configure the output format to use for the job. */ @Override protected void configureOutputFormat(Job job, String tableName, String tableClassName) throws ClassNotFoundException, IOException { job.setOutputFormatClass(getOutputFormatClass()); if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) { job.getConfiguration().set("mapred.output.value.class", tableClassName); } if (options.shouldUseCompression()) { FileOutputFormat.setCompressOutput(job, true); String codecName = options.getCompressionCodec(); Class<? extends CompressionCodec> codecClass; if (codecName == null) { codecClass = GzipCodec.class; } else { Configuration conf = job.getConfiguration(); codecClass = CodecMap.getCodec(codecName, conf).getClass(); } FileOutputFormat.setOutputCompressorClass(job, codecClass); if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) { SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK); } // SQOOP-428: Avro expects not a fully qualified class name but a "short" // name instead (e.g. "snappy") and it needs to be set in a custom // configuration option called "avro.output.codec". // The default codec is "deflate". if (options.getFileLayout() == SqoopOptions.FileLayout.AvroDataFile) { if (codecName != null) { String shortName = CodecMap.getCodecShortNameByName(codecName, job.getConfiguration()); // Avro only knows about "deflate" and not "default" if (shortName.equalsIgnoreCase("default")) { shortName = "deflate"; } job.getConfiguration().set(AvroJob.OUTPUT_CODEC, shortName); } else { job.getConfiguration() .set(AvroJob.OUTPUT_CODEC, DataFileConstants.DEFLATE_CODEC); } } } Path outputPath = context.getDestination(); FileOutputFormat.setOutputPath(job, outputPath); } /** * Actually run the MapReduce job. */ @Override protected boolean runJob(Job job) throws ClassNotFoundException, IOException, InterruptedException { PerfCounters perfCounters = new PerfCounters(); perfCounters.startClock(); boolean success = job.waitForCompletion(true); perfCounters.stopClock(); Counters jobCounters = job.getCounters(); // If the job has been retired, these may be unavailable. if (null == jobCounters) { displayRetiredJobNotice(LOG); } else { perfCounters.addBytes(jobCounters.getGroup("FileSystemCounters") .findCounter("HDFS_BYTES_WRITTEN").getValue()); LOG.info("Transferred " + perfCounters.toString()); long numRecords = ConfigurationHelper.getNumMapOutputRecords(job); LOG.info("Retrieved " + numRecords + " records."); } return success; } /** * Run an import job to read a table in to HDFS. * * @param tableName the database table to read; may be null if a free-form * query is specified in the SqoopOptions, and the ImportJobBase subclass * supports free-form queries. * @param ormJarFile the Jar file to insert into the dcache classpath. * (may be null) * @param splitByCol the column of the database table to use to split * the import * @param conf A fresh Hadoop Configuration to use to build an MR job. * @throws IOException if the job encountered an IO problem * @throws ImportException if the job failed unexpectedly or was * misconfigured. */ public void runImport(String tableName, String ormJarFile, String splitByCol, Configuration conf) throws IOException, ImportException { if (null != tableName) { LOG.info("Beginning import of " + tableName); } else { LOG.info("Beginning query import."); } String tableClassName = new TableClassName(options).getClassForTable(tableName); loadJars(conf, ormJarFile, tableClassName); try { Job job = new Job(conf); // Set the external jar to use for the job. job.getConfiguration().set("mapred.jar", ormJarFile); propagateOptionsToJob(job); configureInputFormat(job, tableName, tableClassName, splitByCol); configureOutputFormat(job, tableName, tableClassName); configureMapper(job, tableName, tableClassName); configureNumTasks(job); cacheJars(job, getContext().getConnManager()); jobSetup(job); setJob(job); boolean success = runJob(job); if (!success) { throw new ImportException("Import job failed!"); } } catch (InterruptedException ie) { throw new IOException(ie); } catch (ClassNotFoundException cnfe) { throw new IOException(cnfe); } finally { unloadJars(); } } /** * Open-ended "setup" routine that is called after the job is configured * but just before it is submitted to MapReduce. Subclasses may override * if necessary. */ protected void jobSetup(Job job) throws IOException, ImportException { } protected ImportJobContext getContext() { return context; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import java.util.List; import com.google.common.collect.ImmutableList; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFFlowDeleteVer14 implements OFFlowDelete { private static final Logger logger = LoggerFactory.getLogger(OFFlowDeleteVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int MINIMUM_LENGTH = 56; private final static long DEFAULT_XID = 0x0L; private final static U64 DEFAULT_COOKIE = U64.ZERO; private final static U64 DEFAULT_COOKIE_MASK = U64.ZERO; private final static TableId DEFAULT_TABLE_ID = TableId.ZERO; private final static int DEFAULT_IDLE_TIMEOUT = 0x0; private final static int DEFAULT_HARD_TIMEOUT = 0x0; private final static int DEFAULT_PRIORITY = 0x0; private final static OFBufferId DEFAULT_BUFFER_ID = OFBufferId.NO_BUFFER; private final static OFPort DEFAULT_OUT_PORT = OFPort.ANY; private final static OFGroup DEFAULT_OUT_GROUP = OFGroup.ANY; private final static Set<OFFlowModFlags> DEFAULT_FLAGS = ImmutableSet.<OFFlowModFlags>of(); private final static int DEFAULT_IMPORTANCE = 0x0; private final static Match DEFAULT_MATCH = OFFactoryVer14.MATCH_WILDCARD_ALL; private final static List<OFInstruction> DEFAULT_INSTRUCTIONS = ImmutableList.<OFInstruction>of(); // OF message fields private final long xid; private final U64 cookie; private final U64 cookieMask; private final TableId tableId; private final int idleTimeout; private final int hardTimeout; private final int priority; private final OFBufferId bufferId; private final OFPort outPort; private final OFGroup outGroup; private final Set<OFFlowModFlags> flags; private final int importance; private final Match match; private final List<OFInstruction> instructions; // // Immutable default instance final static OFFlowDeleteVer14 DEFAULT = new OFFlowDeleteVer14( DEFAULT_XID, DEFAULT_COOKIE, DEFAULT_COOKIE_MASK, DEFAULT_TABLE_ID, DEFAULT_IDLE_TIMEOUT, DEFAULT_HARD_TIMEOUT, DEFAULT_PRIORITY, DEFAULT_BUFFER_ID, DEFAULT_OUT_PORT, DEFAULT_OUT_GROUP, DEFAULT_FLAGS, DEFAULT_IMPORTANCE, DEFAULT_MATCH, DEFAULT_INSTRUCTIONS ); // package private constructor - used by readers, builders, and factory OFFlowDeleteVer14(long xid, U64 cookie, U64 cookieMask, TableId tableId, int idleTimeout, int hardTimeout, int priority, OFBufferId bufferId, OFPort outPort, OFGroup outGroup, Set<OFFlowModFlags> flags, int importance, Match match, List<OFInstruction> instructions) { if(cookie == null) { throw new NullPointerException("OFFlowDeleteVer14: property cookie cannot be null"); } if(cookieMask == null) { throw new NullPointerException("OFFlowDeleteVer14: property cookieMask cannot be null"); } if(tableId == null) { throw new NullPointerException("OFFlowDeleteVer14: property tableId cannot be null"); } if(bufferId == null) { throw new NullPointerException("OFFlowDeleteVer14: property bufferId cannot be null"); } if(outPort == null) { throw new NullPointerException("OFFlowDeleteVer14: property outPort cannot be null"); } if(outGroup == null) { throw new NullPointerException("OFFlowDeleteVer14: property outGroup cannot be null"); } if(flags == null) { throw new NullPointerException("OFFlowDeleteVer14: property flags cannot be null"); } if(match == null) { throw new NullPointerException("OFFlowDeleteVer14: property match cannot be null"); } if(instructions == null) { throw new NullPointerException("OFFlowDeleteVer14: property instructions cannot be null"); } this.xid = xid; this.cookie = cookie; this.cookieMask = cookieMask; this.tableId = tableId; this.idleTimeout = idleTimeout; this.hardTimeout = hardTimeout; this.priority = priority; this.bufferId = bufferId; this.outPort = outPort; this.outGroup = outGroup; this.flags = flags; this.importance = importance; this.match = match; this.instructions = instructions; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.FLOW_MOD; } @Override public long getXid() { return xid; } @Override public U64 getCookie() { return cookie; } @Override public U64 getCookieMask() { return cookieMask; } @Override public TableId getTableId() { return tableId; } @Override public OFFlowModCommand getCommand() { return OFFlowModCommand.DELETE; } @Override public int getIdleTimeout() { return idleTimeout; } @Override public int getHardTimeout() { return hardTimeout; } @Override public int getPriority() { return priority; } @Override public OFBufferId getBufferId() { return bufferId; } @Override public OFPort getOutPort() { return outPort; } @Override public OFGroup getOutGroup() { return outGroup; } @Override public Set<OFFlowModFlags> getFlags() { return flags; } @Override public Match getMatch() { return match; } @Override public List<OFInstruction> getInstructions() { return instructions; } @Override public List<OFAction> getActions()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public int getImportance() { return importance; } public OFFlowDelete.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFFlowDelete.Builder { final OFFlowDeleteVer14 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean cookieSet; private U64 cookie; private boolean cookieMaskSet; private U64 cookieMask; private boolean tableIdSet; private TableId tableId; private boolean idleTimeoutSet; private int idleTimeout; private boolean hardTimeoutSet; private int hardTimeout; private boolean prioritySet; private int priority; private boolean bufferIdSet; private OFBufferId bufferId; private boolean outPortSet; private OFPort outPort; private boolean outGroupSet; private OFGroup outGroup; private boolean flagsSet; private Set<OFFlowModFlags> flags; private boolean importanceSet; private int importance; private boolean matchSet; private Match match; private boolean instructionsSet; private List<OFInstruction> instructions; BuilderWithParent(OFFlowDeleteVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.FLOW_MOD; } @Override public long getXid() { return xid; } @Override public OFFlowDelete.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public U64 getCookie() { return cookie; } @Override public OFFlowDelete.Builder setCookie(U64 cookie) { this.cookie = cookie; this.cookieSet = true; return this; } @Override public U64 getCookieMask() { return cookieMask; } @Override public OFFlowDelete.Builder setCookieMask(U64 cookieMask) { this.cookieMask = cookieMask; this.cookieMaskSet = true; return this; } @Override public TableId getTableId() { return tableId; } @Override public OFFlowDelete.Builder setTableId(TableId tableId) { this.tableId = tableId; this.tableIdSet = true; return this; } @Override public OFFlowModCommand getCommand() { return OFFlowModCommand.DELETE; } @Override public int getIdleTimeout() { return idleTimeout; } @Override public OFFlowDelete.Builder setIdleTimeout(int idleTimeout) { this.idleTimeout = idleTimeout; this.idleTimeoutSet = true; return this; } @Override public int getHardTimeout() { return hardTimeout; } @Override public OFFlowDelete.Builder setHardTimeout(int hardTimeout) { this.hardTimeout = hardTimeout; this.hardTimeoutSet = true; return this; } @Override public int getPriority() { return priority; } @Override public OFFlowDelete.Builder setPriority(int priority) { this.priority = priority; this.prioritySet = true; return this; } @Override public OFBufferId getBufferId() { return bufferId; } @Override public OFFlowDelete.Builder setBufferId(OFBufferId bufferId) { this.bufferId = bufferId; this.bufferIdSet = true; return this; } @Override public OFPort getOutPort() { return outPort; } @Override public OFFlowDelete.Builder setOutPort(OFPort outPort) { this.outPort = outPort; this.outPortSet = true; return this; } @Override public OFGroup getOutGroup() { return outGroup; } @Override public OFFlowDelete.Builder setOutGroup(OFGroup outGroup) { this.outGroup = outGroup; this.outGroupSet = true; return this; } @Override public Set<OFFlowModFlags> getFlags() { return flags; } @Override public OFFlowDelete.Builder setFlags(Set<OFFlowModFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public Match getMatch() { return match; } @Override public OFFlowDelete.Builder setMatch(Match match) { this.match = match; this.matchSet = true; return this; } @Override public List<OFInstruction> getInstructions() { return instructions; } @Override public OFFlowDelete.Builder setInstructions(List<OFInstruction> instructions) { this.instructions = instructions; this.instructionsSet = true; return this; } @Override public List<OFAction> getActions()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public OFFlowDelete.Builder setActions(List<OFAction> actions) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public int getImportance() { return importance; } @Override public OFFlowDelete.Builder setImportance(int importance) { this.importance = importance; this.importanceSet = true; return this; } @Override public OFFlowDelete build() { long xid = this.xidSet ? this.xid : parentMessage.xid; U64 cookie = this.cookieSet ? this.cookie : parentMessage.cookie; if(cookie == null) throw new NullPointerException("Property cookie must not be null"); U64 cookieMask = this.cookieMaskSet ? this.cookieMask : parentMessage.cookieMask; if(cookieMask == null) throw new NullPointerException("Property cookieMask must not be null"); TableId tableId = this.tableIdSet ? this.tableId : parentMessage.tableId; if(tableId == null) throw new NullPointerException("Property tableId must not be null"); int idleTimeout = this.idleTimeoutSet ? this.idleTimeout : parentMessage.idleTimeout; int hardTimeout = this.hardTimeoutSet ? this.hardTimeout : parentMessage.hardTimeout; int priority = this.prioritySet ? this.priority : parentMessage.priority; OFBufferId bufferId = this.bufferIdSet ? this.bufferId : parentMessage.bufferId; if(bufferId == null) throw new NullPointerException("Property bufferId must not be null"); OFPort outPort = this.outPortSet ? this.outPort : parentMessage.outPort; if(outPort == null) throw new NullPointerException("Property outPort must not be null"); OFGroup outGroup = this.outGroupSet ? this.outGroup : parentMessage.outGroup; if(outGroup == null) throw new NullPointerException("Property outGroup must not be null"); Set<OFFlowModFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); int importance = this.importanceSet ? this.importance : parentMessage.importance; Match match = this.matchSet ? this.match : parentMessage.match; if(match == null) throw new NullPointerException("Property match must not be null"); List<OFInstruction> instructions = this.instructionsSet ? this.instructions : parentMessage.instructions; if(instructions == null) throw new NullPointerException("Property instructions must not be null"); // return new OFFlowDeleteVer14( xid, cookie, cookieMask, tableId, idleTimeout, hardTimeout, priority, bufferId, outPort, outGroup, flags, importance, match, instructions ); } } static class Builder implements OFFlowDelete.Builder { // OF message fields private boolean xidSet; private long xid; private boolean cookieSet; private U64 cookie; private boolean cookieMaskSet; private U64 cookieMask; private boolean tableIdSet; private TableId tableId; private boolean idleTimeoutSet; private int idleTimeout; private boolean hardTimeoutSet; private int hardTimeout; private boolean prioritySet; private int priority; private boolean bufferIdSet; private OFBufferId bufferId; private boolean outPortSet; private OFPort outPort; private boolean outGroupSet; private OFGroup outGroup; private boolean flagsSet; private Set<OFFlowModFlags> flags; private boolean importanceSet; private int importance; private boolean matchSet; private Match match; private boolean instructionsSet; private List<OFInstruction> instructions; @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.FLOW_MOD; } @Override public long getXid() { return xid; } @Override public OFFlowDelete.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public U64 getCookie() { return cookie; } @Override public OFFlowDelete.Builder setCookie(U64 cookie) { this.cookie = cookie; this.cookieSet = true; return this; } @Override public U64 getCookieMask() { return cookieMask; } @Override public OFFlowDelete.Builder setCookieMask(U64 cookieMask) { this.cookieMask = cookieMask; this.cookieMaskSet = true; return this; } @Override public TableId getTableId() { return tableId; } @Override public OFFlowDelete.Builder setTableId(TableId tableId) { this.tableId = tableId; this.tableIdSet = true; return this; } @Override public OFFlowModCommand getCommand() { return OFFlowModCommand.DELETE; } @Override public int getIdleTimeout() { return idleTimeout; } @Override public OFFlowDelete.Builder setIdleTimeout(int idleTimeout) { this.idleTimeout = idleTimeout; this.idleTimeoutSet = true; return this; } @Override public int getHardTimeout() { return hardTimeout; } @Override public OFFlowDelete.Builder setHardTimeout(int hardTimeout) { this.hardTimeout = hardTimeout; this.hardTimeoutSet = true; return this; } @Override public int getPriority() { return priority; } @Override public OFFlowDelete.Builder setPriority(int priority) { this.priority = priority; this.prioritySet = true; return this; } @Override public OFBufferId getBufferId() { return bufferId; } @Override public OFFlowDelete.Builder setBufferId(OFBufferId bufferId) { this.bufferId = bufferId; this.bufferIdSet = true; return this; } @Override public OFPort getOutPort() { return outPort; } @Override public OFFlowDelete.Builder setOutPort(OFPort outPort) { this.outPort = outPort; this.outPortSet = true; return this; } @Override public OFGroup getOutGroup() { return outGroup; } @Override public OFFlowDelete.Builder setOutGroup(OFGroup outGroup) { this.outGroup = outGroup; this.outGroupSet = true; return this; } @Override public Set<OFFlowModFlags> getFlags() { return flags; } @Override public OFFlowDelete.Builder setFlags(Set<OFFlowModFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public Match getMatch() { return match; } @Override public OFFlowDelete.Builder setMatch(Match match) { this.match = match; this.matchSet = true; return this; } @Override public List<OFInstruction> getInstructions() { return instructions; } @Override public OFFlowDelete.Builder setInstructions(List<OFInstruction> instructions) { this.instructions = instructions; this.instructionsSet = true; return this; } @Override public List<OFAction> getActions()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public OFFlowDelete.Builder setActions(List<OFAction> actions) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property actions not supported in version 1.4"); } @Override public int getImportance() { return importance; } @Override public OFFlowDelete.Builder setImportance(int importance) { this.importance = importance; this.importanceSet = true; return this; } // @Override public OFFlowDelete build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; U64 cookie = this.cookieSet ? this.cookie : DEFAULT_COOKIE; if(cookie == null) throw new NullPointerException("Property cookie must not be null"); U64 cookieMask = this.cookieMaskSet ? this.cookieMask : DEFAULT_COOKIE_MASK; if(cookieMask == null) throw new NullPointerException("Property cookieMask must not be null"); TableId tableId = this.tableIdSet ? this.tableId : DEFAULT_TABLE_ID; if(tableId == null) throw new NullPointerException("Property tableId must not be null"); int idleTimeout = this.idleTimeoutSet ? this.idleTimeout : DEFAULT_IDLE_TIMEOUT; int hardTimeout = this.hardTimeoutSet ? this.hardTimeout : DEFAULT_HARD_TIMEOUT; int priority = this.prioritySet ? this.priority : DEFAULT_PRIORITY; OFBufferId bufferId = this.bufferIdSet ? this.bufferId : DEFAULT_BUFFER_ID; if(bufferId == null) throw new NullPointerException("Property bufferId must not be null"); OFPort outPort = this.outPortSet ? this.outPort : DEFAULT_OUT_PORT; if(outPort == null) throw new NullPointerException("Property outPort must not be null"); OFGroup outGroup = this.outGroupSet ? this.outGroup : DEFAULT_OUT_GROUP; if(outGroup == null) throw new NullPointerException("Property outGroup must not be null"); Set<OFFlowModFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); int importance = this.importanceSet ? this.importance : DEFAULT_IMPORTANCE; Match match = this.matchSet ? this.match : DEFAULT_MATCH; if(match == null) throw new NullPointerException("Property match must not be null"); List<OFInstruction> instructions = this.instructionsSet ? this.instructions : DEFAULT_INSTRUCTIONS; if(instructions == null) throw new NullPointerException("Property instructions must not be null"); return new OFFlowDeleteVer14( xid, cookie, cookieMask, tableId, idleTimeout, hardTimeout, priority, bufferId, outPort, outGroup, flags, importance, match, instructions ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFFlowDelete> { @Override public OFFlowDelete readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 5 byte version = bb.readByte(); if(version != (byte) 0x5) throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version); // fixed value property type == 14 byte type = bb.readByte(); if(type != (byte) 0xe) throw new OFParseError("Wrong type: Expected=OFType.FLOW_MOD(14), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); U64 cookie = U64.ofRaw(bb.readLong()); U64 cookieMask = U64.ofRaw(bb.readLong()); TableId tableId = TableId.readByte(bb); // fixed value property command == 3 short command = bb.readByte(); if(command != (short) 0x3) throw new OFParseError("Wrong command: Expected=OFFlowModCommand.DELETE(3), got="+command); int idleTimeout = U16.f(bb.readShort()); int hardTimeout = U16.f(bb.readShort()); int priority = U16.f(bb.readShort()); OFBufferId bufferId = OFBufferId.of(bb.readInt()); OFPort outPort = OFPort.read4Bytes(bb); OFGroup outGroup = OFGroup.read4Bytes(bb); Set<OFFlowModFlags> flags = OFFlowModFlagsSerializerVer14.readFrom(bb); int importance = U16.f(bb.readShort()); Match match = ChannelUtilsVer14.readOFMatch(bb); List<OFInstruction> instructions = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFInstructionVer14.READER); OFFlowDeleteVer14 flowDeleteVer14 = new OFFlowDeleteVer14( xid, cookie, cookieMask, tableId, idleTimeout, hardTimeout, priority, bufferId, outPort, outGroup, flags, importance, match, instructions ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", flowDeleteVer14); return flowDeleteVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFFlowDeleteVer14Funnel FUNNEL = new OFFlowDeleteVer14Funnel(); static class OFFlowDeleteVer14Funnel implements Funnel<OFFlowDeleteVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFFlowDeleteVer14 message, PrimitiveSink sink) { // fixed value property version = 5 sink.putByte((byte) 0x5); // fixed value property type = 14 sink.putByte((byte) 0xe); // FIXME: skip funnel of length sink.putLong(message.xid); message.cookie.putTo(sink); message.cookieMask.putTo(sink); message.tableId.putTo(sink); // fixed value property command = 3 sink.putShort((short) 0x3); sink.putInt(message.idleTimeout); sink.putInt(message.hardTimeout); sink.putInt(message.priority); message.bufferId.putTo(sink); message.outPort.putTo(sink); message.outGroup.putTo(sink); OFFlowModFlagsSerializerVer14.putTo(message.flags, sink); sink.putInt(message.importance); message.match.putTo(sink); FunnelUtils.putList(message.instructions, sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFFlowDeleteVer14> { @Override public void write(ChannelBuffer bb, OFFlowDeleteVer14 message) { int startIndex = bb.writerIndex(); // fixed value property version = 5 bb.writeByte((byte) 0x5); // fixed value property type = 14 bb.writeByte((byte) 0xe); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); bb.writeLong(message.cookie.getValue()); bb.writeLong(message.cookieMask.getValue()); message.tableId.writeByte(bb); // fixed value property command = 3 bb.writeByte((short) 0x3); bb.writeShort(U16.t(message.idleTimeout)); bb.writeShort(U16.t(message.hardTimeout)); bb.writeShort(U16.t(message.priority)); bb.writeInt(message.bufferId.getInt()); message.outPort.write4Bytes(bb); message.outGroup.write4Bytes(bb); OFFlowModFlagsSerializerVer14.writeTo(bb, message.flags); bb.writeShort(U16.t(message.importance)); message.match.writeTo(bb); ChannelUtils.writeList(bb, message.instructions); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFFlowDeleteVer14("); b.append("xid=").append(xid); b.append(", "); b.append("cookie=").append(cookie); b.append(", "); b.append("cookieMask=").append(cookieMask); b.append(", "); b.append("tableId=").append(tableId); b.append(", "); b.append("idleTimeout=").append(idleTimeout); b.append(", "); b.append("hardTimeout=").append(hardTimeout); b.append(", "); b.append("priority=").append(priority); b.append(", "); b.append("bufferId=").append(bufferId); b.append(", "); b.append("outPort=").append(outPort); b.append(", "); b.append("outGroup=").append(outGroup); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("importance=").append(importance); b.append(", "); b.append("match=").append(match); b.append(", "); b.append("instructions=").append(instructions); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFFlowDeleteVer14 other = (OFFlowDeleteVer14) obj; if( xid != other.xid) return false; if (cookie == null) { if (other.cookie != null) return false; } else if (!cookie.equals(other.cookie)) return false; if (cookieMask == null) { if (other.cookieMask != null) return false; } else if (!cookieMask.equals(other.cookieMask)) return false; if (tableId == null) { if (other.tableId != null) return false; } else if (!tableId.equals(other.tableId)) return false; if( idleTimeout != other.idleTimeout) return false; if( hardTimeout != other.hardTimeout) return false; if( priority != other.priority) return false; if (bufferId == null) { if (other.bufferId != null) return false; } else if (!bufferId.equals(other.bufferId)) return false; if (outPort == null) { if (other.outPort != null) return false; } else if (!outPort.equals(other.outPort)) return false; if (outGroup == null) { if (other.outGroup != null) return false; } else if (!outGroup.equals(other.outGroup)) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if( importance != other.importance) return false; if (match == null) { if (other.match != null) return false; } else if (!match.equals(other.match)) return false; if (instructions == null) { if (other.instructions != null) return false; } else if (!instructions.equals(other.instructions)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((cookie == null) ? 0 : cookie.hashCode()); result = prime * result + ((cookieMask == null) ? 0 : cookieMask.hashCode()); result = prime * result + ((tableId == null) ? 0 : tableId.hashCode()); result = prime * result + idleTimeout; result = prime * result + hardTimeout; result = prime * result + priority; result = prime * result + ((bufferId == null) ? 0 : bufferId.hashCode()); result = prime * result + ((outPort == null) ? 0 : outPort.hashCode()); result = prime * result + ((outGroup == null) ? 0 : outGroup.hashCode()); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + importance; result = prime * result + ((match == null) ? 0 : match.hashCode()); result = prime * result + ((instructions == null) ? 0 : instructions.hashCode()); return result; } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import static org.junit.Assert.*; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.TimestampsFilter; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Run tests related to {@link TimestampsFilter} using HBase client APIs. * Sets up the HBase mini cluster once at start. Each creates a table * named for the method and does its stuff against that. */ @Category({MediumTests.class, ClientTests.class}) public class TestTimestampsFilter { private static final Log LOG = LogFactory.getLog(TestTimestampsFilter.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); /** * @throws java.lang.Exception */ @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } /** * @throws java.lang.Exception */ @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { // Nothing to do. } /** * @throws java.lang.Exception */ @After public void tearDown() throws Exception { // Nothing to do. } /** * Test from client side for TimestampsFilter. * * The TimestampsFilter provides the ability to request cells (KeyValues) * whose timestamp/version is in the specified list of timestamps/version. * * @throws Exception */ @Test public void testTimestampsFilter() throws Exception { byte [] TABLE = Bytes.toBytes("testTimestampsFilter"); byte [] FAMILY = Bytes.toBytes("event_log"); byte [][] FAMILIES = new byte[][] { FAMILY }; Cell kvs[]; // create table; set versions to max... Table ht = TEST_UTIL.createTable(TableName.valueOf(TABLE), FAMILIES, Integer.MAX_VALUE); for (int rowIdx = 0; rowIdx < 5; rowIdx++) { for (int colIdx = 0; colIdx < 5; colIdx++) { // insert versions 201..300 putNVersions(ht, FAMILY, rowIdx, colIdx, 201, 300); // insert versions 1..100 putNVersions(ht, FAMILY, rowIdx, colIdx, 1, 100); } } // do some verification before flush verifyInsertedValues(ht, FAMILY); TEST_UTIL.flush(); // do some verification after flush verifyInsertedValues(ht, FAMILY); // Insert some more versions after flush. These should be in memstore. // After this we should have data in both memstore & HFiles. for (int rowIdx = 0; rowIdx < 5; rowIdx++) { for (int colIdx = 0; colIdx < 5; colIdx++) { putNVersions(ht, FAMILY, rowIdx, colIdx, 301, 400); putNVersions(ht, FAMILY, rowIdx, colIdx, 101, 200); } } for (int rowIdx = 0; rowIdx < 5; rowIdx++) { for (int colIdx = 0; colIdx < 5; colIdx++) { kvs = getNVersions(ht, FAMILY, rowIdx, colIdx, Arrays.asList(505L, 5L, 105L, 305L, 205L)); assertEquals(4, kvs.length); checkOneCell(kvs[0], FAMILY, rowIdx, colIdx, 305); checkOneCell(kvs[1], FAMILY, rowIdx, colIdx, 205); checkOneCell(kvs[2], FAMILY, rowIdx, colIdx, 105); checkOneCell(kvs[3], FAMILY, rowIdx, colIdx, 5); } } // Request an empty list of versions using the Timestamps filter; // Should return none. kvs = getNVersions(ht, FAMILY, 2, 2, new ArrayList<Long>()); assertEquals(0, kvs == null? 0: kvs.length); // // Test the filter using a Scan operation // Scan rows 0..4. For each row, get all its columns, but only // those versions of the columns with the specified timestamps. Result[] results = scanNVersions(ht, FAMILY, 0, 4, Arrays.asList(6L, 106L, 306L)); assertEquals("# of rows returned from scan", 5, results.length); for (int rowIdx = 0; rowIdx < 5; rowIdx++) { kvs = results[rowIdx].rawCells(); // each row should have 5 columns. // And we have requested 3 versions for each. assertEquals("Number of KeyValues in result for row:" + rowIdx, 3*5, kvs.length); for (int colIdx = 0; colIdx < 5; colIdx++) { int offset = colIdx * 3; checkOneCell(kvs[offset + 0], FAMILY, rowIdx, colIdx, 306); checkOneCell(kvs[offset + 1], FAMILY, rowIdx, colIdx, 106); checkOneCell(kvs[offset + 2], FAMILY, rowIdx, colIdx, 6); } } ht.close(); } @Test public void testMultiColumns() throws Exception { byte [] TABLE = Bytes.toBytes("testTimestampsFilterMultiColumns"); byte [] FAMILY = Bytes.toBytes("event_log"); byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... Table ht = TEST_UTIL.createTable(TableName.valueOf(TABLE), FAMILIES, Integer.MAX_VALUE); Put p = new Put(Bytes.toBytes("row")); p.add(FAMILY, Bytes.toBytes("column0"), 3, Bytes.toBytes("value0-3")); p.add(FAMILY, Bytes.toBytes("column1"), 3, Bytes.toBytes("value1-3")); p.add(FAMILY, Bytes.toBytes("column2"), 1, Bytes.toBytes("value2-1")); p.add(FAMILY, Bytes.toBytes("column2"), 2, Bytes.toBytes("value2-2")); p.add(FAMILY, Bytes.toBytes("column2"), 3, Bytes.toBytes("value2-3")); p.add(FAMILY, Bytes.toBytes("column3"), 2, Bytes.toBytes("value3-2")); p.add(FAMILY, Bytes.toBytes("column4"), 1, Bytes.toBytes("value4-1")); p.add(FAMILY, Bytes.toBytes("column4"), 2, Bytes.toBytes("value4-2")); p.add(FAMILY, Bytes.toBytes("column4"), 3, Bytes.toBytes("value4-3")); ht.put(p); ArrayList<Long> timestamps = new ArrayList<Long>(); timestamps.add(new Long(3)); TimestampsFilter filter = new TimestampsFilter(timestamps); Get g = new Get(Bytes.toBytes("row")); g.setFilter(filter); g.setMaxVersions(); g.addColumn(FAMILY, Bytes.toBytes("column2")); g.addColumn(FAMILY, Bytes.toBytes("column4")); Result result = ht.get(g); for (Cell kv : result.listCells()) { System.out.println("found row " + Bytes.toString(CellUtil.cloneRow(kv)) + ", column " + Bytes.toString(CellUtil.cloneQualifier(kv)) + ", value " + Bytes.toString(CellUtil.cloneValue(kv))); } assertEquals(result.listCells().size(), 2); assertTrue(CellUtil.matchingValue(result.listCells().get(0), Bytes.toBytes("value2-3"))); assertTrue(CellUtil.matchingValue(result.listCells().get(1), Bytes.toBytes("value4-3"))); ht.close(); } /** * Test TimestampsFilter in the presence of version deletes. * * @throws Exception */ @Test public void testWithVersionDeletes() throws Exception { // first test from memstore (without flushing). testWithVersionDeletes(false); // run same test against HFiles (by forcing a flush). testWithVersionDeletes(true); } private void testWithVersionDeletes(boolean flushTables) throws IOException { byte [] TABLE = Bytes.toBytes("testWithVersionDeletes_" + (flushTables ? "flush" : "noflush")); byte [] FAMILY = Bytes.toBytes("event_log"); byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... Table ht = TEST_UTIL.createTable(TableName.valueOf(TABLE), FAMILIES, Integer.MAX_VALUE); // For row:0, col:0: insert versions 1 through 5. putNVersions(ht, FAMILY, 0, 0, 1, 5); // delete version 4. deleteOneVersion(ht, FAMILY, 0, 0, 4); if (flushTables) { TEST_UTIL.flush(); } // request a bunch of versions including the deleted version. We should // only get back entries for the versions that exist. Cell kvs[] = getNVersions(ht, FAMILY, 0, 0, Arrays.asList(2L, 3L, 4L, 5L)); assertEquals(3, kvs.length); checkOneCell(kvs[0], FAMILY, 0, 0, 5); checkOneCell(kvs[1], FAMILY, 0, 0, 3); checkOneCell(kvs[2], FAMILY, 0, 0, 2); ht.close(); } private void verifyInsertedValues(Table ht, byte[] cf) throws IOException { for (int rowIdx = 0; rowIdx < 5; rowIdx++) { for (int colIdx = 0; colIdx < 5; colIdx++) { // ask for versions that exist. Cell[] kvs = getNVersions(ht, cf, rowIdx, colIdx, Arrays.asList(5L, 300L, 6L, 80L)); assertEquals(4, kvs.length); checkOneCell(kvs[0], cf, rowIdx, colIdx, 300); checkOneCell(kvs[1], cf, rowIdx, colIdx, 80); checkOneCell(kvs[2], cf, rowIdx, colIdx, 6); checkOneCell(kvs[3], cf, rowIdx, colIdx, 5); // ask for versions that do not exist. kvs = getNVersions(ht, cf, rowIdx, colIdx, Arrays.asList(101L, 102L)); assertEquals(0, kvs == null? 0: kvs.length); // ask for some versions that exist and some that do not. kvs = getNVersions(ht, cf, rowIdx, colIdx, Arrays.asList(1L, 300L, 105L, 70L, 115L)); assertEquals(3, kvs.length); checkOneCell(kvs[0], cf, rowIdx, colIdx, 300); checkOneCell(kvs[1], cf, rowIdx, colIdx, 70); checkOneCell(kvs[2], cf, rowIdx, colIdx, 1); } } } /** * Assert that the passed in KeyValue has expected contents for the * specified row, column & timestamp. */ private void checkOneCell(Cell kv, byte[] cf, int rowIdx, int colIdx, long ts) { String ctx = "rowIdx=" + rowIdx + "; colIdx=" + colIdx + "; ts=" + ts; assertEquals("Row mismatch which checking: " + ctx, "row:"+ rowIdx, Bytes.toString(CellUtil.cloneRow(kv))); assertEquals("ColumnFamily mismatch while checking: " + ctx, Bytes.toString(cf), Bytes.toString(CellUtil.cloneFamily(kv))); assertEquals("Column qualifier mismatch while checking: " + ctx, "column:" + colIdx, Bytes.toString(CellUtil.cloneQualifier(kv))); assertEquals("Timestamp mismatch while checking: " + ctx, ts, kv.getTimestamp()); assertEquals("Value mismatch while checking: " + ctx, "value-version-" + ts, Bytes.toString(CellUtil.cloneValue(kv))); } /** * Uses the TimestampFilter on a Get to request a specified list of * versions for the row/column specified by rowIdx & colIdx. * */ private Cell[] getNVersions(Table ht, byte[] cf, int rowIdx, int colIdx, List<Long> versions) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); byte column[] = Bytes.toBytes("column:" + colIdx); Filter filter = new TimestampsFilter(versions); Get get = new Get(row); get.addColumn(cf, column); get.setFilter(filter); get.setMaxVersions(); Result result = ht.get(get); return result.rawCells(); } /** * Uses the TimestampFilter on a Scan to request a specified list of * versions for the rows from startRowIdx to endRowIdx (both inclusive). */ private Result[] scanNVersions(Table ht, byte[] cf, int startRowIdx, int endRowIdx, List<Long> versions) throws IOException { byte startRow[] = Bytes.toBytes("row:" + startRowIdx); byte endRow[] = Bytes.toBytes("row:" + endRowIdx + 1); // exclusive Filter filter = new TimestampsFilter(versions); Scan scan = new Scan(startRow, endRow); scan.setFilter(filter); scan.setMaxVersions(); ResultScanner scanner = ht.getScanner(scan); return scanner.next(endRowIdx - startRowIdx + 1); } /** * Insert in specific row/column versions with timestamps * versionStart..versionEnd. */ private void putNVersions(Table ht, byte[] cf, int rowIdx, int colIdx, long versionStart, long versionEnd) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); byte column[] = Bytes.toBytes("column:" + colIdx); Put put = new Put(row); put.setDurability(Durability.SKIP_WAL); for (long idx = versionStart; idx <= versionEnd; idx++) { put.add(cf, column, idx, Bytes.toBytes("value-version-" + idx)); } ht.put(put); } /** * For row/column specified by rowIdx/colIdx, delete the cell * corresponding to the specified version. */ private void deleteOneVersion(Table ht, byte[] cf, int rowIdx, int colIdx, long version) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); byte column[] = Bytes.toBytes("column:" + colIdx); Delete del = new Delete(row); del.deleteColumn(cf, column, version); ht.delete(del); } }
/** */ package org.mar9000.pe.ecore.impl; import java.util.Collection; import java.util.List; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; import org.mar9000.pe.ecore.PEElement; import org.mar9000.pe.ecore.PEElementAttribute; import org.mar9000.pe.ecore.PEElementCardinality; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>PE Element</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.mar9000.pe.ecore.impl.PEElementImpl#getName <em>Name</em>}</li> * <li>{@link org.mar9000.pe.ecore.impl.PEElementImpl#getAttributes <em>Attributes</em>}</li> * <li>{@link org.mar9000.pe.ecore.impl.PEElementImpl#getCardinality <em>Cardinality</em>}</li> * </ul> * </p> * * @generated */ public abstract class PEElementImpl extends MinimalEObjectImpl.Container implements PEElement { /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The cached value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected String name = NAME_EDEFAULT; /** * The cached value of the '{@link #getAttributes() <em>Attributes</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAttributes() * @generated * @ordered */ protected EList<PEElementAttribute> attributes; /** * The default value of the '{@link #getCardinality() <em>Cardinality</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCardinality() * @generated * @ordered */ protected static final PEElementCardinality CARDINALITY_EDEFAULT = PEElementCardinality.MANDATORY; /** * The cached value of the '{@link #getCardinality() <em>Cardinality</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCardinality() * @generated * @ordered */ protected PEElementCardinality cardinality = CARDINALITY_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected PEElementImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return EcorePackageImpl.Literals.PE_ELEMENT; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName(String newName) { String oldName = name; name = newName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EcorePackageImpl.PE_ELEMENT__NAME, oldName, name)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public List<PEElementAttribute> getAttributes() { if (attributes == null) { attributes = new EObjectContainmentEList<PEElementAttribute>(PEElementAttribute.class, this, EcorePackageImpl.PE_ELEMENT__ATTRIBUTES); } return attributes; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public PEElementCardinality getCardinality() { return cardinality; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCardinality(PEElementCardinality newCardinality) { PEElementCardinality oldCardinality = cardinality; cardinality = newCardinality == null ? CARDINALITY_EDEFAULT : newCardinality; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EcorePackageImpl.PE_ELEMENT__CARDINALITY, oldCardinality, cardinality)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EcorePackageImpl.PE_ELEMENT__ATTRIBUTES: return ((InternalEList<?>)getAttributes()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case EcorePackageImpl.PE_ELEMENT__NAME: return getName(); case EcorePackageImpl.PE_ELEMENT__ATTRIBUTES: return getAttributes(); case EcorePackageImpl.PE_ELEMENT__CARDINALITY: return getCardinality(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case EcorePackageImpl.PE_ELEMENT__NAME: setName((String)newValue); return; case EcorePackageImpl.PE_ELEMENT__ATTRIBUTES: getAttributes().clear(); getAttributes().addAll((Collection<? extends PEElementAttribute>)newValue); return; case EcorePackageImpl.PE_ELEMENT__CARDINALITY: setCardinality((PEElementCardinality)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case EcorePackageImpl.PE_ELEMENT__NAME: setName(NAME_EDEFAULT); return; case EcorePackageImpl.PE_ELEMENT__ATTRIBUTES: getAttributes().clear(); return; case EcorePackageImpl.PE_ELEMENT__CARDINALITY: setCardinality(CARDINALITY_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case EcorePackageImpl.PE_ELEMENT__NAME: return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name); case EcorePackageImpl.PE_ELEMENT__ATTRIBUTES: return attributes != null && !attributes.isEmpty(); case EcorePackageImpl.PE_ELEMENT__CARDINALITY: return cardinality != CARDINALITY_EDEFAULT; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (name: "); result.append(name); result.append(", cardinality: "); result.append(cardinality); result.append(')'); return result.toString(); } } //PEElementImpl
/*************************************************************************** * Use this class to store ForceChanges, ForceErrors and SCPChanges in a * vector and get them when you want. It is most useful if you don't want * to worry about your code running in a multi-threaded environment and * about vrpn possibly delivering device updates at arbitrary times. * * The Listener can be configured to buffer and return either all * updates or only the last (the most recent) update. Force messages, * SCP messages and force-error messages are buffered independently. * By default, the Listener keeps only the last update of each. * * It is not intended that Listeners be shared among objects. Each * entity in a program that is interested in hearing about updates * from some vrpn Force device (and which wishes to use this Listener * mechanism) should create its own listener (even if multiple entities * wish to hear from the same device). ***************************************************************************/ package vrpn; import java.util.Vector; public class ForceDeviceRemoteListener implements ForceDeviceRemote.ForceChangeListener, ForceDeviceRemote.ForceErrorListener, ForceDeviceRemote.SCPChangeListener { public static final int ALL_UPDATES = 0; public static final int LAST_UPDATE = 1; public ForceDeviceRemoteListener(ForceDeviceRemote force) { forceUpdates = new Vector(); errorUpdates = new Vector(); scpUpdates = new Vector(); forceBufferMode = LAST_UPDATE; errorBufferMode = LAST_UPDATE; scpBufferMode = LAST_UPDATE; force.addForceChangeListener(this); force.addForceErrorListener(this); force.addSCPChangeListener(this); } /** * Sets the buffering mode of the Listener to record and return only * the most recent (the last, the latest) ForceChange. */ public synchronized void setModeLastForceUpdate() { forceBufferMode = LAST_UPDATE; if (!forceUpdates.isEmpty()) { Object temp = forceUpdates.lastElement(); forceUpdates.removeAllElements(); forceUpdates.addElement(temp); } } /** * Sets the buffering mode of the Listener to record and return only * the most recent (the last, the latest) ForceError. */ public synchronized void setModeLastForceErrorUpdate() { errorBufferMode = LAST_UPDATE; if (!errorUpdates.isEmpty()) { Object temp = errorUpdates.lastElement(); errorUpdates.removeAllElements(); errorUpdates.addElement(temp); } } /** * Sets the buffering mode of the Listener to record and return only * the most recent (the last, the latest) SCPChange. */ public synchronized void setModeLastSCPUpdate() { scpBufferMode = LAST_UPDATE; if (!scpUpdates.isEmpty()) { Object temp = scpUpdates.lastElement(); scpUpdates.removeAllElements(); scpUpdates.addElement(temp); } } /** * Sets the buffering mode of the Listener to record and return all * ForceChanges, beginning at the time this mode is first enabled. */ public synchronized void setModeAllForceUpdates() { if( forceBufferMode == LAST_UPDATE ) { forceUpdates.removeAllElements( ); } forceBufferMode = ALL_UPDATES; } /** * Sets the buffering mode of the Listener to record and return all * ForceErrors, beginning at the time this mode is first enabled. */ public synchronized void setModeAllForceErrorUpdates() { if( errorBufferMode == LAST_UPDATE ) { errorUpdates.removeAllElements( ); } errorBufferMode = ALL_UPDATES; } /** * Sets the buffering mode of the Listener to record and return all * SCPChanges, beginning at the time this mode is first enabled. */ public synchronized void setModeAllSCPUpdates() { if( forceBufferMode == LAST_UPDATE ) { scpUpdates.removeAllElements( ); } scpBufferMode = ALL_UPDATES; } /** * @return ForceDeviceRemoteListener.ALL_UPDATES if the Listener is recording and * returning all ForceChanges; ForceDeviceRemoteListener.LAST_UPDATE if only the * latest ForceChanges. */ public synchronized int getModeForceUpdate() { return forceBufferMode; } /** * @return ForceDeviceRemoteListener.ALL_UPDATES if the Listener is recording and * returning all ForceErrors; ForceDeviceRemoteListener.LAST_UPDATE if only the * latest ForceErrors. */ public synchronized int getModeForceErrorUpdate() { return errorBufferMode; } /** * @return ForceDeviceRemoteListener.ALL_UPDATES if the Listener is recording and * returning all SCPUpdates; ForceDeviceRemoteListener.LAST_UPDATE if only the * latest SCPUpdates. */ public synchronized int getModeSCPUpdate() { return scpBufferMode; } /** * This method retreives the buffered ForceChanges from the Listener. * If the buffering mode is LAST_UPDATE, the last update received will * be returned (note that, in this mode, successive calls to getForceUpdate() * may return the same ForceChange if no new changes were received in the * interim). If the buffering mode is ALL_UPDATES, all changes * received since the last call to getForceUpdate() (or since ALL_UPDATES * mode was enabled) will be returned. * @return A Vector containing the buffered ForceChanges. The number of * ForceChanges returned will depend on the buffering mode. If there are * no ForceChanges buffered, an empty Vector will be returned. * @see #setModeLastForceUpdate * @see #setModeAllForceUpdates */ public synchronized Vector getForceUpdate() { Vector v = new Vector( ); if( forceUpdates.isEmpty() ) { return v; } if( forceBufferMode == LAST_UPDATE ) { v.addElement( forceUpdates.lastElement() ); } else if( forceBufferMode == ALL_UPDATES ) { for( int i = 0; i < forceUpdates.size(); i++ ) { v.addElement( forceUpdates.elementAt(i) ); } forceUpdates.removeAllElements(); } return v; } // end method getForceUpdate() /** * This method retreives the buffered ForceErrors from the Listener. * If the buffering mode is LAST_UPDATE, the last update received will * be returned (note that, in this mode, successive calls to getForceErrorUpdate() * may return the same ForceError if no new changes were received in the * interim). If the buffering mode is ALL_UPDATES, all changes * received since the last call to getForceErrorUpdate() (or since ALL_UPDATES * mode was enabled) will be returned. * @return A Vector containing the buffered ForceErrors. The number of * ForceErrors returned will depend on the buffering mode. If there are * no ForceErrors buffered, an empty Vector will be returned. * @see #setModeLastForceErrorUpdate * @see #setModeAllForceErrorUpdates */ public synchronized Vector getForceErrorUpdate() { Vector v = new Vector( ); if( errorUpdates.isEmpty() ) { return v; } if( errorBufferMode == LAST_UPDATE ) { v.addElement( errorUpdates.lastElement() ); } else if( errorBufferMode == ALL_UPDATES ) { for( int i = 0; i < errorUpdates.size(); i++ ) { v.addElement( errorUpdates.elementAt(i) ); } errorUpdates.removeAllElements(); } return v; } // end method getForceErrorUpdate() /** * This method retreives the buffered SCPChanges from the Listener. * If the buffering mode is LAST_UPDATE, the last update received will * be returned (note that, in this mode, successive calls to getSCPUpdate() * may return the same SCPChange if no new changes were received in the * interim). If the buffering mode is ALL_UPDATES, all changes * received since the last call to getSCPUpdate() (or since ALL_UPDATES * mode was enabled) will be returned. * @return A Vector containing the buffered SCPChanges. The number of * SCPChanges returned will depend on the buffering mode. If there are * no SCPChanges buffered, an empty Vector will be returned. * @see #setModeLastSCPUpdate * @see #setModeAllSCPUpdates */ public synchronized Vector getSCPUpdate() { Vector v = new Vector( ); if( scpUpdates.isEmpty() ) { return v; } if( scpBufferMode == LAST_UPDATE ) { v.addElement( scpUpdates.lastElement() ); } else if( errorBufferMode == ALL_UPDATES ) { for( int i = 0; i < scpUpdates.size(); i++ ) { v.addElement( scpUpdates.elementAt(i) ); } scpUpdates.removeAllElements(); } return v; } // end method getForceErrorUpdate() /** * @return The last (most recent, latest) ForceChange received. This function * returns <code>null</code> if no updates have been received. Note that * successive calls to getLastForceUpdate() may return the same ForceChange * if no updates were received in the interim. */ public synchronized ForceDeviceRemote.ForceChange getLastForceUpdate() { if( forceUpdates.isEmpty( ) ) return null; return (ForceDeviceRemote.ForceChange) forceUpdates.lastElement(); } /** * @return The last (most recent, latest) ForceError received. This function * returns <code>null</code> if no updates have been received. Note that * successive calls to getLastErrorUpdate() may return the same ForceError * if no updates were received in the interim. */ public synchronized ForceDeviceRemote.ForceError getLastErrorUpdate() { if( errorUpdates.isEmpty( ) ) return null; return (ForceDeviceRemote.ForceError) errorUpdates.lastElement(); } /** * @return The last (most recent, latest) SCPChange received. This function * returns <code>null</code> if no updates have been received. Note that * successive calls to getLastSCPUpdate() may return the same SCPChange * if no updates were received in the interim. */ public synchronized ForceDeviceRemote.SCPChange getLastSCPUpdate() { if( scpUpdates.isEmpty( ) ) return null; return (ForceDeviceRemote.SCPChange) scpUpdates.lastElement(); } /** * This is the handler that the ForceDeviceRemote instance will call to deliver * force change messages. This method is not intended to be called by user code. */ public synchronized void forceUpdate (ForceDeviceRemote.ForceChange f, ForceDeviceRemote force) { if( forceBufferMode == LAST_UPDATE ) { forceUpdates.removeAllElements(); } forceUpdates.addElement(f); } /** * This is the handler that the ForceDeviceRemote instance will call to deliver * force error messages. This method is not intended to be called by user code. */ public synchronized void forceError (ForceDeviceRemote.ForceError e, ForceDeviceRemote force) { if( errorBufferMode == LAST_UPDATE ) { errorUpdates.removeAllElements(); } errorUpdates.addElement(e); } /** * This is the handler that the ForceDeviceRemote instance will call to deliver * SCP change messages. This method is not intended to be called by user code. */ public synchronized void scpUpdate (ForceDeviceRemote.SCPChange s, ForceDeviceRemote force ) { if( scpBufferMode == LAST_UPDATE ) { scpUpdates.removeAllElements(); } scpUpdates.addElement(s); } protected Vector forceUpdates; protected Vector errorUpdates; protected Vector scpUpdates; protected int forceBufferMode; protected int errorBufferMode; protected int scpBufferMode; } // end class ForceDeviceRemoteListener
package com.reactlibrary; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Rect; import android.os.Environment; import android.util.Log; import androidx.annotation.Nullable; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.Promise; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.WritableArray; import com.facebook.react.bridge.WritableMap; import com.facebook.react.modules.core.DeviceEventManagerModule; import com.googlecode.tesseract.android.ResultIterator; import com.googlecode.tesseract.android.TessBaseAPI; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; public class TesseractOcrModule extends ReactContextBaseJavaModule { private final ReactApplicationContext reactContext; private static final String KEY_ALLOW_LIST = "allowlist"; private static final String KEY_DENY_LIST = "denylist"; private static final String KEY_TOKEN_LEVEL = "level"; private static final String TESS_FILES_DIRECTORY = "tessdata"; private static final String TESS_FILES_EXTENSION = ".traineddata"; private static String DATA_PATH = Environment.getExternalStorageDirectory().toString(); private static String TESS_FILES_PATH; private TessBaseAPI tesseract; public TesseractOcrModule(ReactApplicationContext reactContext) { super(reactContext); this.reactContext = reactContext; if (!this.DATA_PATH.contains(reactContext.getPackageName())) { this.DATA_PATH += File.separator + reactContext.getPackageName(); } this.TESS_FILES_PATH = this.DATA_PATH + File.separator + this.TESS_FILES_DIRECTORY; } @Override public String getName() { return "TesseractOcr"; } @ReactMethod public void stop(Promise promise) { Log.d(getName(), "stop"); try { tesseract.stop(); tesseract.end(); promise.resolve("Recognition cancelled successfully"); } catch (Exception e) { Log.e(getName(), "Could not stop recognition. " + e.toString(), e); promise.reject("Could not stop recognition", e.toString()); } } @ReactMethod public void recognize(String imageSource, final String lang, @Nullable final ReadableMap tessOptions, final Promise promise) { Log.d(getName(), "recognize"); try { if (shouldCopyTrainedFile(lang)) { prepareTrainedFilesDirectory(); copyTrainedFile(lang); } final Bitmap bitmap = getBitmap(imageSource); if (bitmap != null) { new Thread() { @Override public void run() { tesseract = createTesseractAPI(lang, tessOptions); tesseract.setImage(bitmap); tesseract.getHOCRText(0); String recognizedText = tesseract.getUTF8Text(); tesseract.end(); promise.resolve(recognizedText); } }.start(); } else { throw new IOException("Could not decode a file path into a bitmap."); } } catch (IOException e) { Log.e(getName(), "Could not access trained files. " + e.toString(), e); promise.reject("Could not access trained files", e.toString()); } catch (Exception e) { Log.e(getName(), "Could not recognize text. " + e.toString(), e); promise.reject("Could not recognize text", e.toString()); } } @ReactMethod public void recognizeTokens(String imageSource, final String lang, @Nullable final ReadableMap tessOptions, final Promise promise) { Log.d(getName(), "recognizeTokens"); try { if (shouldCopyTrainedFile(lang)) { prepareTrainedFilesDirectory(); copyTrainedFile(lang); } final int iteratorLevel = getIteratorLevel(tessOptions != null ? tessOptions.getString(KEY_TOKEN_LEVEL) : "word"); final Bitmap bitmap = getBitmap(imageSource); if (bitmap != null) { new Thread() { @Override public void run() { tesseract = createTesseractAPI(lang, tessOptions); tesseract.setImage(bitmap); tesseract.getHOCRText(0); WritableArray tokens = Arguments.createArray(); WritableMap tempMap; WritableMap bounding; ResultIterator iterator = tesseract.getResultIterator(); iterator.begin(); do { bounding = Arguments.createMap(); tempMap = Arguments.createMap(); Rect rect = iterator.getBoundingRect(iteratorLevel); bounding.putInt("bottom", rect.bottom); bounding.putInt("left", rect.left); bounding.putInt("right", rect.right); bounding.putInt("top", rect.top); tempMap.putString("token", iterator.getUTF8Text(iteratorLevel)); tempMap.putDouble("confidence", iterator.confidence(iteratorLevel)); tempMap.putMap("bounding", bounding); tokens.pushMap(tempMap); } while (iterator.next(iteratorLevel)); iterator.delete(); tesseract.end(); promise.resolve(tokens); } }.start(); } else { throw new IOException("Could not decode a file path into a bitmap."); } } catch (IOException e) { Log.e(getName(), "Could not access trained files. " + e.toString(), e); promise.reject("Could not access trained files", e.toString()); } catch (Exception e) { Log.e(getName(), "Could not recognize text. " + e.toString(), e); promise.reject("Could not recognize text", e.toString()); } } private TessBaseAPI createTesseractAPI(String lang, @Nullable final ReadableMap tessOptions) { TessBaseAPI tessBaseAPI = new TessBaseAPI(createProgressNotifier()); tessBaseAPI.init(DATA_PATH + File.separator, lang); if (tessOptions != null) { // Allow List - List of characters you want to detect if (tessOptions.hasKey(KEY_ALLOW_LIST) && tessOptions.getString(KEY_ALLOW_LIST) != null && !tessOptions.getString(KEY_ALLOW_LIST).isEmpty()) { Log.d(getName(), KEY_ALLOW_LIST + " " + tessOptions.getString(KEY_ALLOW_LIST)); tessBaseAPI.setVariable(TessBaseAPI.VAR_CHAR_WHITELIST, tessOptions.getString(KEY_ALLOW_LIST)); } // Deny List - List of characters you DON'T want to detect if (tessOptions.hasKey(KEY_DENY_LIST) && tessOptions.getString(KEY_DENY_LIST) != null && !tessOptions.getString(KEY_DENY_LIST).isEmpty()) { Log.d(getName(), KEY_DENY_LIST + " " + tessOptions.getString(KEY_DENY_LIST)); tessBaseAPI.setVariable(TessBaseAPI.VAR_CHAR_BLACKLIST, tessOptions.getString(KEY_DENY_LIST)); } } return tessBaseAPI; } private TessBaseAPI.ProgressNotifier createProgressNotifier() { return new TessBaseAPI.ProgressNotifier() { @Override public void onProgressValues(TessBaseAPI.ProgressValues progressValues) { Log.d(getName(), "progress " + String.valueOf(progressValues.getPercent())); onProgress(progressValues.getPercent()); } }; } private void onProgress(int percent) { Log.d(getName(), "onProgressChange " + Integer.toString(percent)); WritableMap payload = Arguments.createMap(); payload.putInt("percent", percent); this.reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class).emit("onProgressChange", payload); } private int getIteratorLevel(String level) { switch (level) { case "block": return TessBaseAPI.PageIteratorLevel.RIL_BLOCK; case "paragraph": return TessBaseAPI.PageIteratorLevel.RIL_PARA; case "symbol": return TessBaseAPI.PageIteratorLevel.RIL_SYMBOL; case "line": return TessBaseAPI.PageIteratorLevel.RIL_TEXTLINE; default: // word return TessBaseAPI.PageIteratorLevel.RIL_WORD; } } private Bitmap getBitmap(String imageSource) throws Exception { String path = imageSource.startsWith("file://") ? imageSource.replace("file://", "") : imageSource; if (path.startsWith("http://") || path.startsWith("https://")) { // TODO: support remote files throw new Exception("Cannot select remote files"); } return BitmapFactory.decodeFile(path, new BitmapFactory.Options()); } private boolean shouldCopyTrainedFile(String lang) { Log.d(getName(), "should copy " + lang + " trained files?"); String filePath = TESS_FILES_PATH + File.separator + lang + TESS_FILES_EXTENSION; File file = new File(filePath); return !file.exists(); } private void prepareTrainedFilesDirectory() throws IOException { Log.d(getName(), "prepare trained files directory"); File dir = new File(TESS_FILES_PATH); if (!dir.exists()) { if (!dir.mkdirs()) { Log.e(getName(), "Could not create directory, please make sure the app has write permission"); throw new IOException("Could not create directory"); } } } private void copyTrainedFile(String lang) throws IOException { Log.d(getName(), "copy tesseract data file (" + lang + ")"); String assetPath = TESS_FILES_DIRECTORY + File.separator + lang + TESS_FILES_EXTENSION; String newAssetPath = DATA_PATH + File.separator + assetPath; copyAsset(assetPath, newAssetPath); } private void copyAsset(String from, String to) throws IOException { Log.d(getName(), "copy asset " + from + " to " + to); InputStream in = reactContext.getAssets().open(from); OutputStream out = new FileOutputStream(to); byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); out.close(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.deltaspike.data.api.criteria; import java.util.Collection; import java.util.List; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.JoinType; import javax.persistence.criteria.Path; import javax.persistence.criteria.Predicate; import javax.persistence.metamodel.CollectionAttribute; import javax.persistence.metamodel.ListAttribute; import javax.persistence.metamodel.MapAttribute; import javax.persistence.metamodel.PluralAttribute; import javax.persistence.metamodel.SetAttribute; import javax.persistence.metamodel.SingularAttribute; /** * Criteria API utilities. * * @param <C> Entity type. * @param <R> Result type. */ public interface Criteria<C, R> { /** * Executes the query and returns the result list. * @return List of entities matching the query. */ List<R> getResultList(); /** * Executes the query which has a single result. * @return Entity matching the search query. */ R getSingleResult(); /** * Creates a JPA query object to be executed. * @return A {@link TypedQuery} object ready to return results. */ TypedQuery<R> createQuery(); /** * Boolean OR with another Criteria. * @param criteria The right side of the boolean OR. * @return Fluent API: Criteria instance. */ Criteria<C, R> or(Criteria<C, R>... criteria); /** * Boolean OR with another Criteria. * @param criteria The right side of the boolean OR. * @return Fluent API: Criteria instance. */ Criteria<C, R> or(Collection<Criteria<C, R>> criteria); /** * Join an attribute with another Criteria. * @param att The attribute to join. * @param criteria The join criteria. * @return Fluent API: Criteria instance. */ <P, E> Criteria<C, R> join(SingularAttribute<? super C, P> att, Criteria<P, P> criteria); /** * Join a collection attribute with another Criteria. * @param att The attribute to join. * @param criteria The join criteria. * @return Fluent API: Criteria instance. */ <P, E> Criteria<C, R> join(ListAttribute<? super C, P> att, Criteria<P, P> criteria); /** * Join a collection attribute with another Criteria. * @param att The attribute to join. * @param criteria The join criteria. * @return Fluent API: Criteria instance. */ <P, E> Criteria<C, R> join(CollectionAttribute<? super C, P> att, Criteria<P, P> criteria); /** * Join a collection attribute with another Criteria. * @param att The attribute to join. * @param criteria The join criteria. * @return Fluent API: Criteria instance. */ <P, E> Criteria<C, R> join(SetAttribute<? super C, P> att, Criteria<P, P> criteria); /** * Join a collection attribute with another Criteria. * @param att The attribute to join. * @param criteria The join criteria. * @return Fluent API: Criteria instance. */ <P, E> Criteria<C, R> join(MapAttribute<? super C, E, P> att, Criteria<P, P> criteria); /** * Fetch join an attribute. * @param att The attribute to fetch. * @return Fluent API: Criteria instance. */ <P, E> Criteria<C, R> fetch(SingularAttribute<? super C, P> att); /** * Fetch join an attribute. * @param att The attribute to fetch. * @param joinType The JoinType to use. * @return Fluent API: Criteria instance. */ <P, E> Criteria<C, R> fetch(SingularAttribute<? super C, P> att, JoinType joinType); /** * Fetch join an attribute. * @param att The attribute to fetch. * @return Fluent API: Criteria instance. */ <P, E> Criteria<C, R> fetch(PluralAttribute<? super C, P, E> att); /** * Fetch join an attribute. * @param att The attribute to fetch. * @param joinType The JoinType to use. * @return Fluent API: Criteria instance. */ <P, E> Criteria<C, R> fetch(PluralAttribute<? super C, P, E> att, JoinType joinType); /** * Apply sorting by an attribute, ascending direction. * @param att The attribute to order for. * @return Fluent API: Criteria instance. */ <P> Criteria<C, R> orderAsc(SingularAttribute<? super C, P> att); /** * Apply sorting by an attribute, descending direction. * @param att The attribute to order for. * @return Fluent API: Criteria instance. */ <P> Criteria<C, R> orderDesc(SingularAttribute<? super C, P> att); /** * Create a select query. * @param resultClass The query result class. * @param selection List of selects (attributes, scalars...) * @return Fluent API: Criteria instance. */ <N> Criteria<C, N> select(Class<N> resultClass, QuerySelection<? super C, ?>... selection); /** * Create a select query. * @param selection List of selects (attributes, scalars...) * @return Fluent API: Criteria instance. */ Criteria<C, Object[]> select(QuerySelection<? super C, ?>... selection); /** * Apply a distinct on the query. * @return Fluent API: Criteria instance. */ Criteria<C, R> distinct(); /** * Equals predicate. * @param att The attribute to compare with. * @param value The comparison value. * @return Fluent API: Criteria instance. */ <P> Criteria<C, R> eq(SingularAttribute<? super C, P> att, P value); /** * Not Equals predicate. * @param att The attribute to compare with. * @param value The comparison value. * @return Fluent API: Criteria instance. */ <P> Criteria<C, R> notEq(SingularAttribute<? super C, P> att, P value); /** * Like predicate. * @param att The attribute to compare with. * @param value The comparison value. * @return Fluent API: Criteria instance. */ <P> Criteria<C, R> like(SingularAttribute<? super C, String> att, String value); /** * Not like predicate. * @param att The attribute to compare with. * @param value The comparison value. * @return Fluent API: Criteria instance. */ <P> Criteria<C, R> notLike(SingularAttribute<? super C, String> att, String value); /** * Less than predicate. * @param att The attribute to compare with. * @param value The comparison value. * @return Fluent API: Criteria instance. */ <P extends Number> Criteria<C, R> lt(SingularAttribute<? super C, P> att, P value); /** * Less than or equals predicate. * @param att The attribute to compare with. * @param value The comparison value. * @return Fluent API: Criteria instance. */ <P extends Comparable<? super P>> Criteria<C, R> ltOrEq(SingularAttribute<? super C, P> att, P value); /** * Greater than predicate. * @param att The attribute to compare with. * @param value The comparison value. * @return Fluent API: Criteria instance. */ <P extends Number> Criteria<C, R> gt(SingularAttribute<? super C, P> att, P value); /** * Greater than or equals predicate. * @param att The attribute to compare with. * @param value The comparison value. * @return Fluent API: Criteria instance. */ <P extends Comparable<? super P>> Criteria<C, R> gtOrEq(SingularAttribute<? super C, P> att, P value); /** * Between predicate. * @param att The attribute to compare with. * @param lower The lower bound comparison value. * @param upper The upper bound comparison value. * @return Fluent API: Criteria instance. */ <P extends Comparable<? super P>> Criteria<C, R> between(SingularAttribute<? super C, P> att, P lower, P upper); /** * IsNull predicate. * @param att The null attribute. * @return Fluent API: Criteria instance. */ <P> Criteria<C, R> isNull(SingularAttribute<? super C, P> att); /** * NotNull predicate. * @param att The non-null attribute. * @return Fluent API: Criteria instance. */ <P> Criteria<C, R> notNull(SingularAttribute<? super C, P> att); /** * Empty predicate. * @param att The collection attribute to check for emptyness. * @return Fluent API: Criteria instance. */ <P extends Collection<?>> Criteria<C, R> empty(SingularAttribute<? super C, P> att); /** * Not empty predicate. * @param att The collection attribute to check for non-emptyness. * @return Fluent API: Criteria instance. */ <P extends Collection<?>> Criteria<C, R> notEmpty(SingularAttribute<? super C, P> att); /** * In predicte. * @param att The attribute to check for. * @param values The values for the in predicate. * @return */ <P> Criteria<C, R> in(SingularAttribute<? super C, P> att, P... values); /** * Return the list of predicates applicable for this Criteria instance. * @param builder A CriteriaBuilder used to instantiate the Predicates. * @param path Current path. * @return List of predicates applicable to this Criteria. */ List<Predicate> predicates(CriteriaBuilder builder, Path<C> path); }
/* The MIT License (MIT) Copyright (c) 2015 Terence Parr, Hanzhou Shi, Shuai Yuan, Yuanyuan Zhang Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package wich.semantics; import org.antlr.symtab.Type; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.TerminalNode; import wich.parser.WichParser; import wich.parser.WichParser.ExprContext; import wich.semantics.symbols.WBuiltInTypeSymbol; import java.util.Collections; import java.util.List; import java.util.function.Function; import java.util.function.Predicate; import static wich.parser.WichParser.ADD; import static wich.parser.WichParser.AND; import static wich.parser.WichParser.DIV; import static wich.parser.WichParser.EQUAL_EQUAL; import static wich.parser.WichParser.GE; import static wich.parser.WichParser.GT; import static wich.parser.WichParser.LE; import static wich.parser.WichParser.LT; import static wich.parser.WichParser.MUL; import static wich.parser.WichParser.NOT_EQUAL; import static wich.parser.WichParser.OR; import static wich.parser.WichParser.SUB; import static wich.semantics.SymbolTable._boolean; import static wich.semantics.SymbolTable._float; import static wich.semantics.SymbolTable._int; import static wich.semantics.SymbolTable._string; import static wich.semantics.SymbolTable._vector; public class TypeHelper { protected static final WBuiltInTypeSymbol[][][] opResultTypeMap = new WBuiltInTypeSymbol[WichParser.tokenNames.length+1][][]; protected static final WBuiltInTypeSymbol[][][] operandPromotionMap = new WBuiltInTypeSymbol[WichParser.tokenNames.length+1][][]; // ---------------------------- Result Type Table ---------------------------- // *, / - protected static final WBuiltInTypeSymbol[][] arithmeticResultTable = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {_int, _float, null, _vector, null}, /*float*/ {_float, _float, null, _vector, null}, /*string*/ {null, null, null, null, null}, /*vector*/ {_vector, _vector, null, _vector, null}, /*boolean*/ {null, null, null, null, null} }; // + protected static final WBuiltInTypeSymbol[][] arithmeticStrResultTable = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {_int, _float, _string, _vector, null}, /*float*/ {_float, _float, _string, _vector, null}, /*string*/ {_string, _string, _string, _string, null}, /*vector*/ {_vector, _vector, _string, _vector, null}, /*boolean*/ {null, null, null, null, null} }; // <, <=, >, >= protected static final WBuiltInTypeSymbol[][] relationalResultTable = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {_boolean, _boolean, null, null, null}, /*float*/ {_boolean, _boolean, null, null, null}, /*string*/ {null, null, _boolean, null, null}, /*vector*/ {null, null, null, null, null}, /*boolean*/ {null, null, null, null, null} }; // ==, != (also assign =) protected static final WBuiltInTypeSymbol[][] equalityResultTable = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {_boolean, _boolean, null, null, null}, /*float*/ {_boolean, _boolean, null, null, null}, /*string*/ {null, null, _boolean, null, null}, /*vector*/ {null, null, null, _boolean, null}, /*boolean*/ {null, null, null, null, _boolean} }; // and, or protected static final WBuiltInTypeSymbol[][] logicalResultTable = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {null, null, null, null, null}, /*float*/ {null, null, null, null, null}, /*string*/ {null, null, null, null, null}, /*vector*/ {null, null, null, null, null}, /*boolean*/ {null, null, null, null, _boolean} }; // ---------------------------- Type Promotion Table ---------------------------- // *, /, - protected static final WBuiltInTypeSymbol[][] arithmeticPromoteFromTo = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {null, _float, null, _vector, null}, /*float*/ {null, null, null, _vector, null}, /*string*/ {null, null, null, null, null}, /*vector*/ {null, null, null, null, null}, /*boolean*/ {null, null, null, null, null} }; // + protected static final WBuiltInTypeSymbol[][] arithmeticStrPromoteFromTo = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {null, _float, _string, _vector, null}, /*float*/ {null, null, _string, _vector, null}, /*string*/ {null, null, null, null, null}, /*vector*/ {null, null, _string, null, null}, /*boolean*/ {null, null, null, null, null} }; // <, <=, >, >= protected static final WBuiltInTypeSymbol[][] relationalPromoteFromTo = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {null, _float, null, null, null}, /*float*/ {null, null, null, null, null}, /*string*/ {null, null, null, null, null}, /*vector*/ {null, null, null, null, null}, /*boolean*/ {null, null, null, null, null} }; // ==, != (also assign =) protected static final WBuiltInTypeSymbol[][] equalityPromoteFromTo = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {null, _float, null, null, null}, /*float*/ {null, null, null, null, null}, /*string*/ {null, null, null, null, null}, /*vector*/ {null, null, null, null, null}, /*boolean*/ {null, null, null, null, null} }; // and, or protected static final WBuiltInTypeSymbol[][] logicalPromoteFromTo = new WBuiltInTypeSymbol[][] { /* int float string vector boolean */ /*int*/ {null, null, null, null, null}, /*float*/ {null, null, null, null, null}, /*string*/ {null, null, null, null, null}, /*vector*/ {null, null, null, null, null}, /*boolean*/ {null, null, null, null, null} }; static { // register result tables. opResultTypeMap[MUL] = arithmeticResultTable; opResultTypeMap[SUB] = arithmeticResultTable; opResultTypeMap[DIV] = arithmeticResultTable; opResultTypeMap[ADD] = arithmeticStrResultTable; opResultTypeMap[LT] = relationalResultTable; opResultTypeMap[LE] = relationalResultTable; opResultTypeMap[GT] = relationalResultTable; opResultTypeMap[GE] = relationalResultTable; opResultTypeMap[EQUAL_EQUAL] = equalityResultTable; opResultTypeMap[NOT_EQUAL] = equalityResultTable; opResultTypeMap[AND] = logicalResultTable; opResultTypeMap[OR] = logicalResultTable; // register promote tables. operandPromotionMap[MUL] = arithmeticPromoteFromTo; operandPromotionMap[SUB] = arithmeticPromoteFromTo; operandPromotionMap[DIV] = arithmeticPromoteFromTo; operandPromotionMap[ADD] = arithmeticStrPromoteFromTo; operandPromotionMap[LT] = relationalPromoteFromTo; operandPromotionMap[LE] = relationalPromoteFromTo; operandPromotionMap[GT] = relationalPromoteFromTo; operandPromotionMap[GE] = relationalPromoteFromTo; operandPromotionMap[EQUAL_EQUAL] = equalityPromoteFromTo; operandPromotionMap[NOT_EQUAL] = equalityPromoteFromTo; operandPromotionMap[AND] = logicalPromoteFromTo; operandPromotionMap[OR] = logicalPromoteFromTo; } /** This method is the general helper method used to calculate result type. * You should use the method in SymbolTable based on this method. */ public static Type getResultType(int op, ExprContext le, ExprContext re) { int li = le.exprType.getTypeIndex(); int ri = re.exprType.getTypeIndex(); Type resultType = opResultTypeMap[op][li][ri]; if (resultType == null) { return SymbolTable.INVALID_TYPE; } le.promoteToType = operandPromotionMap[op][li][ri]; re.promoteToType = operandPromotionMap[op][ri][li]; return resultType; } public static Type getPromoteType(int op, WichParser.OpContext ctx) { ExprContext le = ctx.expr(0); ExprContext re = ctx.expr(1); int li = le.exprType.getTypeIndex(); int ri = re.exprType.getTypeIndex(); Type leftToRight = operandPromotionMap[op][li][ri]; Type rightToLeft = operandPromotionMap[op][ri][li]; if (leftToRight != null) return leftToRight; if (rightToLeft != null) return rightToLeft; return null; } /** This method is used to promote type in assignment. * Returns a boolean indicating whether the assignment is legal. * The expr type info must be available before this method works. */ public static boolean isLegalAssign(Type ltype, ExprContext expr) { return ltype==expr.exprType || ltype==expr.promoteToType; } /** This method is used to promote type during type annotation */ public static void promote(ExprContext elem, Type targetType) { if (elem.exprType != null && elem.promoteToType != targetType) { // elem.exprType may not be known int selfIndex = elem.exprType.getTypeIndex(); elem.promoteToType = equalityPromoteFromTo[selfIndex][targetType.getTypeIndex()]; } } public static String dumpWithType(ParserRuleContext tree) { if (tree == null) return ""; return String.valueOf(process(tree.children, (t) -> t instanceof TerminalNode, (t) -> "", (t) -> dumpNonTerminal((ParserRuleContext) t))); } private static String dumpNonTerminal(ParserRuleContext ctx) { StringBuilder sb = new StringBuilder(); if (ctx instanceof ExprContext || ctx instanceof WichParser.Call_exprContext) { sb.append(dumpExprWithType(ctx)); } else { sb.append(dumpWithType(ctx)); } return sb.toString(); } protected static String dumpExprWithType(ParserRuleContext ctx) { StringBuilder sb = new StringBuilder(); if (ctx instanceof WichParser.AtomContext) { sb.append(dumpPrimaryWithType((WichParser.AtomContext) ctx)); } else if (ctx instanceof WichParser.OpContext) { sb.append(dumpOpWithType((WichParser.OpContext) ctx)); } else if (ctx instanceof WichParser.CallContext) { sb.append(dumpCallWithType((WichParser.CallContext) ctx)); } else if (ctx instanceof WichParser.Call_exprContext){ sb.append(dumpCallWithType((WichParser.Call_exprContext) ctx)); } else { ExprContext exprCtx = (ExprContext) ctx; sb.append(exprCtx.getText()).append(":").append(getPrintType(exprCtx)).append("\n"); sb.append(process(exprCtx.children, (t)->t instanceof ExprContext, (t)->dumpExprWithType((ParserRuleContext) t), (t)->"")); } return sb.toString(); } protected static String dumpOpWithType(WichParser.OpContext ctx) { return String.valueOf(process(ctx.children, (t) -> t instanceof ExprContext, (t) -> dumpExprWithType((ParserRuleContext) t), (t) -> "")) + ctx.operator().getText() + ":" + getPrintType(ctx) + "\n"; } protected static String dumpCallWithType(WichParser.CallContext ctx) { WichParser.Expr_listContext args = ctx.call_expr().expr_list(); return ctx.getText() + ":" + getPrintType(ctx) + "\n" + String.valueOf(process(args!=null ? args.children : Collections.emptyList(), (t) -> t instanceof ExprContext, (t) -> dumpExprWithType((ParserRuleContext) t), (t) -> "")); } //overloading to dump call statement with type protected static String dumpCallWithType(WichParser.Call_exprContext ctx) { WichParser.Expr_listContext args = ctx.expr_list(); return ctx.getText() + ":" + getPrintType(ctx) + "\n" + String.valueOf(process(args!=null ? args.children : Collections.emptyList(), (t) -> t instanceof ExprContext, (t) -> dumpExprWithType((ParserRuleContext) t), (t) -> "")); } protected static String dumpPrimaryWithType(WichParser.AtomContext ctx) { StringBuilder sb = new StringBuilder(); sb.append(ctx.getText()).append(":").append(getPrintType(ctx)).append("\n"); if (ctx.primary() instanceof WichParser.VectorContext) { WichParser.VectorContext vectorContext = (WichParser.VectorContext) ctx.primary(); sb.append(process(vectorContext.expr_list().expr(), (t) -> true, TypeHelper::dumpExprWithType, (t) -> "")); } return sb.toString(); } protected static String getPrintType(ExprContext ctx) { StringBuilder sb = new StringBuilder(); sb.append(ctx.exprType.getName()); if (ctx.promoteToType != null) { sb.append(" => ").append(ctx.promoteToType); } return sb.toString(); } //overloading to get print type for call statement protected static String getPrintType(WichParser.Call_exprContext ctx) { StringBuilder sb = new StringBuilder(); sb.append(ctx.exprType.getName()); if (ctx.promoteToType != null) { sb.append(" => ").append(ctx.promoteToType); } return sb.toString(); } protected static <T, R> StringBuilder process(List<T> children, Predicate<T> pred, Function<T, R> func1, Function<T, R> func2) { StringBuilder sb = new StringBuilder(); for (T child : children) { if (pred.test(child)) { sb.append(func1.apply(child)); } else { sb.append(func2.apply(child)); } } return sb; } public static boolean typesAreCompatible(ExprContext elem, Type type) { return elem.exprType == type || elem.promoteToType == type; } }
package org.apache.lucene.search; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; /** The abstract base class for queries. <p>Instantiable subclasses are: <ul> <li> {@link TermQuery} <li> {@link MultiTermQuery} <li> {@link BooleanQuery} <li> {@link WildcardQuery} <li> {@link PhraseQuery} <li> {@link PrefixQuery} <li> {@link MultiPhraseQuery} <li> {@link FuzzyQuery} <li> {@link TermRangeQuery} <li> {@link NumericRangeQuery} <li> {@link org.apache.lucene.search.spans.SpanQuery} </ul> <p>A parser for queries is contained in: <ul> <li>{@link org.apache.lucene.queryParser.QueryParser QueryParser} </ul> */ public abstract class Query implements java.io.Serializable, Cloneable { private float boost = 1.0f; // query boost factor /** Sets the boost for this query clause to <code>b</code>. Documents * matching this clause will (in addition to the normal weightings) have * their score multiplied by <code>b</code>. */ public void setBoost(float b) { boost = b; } /** Gets the boost for this clause. Documents matching * this clause will (in addition to the normal weightings) have their score * multiplied by <code>b</code>. The boost is 1.0 by default. */ public float getBoost() { return boost; } /** Prints a query to a string, with <code>field</code> assumed to be the * default field and omitted. * <p>The representation used is one that is supposed to be readable * by {@link org.apache.lucene.queryParser.QueryParser QueryParser}. However, * there are the following limitations: * <ul> * <li>If the query was created by the parser, the printed * representation may not be exactly what was parsed. For example, * characters that need to be escaped will be represented without * the required backslash.</li> * <li>Some of the more complicated queries (e.g. span queries) * don't have a representation that can be parsed by QueryParser.</li> * </ul> */ public abstract String toString(String field); /** Prints a query to a string. */ @Override public String toString() { return toString(""); } /** * Expert: Constructs an appropriate Weight implementation for this query. * * <p> * Only implemented by primitive queries, which re-write to themselves. */ public Weight createWeight(Searcher searcher) throws IOException { throw new UnsupportedOperationException(); } /** * Expert: Constructs and initializes a Weight for a top-level query. */ public Weight weight(Searcher searcher) throws IOException { Query query = searcher.rewrite(this); Weight weight = query.createWeight(searcher); float sum = weight.sumOfSquaredWeights(); float norm = getSimilarity(searcher).queryNorm(sum); if (Float.isInfinite(norm) || Float.isNaN(norm)) norm = 1.0f; weight.normalize(norm); return weight; } /** Expert: called to re-write queries into primitive queries. For example, * a PrefixQuery will be rewritten into a BooleanQuery that consists * of TermQuerys. */ public Query rewrite(IndexReader reader) throws IOException { return this; } /** Expert: called when re-writing queries under MultiSearcher. * * Create a single query suitable for use by all subsearchers (in 1-1 * correspondence with queries). This is an optimization of the OR of * all queries. We handle the common optimization cases of equal * queries and overlapping clauses of boolean OR queries (as generated * by MultiTermQuery.rewrite()). * Be careful overriding this method as queries[0] determines which * method will be called and is not necessarily of the same type as * the other queries. */ public Query combine(Query[] queries) { HashSet<Query> uniques = new HashSet<Query>(); for (int i = 0; i < queries.length; i++) { Query query = queries[i]; BooleanClause[] clauses = null; // check if we can split the query into clauses boolean splittable = (query instanceof BooleanQuery); if(splittable){ BooleanQuery bq = (BooleanQuery) query; splittable = bq.isCoordDisabled(); clauses = bq.getClauses(); for (int j = 0; splittable && j < clauses.length; j++) { splittable = (clauses[j].getOccur() == BooleanClause.Occur.SHOULD); } } if(splittable){ for (int j = 0; j < clauses.length; j++) { uniques.add(clauses[j].getQuery()); } } else { uniques.add(query); } } // optimization: if we have just one query, just return it if(uniques.size() == 1){ return uniques.iterator().next(); } BooleanQuery result = new BooleanQuery(true); for (final Query query : uniques) result.add(query, BooleanClause.Occur.SHOULD); return result; } /** * Expert: adds all terms occurring in this query to the terms set. Only * works if this query is in its {@link #rewrite rewritten} form. * * @throws UnsupportedOperationException if this query is not yet rewritten */ public void extractTerms(Set<Term> terms) { // needs to be implemented by query subclasses throw new UnsupportedOperationException(); } /** Expert: merges the clauses of a set of BooleanQuery's into a single * BooleanQuery. * *<p>A utility for use by {@link #combine(Query[])} implementations. */ public static Query mergeBooleanQueries(BooleanQuery... queries) { HashSet<BooleanClause> allClauses = new HashSet<BooleanClause>(); for (BooleanQuery booleanQuery : queries) { for (BooleanClause clause : booleanQuery) { allClauses.add(clause); } } boolean coordDisabled = queries.length==0? false : queries[0].isCoordDisabled(); BooleanQuery result = new BooleanQuery(coordDisabled); for(BooleanClause clause2 : allClauses) { result.add(clause2); } return result; } /** Expert: Returns the Similarity implementation to be used for this query. * Subclasses may override this method to specify their own Similarity * implementation, perhaps one that delegates through that of the Searcher. * By default the Searcher's Similarity implementation is returned.*/ public Similarity getSimilarity(Searcher searcher) { return searcher.getSimilarity(); } /** Returns a clone of this query. */ @Override public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeException("Clone not supported: " + e.getMessage()); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Float.floatToIntBits(boost); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Query other = (Query) obj; if (Float.floatToIntBits(boost) != Float.floatToIntBits(other.boost)) return false; return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.component.dsl; import javax.annotation.Generated; import org.apache.camel.Component; import org.apache.camel.builder.component.AbstractComponentBuilder; import org.apache.camel.builder.component.ComponentBuilder; import org.apache.camel.component.aws2.kinesis.Kinesis2Component; /** * Consume and produce records from and to AWS Kinesis Streams using AWS SDK * version 2.x. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.ComponentDslMojo") public interface Aws2KinesisComponentBuilderFactory { /** * AWS 2 Kinesis (camel-aws2-kinesis) * Consume and produce records from and to AWS Kinesis Streams using AWS SDK * version 2.x. * * Category: cloud,messaging * Since: 3.2 * Maven coordinates: org.apache.camel:camel-aws2-kinesis * * @return the dsl builder */ static Aws2KinesisComponentBuilder aws2Kinesis() { return new Aws2KinesisComponentBuilderImpl(); } /** * Builder for the AWS 2 Kinesis component. */ interface Aws2KinesisComponentBuilder extends ComponentBuilder<Kinesis2Component> { /** * Amazon Kinesis client to use for all requests for this endpoint. * * The option is a: * &lt;code&gt;software.amazon.awssdk.services.kinesis.KinesisClient&lt;/code&gt; type. * * Group: common * * @param amazonKinesisClient the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder amazonKinesisClient( software.amazon.awssdk.services.kinesis.KinesisClient amazonKinesisClient) { doSetProperty("amazonKinesisClient", amazonKinesisClient); return this; } /** * This option will set the CBOR_ENABLED property during the execution. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: true * Group: common * * @param cborEnabled the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder cborEnabled(boolean cborEnabled) { doSetProperty("cborEnabled", cborEnabled); return this; } /** * Component configuration. * * The option is a: * &lt;code&gt;org.apache.camel.component.aws2.kinesis.Kinesis2Configuration&lt;/code&gt; type. * * Group: common * * @param configuration the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder configuration( org.apache.camel.component.aws2.kinesis.Kinesis2Configuration configuration) { doSetProperty("configuration", configuration); return this; } /** * Set the need for overidding the endpoint. This option needs to be * used in combination with uriEndpointOverride option. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: common * * @param overrideEndpoint the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder overrideEndpoint( boolean overrideEndpoint) { doSetProperty("overrideEndpoint", overrideEndpoint); return this; } /** * To define a proxy host when instantiating the Kinesis client. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: common * * @param proxyHost the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder proxyHost(java.lang.String proxyHost) { doSetProperty("proxyHost", proxyHost); return this; } /** * To define a proxy port when instantiating the Kinesis client. * * The option is a: &lt;code&gt;java.lang.Integer&lt;/code&gt; type. * * Group: common * * @param proxyPort the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder proxyPort( java.lang.Integer proxyPort) { doSetProperty("proxyPort", proxyPort); return this; } /** * To define a proxy protocol when instantiating the Kinesis client. * * The option is a: * &lt;code&gt;software.amazon.awssdk.core.Protocol&lt;/code&gt; type. * * Default: HTTPS * Group: common * * @param proxyProtocol the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder proxyProtocol( software.amazon.awssdk.core.Protocol proxyProtocol) { doSetProperty("proxyProtocol", proxyProtocol); return this; } /** * The region in which Kinesis Firehose client needs to work. When using * this parameter, the configuration will expect the lowercase name of * the region (for example ap-east-1) You'll need to use the name * Region.EU_WEST_1.id(). * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: common * * @param region the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder region(java.lang.String region) { doSetProperty("region", region); return this; } /** * If we want to trust all certificates in case of overriding the * endpoint. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: common * * @param trustAllCertificates the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder trustAllCertificates( boolean trustAllCertificates) { doSetProperty("trustAllCertificates", trustAllCertificates); return this; } /** * Set the overriding uri endpoint. This option needs to be used in * combination with overrideEndpoint option. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: common * * @param uriEndpointOverride the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder uriEndpointOverride( java.lang.String uriEndpointOverride) { doSetProperty("uriEndpointOverride", uriEndpointOverride); return this; } /** * Set whether the Kinesis client should expect to load credentials * through a default credentials provider or to expect static * credentials to be passed in. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: common * * @param useDefaultCredentialsProvider the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder useDefaultCredentialsProvider( boolean useDefaultCredentialsProvider) { doSetProperty("useDefaultCredentialsProvider", useDefaultCredentialsProvider); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: consumer * * @param bridgeErrorHandler the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder bridgeErrorHandler( boolean bridgeErrorHandler) { doSetProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Defines where in the Kinesis stream to start getting records. * * The option is a: * &lt;code&gt;software.amazon.awssdk.services.kinesis.model.ShardIteratorType&lt;/code&gt; type. * * Default: TRIM_HORIZON * Group: consumer * * @param iteratorType the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder iteratorType( software.amazon.awssdk.services.kinesis.model.ShardIteratorType iteratorType) { doSetProperty("iteratorType", iteratorType); return this; } /** * Maximum number of records that will be fetched in each poll. * * The option is a: &lt;code&gt;int&lt;/code&gt; type. * * Default: 1 * Group: consumer * * @param maxResultsPerRequest the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder maxResultsPerRequest( int maxResultsPerRequest) { doSetProperty("maxResultsPerRequest", maxResultsPerRequest); return this; } /** * The sequence number to start polling from. Required if iteratorType * is set to AFTER_SEQUENCE_NUMBER or AT_SEQUENCE_NUMBER. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: consumer * * @param sequenceNumber the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder sequenceNumber( java.lang.String sequenceNumber) { doSetProperty("sequenceNumber", sequenceNumber); return this; } /** * Define what will be the behavior in case of shard closed. Possible * value are ignore, silent and fail. In case of ignore a message will * be logged and the consumer will restart from the beginning,in case of * silent there will be no logging and the consumer will start from the * beginning,in case of fail a ReachedClosedStateException will be * raised. * * The option is a: * &lt;code&gt;org.apache.camel.component.aws2.kinesis.Kinesis2ShardClosedStrategyEnum&lt;/code&gt; type. * * Default: ignore * Group: consumer * * @param shardClosed the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder shardClosed( org.apache.camel.component.aws2.kinesis.Kinesis2ShardClosedStrategyEnum shardClosed) { doSetProperty("shardClosed", shardClosed); return this; } /** * Defines which shardId in the Kinesis stream to get records from. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: consumer * * @param shardId the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder shardId(java.lang.String shardId) { doSetProperty("shardId", shardId); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param lazyStartProducer the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder lazyStartProducer( boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether autowiring is enabled. This is used for automatic autowiring * options (the option must be marked as autowired) by looking up in the * registry to find if there is a single instance of matching type, * which then gets configured on the component. This can be used for * automatic configuring JDBC data sources, JMS connection factories, * AWS Clients, etc. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: true * Group: advanced * * @param autowiredEnabled the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder autowiredEnabled( boolean autowiredEnabled) { doSetProperty("autowiredEnabled", autowiredEnabled); return this; } /** * Amazon AWS Access Key. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: security * * @param accessKey the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder accessKey(java.lang.String accessKey) { doSetProperty("accessKey", accessKey); return this; } /** * Amazon AWS Secret Key. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: security * * @param secretKey the value to set * @return the dsl builder */ default Aws2KinesisComponentBuilder secretKey(java.lang.String secretKey) { doSetProperty("secretKey", secretKey); return this; } } class Aws2KinesisComponentBuilderImpl extends AbstractComponentBuilder<Kinesis2Component> implements Aws2KinesisComponentBuilder { @Override protected Kinesis2Component buildConcreteComponent() { return new Kinesis2Component(); } private org.apache.camel.component.aws2.kinesis.Kinesis2Configuration getOrCreateConfiguration( org.apache.camel.component.aws2.kinesis.Kinesis2Component component) { if (component.getConfiguration() == null) { component.setConfiguration(new org.apache.camel.component.aws2.kinesis.Kinesis2Configuration()); } return component.getConfiguration(); } @Override protected boolean setPropertyOnComponent( Component component, String name, Object value) { switch (name) { case "amazonKinesisClient": getOrCreateConfiguration((Kinesis2Component) component).setAmazonKinesisClient((software.amazon.awssdk.services.kinesis.KinesisClient) value); return true; case "cborEnabled": getOrCreateConfiguration((Kinesis2Component) component).setCborEnabled((boolean) value); return true; case "configuration": ((Kinesis2Component) component).setConfiguration((org.apache.camel.component.aws2.kinesis.Kinesis2Configuration) value); return true; case "overrideEndpoint": getOrCreateConfiguration((Kinesis2Component) component).setOverrideEndpoint((boolean) value); return true; case "proxyHost": getOrCreateConfiguration((Kinesis2Component) component).setProxyHost((java.lang.String) value); return true; case "proxyPort": getOrCreateConfiguration((Kinesis2Component) component).setProxyPort((java.lang.Integer) value); return true; case "proxyProtocol": getOrCreateConfiguration((Kinesis2Component) component).setProxyProtocol((software.amazon.awssdk.core.Protocol) value); return true; case "region": getOrCreateConfiguration((Kinesis2Component) component).setRegion((java.lang.String) value); return true; case "trustAllCertificates": getOrCreateConfiguration((Kinesis2Component) component).setTrustAllCertificates((boolean) value); return true; case "uriEndpointOverride": getOrCreateConfiguration((Kinesis2Component) component).setUriEndpointOverride((java.lang.String) value); return true; case "useDefaultCredentialsProvider": getOrCreateConfiguration((Kinesis2Component) component).setUseDefaultCredentialsProvider((boolean) value); return true; case "bridgeErrorHandler": ((Kinesis2Component) component).setBridgeErrorHandler((boolean) value); return true; case "iteratorType": getOrCreateConfiguration((Kinesis2Component) component).setIteratorType((software.amazon.awssdk.services.kinesis.model.ShardIteratorType) value); return true; case "maxResultsPerRequest": getOrCreateConfiguration((Kinesis2Component) component).setMaxResultsPerRequest((int) value); return true; case "sequenceNumber": getOrCreateConfiguration((Kinesis2Component) component).setSequenceNumber((java.lang.String) value); return true; case "shardClosed": getOrCreateConfiguration((Kinesis2Component) component).setShardClosed((org.apache.camel.component.aws2.kinesis.Kinesis2ShardClosedStrategyEnum) value); return true; case "shardId": getOrCreateConfiguration((Kinesis2Component) component).setShardId((java.lang.String) value); return true; case "lazyStartProducer": ((Kinesis2Component) component).setLazyStartProducer((boolean) value); return true; case "autowiredEnabled": ((Kinesis2Component) component).setAutowiredEnabled((boolean) value); return true; case "accessKey": getOrCreateConfiguration((Kinesis2Component) component).setAccessKey((java.lang.String) value); return true; case "secretKey": getOrCreateConfiguration((Kinesis2Component) component).setSecretKey((java.lang.String) value); return true; default: return false; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.hbase; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Set; import java.util.TreeSet; import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.hadoop.SerializableConfiguration; import org.apache.beam.sdk.io.range.ByteKey; import org.apache.beam.sdk.io.range.ByteKeyRange; import org.apache.beam.sdk.io.range.ByteKeyRangeTracker; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.BufferedMutator; import org.apache.hadoop.hbase.client.BufferedMutatorParams; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.util.Bytes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A bounded source and sink for HBase. * * <p>For more information, see the online documentation at <a * href="https://hbase.apache.org/">HBase</a>. * * <h3>Reading from HBase</h3> * * <p>The HBase source returns a set of rows from a single table, returning a {@code * PCollection<Result>}. * * <p>To configure a HBase source, you must supply a table id and a {@link Configuration} to * identify the HBase instance. By default, {@link HBaseIO.Read} will read all rows in the table. * The row range to be read can optionally be restricted using with a {@link Scan} object or using * the {@link HBaseIO.Read#withKeyRange}, and a {@link Filter} using {@link * HBaseIO.Read#withFilter}, for example: * * <pre>{@code * // Scan the entire table. * p.apply("read", * HBaseIO.read() * .withConfiguration(configuration) * .withTableId("table")); * * // Filter data using a HBaseIO Scan * Scan scan = ... * p.apply("read", * HBaseIO.read() * .withConfiguration(configuration) * .withTableId("table")) * .withScan(scan)); * * // Scan a prefix of the table. * ByteKeyRange keyRange = ...; * p.apply("read", * HBaseIO.read() * .withConfiguration(configuration) * .withTableId("table") * .withKeyRange(keyRange)); * * // Scan a subset of rows that match the specified row filter. * p.apply("filtered read", * HBaseIO.read() * .withConfiguration(configuration) * .withTableId("table") * .withFilter(filter)); * }</pre> * * <h3>Writing to HBase</h3> * * <p>The HBase sink executes a set of row mutations on a single table. It takes as input a {@link * PCollection PCollection&lt;Mutation&gt;}, where each {@link Mutation} represents an idempotent * transformation on a row. * * <p>To configure a HBase sink, you must supply a table id and a {@link Configuration} to identify * the HBase instance, for example: * * <pre>{@code * Configuration configuration = ...; * PCollection<Mutation> data = ...; * * data.apply("write", * HBaseIO.write() * .withConfiguration(configuration) * .withTableId("table")); * }</pre> * * <h3>Experimental</h3> * * <p>The design of the API for HBaseIO is currently related to the BigtableIO one, it can evolve or * be different in some aspects, but the idea is that users can easily migrate from one to the other * . */ @Experimental(Experimental.Kind.SOURCE_SINK) public class HBaseIO { private static final Logger LOG = LoggerFactory.getLogger(HBaseIO.class); /** Disallow construction of utility class. */ private HBaseIO() {} /** * Creates an uninitialized {@link HBaseIO.Read}. Before use, the {@code Read} must be initialized * with a {@link HBaseIO.Read#withConfiguration(Configuration)} that specifies the HBase instance, * and a {@link HBaseIO.Read#withTableId tableId} that specifies which table to read. A {@link * Filter} may also optionally be specified using {@link HBaseIO.Read#withFilter}. */ @Experimental public static Read read() { return new Read(null, "", new SerializableScan(new Scan())); } /** * A {@link PTransform} that reads from HBase. See the class-level Javadoc on {@link HBaseIO} for * more information. * * @see HBaseIO */ public static class Read extends PTransform<PBegin, PCollection<Result>> { /** * Returns a new {@link HBaseIO.Read} that will read from the HBase instance indicated by the * given configuration. */ public Read withConfiguration(Configuration configuration) { checkNotNull(configuration, "conf"); return new Read(new SerializableConfiguration(configuration), tableId, serializableScan); } /** * Returns a new {@link HBaseIO.Read} that will read from the specified table. * * <p>Does not modify this object. */ public Read withTableId(String tableId) { checkNotNull(tableId, "tableId"); return new Read(serializableConfiguration, tableId, serializableScan); } /** * Returns a new {@link HBaseIO.Read} that will filter the rows read from HBase using the given * scan. * * <p>Does not modify this object. */ public Read withScan(Scan scan) { checkNotNull(scan, "scan"); return new Read(serializableConfiguration, tableId, new SerializableScan(scan)); } /** * Returns a new {@link HBaseIO.Read} that will filter the rows read from HBase using the given * row filter. * * <p>Does not modify this object. */ public Read withFilter(Filter filter) { checkNotNull(filter, "filter"); return withScan(serializableScan.get().setFilter(filter)); } /** * Returns a new {@link HBaseIO.Read} that will read only rows in the specified range. * * <p>Does not modify this object. */ public Read withKeyRange(ByteKeyRange keyRange) { checkNotNull(keyRange, "keyRange"); byte[] startRow = keyRange.getStartKey().getBytes(); byte[] stopRow = keyRange.getEndKey().getBytes(); return withScan(serializableScan.get().setStartRow(startRow).setStopRow(stopRow)); } /** * Returns a new {@link HBaseIO.Read} that will read only rows in the specified range. * * <p>Does not modify this object. */ public Read withKeyRange(byte[] startRow, byte[] stopRow) { checkNotNull(startRow, "startRow"); checkNotNull(stopRow, "stopRow"); ByteKeyRange keyRange = ByteKeyRange.of(ByteKey.copyFrom(startRow), ByteKey.copyFrom(stopRow)); return withKeyRange(keyRange); } private Read( SerializableConfiguration serializableConfiguration, String tableId, SerializableScan serializableScan) { this.serializableConfiguration = serializableConfiguration; this.tableId = tableId; this.serializableScan = serializableScan; } @Override public PCollection<Result> expand(PBegin input) { HBaseSource source = new HBaseSource(this, null /* estimatedSizeBytes */); return input.getPipeline().apply(org.apache.beam.sdk.io.Read.from(source)); } @Override public void validate(PipelineOptions options) { checkArgument(serializableConfiguration != null, "Configuration not provided"); checkArgument(!tableId.isEmpty(), "Table ID not specified"); try (Connection connection = ConnectionFactory.createConnection(serializableConfiguration.get())) { Admin admin = connection.getAdmin(); checkArgument( admin.tableExists(TableName.valueOf(tableId)), "Table %s does not exist", tableId); } catch (IOException e) { LOG.warn("Error checking whether table {} exists; proceeding.", tableId, e); } } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); builder.add(DisplayData.item("configuration", serializableConfiguration.get().toString())); builder.add(DisplayData.item("tableId", tableId)); builder.addIfNotNull(DisplayData.item("scan", serializableScan.get().toString())); } public String getTableId() { return tableId; } public Configuration getConfiguration() { return serializableConfiguration.get(); } /** Returns the range of keys that will be read from the table. */ public ByteKeyRange getKeyRange() { byte[] startRow = serializableScan.get().getStartRow(); byte[] stopRow = serializableScan.get().getStopRow(); return ByteKeyRange.of(ByteKey.copyFrom(startRow), ByteKey.copyFrom(stopRow)); } private final SerializableConfiguration serializableConfiguration; private final String tableId; private final SerializableScan serializableScan; } static class HBaseSource extends BoundedSource<Result> { private final Read read; @Nullable private Long estimatedSizeBytes; HBaseSource(Read read, @Nullable Long estimatedSizeBytes) { this.read = read; this.estimatedSizeBytes = estimatedSizeBytes; } HBaseSource withStartKey(ByteKey startKey) throws IOException { checkNotNull(startKey, "startKey"); Read newRead = new Read( read.serializableConfiguration, read.tableId, new SerializableScan( new Scan(read.serializableScan.get()).setStartRow(startKey.getBytes()))); return new HBaseSource(newRead, estimatedSizeBytes); } HBaseSource withEndKey(ByteKey endKey) throws IOException { checkNotNull(endKey, "endKey"); Read newRead = new Read( read.serializableConfiguration, read.tableId, new SerializableScan( new Scan(read.serializableScan.get()).setStopRow(endKey.getBytes()))); return new HBaseSource(newRead, estimatedSizeBytes); } @Override public long getEstimatedSizeBytes(PipelineOptions pipelineOptions) throws Exception { if (estimatedSizeBytes == null) { estimatedSizeBytes = estimateSizeBytes(); LOG.debug( "Estimated size {} bytes for table {} and scan {}", estimatedSizeBytes, read.tableId, read.serializableScan.get()); } return estimatedSizeBytes; } /** * This estimates the real size, it can be the compressed size depending on the HBase * configuration. */ private long estimateSizeBytes() throws Exception { // This code is based on RegionSizeCalculator in hbase-server long estimatedSizeBytes = 0L; Configuration configuration = this.read.serializableConfiguration.get(); try (Connection connection = ConnectionFactory.createConnection(configuration)) { // filter regions for the given table/scan List<HRegionLocation> regionLocations = getRegionLocations(connection); // builds set of regions who are part of the table scan Set<byte[]> tableRegions = new TreeSet<>(Bytes.BYTES_COMPARATOR); for (HRegionLocation regionLocation : regionLocations) { tableRegions.add(regionLocation.getRegionInfo().getRegionName()); } // calculate estimated size for the regions Admin admin = connection.getAdmin(); ClusterStatus clusterStatus = admin.getClusterStatus(); Collection<ServerName> servers = clusterStatus.getServers(); for (ServerName serverName : servers) { ServerLoad serverLoad = clusterStatus.getLoad(serverName); for (RegionLoad regionLoad : serverLoad.getRegionsLoad().values()) { byte[] regionId = regionLoad.getName(); if (tableRegions.contains(regionId)) { long regionSizeBytes = regionLoad.getStorefileSizeMB() * 1_048_576L; estimatedSizeBytes += regionSizeBytes; } } } } return estimatedSizeBytes; } private List<HRegionLocation> getRegionLocations(Connection connection) throws Exception { final Scan scan = read.serializableScan.get(); byte[] startRow = scan.getStartRow(); byte[] stopRow = scan.getStopRow(); final List<HRegionLocation> regionLocations = new ArrayList<>(); final boolean scanWithNoLowerBound = startRow.length == 0; final boolean scanWithNoUpperBound = stopRow.length == 0; TableName tableName = TableName.valueOf(read.tableId); RegionLocator regionLocator = connection.getRegionLocator(tableName); List<HRegionLocation> tableRegionInfos = regionLocator.getAllRegionLocations(); for (HRegionLocation regionLocation : tableRegionInfos) { final byte[] startKey = regionLocation.getRegionInfo().getStartKey(); final byte[] endKey = regionLocation.getRegionInfo().getEndKey(); boolean isLastRegion = endKey.length == 0; // filters regions who are part of the scan if ((scanWithNoLowerBound || isLastRegion || Bytes.compareTo(startRow, endKey) < 0) && (scanWithNoUpperBound || Bytes.compareTo(stopRow, startKey) > 0)) { regionLocations.add(regionLocation); } } return regionLocations; } private List<HBaseSource> splitBasedOnRegions( List<HRegionLocation> regionLocations, int numSplits) throws Exception { final Scan scan = read.serializableScan.get(); byte[] startRow = scan.getStartRow(); byte[] stopRow = scan.getStopRow(); final List<HBaseSource> sources = new ArrayList<>(numSplits); final boolean scanWithNoLowerBound = startRow.length == 0; final boolean scanWithNoUpperBound = stopRow.length == 0; for (HRegionLocation regionLocation : regionLocations) { final byte[] startKey = regionLocation.getRegionInfo().getStartKey(); final byte[] endKey = regionLocation.getRegionInfo().getEndKey(); boolean isLastRegion = endKey.length == 0; String host = regionLocation.getHostnamePort(); final byte[] splitStart = (scanWithNoLowerBound || Bytes.compareTo(startKey, startRow) >= 0) ? startKey : startRow; final byte[] splitStop = (scanWithNoUpperBound || Bytes.compareTo(endKey, stopRow) <= 0) && !isLastRegion ? endKey : stopRow; LOG.debug( "{} {} {} {} {}", sources.size(), host, read.tableId, Bytes.toString(splitStart), Bytes.toString(splitStop)); // We need to create a new copy of the scan and read to add the new ranges Scan newScan = new Scan(scan).setStartRow(splitStart).setStopRow(splitStop); Read newRead = new Read(read.serializableConfiguration, read.tableId, new SerializableScan(newScan)); sources.add(new HBaseSource(newRead, estimatedSizeBytes)); } return sources; } @Override public List<? extends BoundedSource<Result>> split( long desiredBundleSizeBytes, PipelineOptions options) throws Exception { LOG.debug("desiredBundleSize {} bytes", desiredBundleSizeBytes); long estimatedSizeBytes = getEstimatedSizeBytes(options); int numSplits = 1; if (estimatedSizeBytes > 0 && desiredBundleSizeBytes > 0) { numSplits = (int) Math.ceil((double) estimatedSizeBytes / desiredBundleSizeBytes); } try (Connection connection = ConnectionFactory.createConnection(read.getConfiguration())) { List<HRegionLocation> regionLocations = getRegionLocations(connection); int realNumSplits = numSplits < regionLocations.size() ? regionLocations.size() : numSplits; LOG.debug("Suggested {} bundle(s) based on size", numSplits); LOG.debug("Suggested {} bundle(s) based on number of regions", regionLocations.size()); final List<HBaseSource> sources = splitBasedOnRegions(regionLocations, realNumSplits); LOG.debug("Split into {} bundle(s)", sources.size()); if (numSplits >= 1) { return sources; } return Collections.singletonList(this); } } @Override public BoundedReader<Result> createReader(PipelineOptions pipelineOptions) throws IOException { return new HBaseReader(this); } @Override public void validate() { read.validate(null /* input */); } @Override public void populateDisplayData(DisplayData.Builder builder) { read.populateDisplayData(builder); } @Override public Coder<Result> getOutputCoder() { return HBaseResultCoder.of(); } } private static class HBaseReader extends BoundedSource.BoundedReader<Result> { private HBaseSource source; private Connection connection; private ResultScanner scanner; private Iterator<Result> iter; private Result current; private final ByteKeyRangeTracker rangeTracker; private long recordsReturned; HBaseReader(HBaseSource source) { this.source = source; Scan scan = source.read.serializableScan.get(); ByteKeyRange range = ByteKeyRange.of( ByteKey.copyFrom(scan.getStartRow()), ByteKey.copyFrom(scan.getStopRow())); rangeTracker = ByteKeyRangeTracker.of(range); } @Override public boolean start() throws IOException { HBaseSource source = getCurrentSource(); Configuration configuration = source.read.serializableConfiguration.get(); String tableId = source.read.tableId; connection = ConnectionFactory.createConnection(configuration); TableName tableName = TableName.valueOf(tableId); Table table = connection.getTable(tableName); // [BEAM-2319] We have to clone the Scan because the underlying scanner may mutate it. Scan scanClone = new Scan(source.read.serializableScan.get()); scanner = table.getScanner(scanClone); iter = scanner.iterator(); return advance(); } @Override public Result getCurrent() throws NoSuchElementException { return current; } @Override public boolean advance() throws IOException { if (!iter.hasNext()) { return rangeTracker.markDone(); } final Result next = iter.next(); boolean hasRecord = rangeTracker.tryReturnRecordAt(true, ByteKey.copyFrom(next.getRow())) || rangeTracker.markDone(); if (hasRecord) { current = next; ++recordsReturned; } return hasRecord; } @Override public void close() throws IOException { LOG.debug("Closing reader after reading {} records.", recordsReturned); if (scanner != null) { scanner.close(); scanner = null; } if (connection != null) { connection.close(); connection = null; } } @Override public synchronized HBaseSource getCurrentSource() { return source; } @Override public final Double getFractionConsumed() { return rangeTracker.getFractionConsumed(); } @Override public final long getSplitPointsConsumed() { return rangeTracker.getSplitPointsConsumed(); } @Override @Nullable public final synchronized HBaseSource splitAtFraction(double fraction) { ByteKey splitKey; try { splitKey = rangeTracker.getRange().interpolateKey(fraction); } catch (RuntimeException e) { LOG.info( "{}: Failed to interpolate key for fraction {}.", rangeTracker.getRange(), fraction, e); return null; } LOG.info("Proposing to split {} at fraction {} (key {})", rangeTracker, fraction, splitKey); HBaseSource primary; HBaseSource residual; try { primary = source.withEndKey(splitKey); residual = source.withStartKey(splitKey); } catch (Exception e) { LOG.info( "{}: Interpolating for fraction {} yielded invalid split key {}.", rangeTracker.getRange(), fraction, splitKey, e); return null; } if (!rangeTracker.trySplitAtPosition(splitKey)) { return null; } this.source = primary; return residual; } } /** * Creates an uninitialized {@link HBaseIO.Write}. Before use, the {@code Write} must be * initialized with a {@link HBaseIO.Write#withConfiguration(Configuration)} that specifies the * destination HBase instance, and a {@link HBaseIO.Write#withTableId tableId} that specifies * which table to write. */ public static Write write() { return new Write(null /* SerializableConfiguration */, ""); } /** * A {@link PTransform} that writes to HBase. See the class-level Javadoc on {@link HBaseIO} for * more information. * * @see HBaseIO */ public static class Write extends PTransform<PCollection<Mutation>, PDone> { /** * Returns a new {@link HBaseIO.Write} that will write to the HBase instance indicated by the * given Configuration, and using any other specified customizations. * * <p>Does not modify this object. */ public Write withConfiguration(Configuration configuration) { checkNotNull(configuration, "conf"); return new Write(new SerializableConfiguration(configuration), tableId); } /** * Returns a new {@link HBaseIO.Write} that will write to the specified table. * * <p>Does not modify this object. */ public Write withTableId(String tableId) { checkNotNull(tableId, "tableId"); return new Write(serializableConfiguration, tableId); } private Write(SerializableConfiguration serializableConfiguration, String tableId) { this.serializableConfiguration = serializableConfiguration; this.tableId = tableId; } @Override public PDone expand(PCollection<Mutation> input) { input.apply(ParDo.of(new HBaseWriterFn(tableId, serializableConfiguration))); return PDone.in(input.getPipeline()); } @Override public void validate(PipelineOptions options) { checkArgument(serializableConfiguration != null, "Configuration not specified"); checkArgument(!tableId.isEmpty(), "Table ID not specified"); try (Connection connection = ConnectionFactory.createConnection(serializableConfiguration.get())) { Admin admin = connection.getAdmin(); checkArgument( admin.tableExists(TableName.valueOf(tableId)), "Table %s does not exist", tableId); } catch (IOException e) { LOG.warn("Error checking whether table {} exists; proceeding.", tableId, e); } } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); builder.add(DisplayData.item("configuration", serializableConfiguration.get().toString())); builder.add(DisplayData.item("tableId", tableId)); } public String getTableId() { return tableId; } public Configuration getConfiguration() { return serializableConfiguration.get(); } private final String tableId; private final SerializableConfiguration serializableConfiguration; private class HBaseWriterFn extends DoFn<Mutation, Void> { public HBaseWriterFn(String tableId, SerializableConfiguration serializableConfiguration) { this.tableId = checkNotNull(tableId, "tableId"); this.serializableConfiguration = checkNotNull(serializableConfiguration, "serializableConfiguration"); } @Setup public void setup() throws Exception { connection = ConnectionFactory.createConnection(serializableConfiguration.get()); } @StartBundle public void startBundle(StartBundleContext c) throws IOException { BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(tableId)); mutator = connection.getBufferedMutator(params); recordsWritten = 0; } @ProcessElement public void processElement(ProcessContext c) throws Exception { mutator.mutate(c.element()); ++recordsWritten; } @FinishBundle public void finishBundle() throws Exception { mutator.flush(); LOG.debug("Wrote {} records", recordsWritten); } @Teardown public void tearDown() throws Exception { if (mutator != null) { mutator.close(); mutator = null; } if (connection != null) { connection.close(); connection = null; } } @Override public void populateDisplayData(DisplayData.Builder builder) { builder.delegate(Write.this); } private final String tableId; private final SerializableConfiguration serializableConfiguration; private Connection connection; private BufferedMutator mutator; private long recordsWritten; } } }
package org.apereo.cas.services.web.view; import org.apache.commons.lang3.StringUtils; import org.apereo.cas.CasProtocolConstants; import org.apereo.cas.CasViewConstants; import org.apereo.cas.authentication.Authentication; import org.apereo.cas.authentication.ProtocolAttributeEncoder; import org.apereo.cas.authentication.RememberMeCredential; import org.apereo.cas.authentication.principal.Principal; import org.apereo.cas.authentication.principal.Service; import org.apereo.cas.services.RegisteredService; import org.apereo.cas.services.RegisteredServiceAttributeReleasePolicy; import org.apereo.cas.services.ServicesManager; import org.apereo.cas.util.CollectionUtils; import org.apereo.cas.validation.Assertion; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.servlet.view.AbstractView; import java.time.ZonedDateTime; import java.util.Collection; import java.util.Enumeration; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; /** * Abstract class to handle retrieving the Assertion from the model. * * @author Scott Battaglia * @since 3.1 */ public abstract class AbstractCasView extends AbstractView { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractCasView.class); /** * Indicate whether this view will be generating the success response or not. * By default, the view is treated as a failure. */ protected final boolean successResponse; /** * The attribute encoder instance. */ protected final ProtocolAttributeEncoder protocolAttributeEncoder; /** * The Services manager. */ protected final ServicesManager servicesManager; /** * authentication context attribute name. */ protected final String authenticationContextAttribute; public AbstractCasView(final boolean successResponse, final ProtocolAttributeEncoder protocolAttributeEncoder, final ServicesManager servicesManager, final String authenticationContextAttribute) { this.successResponse = successResponse; this.protocolAttributeEncoder = protocolAttributeEncoder; this.servicesManager = servicesManager; this.authenticationContextAttribute = authenticationContextAttribute; } /** * Gets the assertion from the model. * * @param model the model * @return the assertion from */ protected Assertion getAssertionFrom(final Map<String, Object> model) { return (Assertion) model.get(CasViewConstants.MODEL_ATTRIBUTE_NAME_ASSERTION); } /** * Gets error code from. * * @param model the model * @return the error code from */ protected String getErrorCodeFrom(final Map<String, Object> model) { return model.get(CasViewConstants.MODEL_ATTRIBUTE_NAME_ERROR_CODE).toString(); } /** * Gets error description from. * * @param model the model * @return the error description from */ protected String getErrorDescriptionFrom(final Map<String, Object> model) { return model.get(CasViewConstants.MODEL_ATTRIBUTE_NAME_ERROR_DESCRIPTION).toString(); } /** * Gets the PGT from the model. * * @param model the model * @return the pgt id */ protected String getProxyGrantingTicketId(final Map<String, Object> model) { return (String) model.get(CasViewConstants.MODEL_ATTRIBUTE_NAME_PROXY_GRANTING_TICKET); } /** * Gets the PGT-IOU from the model. * * @param model the model * @return the pgt-iou id */ protected String getProxyGrantingTicketIou(final Map<String, Object> model) { return (String) model.get(CasViewConstants.MODEL_ATTRIBUTE_NAME_PROXY_GRANTING_TICKET_IOU); } /** * Gets the authentication from the model. * * @param model the model * @return the assertion from * @since 4.1.0 */ protected Authentication getPrimaryAuthenticationFrom(final Map<String, Object> model) { return getAssertionFrom(model).getPrimaryAuthentication(); } /** * Gets model attributes. * * @param model the model * @return the model attributes */ protected Map<String, Object> getModelAttributes(final Map<String, Object> model) { return (Map<String, Object>) model.get(CasProtocolConstants.VALIDATION_CAS_MODEL_ATTRIBUTE_NAME_ATTRIBUTES); } /** * Gets authentication attributes from the primary authentication object. * * @param model the model * @return the authentication attribute */ protected Map<String, Object> getAuthenticationAttributes(final Map<String, Object> model) { final Authentication authn = getPrimaryAuthenticationFrom(model); return authn.getAttributes(); } /** * Gets an authentication attribute from the primary authentication object. * * @param model the model * @param attributeName the attribute name * @return the authentication attribute */ protected String getAuthenticationAttribute(final Map<String, Object> model, final String attributeName) { final Authentication authn = getPrimaryAuthenticationFrom(model); return (String) authn.getAttributes().get(attributeName); } /** * Gets the principal from the model. * * @param model the model * @return the assertion from * @since 4.1.0 */ protected Principal getPrincipal(final Map<String, Object> model) { return getPrimaryAuthenticationFrom(model).getPrincipal(); } /** * Gets principal attributes. * Single-valued attributes are converted to a collection * so the review can easily loop through all. * * @param model the model * @return the attributes * @see #convertAttributeValuesToMultiValuedObjects(java.util.Map) * @since 4.1.0 */ protected Map<String, Object> getPrincipalAttributesAsMultiValuedAttributes(final Map<String, Object> model) { return convertAttributeValuesToMultiValuedObjects(getPrincipal(model).getAttributes()); } /** * Gets authentication attributes. * Single-valued attributes are converted to a collection * so the review can easily loop through all. * * @param model the model * @return the attributes * @see #convertAttributeValuesToMultiValuedObjects(java.util.Map) * @since 4.1.0 */ protected Map<String, Object> getAuthenticationAttributesAsMultiValuedAttributes(final Map<String, Object> model) { return convertAttributeValuesToMultiValuedObjects(getPrimaryAuthenticationFrom(model).getAttributes()); } /** * Is remember me authentication? * looks at the authentication object to find {@link RememberMeCredential#AUTHENTICATION_ATTRIBUTE_REMEMBER_ME} * and expects the assertion to also note a new login session. * * @param model the model * @return true if remember-me, false if otherwise. */ protected boolean isRememberMeAuthentication(final Map<String, Object> model) { final Map<String, Object> authnAttributes = getAuthenticationAttributesAsMultiValuedAttributes(model); final Collection authnMethod = (Collection) authnAttributes.get(RememberMeCredential.AUTHENTICATION_ATTRIBUTE_REMEMBER_ME); return authnMethod != null && authnMethod.contains(Boolean.TRUE) && isAssertionBackedByNewLogin(model); } /** * Gets satisfied multifactor authentication provider. * * @param model the model * @return the satisfied multifactor authentication provider */ protected String getSatisfiedMultifactorAuthenticationProviderId(final Map<String, Object> model) { if (StringUtils.isNotBlank(authenticationContextAttribute) && model.containsKey(this.authenticationContextAttribute)) { return model.get(this.authenticationContextAttribute).toString(); } return null; } /** * Is assertion backed by new login? * * @param model the model * @return true/false. */ protected boolean isAssertionBackedByNewLogin(final Map<String, Object> model) { return getAssertionFrom(model).isFromNewLogin(); } /** * Convert attribute values to multi valued objects. * * @param attributes the attributes * @return the map of attributes to return */ private static Map<String, Object> convertAttributeValuesToMultiValuedObjects(final Map<String, Object> attributes) { final Set<Map.Entry<String, Object>> entries = attributes.entrySet(); return entries.stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> { final Object value = entry.getValue(); if (value instanceof Collection || value instanceof Map || value instanceof Object[] || value instanceof Iterator || value instanceof Enumeration) { return value; } return CollectionUtils.wrap(value); })); } /** * Gets authentication date. * * @param model the model * @return the authentication date * @since 4.1.0 */ protected ZonedDateTime getAuthenticationDate(final Map<String, Object> model) { return getPrimaryAuthenticationFrom(model).getAuthenticationDate(); } /** * Gets validated service from the model. * * @param model the model * @return the validated service from */ protected Service getServiceFrom(final Map<String, Object> model) { return (Service) model.get(CasViewConstants.MODEL_ATTRIBUTE_NAME_SERVICE); } /** * Gets chained authentications. * Note that the last index in the list always describes the primary authentication * event. All others in the chain should denote proxies. Per the CAS protocol, * when authentication has proceeded through multiple proxies, * the order in which the proxies were traversed MUST be reflected in the response. * The most recently-visited proxy MUST be the first proxy listed, and all the * other proxies MUST be shifted down as new proxies are added. * * @param model the model * @return the chained authentications */ protected Collection<Authentication> getChainedAuthentications(final Map<String, Object> model) { final Assertion assertion = getAssertionFrom(model); final List<Authentication> chainedAuthentications = assertion.getChainedAuthentications(); return chainedAuthentications.stream().limit(chainedAuthentications.size() - 1).collect(Collectors.toList()); } /** * Decide if credential password should be released as attribute. * The credential must have been cached as an authentication attribute * and the attribute release policy must be allowed to release the * attribute. * * @param attributes the attributes * @param model the model * @param service the service */ protected void decideIfCredentialPasswordShouldBeReleasedAsAttribute(final Map<String, Object> attributes, final Map<String, Object> model, final RegisteredService service) { final RegisteredServiceAttributeReleasePolicy policy = service.getAttributeReleasePolicy(); final boolean isAuthorized = policy != null && policy.isAuthorizedToReleaseCredentialPassword(); decideAttributeReleaseBasedOnServiceAttributePolicy(attributes, getAuthenticationAttribute(model, CasViewConstants.MODEL_ATTRIBUTE_NAME_PRINCIPAL_CREDENTIAL), CasViewConstants.MODEL_ATTRIBUTE_NAME_PRINCIPAL_CREDENTIAL, service, isAuthorized); } /** * Decide if PGT should be released as attribute. * The PGT must have been cached as an authentication attribute * and the attribute release policy must be allowed to release the * attribute. * * @param attributes the attributes * @param model the model * @param service the service */ protected void decideIfProxyGrantingTicketShouldBeReleasedAsAttribute(final Map<String, Object> attributes, final Map<String, Object> model, final RegisteredService service) { final RegisteredServiceAttributeReleasePolicy policy = service.getAttributeReleasePolicy(); final boolean isAuthorized = policy != null && policy.isAuthorizedToReleaseProxyGrantingTicket(); decideAttributeReleaseBasedOnServiceAttributePolicy(attributes, getProxyGrantingTicketId(model), CasViewConstants.MODEL_ATTRIBUTE_NAME_PROXY_GRANTING_TICKET, service, isAuthorized); } /** * Decide attribute release based on service attribute policy. * * @param attributes the attributes * @param attributeValue the attribute value * @param attributeName the attribute name * @param service the service * @param doesAttributePolicyAllow does attribute policy allow release of this attribute? */ protected void decideAttributeReleaseBasedOnServiceAttributePolicy(final Map<String, Object> attributes, final String attributeValue, final String attributeName, final RegisteredService service, final boolean doesAttributePolicyAllow) { if (StringUtils.isNotBlank(attributeValue)) { LOGGER.debug("Obtained [{}] as an authentication attribute", attributeName); if (doesAttributePolicyAllow) { LOGGER.debug("Obtained [{}] is passed to the CAS validation payload", attributeName); attributes.put(attributeName, CollectionUtils.wrap(attributeValue)); } else { LOGGER.debug("Attribute release policy for [{}] does not authorize the release of [{}]", service.getServiceId(), attributeName); attributes.remove(attributeName); } } else { LOGGER.trace("[{}] is not available and will not be released to the validation response.", attributeName); } } /** * Put into model. * * @param model the model * @param key the key * @param value the value */ protected void putIntoModel(final Map<String, Object> model, final String key, final Object value) { model.put(key, value); } /** * Put all into model. * * @param model the model * @param values the values */ protected void putAllIntoModel(final Map<String, Object> model, final Map<String, Object> values) { model.putAll(values); } public ProtocolAttributeEncoder getProtocolAttributeEncoder() { return this.protocolAttributeEncoder; } public ServicesManager getServicesManager() { return this.servicesManager; } public String getAuthenticationContextAttribute() { return authenticationContextAttribute; } }
/** * Copyright 2014-2019 Riccardo Massera (TheCoder4.Eu), Dario D'Urzo and Stephan Rauh (http://www.beyondjava.net). * * This file is part of BootsFaces. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.bootsfaces.component.dateTimePicker; import javax.faces.context.FacesContext; import javax.faces.event.AbortProcessingException; import javax.faces.event.ComponentSystemEvent; import javax.faces.event.ListenerFor; import javax.faces.event.ListenersFor; import javax.faces.event.PostAddToViewEvent; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Calendar; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; import javax.el.ValueExpression; import javax.faces.component.FacesComponent; import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.faces.convert.Converter; import javax.faces.convert.ConverterException; import net.bootsfaces.C; import net.bootsfaces.component.ajax.IAJAXComponent; import net.bootsfaces.component.ajax.IAJAXComponent2; import net.bootsfaces.listeners.AddResourcesListener; import net.bootsfaces.render.IContentDisabled; import net.bootsfaces.render.IResponsive; import net.bootsfaces.render.IResponsiveLabel; import net.bootsfaces.render.Tooltip; import net.bootsfaces.utils.BsfUtils; import net.bootsfaces.utils.LocaleUtils; /** This class holds the attributes of &lt;b:dateTimePicker /&gt;. */ @ListenersFor({ @ListenerFor(systemEventClass = PostAddToViewEvent.class) }) @FacesComponent(DateTimePicker.COMPONENT_TYPE) public class DateTimePicker extends DateTimePickerCore implements net.bootsfaces.render.IHasTooltip, IResponsive, IAJAXComponent, IAJAXComponent2, IResponsiveLabel { public static final String COMPONENT_TYPE = C.BSFCOMPONENT + ".dateTimePicker.DateTimePicker"; public static final String COMPONENT_FAMILY = C.BSFCOMPONENT; public static final String DEFAULT_RENDERER = "net.bootsfaces.component.dateTimePicker.DateTimePicker"; private static final Collection<String> EVENT_NAMES = Collections .unmodifiableCollection(Arrays.asList("blur", "change", "click", "dblclick", "focus", "keydown", "keypress", "keyup", "mousedown", "mousemove", "mouseout", "mouseover", "mouseup", "select")); public DateTimePicker() { Tooltip.addResourceFiles(); AddResourcesListener.addExtCSSResource("bootstrap-datetimepicker.min.css"); AddResourcesListener.addBasicJSResource(C.BSF_LIBRARY, "js/moment-with-locales.min.js"); AddResourcesListener.addResourceToHeadButAfterJQuery(C.BSF_LIBRARY, "js/bootstrap-datetimepicker.min.js"); setRendererType(DEFAULT_RENDERER); } public void processEvent(ComponentSystemEvent event) throws AbortProcessingException { if (isAutoUpdate()) { if (FacesContext.getCurrentInstance().isPostback()) { FacesContext.getCurrentInstance().getPartialViewContext().getRenderIds().add(getClientId()); } super.processEvent(event); } } public String getFamily() { return COMPONENT_FAMILY; } @Override public Map<String, String> getJQueryEvents() { Map<String, String> result = new HashMap<String, String>(); result.put("dtchange", "dp.change"); return result; } /** * Returns the subset of the parameter list of jQuery and other non-standard JS * callbacks which is sent to the server via AJAX. If there's no parameter list * for a certain event, the default is simply null. * * @return A hash map containing the events. May be null. */ @Override public Map<String, String> getJQueryEventParameterListsForAjax() { return null; } /** * Returns the parameter list of jQuery and other non-standard JS callbacks. If * there's no parameter list for a certain event, the default is simply "event". * * @return A hash map containing the events. May be null. */ @Override public Map<String, String> getJQueryEventParameterLists() { return null; } public Collection<String> getEventNames() { return EVENT_NAMES; } public String getDefaultEventName() { return "click"; } /** * Manage EL-expression for snake-case attributes */ public void setValueExpression(String name, ValueExpression binding) { name = BsfUtils.snakeCaseToCamelCase(name); super.setValueExpression(name, binding); } /** * Converts the date from the moment.js format to a java.util.Date. */ @Override public Object getConvertedValue(FacesContext context, Object submittedValue) throws ConverterException { if (submittedValue == null) { return null; } String val = (String) submittedValue; // If the Trimmed submitted value is empty, return null if (val.trim().length() == 0) { return null; } Converter converter = this.getConverter(); // If the user supplied a converter, use it if (converter != null) { return converter.getAsObject(context, this, val); } // Else we use our own converter Locale sloc = BsfUtils.selectLocale(context.getViewRoot().getLocale(), this.getLocale(), this); String momentJSFormat = BsfUtils.selectMomentJSDateTimeFormat(sloc, this.getFormat(), this.isShowDate(), this.isShowTime()); String javaFormat = LocaleUtils.momentToJavaFormat(momentJSFormat); Calendar cal = Calendar.getInstance(sloc); SimpleDateFormat format = new SimpleDateFormat(javaFormat, sloc); format.setTimeZone(cal.getTimeZone()); try { cal.setTime(format.parse(val)); return cal.getTime(); } catch (ParseException e) { // FIRST STEP GONE: TRY THE AUTO-PARSER try { cal.setTime(LocaleUtils.autoParseDateFormat(val)); return cal.getTime(); } catch (Exception pe) { this.setValid(false); throw new ConverterException(BsfUtils.getMessage("javax.faces.converter.DateTimeConverter.DATE", val, javaFormat, BsfUtils.getLabel(context, this))); } } } /** * Boolean value to specify if the widget is disabled. * <P> * * @return Returns the value of the attribute, or false, if it hasn't been set * by the JSF file. */ public boolean isDisabled() { if (super.isDisabled()) return true; UIComponent ancestor = getParent(); while (ancestor != null) { if (ancestor instanceof IContentDisabled) { if (((IContentDisabled) ancestor).isContentDisabled()) { return true; } } ancestor = ancestor.getParent(); } return false; } public void setIconAwesome(String _iconAwesome) { AddResourcesListener.setNeedsFontsAwesome(this); super.setIconAwesome(_iconAwesome); } @Override public void setIconBrand(boolean _iconBrand) { if (_iconBrand) { AddResourcesListener.setFontAwesomeVersion(5, this); } super.setIconBrand(_iconBrand); } @Override public void setIconRegular(boolean _iconRegular) { if (_iconRegular) { AddResourcesListener.setFontAwesomeVersion(5, this); } super.setIconRegular(_iconRegular); } @Override public void setIconLight(boolean _iconLight) { if (_iconLight) { AddResourcesListener.setFontAwesomeVersion(5, this); } super.setIconLight(_iconLight); } @Override public void setIconSolid(boolean _iconSolid) { if (_iconSolid) { AddResourcesListener.setFontAwesomeVersion(5, this); } super.setIconSolid(_iconSolid); } }
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.codec.impl; import java.util.EnumMap; import org.onosproject.codec.CodecContext; import org.onosproject.net.OchSignal; import org.onosproject.net.flow.criteria.Criterion; import org.onosproject.net.flow.criteria.EthCriterion; import org.onosproject.net.flow.criteria.EthTypeCriterion; import org.onosproject.net.flow.criteria.IPCriterion; import org.onosproject.net.flow.criteria.IPDscpCriterion; import org.onosproject.net.flow.criteria.IPEcnCriterion; import org.onosproject.net.flow.criteria.IPProtocolCriterion; import org.onosproject.net.flow.criteria.IPv6ExthdrFlagsCriterion; import org.onosproject.net.flow.criteria.IPv6FlowLabelCriterion; import org.onosproject.net.flow.criteria.IPv6NDLinkLayerAddressCriterion; import org.onosproject.net.flow.criteria.IPv6NDTargetAddressCriterion; import org.onosproject.net.flow.criteria.IcmpCodeCriterion; import org.onosproject.net.flow.criteria.IcmpTypeCriterion; import org.onosproject.net.flow.criteria.Icmpv6CodeCriterion; import org.onosproject.net.flow.criteria.Icmpv6TypeCriterion; import org.onosproject.net.flow.criteria.MetadataCriterion; import org.onosproject.net.flow.criteria.MplsCriterion; import org.onosproject.net.flow.criteria.OchSignalCriterion; import org.onosproject.net.flow.criteria.OchSignalTypeCriterion; import org.onosproject.net.flow.criteria.OduSignalIdCriterion; import org.onosproject.net.flow.criteria.OduSignalTypeCriterion; import org.onosproject.net.flow.criteria.PortCriterion; import org.onosproject.net.flow.criteria.SctpPortCriterion; import org.onosproject.net.flow.criteria.TcpPortCriterion; import org.onosproject.net.flow.criteria.TunnelIdCriterion; import org.onosproject.net.flow.criteria.UdpPortCriterion; import org.onosproject.net.flow.criteria.VlanIdCriterion; import org.onosproject.net.flow.criteria.VlanPcpCriterion; import com.fasterxml.jackson.databind.node.ObjectNode; import static com.google.common.base.Preconditions.checkNotNull; /** * Encode portion of the criterion codec. */ public final class EncodeCriterionCodecHelper { private final Criterion criterion; private final CodecContext context; private final EnumMap<Criterion.Type, CriterionTypeFormatter> formatMap; /** * Creates an encoder object for a criterion. * Initializes the formatter lookup map for the criterion subclasses. * * @param criterion Criterion to encode * @param context context of the JSON encoding */ public EncodeCriterionCodecHelper(Criterion criterion, CodecContext context) { this.criterion = criterion; this.context = context; formatMap = new EnumMap<>(Criterion.Type.class); formatMap.put(Criterion.Type.IN_PORT, new FormatInPort()); formatMap.put(Criterion.Type.IN_PHY_PORT, new FormatInPort()); formatMap.put(Criterion.Type.METADATA, new FormatMetadata()); formatMap.put(Criterion.Type.ETH_DST, new FormatEth()); formatMap.put(Criterion.Type.ETH_SRC, new FormatEth()); formatMap.put(Criterion.Type.ETH_TYPE, new FormatEthType()); formatMap.put(Criterion.Type.VLAN_VID, new FormatVlanVid()); formatMap.put(Criterion.Type.VLAN_PCP, new FormatVlanPcp()); formatMap.put(Criterion.Type.IP_DSCP, new FormatIpDscp()); formatMap.put(Criterion.Type.IP_ECN, new FormatIpEcn()); formatMap.put(Criterion.Type.IP_PROTO, new FormatIpProto()); formatMap.put(Criterion.Type.IPV4_SRC, new FormatIp()); formatMap.put(Criterion.Type.IPV4_DST, new FormatIp()); formatMap.put(Criterion.Type.TCP_SRC, new FormatTcp()); formatMap.put(Criterion.Type.TCP_DST, new FormatTcp()); formatMap.put(Criterion.Type.UDP_SRC, new FormatUdp()); formatMap.put(Criterion.Type.UDP_DST, new FormatUdp()); formatMap.put(Criterion.Type.SCTP_SRC, new FormatSctp()); formatMap.put(Criterion.Type.SCTP_DST, new FormatSctp()); formatMap.put(Criterion.Type.ICMPV4_TYPE, new FormatIcmpV4Type()); formatMap.put(Criterion.Type.ICMPV4_CODE, new FormatIcmpV4Code()); formatMap.put(Criterion.Type.IPV6_SRC, new FormatIp()); formatMap.put(Criterion.Type.IPV6_DST, new FormatIp()); formatMap.put(Criterion.Type.IPV6_FLABEL, new FormatIpV6FLabel()); formatMap.put(Criterion.Type.ICMPV6_TYPE, new FormatIcmpV6Type()); formatMap.put(Criterion.Type.ICMPV6_CODE, new FormatIcmpV6Code()); formatMap.put(Criterion.Type.IPV6_ND_TARGET, new FormatV6NDTarget()); formatMap.put(Criterion.Type.IPV6_ND_SLL, new FormatV6NDTll()); formatMap.put(Criterion.Type.IPV6_ND_TLL, new FormatV6NDTll()); formatMap.put(Criterion.Type.MPLS_LABEL, new FormatMplsLabel()); formatMap.put(Criterion.Type.IPV6_EXTHDR, new FormatIpV6Exthdr()); formatMap.put(Criterion.Type.OCH_SIGID, new FormatOchSigId()); formatMap.put(Criterion.Type.OCH_SIGTYPE, new FormatOchSigType()); formatMap.put(Criterion.Type.TUNNEL_ID, new FormatTunnelId()); formatMap.put(Criterion.Type.DUMMY, new FormatDummyType()); formatMap.put(Criterion.Type.ODU_SIGID, new FormatOduSignalId()); formatMap.put(Criterion.Type.ODU_SIGTYPE, new FormatOduSignalType()); // Currently unimplemented formatMap.put(Criterion.Type.ARP_OP, new FormatUnknown()); formatMap.put(Criterion.Type.ARP_SPA, new FormatUnknown()); formatMap.put(Criterion.Type.ARP_TPA, new FormatUnknown()); formatMap.put(Criterion.Type.ARP_SHA, new FormatUnknown()); formatMap.put(Criterion.Type.ARP_THA, new FormatUnknown()); formatMap.put(Criterion.Type.MPLS_TC, new FormatUnknown()); formatMap.put(Criterion.Type.MPLS_BOS, new FormatUnknown()); formatMap.put(Criterion.Type.PBB_ISID, new FormatUnknown()); formatMap.put(Criterion.Type.UNASSIGNED_40, new FormatUnknown()); formatMap.put(Criterion.Type.PBB_UCA, new FormatUnknown()); formatMap.put(Criterion.Type.TCP_FLAGS, new FormatUnknown()); formatMap.put(Criterion.Type.ACTSET_OUTPUT, new FormatUnknown()); formatMap.put(Criterion.Type.PACKET_TYPE, new FormatUnknown()); } private interface CriterionTypeFormatter { ObjectNode encodeCriterion(ObjectNode root, Criterion criterion); } private static class FormatUnknown implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { return root; } } private static class FormatInPort implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final PortCriterion portCriterion = (PortCriterion) criterion; return root.put(CriterionCodec.PORT, portCriterion.port().toLong()); } } private static class FormatMetadata implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final MetadataCriterion metadataCriterion = (MetadataCriterion) criterion; return root.put(CriterionCodec.METADATA, metadataCriterion.metadata()); } } private static class FormatEth implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final EthCriterion ethCriterion = (EthCriterion) criterion; return root.put(CriterionCodec.MAC, ethCriterion.mac().toString()); } } private static class FormatEthType implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final EthTypeCriterion ethTypeCriterion = (EthTypeCriterion) criterion; return root.put(CriterionCodec.ETH_TYPE, ethTypeCriterion.ethType().toShort()); } } private static class FormatVlanVid implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final VlanIdCriterion vlanIdCriterion = (VlanIdCriterion) criterion; return root.put(CriterionCodec.VLAN_ID, vlanIdCriterion.vlanId().toShort()); } } private static class FormatVlanPcp implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final VlanPcpCriterion vlanPcpCriterion = (VlanPcpCriterion) criterion; return root.put(CriterionCodec.PRIORITY, vlanPcpCriterion.priority()); } } private static class FormatIpDscp implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IPDscpCriterion ipDscpCriterion = (IPDscpCriterion) criterion; return root.put(CriterionCodec.IP_DSCP, ipDscpCriterion.ipDscp()); } } private static class FormatIpEcn implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IPEcnCriterion ipEcnCriterion = (IPEcnCriterion) criterion; return root.put(CriterionCodec.IP_ECN, ipEcnCriterion.ipEcn()); } } private static class FormatIpProto implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IPProtocolCriterion iPProtocolCriterion = (IPProtocolCriterion) criterion; return root.put(CriterionCodec.PROTOCOL, iPProtocolCriterion.protocol()); } } private static class FormatIp implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IPCriterion iPCriterion = (IPCriterion) criterion; return root.put(CriterionCodec.IP, iPCriterion.ip().toString()); } } private static class FormatTcp implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final TcpPortCriterion tcpPortCriterion = (TcpPortCriterion) criterion; return root.put(CriterionCodec.TCP_PORT, tcpPortCriterion.tcpPort().toInt()); } } private static class FormatUdp implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final UdpPortCriterion udpPortCriterion = (UdpPortCriterion) criterion; return root.put(CriterionCodec.UDP_PORT, udpPortCriterion.udpPort().toInt()); } } private static class FormatSctp implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final SctpPortCriterion sctpPortCriterion = (SctpPortCriterion) criterion; return root.put(CriterionCodec.SCTP_PORT, sctpPortCriterion.sctpPort().toInt()); } } private static class FormatIcmpV4Type implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IcmpTypeCriterion icmpTypeCriterion = (IcmpTypeCriterion) criterion; return root.put(CriterionCodec.ICMP_TYPE, icmpTypeCriterion.icmpType()); } } private static class FormatIcmpV4Code implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IcmpCodeCriterion icmpCodeCriterion = (IcmpCodeCriterion) criterion; return root.put(CriterionCodec.ICMP_CODE, icmpCodeCriterion.icmpCode()); } } private static class FormatIpV6FLabel implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IPv6FlowLabelCriterion ipv6FlowLabelCriterion = (IPv6FlowLabelCriterion) criterion; return root.put(CriterionCodec.FLOW_LABEL, ipv6FlowLabelCriterion.flowLabel()); } } private static class FormatIcmpV6Type implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final Icmpv6TypeCriterion icmpv6TypeCriterion = (Icmpv6TypeCriterion) criterion; return root.put(CriterionCodec.ICMPV6_TYPE, icmpv6TypeCriterion.icmpv6Type()); } } private static class FormatIcmpV6Code implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final Icmpv6CodeCriterion icmpv6CodeCriterion = (Icmpv6CodeCriterion) criterion; return root.put(CriterionCodec.ICMPV6_CODE, icmpv6CodeCriterion.icmpv6Code()); } } private static class FormatV6NDTarget implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IPv6NDTargetAddressCriterion ipv6NDTargetAddressCriterion = (IPv6NDTargetAddressCriterion) criterion; return root.put(CriterionCodec.TARGET_ADDRESS, ipv6NDTargetAddressCriterion.targetAddress().toString()); } } private static class FormatV6NDTll implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IPv6NDLinkLayerAddressCriterion ipv6NDLinkLayerAddressCriterion = (IPv6NDLinkLayerAddressCriterion) criterion; return root.put(CriterionCodec.MAC, ipv6NDLinkLayerAddressCriterion.mac().toString()); } } private static class FormatMplsLabel implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final MplsCriterion mplsCriterion = (MplsCriterion) criterion; return root.put(CriterionCodec.LABEL, mplsCriterion.label().toInt()); } } private static class FormatIpV6Exthdr implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final IPv6ExthdrFlagsCriterion exthdrCriterion = (IPv6ExthdrFlagsCriterion) criterion; return root.put(CriterionCodec.EXT_HDR_FLAGS, exthdrCriterion.exthdrFlags()); } } private static class FormatOchSigId implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { OchSignal ochSignal = ((OchSignalCriterion) criterion).lambda(); ObjectNode child = root.putObject(CriterionCodec.OCH_SIGNAL_ID); child.put(CriterionCodec.GRID_TYPE, ochSignal.gridType().name()); child.put(CriterionCodec.CHANNEL_SPACING, ochSignal.channelSpacing().name()); child.put(CriterionCodec.SPACING_MULIPLIER, ochSignal.spacingMultiplier()); child.put(CriterionCodec.SLOT_GRANULARITY, ochSignal.slotGranularity()); return root; } } private static class FormatOchSigType implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final OchSignalTypeCriterion ochSignalTypeCriterion = (OchSignalTypeCriterion) criterion; return root.put(CriterionCodec.OCH_SIGNAL_TYPE, ochSignalTypeCriterion.signalType().name()); } } private static class FormatTunnelId implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final TunnelIdCriterion tunnelIdCriterion = (TunnelIdCriterion) criterion; return root.put(CriterionCodec.TUNNEL_ID, tunnelIdCriterion.tunnelId()); } } private static class FormatOduSignalId implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final OduSignalIdCriterion oduSignalIdCriterion = (OduSignalIdCriterion) criterion; return root.put(CriterionCodec.ODU_SIGNAL_ID, oduSignalIdCriterion.oduSignalId().toString()); } } private static class FormatOduSignalType implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { final OduSignalTypeCriterion oduSignalTypeCriterion = (OduSignalTypeCriterion) criterion; return root.put(CriterionCodec.ODU_SIGNAL_TYPE, oduSignalTypeCriterion.signalType().name()); } } private class FormatDummyType implements CriterionTypeFormatter { @Override public ObjectNode encodeCriterion(ObjectNode root, Criterion criterion) { checkNotNull(criterion, "Criterion cannot be null"); return root.put(CriterionCodec.TYPE, criterion.type().toString()); } } /** * Encodes a criterion into a JSON node. * * @return encoded JSON object for the given criterion */ public ObjectNode encode() { final ObjectNode result = context.mapper().createObjectNode() .put(CriterionCodec.TYPE, criterion.type().toString()); CriterionTypeFormatter formatter = checkNotNull( formatMap.get(criterion.type()), "No formatter found for criterion type " + criterion.type().toString()); return formatter.encodeCriterion(result, criterion); } }
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.demo.travel.account; import org.kuali.rice.testtools.selenium.WebDriverLegacyITBase; import org.openqa.selenium.By; import org.apache.commons.lang.RandomStringUtils; import org.junit.Test; /** * @author Kuali Rice Team (rice.collab@kuali.org) */ public class DemoTravelAccountMaintenanceEditAft extends WebDriverLegacyITBase { /** * /kr-krad/maintenance?methodToCall=maintenanceEdit&number=a14&dataObjectClassName=org.kuali.rice.krad.demo.travel.dataobject.TravelAccount&hideReturnLink=true */ public static final String BOOKMARK_URL = "/kr-krad/maintenance?methodToCall=maintenanceEdit&number=a14&dataObjectClassName=org.kuali.rice.krad.demo.travel.dataobject.TravelAccount&hideReturnLink=true"; /** * Description field */ public static final String DESCRIPTION_FIELD = "document.documentHeader.documentDescription"; /** * Explanation field */ public static final String EXPLANATION_FIELD = "document.documentHeader.explanation"; /** * Organization document number field */ public static final String ORGANIZATION_DOCUMENT_NUMBER_FIELD = "document.documentHeader.organizationDocumentNumber"; /** * Travel sub account field */ public static final String SUB_ACCOUNT_FIELD = "newCollectionLines['document.newMaintainableObject.dataObject.subAccounts'].subAccount"; /** * Travel sub account name field */ public static final String SUB_ACCOUNT_NAME_FIELD = "newCollectionLines['document.newMaintainableObject.dataObject.subAccounts'].subAccountName"; /** * Subsidized percent */ public static final String SUBSIDIZED_PERCENT_FIELD = "document.newMaintainableObject.dataObject.subsidizedPercent"; /** * Date created. */ public static final String DATE_CREATED_FIELD = "document.newMaintainableObject.dataObject.createDate"; /** * Fiscal officer ID */ public static final String FISCAL_OFFICER_ID_FIELD = "document.newMaintainableObject.dataObject.foId"; @Override public String getBookmarkUrl() { return BOOKMARK_URL; } protected void navigate() throws Exception { waitAndClickDemoLink(); waitAndClickByLinkText("Travel Account Maintenance (Edit)"); } protected void testTravelAccountMaintenanceEdit() throws Exception { waitAndTypeByName("document.documentHeader.documentDescription", "Travel Account Edit"+RandomStringUtils.randomAlphabetic(2)); // Verify that adding a duplicate Sub Account is not allowed. String subAccountDuplicate = "A"; waitAndTypeByName(SUB_ACCOUNT_FIELD, subAccountDuplicate); waitAndTypeByName("newCollectionLines['document.newMaintainableObject.dataObject.subAccounts'].subAccountName", "Sub Account 1"+RandomStringUtils.randomAlphabetic(2)); waitAndClickButtonByText("Add"); waitForProgressAddingLine(); String errorMessage []={"Duplicate Sub Accounts (Travel Sub Account Number) are not allowed."}; assertTextPresent(errorMessage); // Verify that adding a duplicate Sub Account and Sub Account Name is not allowed. clearTextByName(SUB_ACCOUNT_FIELD); waitAndTypeByName(SUB_ACCOUNT_FIELD, subAccountDuplicate); clearTextByName("newCollectionLines['document.newMaintainableObject.dataObject.subAccounts'].subAccountName"); waitAndTypeByName("newCollectionLines['document.newMaintainableObject.dataObject.subAccounts'].subAccountName", "Sub Account A"); waitAndClickButtonByText("Add"); waitForProgressAddingLine(); String errorMessage2 []={"Duplicate Sub Accounts (Travel Sub Account Number) are not allowed."}; assertTextPresent(errorMessage2); //Check for LookUp search waitAndClickByXpath("//button[@class='btn btn-default uif-action icon-search']"); gotoLightBox(); waitAndClickSearchByText(); waitAndClickReturnValue(); waitAndClickByXpath("//a/span[contains(text(),'Ad Hoc Recipients')]"); waitAndClickByXpath("//div[@data-parent='Uif-AdHocPersonCollection']/div/div/button[@class='btn btn-default uif-action icon-search']"); gotoLightBox(); waitAndClickSearchByText(); waitAndClickReturnValue(); waitAndClickByXpath("//div[@data-parent='CollectionGroup_AdHocWorkgroup']/div/div/button[@class='btn btn-default uif-action icon-search']"); gotoLightBox(); waitAndClickSearchByText(); waitAndClickReturnValue(); // Add a new sub account String subAccount = "Z1" + RandomStringUtils.randomAlphabetic(2); clearTextByName(SUB_ACCOUNT_FIELD); waitAndTypeByName(SUB_ACCOUNT_FIELD, subAccount); clearTextByName("newCollectionLines['document.newMaintainableObject.dataObject.subAccounts'].subAccountName"); waitAndTypeByName("newCollectionLines['document.newMaintainableObject.dataObject.subAccounts'].subAccountName", "Sub Account 1"+RandomStringUtils.randomAlphabetic(2)); waitForElementPresentByXpath("//input[@name='document.newMaintainableObject.dataObject.number' and @value='a14']"); waitForElementPresentByXpath("//input[@name='document.newMaintainableObject.dataObject.name' and @value='Travel Account 14']"); waitForElementPresentByXpath("//input[@name='document.newMaintainableObject.dataObject.foId' and @value='fran']"); waitAndClickButtonByText("Add"); waitForProgressAddingLine(); waitForElementPresentByXpath("//a[contains(text(),subAccount)]"); saveSuccessfully(); assertTextPresent("SAVED"); submitSuccessfully(); waitAndClickButtonByText("Reload"); waitForProgressLoading(); checkForDocErrorKrad(); waitForTextPresent("Document was successfully reloaded."); assertTextPresent("FINAL"); } protected void testTravelAccountMaintenanceEditBlanketApprove() throws Exception { waitAndTypeByName("document.documentHeader.documentDescription", "Travel Account Edit"+RandomStringUtils.randomAlphabetic(2)); clearTextByName("document.newMaintainableObject.dataObject.subsidizedPercent"); waitAndTypeByName("document.newMaintainableObject.dataObject.subsidizedPercent", "42"); clickBlanketApproveAndEditDocAgain(); if(!isElementPresentByXpath("//input[@name='document.newMaintainableObject.dataObject.subsidizedPercent' and @value='42']")) { jiraAwareFail("BlanketApprove was not successful. subsidizedPercent should be 42"); } waitAndTypeByName("document.documentHeader.documentDescription", "Travel Account Edit" + RandomStringUtils.randomAlphabetic(2)); clearTextByName("document.newMaintainableObject.dataObject.subsidizedPercent"); waitAndClickBlanketApprove(); waitAndClickConfirmBlanketApproveOk(); // waitAndClickByXpath("/html/body/form/div/div[2]/main/div/section[1]/div/div/div[2]/button[2]"); acceptAlertIfPresent(); } protected void testTravelAccountMaintenanceEditBlanketApproveAdHocComplete() throws Exception { waitAndTypeByName("document.documentHeader.documentDescription", "Travel Account Edit" + RandomStringUtils.randomAlphabetic(2)); //add add hoc user with complete action waitAndClickByLinkText("Ad Hoc Recipients"); waitAndSelectByName("newCollectionLines['document.adHocRoutePersons'].actionRequested", "COMPLETE"); waitAndTypeByName("newCollectionLines['document.adHocRoutePersons'].id", "dev1"); waitAndClickById("Uif-AdHocPersonCollection_add"); //verify blanket approve fails with correct message waitAndClickBlanketApprove(); waitAndClickConfirmBlanketApproveOk(); waitForTextPresent("Blanket Approve cannot be performed when adhoc route for completion request is newly added"); //remove add hoc user waitAndClickById("Uif-AdHocPersonCollection_del_line0"); //add add hoc group with complete action waitAndSelectByName("newCollectionLines['document.adHocRouteWorkgroups'].actionRequested", "COMPLETE"); waitAndTypeByName("newCollectionLines['document.adHocRouteWorkgroups'].recipientName", "Kuali Developers"); waitAndClickById("CollectionGroup_AdHocWorkgroup_add"); waitAndClickBlanketApprove(); //verify blanket approve fails with correct message waitAndClickBlanketApprove(); waitAndClickConfirmBlanketApproveOk(); waitForTextPresent("Blanket Approve cannot be performed when adhoc route for completion request is newly added"); } protected void testTravelAccountMaintenanceEditXss() throws Exception { waitAndTypeByName(DESCRIPTION_FIELD,"\"/><script>alert('!')</script>"); waitAndTypeByName(EXPLANATION_FIELD,"\"/><script>alert('!')</script>"); waitAndTypeByName(ORGANIZATION_DOCUMENT_NUMBER_FIELD,"\"/><script>alert('!')</script>"); waitAndTypeByName(SUB_ACCOUNT_FIELD,"blah"); waitAndTypeByName(SUB_ACCOUNT_NAME_FIELD,"\"/><script>alert('!')</script>"); waitAndTypeByName(SUBSIDIZED_PERCENT_FIELD,"\"/><script>alert('!')</script>"); // waitAndTypeByName(DATE_CREATED_FIELD,"\"/><script>alert('!')</script>"); // no longer an input field // waitAndTypeByName(FISCAL_OFFICER_ID_FIELD,"\"/><script>alert('!')</script>"); waitAndClickSaveByText(); Thread.sleep(1000); if(isAlertPresent()) { fail("XSS vulnerability identified."); } } protected boolean isAlertPresent() { try { driver.switchTo().alert(); return true; } // try catch (Exception Ex) { return false; } // catch } protected void testEditFiscalOfficer() throws Exception { String currentFiscalOfficer = findElement(By.name("document.newMaintainableObject.dataObject.foId")).getAttribute("value"); String newFiscalOfficer = "fran"; if ("fran".equals(currentFiscalOfficer)) { newFiscalOfficer = "eric"; } checkForRequiredFields(); changeFiscalOfficer(newFiscalOfficer); // change eric back to fran changeFiscalOfficer(currentFiscalOfficer); } protected void testSubAccountOperations() throws Exception { waitForElementNotPresent(By.xpath("//button[contains(text(),'Delete')]")); waitAndTypeLabeledInput("Travel Sub Account Number:", "A"); waitAndTypeLabeledInput("Sub Account Name:", "Sub Account A"); waitAndClickButtonByExactText("Add"); waitForTextPresent("Duplicate Sub Accounts (Travel Sub Account Number) are not allowed."); } private void changeFiscalOfficer(String newUser) throws Exception { waitAndTypeByName("document.documentHeader.documentDescription", "Edit Fiscal Officer to " + newUser + " " + RandomStringUtils.randomAlphabetic(2)); clearTextByName("document.newMaintainableObject.dataObject.foId"); waitAndTypeByName("document.newMaintainableObject.dataObject.foId", newUser); clickBlanketApproveAndEditDocAgain(); if(!isElementPresentByXpath("//input[@name='document.newMaintainableObject.dataObject.foId' and @value='" + newUser + "']")) { jiraAwareFail("Fiscal Officer Not Changed to " + newUser); } } private void checkForRequiredFields() throws Exception{ waitForElementPresentByXpath("//label[contains(text(),'Description')]/span[contains(text(),'*')]"); waitForElementPresentByXpath("//label[contains(text(),'Travel Account Number:')]/span[contains(text(),'*')]"); waitForElementPresentByXpath("//label[contains(text(),'Travel Account Name:')]/span[contains(text(),'*')]"); waitForElementPresentByXpath("//label[contains(text(),'Travel Account Type Code:')]/span[contains(text(),'*')]"); waitForElementPresentByXpath("//label[contains(text(),'Date Created:')]/span[contains(text(),'*')]"); waitForElementPresentByXpath("//label[contains(text(),'Travel Sub Account Number:')]/span[contains(text(),'*')]"); waitForElementPresentByXpath("//label[contains(text(),'Sub Account Name:')]/span[contains(text(),'*')]"); jGrowl("Verify required messages are displayed"); clearTextByName("document.newMaintainableObject.dataObject.name"); waitAndClickSubmitByText(); waitForElementPresentByXpath("//h3[@id='pageValidationHeader' and contains(text(),'This page has 2 errors')]"); waitForElementPresentByXpath("//ul[@id='pageValidationList']/li/a[contains(text(),'Document Overview: 1 error')]"); waitForElementPresentByXpath("//ul[@id='pageValidationList']/li/a[contains(text(),'Account Information: 1 error')]"); waitForElementPresentByXpath("//div[@class='uif-messageCount' and contains(text(),' 1 error')]"); String requiredMessage []={"Description: Required","Travel Account Name: Required"}; assertTextPresent(requiredMessage); waitAndClickSaveByText(); assertTextPresent(requiredMessage); waitAndClickBlanketApprove(); assertTextPresent(requiredMessage); waitForElementPresentByXpath("//div[@data-label='Date Created']"); waitAndTypeByName("document.newMaintainableObject.dataObject.name","Travel Account 14"); } private void clickBlanketApproveAndEditDocAgain() throws Exception{ waitAndClickBlanketApprove(); //click on confirmation message waitAndClickConfirmBlanketApproveOk(); acceptAlertIfPresent(); waitForProgressLoading(); checkForDocErrorKrad(); // Redirected to Home page after Blanket Approve https://jira.kuali.org/browse/KULRICE-13042 waitAndClickDemoLink(); acceptAlertIfPresent(); waitAndClickByLinkText("Travel Account Maintenance (Edit)"); } @Test public void testDemoTravelAccountMaintenanceEditBookmark() throws Exception { testTravelAccountMaintenanceEdit(); passed(); } @Test public void testDemoTravelAccountMaintenanceEditNav() throws Exception { testTravelAccountMaintenanceEdit(); passed(); } @Test public void testDemoTravelAccountMaintenanceEditBlanketApproveBookmark() throws Exception { testTravelAccountMaintenanceEditBlanketApprove(); passed(); } @Test public void testDemoTravelAccountMaintenanceEditBlanketApproveNav() throws Exception { testTravelAccountMaintenanceEditBlanketApprove(); passed(); } @Test public void testDemoTravelAccountMaintenanceEditXssBookmark() throws Exception { testTravelAccountMaintenanceEditXss(); passed(); } @Test public void testDemoTravelAccountMaintenanceEditXssNav() throws Exception { testTravelAccountMaintenanceEditXss(); passed(); } @Test public void testDemoTravelAccountMaintenanceEditFiscalOfficerBookmark() throws Exception { testEditFiscalOfficer(); passed(); } @Test public void testDemoTravelAccountMaintenanceEditFiscalOfficerNav() throws Exception { testEditFiscalOfficer(); passed(); } @Test public void testDemoTravelAccountMaintenanceSubAccountOperationsBookmark() throws Exception { testSubAccountOperations(); passed(); } @Test public void testDemoTravelAccountMaintenanceSubAccountOperationsNav() throws Exception { testSubAccountOperations(); passed(); } @Test public void testTravelAccountMaintenanceEditBlanketApproveAdHocCompleteBookmark() throws Exception { testTravelAccountMaintenanceEditBlanketApproveAdHocComplete(); passed(); } @Test public void testTravelAccountMaintenanceEditBlanketApproveAdHocCompleteNav() throws Exception { testTravelAccountMaintenanceEditBlanketApproveAdHocComplete(); passed(); } }
/** * Copyright 2015 StreamSets Inc. * * Licensed under the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.datacollector.definition; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.streamsets.datacollector.config.ConfigDefinition; import com.streamsets.datacollector.config.ModelDefinition; import com.streamsets.datacollector.config.ModelType; import com.streamsets.datacollector.el.ElConstantDefinition; import com.streamsets.datacollector.el.ElFunctionDefinition; import com.streamsets.pipeline.api.ListBeanModel; import com.streamsets.pipeline.api.ConfigDef; import com.streamsets.pipeline.api.ConfigDefBean; import com.streamsets.pipeline.api.impl.ErrorMessage; import com.streamsets.pipeline.api.impl.Utils; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; public abstract class ConfigDefinitionExtractor { private static final ConfigDefinitionExtractor EXTRACTOR = new ConfigDefinitionExtractor() {}; public static ConfigDefinitionExtractor get() { return EXTRACTOR; } public List<ErrorMessage> validate(Class klass, List<String> stageGroups, Object contextMsg) { return validate("", klass, stageGroups, true, false, false, contextMsg); } public List<ErrorMessage> validateComplexField(String configPrefix, Class klass, List<String> stageGroups, Object contextMsg) { return validate(configPrefix, klass, stageGroups, true, false, true, contextMsg); } private List<ErrorMessage> validate(String configPrefix, Class klass, List<String> stageGroups, boolean validateDependencies, boolean isBean, boolean isComplexField, Object contextMsg) { List<ErrorMessage> errors = new ArrayList<>(); boolean noConfigs = true; for (Field field : klass.getFields()) { if (field.getAnnotation(ConfigDef.class) != null && field.getAnnotation(ConfigDefBean.class) != null) { errors.add(new ErrorMessage(DefinitionError.DEF_152, contextMsg, field.getName())); } else { if (field.getAnnotation(ConfigDef.class) != null || field.getAnnotation(ConfigDefBean.class) != null) { if (Modifier.isStatic(field.getModifiers())) { errors.add(new ErrorMessage(DefinitionError.DEF_151, contextMsg, klass.getSimpleName(), field.getName())); } if (Modifier.isFinal(field.getModifiers())) { errors.add(new ErrorMessage(DefinitionError.DEF_154, contextMsg, klass.getSimpleName(), field.getName())); } } if (field.getAnnotation(ConfigDef.class) != null) { noConfigs = false; List<ErrorMessage> subErrors = validateConfigDef(configPrefix, stageGroups, field, isComplexField, Utils.formatL("{} Field='{}'", contextMsg, field.getName())); errors.addAll(subErrors); } else if (field.getAnnotation(ConfigDefBean.class) != null) { noConfigs = false; List<ErrorMessage> subErrors = validateConfigDefBean(configPrefix + field.getName() + ".", field, stageGroups, isComplexField, Utils.formatL("{} BeanField='{}'", contextMsg, field.getName())); errors.addAll(subErrors); } } } if (isBean && noConfigs) { errors.add(new ErrorMessage(DefinitionError.DEF_160, contextMsg)); } if (errors.isEmpty() & validateDependencies) { errors.addAll(validateDependencies(getConfigDefinitions(configPrefix, klass, stageGroups, contextMsg), contextMsg)); } return errors; } private static String resolveGroup(List<String> parentGroups, String group, Object contextMsg, List<ErrorMessage> errors) { if (group.startsWith("#")) { try { int pos = Integer.parseInt(group.substring(1).trim()); if (pos >= 0 && pos < parentGroups.size()) { group = parentGroups.get(pos); } else { errors.add(new ErrorMessage(DefinitionError.DEF_163, contextMsg, pos, parentGroups.size() - 1)); } } catch (NumberFormatException ex) { errors.add(new ErrorMessage(DefinitionError.DEF_164, contextMsg, ex.toString())); } } else { if (!parentGroups.contains(group)) { errors.add(new ErrorMessage(DefinitionError.DEF_165, contextMsg, group, parentGroups)); } } return group; } public static List<String> getGroups(Field field, List<String> parentGroups, Object contextMsg, List<ErrorMessage> errors) { List<String> list = new ArrayList<>(); ConfigDefBean configDefBean = field.getAnnotation(ConfigDefBean.class); if (configDefBean != null) { String[] groups = configDefBean.groups(); if (groups.length > 0) { for (String group : groups) { list.add(resolveGroup(parentGroups, group, contextMsg, errors)); } } else { // no groups in the annotation, we propagate all parent groups then list.addAll(parentGroups); } } else { throw new IllegalArgumentException(Utils.format("{} is not annotated with ConfigDefBean", contextMsg)); } return list; } private List<ConfigDefinition> getConfigDefinitions(String configPrefix, Class klass, List<String> stageGroups, Object contextMsg) { List<ConfigDefinition> defs = new ArrayList<>(); for (Field field : klass.getFields()) { if (field.getAnnotation(ConfigDef.class) != null) { defs.add(extractConfigDef(configPrefix, stageGroups, field, Utils.formatL("{} Field='{}'", contextMsg, field.getName()))); } else if (field.getAnnotation(ConfigDefBean.class) != null) { List<String> beanGroups = getGroups(field, stageGroups, contextMsg, new ArrayList<ErrorMessage>()); defs.addAll(extract(configPrefix + field.getName() + ".", field.getType(), beanGroups, true, Utils.formatL("{} BeanField='{}'", contextMsg, field.getName()))); } } return defs; } public List<ConfigDefinition> extract(Class klass, List<String> stageGroups, Object contextMsg) { return extract("", klass, stageGroups, contextMsg); } public List<ConfigDefinition> extract(String configPrefix, Class klass, List<String> stageGroups, Object contextMsg) { List<ConfigDefinition> defs = extract(configPrefix, klass, stageGroups, false, contextMsg); resolveDependencies("", defs, contextMsg); return defs; } private List<ConfigDefinition> extract(String configPrefix, Class klass, List<String> stageGroups, boolean isBean, Object contextMsg) { List<ErrorMessage> errors = validate(configPrefix, klass, stageGroups, false, isBean, false, contextMsg); if (errors.isEmpty()) { return getConfigDefinitions(configPrefix, klass, stageGroups, contextMsg); } else { throw new IllegalArgumentException(Utils.format("Invalid ConfigDefinition: {}", errors)); } } private List<ErrorMessage> validateDependencies(List<ConfigDefinition> defs, Object contextMsg) { List<ErrorMessage> errors = new ArrayList<>(); Map<String, ConfigDefinition> definitionsMap = new HashMap<>(); for (ConfigDefinition def : defs) { definitionsMap.put(def.getName(), def); } for (ConfigDefinition def : defs) { if (!def.getDependsOn().isEmpty()) { ConfigDefinition dependsOnDef = definitionsMap.get(def.getDependsOn()); if (dependsOnDef == null) { errors.add(new ErrorMessage(DefinitionError.DEF_153, contextMsg, def.getName(), def.getDependsOn())); } else { // evaluate dependsOn triggers ConfigDef annotation = def.getConfigField().getAnnotation(ConfigDef.class); for (String trigger : annotation.triggeredByValue()) { errors.addAll(ConfigValueExtractor.get().validate(dependsOnDef.getConfigField(), dependsOnDef.getType(), trigger, contextMsg, true)); } } } } return errors; } void resolveDependencies(String configPrefix, List<ConfigDefinition> defs, Object contextMsg) { Map<String, ConfigDefinition> definitionsMap = new HashMap<>(); for (ConfigDefinition def : defs) { definitionsMap.put(def.getName(), def); } for (ConfigDefinition def : defs) { if (!def.getDependsOn().isEmpty()) { ConfigDefinition dependsOnDef = definitionsMap.get(def.getDependsOn()); // evaluate dependsOn triggers ConfigDef annotation = def.getConfigField().getAnnotation(ConfigDef.class); List<Object> triggers = new ArrayList<>(); for (String trigger : annotation.triggeredByValue()) { triggers.add(ConfigValueExtractor.get().extract(dependsOnDef.getConfigField(), dependsOnDef.getType(), trigger, contextMsg, true)); } def.setTriggeredByValues(triggers); } } // compute dependsOnChain for (ConfigDefinition def : defs) { ConfigDefinition tempConfigDef = def; Map<String, List<Object>> dependsOnMap = new HashMap<>(); while(tempConfigDef != null && tempConfigDef.getDependsOn() != null && !tempConfigDef.getDependsOn().isEmpty()) { dependsOnMap.put(tempConfigDef.getDependsOn(), tempConfigDef.getTriggeredByValues()); tempConfigDef = definitionsMap.get(tempConfigDef.getDependsOn()); } if(!dependsOnMap.isEmpty()) { def.setDependsOnMap(dependsOnMap); } } } List<ErrorMessage> validateConfigDef(String configPrefix, List<String> stageGroups, Field field, boolean isComplexField, Object contextMsg) { List<ErrorMessage> errors = new ArrayList<>(); ConfigDef annotation = field.getAnnotation(ConfigDef.class); errors.addAll(ConfigValueExtractor.get().validate(field, annotation, contextMsg)); if (annotation.type() == ConfigDef.Type.MODEL && field.getAnnotation(ListBeanModel.class) != null && isComplexField) { errors.add(new ErrorMessage(DefinitionError.DEF_161, contextMsg, field.getName())); } else { List<ErrorMessage> modelErrors = ModelDefinitionExtractor.get().validate(configPrefix + field.getName() + ".", field, contextMsg); if (modelErrors.isEmpty()) { ModelDefinition model = ModelDefinitionExtractor.get().extract(configPrefix + field.getName() + ".", field, contextMsg); errors.addAll(validateELFunctions(annotation, model, contextMsg)); errors.addAll(validateELConstants(annotation, model, contextMsg)); } else { errors.addAll(modelErrors); } if (annotation.type() != ConfigDef.Type.NUMBER && (annotation.min() != Long.MIN_VALUE || annotation.max() != Long.MAX_VALUE)) { errors.add(new ErrorMessage(DefinitionError.DEF_155, contextMsg, field.getName())); } errors.addAll(validateDependsOnName(configPrefix, annotation.dependsOn(), Utils.formatL("{} Field='{}'", contextMsg, field.getName()))); } return errors; } @SuppressWarnings("unchecked") List<ErrorMessage> validateConfigDefBean(String configPrefix, Field field, List<String> stageGroups, boolean isComplexField, Object contextMsg) { List<ErrorMessage> errors = new ArrayList<>(); Class klass = field.getType(); try { if (klass.isPrimitive()) { errors.add(new ErrorMessage(DefinitionError.DEF_162, contextMsg, klass.getSimpleName())); } else { klass.getConstructor(); List<String> beanGroups = getGroups(field, stageGroups, contextMsg, errors); errors.addAll(validate(configPrefix, klass, beanGroups, false, true, isComplexField, contextMsg)); } } catch (NoSuchMethodException ex) { errors.add(new ErrorMessage(DefinitionError.DEF_156, contextMsg, klass.getSimpleName())); } return errors; } ConfigDefinition extractConfigDef(String configPrefix, List<String> stageGroups, Field field, Object contextMsg) { List<ErrorMessage> errors = validateConfigDef(configPrefix, stageGroups, field, false, contextMsg); if (errors.isEmpty()) { ConfigDefinition def = null; ConfigDef annotation = field.getAnnotation(ConfigDef.class); if (annotation != null) { String name = field.getName(); ConfigDef.Type type = annotation.type(); String label = annotation.label(); String description = annotation.description(); Object defaultValue = ConfigValueExtractor.get().extract(field, annotation, contextMsg); boolean required = annotation.required(); String group = annotation.group(); group = resolveGroup(stageGroups, group, contextMsg, errors); String fieldName = field.getName(); String dependsOn = resolveDependsOn(configPrefix, annotation.dependsOn()); List<Object> triggeredByValues = null; // done at resolveDependencies() invocation ModelDefinition model = ModelDefinitionExtractor.get().extract(configPrefix + field.getName() + ".", field, contextMsg); if (model != null) { defaultValue = model.getModelType().prepareDefault(defaultValue); } int displayPosition = annotation.displayPosition(); List<ElFunctionDefinition> elFunctionDefinitions = getELFunctions(annotation, model, contextMsg); List<ElConstantDefinition> elConstantDefinitions = getELConstants(annotation, model ,contextMsg); List<Class> elDefs = new ImmutableList.Builder().add(annotation.elDefs()).add(ELDefinitionExtractor.DEFAULT_EL_DEFS).build(); long min = annotation.min(); long max = annotation.max(); String mode = (annotation.mode() != null) ? getMimeString(annotation.mode()) : null; int lines = annotation.lines(); ConfigDef.Evaluation evaluation = annotation.evaluation(); Map<String, List<Object>> dependsOnMap = null; // done at resolveDependencies() invocation def = new ConfigDefinition(field, configPrefix + name, type, label, description, defaultValue, required, group, fieldName, model, dependsOn, triggeredByValues, displayPosition, elFunctionDefinitions, elConstantDefinitions, min, max, mode, lines, elDefs, evaluation, dependsOnMap); } return def; } else { throw new IllegalArgumentException(Utils.format("Invalid ConfigDefinition: {}", errors)); } } private List<ErrorMessage> validateDependsOnName(String configPrefix, String dependsOn, Object contextMsg) { List<ErrorMessage> errors = new ArrayList<>(); if (!dependsOn.isEmpty()) { if (dependsOn.startsWith("^")) { if (dependsOn.substring(1).contains("^")) { errors.add(new ErrorMessage(DefinitionError.DEF_157, contextMsg)); } } else if (dependsOn.endsWith("^")) { boolean gaps = false; for (int i = dependsOn.indexOf("^"); !gaps && i < dependsOn.length(); i++) { gaps = dependsOn.charAt(i) != '^'; } if (gaps) { errors.add(new ErrorMessage(DefinitionError.DEF_158, contextMsg)); } else { int relativeCount = dependsOn.length() - dependsOn.indexOf("^"); int dotCount = configPrefix.split("\\.").length; if (relativeCount > dotCount) { errors.add(new ErrorMessage(DefinitionError.DEF_159, contextMsg, relativeCount, dotCount, configPrefix)); } } } } return errors; } private String resolveDependsOn(String configPrefix, String dependsOn) { if (!dependsOn.isEmpty()) { if (dependsOn.startsWith("^")) { //is absolute from the top dependsOn = dependsOn.substring(1); } else if (dependsOn.endsWith("^")) { configPrefix = configPrefix.substring(0, configPrefix.length() - 1); //is relative backwards based on the ^ count int relativeCount = dependsOn.length() - dependsOn.indexOf("^"); while (relativeCount > 0) { int pos = configPrefix.lastIndexOf("."); configPrefix = (pos == -1) ? "" : configPrefix.substring(0, pos); relativeCount--; } if (!configPrefix.isEmpty()) { configPrefix += "."; } dependsOn = configPrefix + dependsOn.substring(0, dependsOn.indexOf("^")); } else { dependsOn = configPrefix + dependsOn; } } return dependsOn; } private String getMimeString(ConfigDef.Mode mode) { switch(mode) { case JSON: return "application/json"; case PLAIN_TEXT: return "text/plain"; case PYTHON: return "text/x-python"; case JAVASCRIPT: return "text/javascript"; case RUBY: return "text/x-ruby"; case JAVA: return "text/x-java"; case GROOVY: return "text/x-groovy"; case SCALA: return "text/x-scala"; case SQL: return "text/x-sql"; default: return null; } } private static final Set<ConfigDef.Type> TYPES_SUPPORTING_ELS = ImmutableSet.of( ConfigDef.Type.LIST, ConfigDef.Type.MAP, ConfigDef.Type.NUMBER, ConfigDef.Type.STRING, ConfigDef.Type.TEXT); private static final Set<ModelType> MODELS_SUPPORTING_ELS = ImmutableSet.of(ModelType.PREDICATE); private List<ErrorMessage> validateELFunctions(ConfigDef annotation,ModelDefinition model, Object contextMsg) { List<ErrorMessage> errors; if (TYPES_SUPPORTING_ELS.contains(annotation.type()) || (annotation.type() == ConfigDef.Type.MODEL && MODELS_SUPPORTING_ELS.contains(model.getModelType()))) { errors = ELDefinitionExtractor.get().validateFunctions(annotation.elDefs(), contextMsg); } else { errors = new ArrayList<>(); } return errors; } private List<ElFunctionDefinition> getELFunctions(ConfigDef annotation,ModelDefinition model, Object contextMsg) { List<ElFunctionDefinition> functions = Collections.emptyList(); if (TYPES_SUPPORTING_ELS.contains(annotation.type()) || (annotation.type() == ConfigDef.Type.MODEL && MODELS_SUPPORTING_ELS.contains(model.getModelType()))) { functions = ELDefinitionExtractor.get().extractFunctions(annotation.elDefs(), contextMsg); } return functions; } private List<ErrorMessage> validateELConstants(ConfigDef annotation, ModelDefinition model, Object contextMsg) { List<ErrorMessage> errors; if (TYPES_SUPPORTING_ELS.contains(annotation.type()) || (annotation.type() == ConfigDef.Type.MODEL && MODELS_SUPPORTING_ELS.contains(model.getModelType()))) { errors = ELDefinitionExtractor.get().validateConstants(annotation.elDefs(), contextMsg); } else { errors = new ArrayList<>(); } return errors; } private List<ElConstantDefinition> getELConstants(ConfigDef annotation, ModelDefinition model, Object contextMsg) { List<ElConstantDefinition> functions = Collections.emptyList(); if (TYPES_SUPPORTING_ELS.contains(annotation.type()) || (annotation.type() == ConfigDef.Type.MODEL && MODELS_SUPPORTING_ELS.contains(model.getModelType()))) { functions = ELDefinitionExtractor.get().extractConstants(annotation.elDefs(), contextMsg); } return functions; } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.networkconnectivity.v1alpha1; import com.google.api.pathtemplate.PathTemplate; import com.google.api.resourcenames.ResourceName; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. @Generated("by gapic-generator-java") public class SpokeName implements ResourceName { private static final PathTemplate PROJECT_LOCATION_SPOKE = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/spokes/{spoke}"); private volatile Map<String, String> fieldValuesMap; private final String project; private final String location; private final String spoke; @Deprecated protected SpokeName() { project = null; location = null; spoke = null; } private SpokeName(Builder builder) { project = Preconditions.checkNotNull(builder.getProject()); location = Preconditions.checkNotNull(builder.getLocation()); spoke = Preconditions.checkNotNull(builder.getSpoke()); } public String getProject() { return project; } public String getLocation() { return location; } public String getSpoke() { return spoke; } public static Builder newBuilder() { return new Builder(); } public Builder toBuilder() { return new Builder(this); } public static SpokeName of(String project, String location, String spoke) { return newBuilder().setProject(project).setLocation(location).setSpoke(spoke).build(); } public static String format(String project, String location, String spoke) { return newBuilder() .setProject(project) .setLocation(location) .setSpoke(spoke) .build() .toString(); } public static SpokeName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } Map<String, String> matchMap = PROJECT_LOCATION_SPOKE.validatedMatch( formattedString, "SpokeName.parse: formattedString not in valid format"); return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("spoke")); } public static List<SpokeName> parseList(List<String> formattedStrings) { List<SpokeName> list = new ArrayList<>(formattedStrings.size()); for (String formattedString : formattedStrings) { list.add(parse(formattedString)); } return list; } public static List<String> toStringList(List<SpokeName> values) { List<String> list = new ArrayList<>(values.size()); for (SpokeName value : values) { if (value == null) { list.add(""); } else { list.add(value.toString()); } } return list; } public static boolean isParsableFrom(String formattedString) { return PROJECT_LOCATION_SPOKE.matches(formattedString); } @Override public Map<String, String> getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder(); if (project != null) { fieldMapBuilder.put("project", project); } if (location != null) { fieldMapBuilder.put("location", location); } if (spoke != null) { fieldMapBuilder.put("spoke", spoke); } fieldValuesMap = fieldMapBuilder.build(); } } } return fieldValuesMap; } public String getFieldValue(String fieldName) { return getFieldValuesMap().get(fieldName); } @Override public String toString() { return PROJECT_LOCATION_SPOKE.instantiate( "project", project, "location", location, "spoke", spoke); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o != null || getClass() == o.getClass()) { SpokeName that = ((SpokeName) o); return Objects.equals(this.project, that.project) && Objects.equals(this.location, that.location) && Objects.equals(this.spoke, that.spoke); } return false; } @Override public int hashCode() { int h = 1; h *= 1000003; h ^= Objects.hashCode(project); h *= 1000003; h ^= Objects.hashCode(location); h *= 1000003; h ^= Objects.hashCode(spoke); return h; } /** Builder for projects/{project}/locations/{location}/spokes/{spoke}. */ public static class Builder { private String project; private String location; private String spoke; protected Builder() {} public String getProject() { return project; } public String getLocation() { return location; } public String getSpoke() { return spoke; } public Builder setProject(String project) { this.project = project; return this; } public Builder setLocation(String location) { this.location = location; return this; } public Builder setSpoke(String spoke) { this.spoke = spoke; return this; } private Builder(SpokeName spokeName) { this.project = spokeName.project; this.location = spokeName.location; this.spoke = spokeName.spoke; } public SpokeName build() { return new SpokeName(this); } } }
/** */ package bpsim.impl; import bpsim.BpsimPackage; import bpsim.ParameterValue; import bpsim.ResultType; import com.google.gwt.user.client.rpc.GwtTransient; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Parameter Value</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link bpsim.impl.ParameterValueImpl#getInstance <em>Instance</em>}</li> * <li>{@link bpsim.impl.ParameterValueImpl#getResult <em>Result</em>}</li> * <li>{@link bpsim.impl.ParameterValueImpl#getValidFor <em>Valid For</em>}</li> * </ul> * * @generated */ public class ParameterValueImpl extends EObjectImpl implements ParameterValue { /** * The default value of the '{@link #getInstance() <em>Instance</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInstance() * @generated * @ordered */ protected static final String INSTANCE_EDEFAULT = null; /** * The cached value of the '{@link #getInstance() <em>Instance</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInstance() * @generated * @ordered */ @GwtTransient protected String instance = INSTANCE_EDEFAULT; /** * The default value of the '{@link #getResult() <em>Result</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getResult() * @generated * @ordered */ protected static final ResultType RESULT_EDEFAULT = ResultType.MIN; /** * The cached value of the '{@link #getResult() <em>Result</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getResult() * @generated * @ordered */ @GwtTransient protected ResultType result = RESULT_EDEFAULT; /** * This is true if the Result attribute has been set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ @GwtTransient protected boolean resultESet; /** * The default value of the '{@link #getValidFor() <em>Valid For</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValidFor() * @generated * @ordered */ protected static final String VALID_FOR_EDEFAULT = null; /** * The cached value of the '{@link #getValidFor() <em>Valid For</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValidFor() * @generated * @ordered */ @GwtTransient protected String validFor = VALID_FOR_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ParameterValueImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return BpsimPackage.Literals.PARAMETER_VALUE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getInstance() { return instance; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setInstance(String newInstance) { String oldInstance = instance; instance = newInstance; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.PARAMETER_VALUE__INSTANCE, oldInstance, instance)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public ResultType getResult() { return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setResult(ResultType newResult) { ResultType oldResult = result; result = newResult == null ? RESULT_EDEFAULT : newResult; boolean oldResultESet = resultESet; resultESet = true; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.PARAMETER_VALUE__RESULT, oldResult, result, !oldResultESet)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void unsetResult() { ResultType oldResult = result; boolean oldResultESet = resultESet; result = RESULT_EDEFAULT; resultESet = false; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.UNSET, BpsimPackage.PARAMETER_VALUE__RESULT, oldResult, RESULT_EDEFAULT, oldResultESet)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean isSetResult() { return resultESet; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getValidFor() { return validFor; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setValidFor(String newValidFor) { String oldValidFor = validFor; validFor = newValidFor; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.PARAMETER_VALUE__VALID_FOR, oldValidFor, validFor)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case BpsimPackage.PARAMETER_VALUE__INSTANCE: return getInstance(); case BpsimPackage.PARAMETER_VALUE__RESULT: return getResult(); case BpsimPackage.PARAMETER_VALUE__VALID_FOR: return getValidFor(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case BpsimPackage.PARAMETER_VALUE__INSTANCE: setInstance((String)newValue); return; case BpsimPackage.PARAMETER_VALUE__RESULT: setResult((ResultType)newValue); return; case BpsimPackage.PARAMETER_VALUE__VALID_FOR: setValidFor((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case BpsimPackage.PARAMETER_VALUE__INSTANCE: setInstance(INSTANCE_EDEFAULT); return; case BpsimPackage.PARAMETER_VALUE__RESULT: unsetResult(); return; case BpsimPackage.PARAMETER_VALUE__VALID_FOR: setValidFor(VALID_FOR_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case BpsimPackage.PARAMETER_VALUE__INSTANCE: return INSTANCE_EDEFAULT == null ? instance != null : !INSTANCE_EDEFAULT.equals(instance); case BpsimPackage.PARAMETER_VALUE__RESULT: return isSetResult(); case BpsimPackage.PARAMETER_VALUE__VALID_FOR: return VALID_FOR_EDEFAULT == null ? validFor != null : !VALID_FOR_EDEFAULT.equals(validFor); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuilder result = new StringBuilder(super.toString()); result.append(" (instance: "); result.append(instance); result.append(", result: "); if (resultESet) result.append(result); else result.append("<unset>"); result.append(", validFor: "); result.append(validFor); result.append(')'); return result.toString(); } } //ParameterValueImpl
/** * Copyright 2008-2010 Digital Enterprise Research Institute (DERI) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sindice.rdfcommons.tripleconverter; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.openrdf.rio.RDFParser; import org.openrdf.rio.RDFWriter; import org.openrdf.rio.ntriples.NTriplesParser; import org.openrdf.rio.ntriples.NTriplesWriter; import org.openrdf.rio.rdfxml.RDFXMLParser; import org.openrdf.rio.rdfxml.RDFXMLWriter; import org.openrdf.rio.trig.TriGParser; import org.openrdf.rio.trig.TriGWriter; import org.openrdf.rio.turtle.TurtleParser; import org.openrdf.rio.turtle.TurtleWriter; import org.sindice.rdfcommons.adapter.sesame.nquads.NQuadsParser; import org.sindice.rdfcommons.adapter.sesame.nquads.NQuadsWriter; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.Arrays; /** * This converter is able to transform an input triple format in * an output triple format. Supports the main triple formats. * * @author Michele Mostarda (mostarda@fbk.eu) */ public class TripleConverter { public static final String RDFXML_FORMAT = "RFXML"; public static final String NTRIPLES_FORMAT = "NTriples"; public static final String TRIG_FORMAT = "Trig"; public static final String TURTLE_FORMAT = "Turtle"; public static final String NQUADS_FORMAT = "NQuads"; public static final String[] INPUT_FORMATS = new String[]{ RDFXML_FORMAT, NTRIPLES_FORMAT, TRIG_FORMAT, TURTLE_FORMAT, NQUADS_FORMAT }; public static final String[] OUTPUT_FORMATS = new String[]{ RDFXML_FORMAT, NTRIPLES_FORMAT, TRIG_FORMAT, TURTLE_FORMAT, NQUADS_FORMAT }; public static final String DEFAULT_BASE_URI = "http://sindice.com/base-uri"; public static void main(String[] args) { final TripleConverter tripleConverter = new TripleConverter(); try { tripleConverter.processCommand(args); } catch (Exception e) { System.err.println("ERROR: " + e.getMessage()); e.printStackTrace(); } } private final Options options; private final CommandLineParser parser = new GnuParser(); @SuppressWarnings({"AccessStaticViaInstance"}) public TripleConverter() { final Option inputFormat = OptionBuilder .withLongOpt("input-format") .withDescription( String.format("Supported formats are: %s", Arrays.toString(INPUT_FORMATS)) ) .hasArg() .isRequired() .create("if"); final Option outputFormat = OptionBuilder .withLongOpt("output-format") .withDescription( String.format("Supported formats are: %s", Arrays.toString(OUTPUT_FORMATS)) ) .hasArg() .isRequired() .create("of"); final Option baseURI = OptionBuilder .withLongOpt("base-uri") .withDescription( String.format("The base uri: default '%s'", DEFAULT_BASE_URI) ) .hasArg() .create("bu"); final Option outputFile = OptionBuilder .withLongOpt("output-file") .withDescription("The output file") .hasArg() .isRequired(false) .create("O"); options = new Options(); options.addOption(inputFormat); options.addOption(outputFormat); options.addOption(outputFile); options.addOption(baseURI); } public void processCommand(String[] args) { // Parsing command line. CommandLine cl = null; try { cl = parser.parse(options, args); } catch (ParseException pe) { printUsageAndExit(pe.getMessage()); } // Input and output formats. final String inputFormat = cl.getOptionValue("if"); final String outputFormat = cl.getOptionValue("of"); OutputStream os = null; if(cl.hasOption("O")) { try { os = new BufferedOutputStream( new FileOutputStream(cl.getOptionValue("O"), false) ); } catch (FileNotFoundException fnfe) { printUsageAndExit("Invalid output file."); } } else { os = System.out; } final RDFParser rdfParser = getParser(inputFormat); final RDFWriter rdfWriter = getWriter(outputFormat, os); final String baseURI; if(cl.hasOption("bu")) { baseURI = cl.getOptionValue("bu"); try { new URI(baseURI); } catch (URISyntaxException uriase) { printUsageAndExit( String.format("Invalid base-uri: '%s'", baseURI) ); } } else { baseURI = DEFAULT_BASE_URI; } // Input file. if(cl.getArgs().length != 1) { printUsageAndExit("Expected one argument."); } final File inputFile = new File( cl.getArgs()[0] ); if( ! inputFile.exists() || inputFile.isDirectory() ) { printUsageAndExit("Invalid input file."); } // Configure RDF parser. rdfParser.setRDFHandler(rdfWriter); rdfParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); rdfParser.setStopAtFirstError(false); // Perform parsing. FileInputStream fis; try { fis = new FileInputStream(inputFile); } catch (FileNotFoundException fnfe) { throw new RuntimeException("Error while opening file.", fnfe); } BufferedInputStream bis = new BufferedInputStream(fis); try { rdfParser.parse(bis, baseURI); } catch (Exception e) { throw new RuntimeException("Error while parsing file.", e); } finally { try { fis.close(); } catch (IOException ioe) { throw new RuntimeException("Error while closing file buffer.", ioe); } } } private void printUsageAndExit(String error) { final HelpFormatter formatter = new HelpFormatter(); formatter.printHelp( 150, String.format("%s OPTIONS <inputfile>", this.getClass().getName() ), "Supported options are:", options, "\n" ); System.out.flush(); throw new RuntimeException(error); } private RDFParser getParser(String inputFormat) { if(RDFXML_FORMAT.equalsIgnoreCase(inputFormat)) { return new RDFXMLParser(); } if(NTRIPLES_FORMAT.equalsIgnoreCase(inputFormat)) { return new NTriplesParser(); } if(TURTLE_FORMAT.equalsIgnoreCase(inputFormat)) { return new TurtleParser(); } if(TRIG_FORMAT.equalsIgnoreCase(inputFormat)) { return new TriGParser(); } if(NQUADS_FORMAT.equalsIgnoreCase(inputFormat)) { return new NQuadsParser(); } throw new IllegalArgumentException( String.format("Invalid format %s", inputFormat) ); } private RDFWriter getWriter(String outputFormat, OutputStream os) { if(RDFXML_FORMAT.equalsIgnoreCase(outputFormat)) { return new RDFXMLWriter(os); } if(NTRIPLES_FORMAT.equalsIgnoreCase(outputFormat)) { return new NTriplesWriter(os); } if(TURTLE_FORMAT.equalsIgnoreCase(outputFormat)) { return new TurtleWriter(os); } if(TRIG_FORMAT.equalsIgnoreCase(outputFormat)) { return new TriGWriter(os); } if(NQUADS_FORMAT.equalsIgnoreCase(outputFormat)) { return new NQuadsWriter(os); } throw new IllegalArgumentException( String.format("Invalid format %s", outputFormat) ); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.vcs.log.data; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.CommonProcessors; import com.intellij.util.Function; import com.intellij.util.io.*; import com.intellij.vcs.log.*; import com.intellij.vcs.log.impl.FatalErrorHandler; import com.intellij.vcs.log.impl.HashImpl; import com.intellij.vcs.log.impl.VcsRefImpl; import com.intellij.vcs.log.util.PersistentUtil; import com.intellij.vcs.log.util.StorageId; import gnu.trove.TObjectIntHashMap; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.function.Predicate; /** * Supports the int <-> Hash and int <-> VcsRef persistent mappings. */ public final class VcsLogStorageImpl implements Disposable, VcsLogStorage { @NotNull private static final Logger LOG = Logger.getInstance(VcsLogStorage.class); @NotNull @NonNls private static final String HASHES_STORAGE = "hashes"; @NotNull @NonNls private static final String REFS_STORAGE = "refs"; @NotNull @NonNls private static final String STORAGE = "storage"; @NotNull public static final VcsLogStorage EMPTY = new EmptyLogStorage(); public static final int VERSION = 8; public static final int NO_INDEX = -1; private static final int REFS_VERSION = 2; @NotNull private final MyPersistentBTreeEnumerator myCommitIdEnumerator; @NotNull private final PersistentEnumerator<VcsRef> myRefsEnumerator; @NotNull private final FatalErrorHandler myExceptionReporter; private volatile boolean myDisposed = false; public VcsLogStorageImpl(@NotNull Project project, @NotNull Map<VirtualFile, VcsLogProvider> logProviders, @NotNull FatalErrorHandler exceptionReporter, @NotNull Disposable parent) throws IOException { myExceptionReporter = exceptionReporter; List<VirtualFile> roots = StreamEx.ofKeys(logProviders).sortedBy(VirtualFile::getPath).toList(); String logId = PersistentUtil.calcLogId(project, logProviders); MyCommitIdKeyDescriptor commitIdKeyDescriptor = new MyCommitIdKeyDescriptor(roots); StorageId hashesStorageId = new StorageId(project.getName(), HASHES_STORAGE, logId, VERSION); myCommitIdEnumerator = IOUtil.openCleanOrResetBroken(() -> new MyPersistentBTreeEnumerator(hashesStorageId, commitIdKeyDescriptor), hashesStorageId.getStorageFile(STORAGE).toFile()); VcsRefKeyDescriptor refsKeyDescriptor = new VcsRefKeyDescriptor(logProviders, commitIdKeyDescriptor); StorageId refsStorageId = new StorageId(project.getName(), REFS_STORAGE, logId, VERSION + REFS_VERSION); myRefsEnumerator = IOUtil.openCleanOrResetBroken(() -> new PersistentEnumerator<>(refsStorageId.getStorageFile(STORAGE), refsKeyDescriptor, Page.PAGE_SIZE, null, refsStorageId.getVersion()), refsStorageId.getStorageFile(STORAGE).toFile()); Disposer.register(parent, this); } @NotNull public static Function<Integer, Hash> createHashGetter(@NotNull VcsLogStorage storage) { return commitIndex -> { CommitId commitId = storage.getCommitId(commitIndex); if (commitId == null) return null; return commitId.getHash(); }; } @Nullable private CommitId doGetCommitId(int index) throws IOException { return myCommitIdEnumerator.valueOf(index); } private int getOrPut(@NotNull Hash hash, @NotNull VirtualFile root) throws IOException { return myCommitIdEnumerator.enumerate(new CommitId(hash, root)); } @Override public int getCommitIndex(@NotNull Hash hash, @NotNull VirtualFile root) { checkDisposed(); try { return getOrPut(hash, root); } catch (IOException e) { myExceptionReporter.consume(this, e); } return NO_INDEX; } @Override @Nullable public CommitId getCommitId(int commitIndex) { checkDisposed(); try { CommitId commitId = doGetCommitId(commitIndex); if (commitId == null) { myExceptionReporter.consume(this, new RuntimeException("Unknown commit index: " + commitIndex)); } return commitId; } catch (IOException e) { myExceptionReporter.consume(this, e); } return null; } @Override public boolean containsCommit(@NotNull CommitId id) { checkDisposed(); try { return myCommitIdEnumerator.contains(id); } catch (IOException e) { myExceptionReporter.consume(this, e); } return false; } @Override public void iterateCommits(@NotNull Predicate<? super CommitId> consumer) { checkDisposed(); try { myCommitIdEnumerator.iterateData(new CommonProcessors.FindProcessor<>() { @Override protected boolean accept(CommitId commitId) { return !consumer.test(commitId); } }); } catch (IOException e) { myExceptionReporter.consume(this, e); } } @Override public int getRefIndex(@NotNull VcsRef ref) { checkDisposed(); try { return myRefsEnumerator.enumerate(ref); } catch (IOException e) { myExceptionReporter.consume(this, e); } return NO_INDEX; } @Nullable @Override public VcsRef getVcsRef(int refIndex) { checkDisposed(); try { return myRefsEnumerator.valueOf(refIndex); } catch (IOException e) { myExceptionReporter.consume(this, e); return null; } } @Override public void flush() { checkDisposed(); myCommitIdEnumerator.force(); myRefsEnumerator.force(); } @Override public void dispose() { try { myDisposed = true; myCommitIdEnumerator.close(); myRefsEnumerator.close(); } catch (IOException e) { LOG.warn(e); } } private void checkDisposed() { if (myDisposed) throw new ProcessCanceledException(); } private static class MyCommitIdKeyDescriptor implements KeyDescriptor<CommitId> { @NotNull private final List<? extends VirtualFile> myRoots; @NotNull private final TObjectIntHashMap<VirtualFile> myRootsReversed; MyCommitIdKeyDescriptor(@NotNull List<? extends VirtualFile> roots) { myRoots = roots; myRootsReversed = new TObjectIntHashMap<>(); for (int i = 0; i < roots.size(); i++) { myRootsReversed.put(roots.get(i), i); } } @Override public void save(@NotNull DataOutput out, CommitId value) throws IOException { ((HashImpl)value.getHash()).write(out); out.writeInt(myRootsReversed.get(value.getRoot())); } @Override public CommitId read(@NotNull DataInput in) throws IOException { Hash hash = HashImpl.read(in); VirtualFile root = myRoots.get(in.readInt()); if (root == null) return null; return new CommitId(hash, root); } @Override public int getHashCode(CommitId value) { return value.hashCode(); } @Override public boolean isEqual(CommitId val1, CommitId val2) { return val1.equals(val2); } } private static class EmptyLogStorage implements VcsLogStorage { @Override public int getCommitIndex(@NotNull Hash hash, @NotNull VirtualFile root) { return 0; } @NotNull @Override public CommitId getCommitId(int commitIndex) { throw new UnsupportedOperationException("Illegal access to empty hash map by index " + commitIndex); } @Override public boolean containsCommit(@NotNull CommitId id) { return false; } @Override public void iterateCommits(@NotNull Predicate<? super CommitId> consumer) { } @Override public int getRefIndex(@NotNull VcsRef ref) { return 0; } @Nullable @Override public VcsRef getVcsRef(int refIndex) { throw new UnsupportedOperationException("Illegal access to empty ref map by index " + refIndex); } @Override public void flush() { } } private static class VcsRefKeyDescriptor implements KeyDescriptor<VcsRef> { @NotNull private final Map<VirtualFile, VcsLogProvider> myLogProviders; @NotNull private final KeyDescriptor<CommitId> myCommitIdKeyDescriptor; VcsRefKeyDescriptor(@NotNull Map<VirtualFile, VcsLogProvider> logProviders, @NotNull KeyDescriptor<CommitId> commitIdKeyDescriptor) { myLogProviders = logProviders; myCommitIdKeyDescriptor = commitIdKeyDescriptor; } @Override public int getHashCode(@NotNull VcsRef value) { return value.hashCode(); } @Override public boolean isEqual(@NotNull VcsRef val1, @NotNull VcsRef val2) { return val1.equals(val2); } @Override public void save(@NotNull DataOutput out, @NotNull VcsRef value) throws IOException { myCommitIdKeyDescriptor.save(out, new CommitId(value.getCommitHash(), value.getRoot())); IOUtil.writeUTF(out, value.getName()); myLogProviders.get(value.getRoot()).getReferenceManager().serialize(out, value.getType()); } @Override public VcsRef read(@NotNull DataInput in) throws IOException { CommitId commitId = myCommitIdKeyDescriptor.read(in); if (commitId == null) throw new IOException("Can not read commit id for reference"); String name = IOUtil.readUTF(in); VcsRefType type = myLogProviders.get(commitId.getRoot()).getReferenceManager().deserialize(in); return new VcsRefImpl(commitId.getHash(), name, type, commitId.getRoot()); } } private static final class MyPersistentBTreeEnumerator extends PersistentBTreeEnumerator<CommitId> { MyPersistentBTreeEnumerator(@NotNull StorageId storageId, @NotNull KeyDescriptor<CommitId> commitIdKeyDescriptor) throws IOException { super(storageId.getStorageFile(STORAGE), commitIdKeyDescriptor, Page.PAGE_SIZE, new StorageLockContext(true), storageId.getVersion()); } public boolean contains(@NotNull CommitId id) throws IOException { return tryEnumerate(id) != NULL_ID; } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2019 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.quickstart; import java.awt.Cursor; import java.awt.Desktop; import java.awt.Font; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.font.TextAttribute; import java.io.File; import java.util.HashMap; import java.util.Map; import javax.swing.ImageIcon; import javax.swing.JLabel; import javax.swing.JPanel; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.view.View; import org.zaproxy.zap.extension.help.ExtensionHelp; import org.zaproxy.zap.utils.DesktopUtils; import org.zaproxy.zap.utils.DisplayUtils; import org.zaproxy.zap.view.LayoutHelper; public class LearnMorePanel extends QuickStartSubPanel { private static final long serialVersionUID = 1L; private static final String WEBSITE_LINK = "https://www.zaproxy.org/"; private static final String FAQ_LINK = "https://www.zaproxy.org/faq/"; private static final String GETTING_STARTED_LINK = "https://www.zaproxy.org/getting-started/"; private static final String USER_GROUP_LINK = "https://groups.google.com/group/zaproxy-users"; private static final String USER_GUIDE_LINK = "https://www.zaproxy.org/docs/desktop/"; private static final String ZAP_IN_TEN_LINK = "https://www.alldaydevops.com/zap-in-ten"; private JPanel contentPanel; private JLabel lowerPadding; private int paddingY; public LearnMorePanel(ExtensionQuickStart extension, QuickStartPanel qsp) { super(extension, qsp); } @Override public String getTitleKey() { return "quickstart.learn.panel.title"; } @Override public JPanel getDescriptionPanel() { JPanel panel = new QuickStartBackgroundPanel(); panel.add( QuickStartHelper.getWrappedLabel("quickstart.learn.panel.message1"), LayoutHelper.getGBC(0, 0, 2, 1.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); panel.add( new JLabel(" "), LayoutHelper.getGBC( 0, 2, 2, 1.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); // Spacer return panel; } private JLabel ulJLabel(String text) { JLabel label = new JLabel(text); Font font = label.getFont(); Map<TextAttribute, Object> attributes = new HashMap<>(font.getAttributes()); attributes.put(TextAttribute.UNDERLINE, TextAttribute.UNDERLINE_ON); label.setFont(font.deriveFont(attributes)); return label; } @Override public JPanel getContentPanel() { if (contentPanel == null) { contentPanel = new QuickStartBackgroundPanel(); int formPanelY = 0; ExtensionHelp extHelp = Control.getSingleton().getExtensionLoader().getExtension(ExtensionHelp.class); boolean isHelpAvailable = extHelp != null && extHelp.isHelpAvailable(); boolean isGuideAvailable = Constant.messages.containsKey("gettingStarted.file"); File guide = null; if (isGuideAvailable) { guide = new File( Constant.getZapHome() + File.separator + "lang" + File.separator + Constant.messages.getString("gettingStarted.file")); if (!guide.canRead()) { isGuideAvailable = false; } } // Keep the compiler happy final File finalGuide = guide; if (isHelpAvailable || isGuideAvailable) { contentPanel.add( new JLabel(Constant.messages.getString("quickstart.links.local")), LayoutHelper.getGBC( 0, formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); contentPanel.add( new JLabel(""), LayoutHelper.getGBC( 2, formPanelY, 1, 1.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); // Spacer if (isGuideAvailable) { JLabel qsLabel = ulJLabel(Constant.messages.getString("quickstart.link.startguide")); qsLabel.setIcon(ExtensionQuickStart.PDF_DOC_ICON); qsLabel.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); qsLabel.addMouseListener( new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { try { Desktop.getDesktop().open(finalGuide); } catch (Exception ex) { View.getSingleton() .showWarningDialog( LearnMorePanel.this, Constant.messages.getString( "quickstart.link.warning.nostartguide", ex.getMessage())); } } }); contentPanel.add( qsLabel, LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); } if (isHelpAvailable) { JLabel helpLabel = ulJLabel(Constant.messages.getString("quickstart.link.userguide")); helpLabel.setIcon(ExtensionHelp.getHelpIcon()); helpLabel.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); helpLabel.addMouseListener( new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { ExtensionHelp.showHelp(); } }); contentPanel.add( helpLabel, LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); } } contentPanel.add( new JLabel(Constant.messages.getString("quickstart.links.online")), LayoutHelper.getGBC( 0, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); contentPanel.add( new JLabel(""), LayoutHelper.getGBC( 2, formPanelY, 1, 1.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); // Spacer contentPanel.add( getOnlineLink("quickstart.link.website", WEBSITE_LINK), LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); contentPanel.add( getOnlineLink("quickstart.link.zapinten", ZAP_IN_TEN_LINK), LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); if (!isGuideAvailable) { // Link to the online version instead contentPanel.add( getOnlineLink("quickstart.link.startguide", GETTING_STARTED_LINK), LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); // TODO move link if/when we detect the add-on is installed } if (!isHelpAvailable) { // Link to the online version instead contentPanel.add( getOnlineLink("quickstart.link.userguide", USER_GUIDE_LINK), LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); // TODO move link if/when we detect the add-on is installed } contentPanel.add( getOnlineLink("quickstart.link.usergroup", USER_GROUP_LINK), LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); contentPanel.add( getOnlineLink("quickstart.link.faq", FAQ_LINK), LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); paddingY = ++formPanelY; this.replacePadding(); } return contentPanel; } private JLabel getOnlineLink(String key, String url) { JLabel label = ulJLabel(Constant.messages.getString(key)); label.setIcon(ExtensionQuickStart.ONLINE_DOC_ICON); label.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); label.addMouseListener( new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { DesktopUtils.openUrlInBrowser(url); } }); return label; } private void replacePadding() { if (contentPanel != null) { // this may or may not be present if (this.lowerPadding == null) { lowerPadding = new JLabel(""); } else { contentPanel.remove(this.lowerPadding); } contentPanel.add( lowerPadding, LayoutHelper.getGBC(0, paddingY, 1, 0.0D, 1.0D)); // Padding at bottom } } @Override public ImageIcon getIcon() { return ExtensionQuickStart.HELP_ICON; } @Override public JPanel getFooterPanel() { return null; } }
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // This file is a part of the 'esoco-gwt' project. // Copyright 2019 Elmar Sonnenschein, esoco GmbH, Flensburg, Germany // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ package de.esoco.gwt.client.app; import de.esoco.data.element.DataElementList; import de.esoco.data.process.ProcessState; import de.esoco.ewt.build.ContainerBuilder; import de.esoco.ewt.component.Container; import de.esoco.ewt.dialog.MessageBox; import de.esoco.ewt.style.AlignedPosition; import de.esoco.ewt.style.StyleData; import de.esoco.gwt.client.res.EsocoGwtResources; import de.esoco.gwt.client.ui.PanelManager; /******************************************************************** * A standard root panel manager for applications with a main process. * * @author eso */ public class GwtProcessAppRootPanel extends GwtApplicationPanelManager<Container, GwtApplicationPanelManager<?, ?>> { //~ Instance fields -------------------------------------------------------- private DataElementList rUserData; private ProcessPanelManager aProcessPanel; //~ Constructors ----------------------------------------------------------- /*************************************** * Creates a new instance. */ public GwtProcessAppRootPanel() { super(null, EsocoGwtResources.INSTANCE.css().gaRootPanel()); // only used for re-authentication, initial login is process-based setLoginMode(LoginMode.DIALOG); } //~ Methods ---------------------------------------------------------------- /*************************************** * {@inheritDoc} */ @Override public void displayMessage(String sMessage, int nDisplayTime) { MessageBox.showNotification( getContainer().getView(), "$tiErrorMessage", sMessage, MessageBox.ICON_ERROR); } /*************************************** * {@inheritDoc} */ @Override public void dispose() { rUserData = null; removeApplicationPanel(); super.dispose(); } /*************************************** * Returns the process panel manager that is used to display the root * process. * * @return The process panel manager */ public ProcessPanelManager getProcessPanel() { return aProcessPanel; } /*************************************** * {@inheritDoc} */ @Override public void updateUI() { if (aProcessPanel != null) { aProcessPanel.updateUI(); } } /*************************************** * {@inheritDoc} */ @Override protected void addComponents() { login(false); } /*************************************** * {@inheritDoc} */ @Override @SuppressWarnings("unchecked") protected ContainerBuilder<Container> createContainer( ContainerBuilder<?> rBuilder, StyleData rStyleData) { // as the root panel only displays a process and therefore has no // own UI just return parent builder to inline the process panel in // the main application view return (ContainerBuilder<Container>) rBuilder; } /*************************************** * Creates the process panel that will be used to render the application * process. Can be overridden to return a different instance than the * default implementation {@link ProcessPanelManager}. * * @param rProcessState The current process state * * @return A new process panel manager instance */ protected ProcessPanelManager createProcessPanel(ProcessState rProcessState) { ProcessPanelManager aProcessPanelManager = new ProcessPanelManager(this, rProcessState.getName(), false, true); aProcessPanelManager.setDisableOnInteraction(true); return aProcessPanelManager; } /*************************************** * {@inheritDoc} */ @Override protected void displayProcess(ProcessState rProcessState) { if (rProcessState.isFinished()) { processFinished(null, rProcessState); } else { aProcessPanel = createProcessPanel(rProcessState); aProcessPanel.buildIn(this, AlignedPosition.CENTER); aProcessPanel.handleCommandResult(rProcessState); } } /*************************************** * Overridden to check whether process panels are currently open. * * @see GwtApplicationPanelManager#getCloseWarning() */ @Override protected String getCloseWarning() { return aProcessPanel != null ? "$msgWindowCloseWarning" : null; } /*************************************** * {@inheritDoc} */ @Override protected Container getProcessContainer() { return getContainer(); } /*************************************** * {@inheritDoc} */ @Override protected DataElementList getUserData() { return rUserData; } /*************************************** * Overridden to perform the error handling for process executions. * * @see GwtApplicationPanelManager#handleError(Throwable) */ @Override protected void handleError(Throwable eCaught) { if (aProcessPanel != null) { aProcessPanel.handleError(eCaught); } else { displayMessage("$msgServiceCallFailed", MESSAGE_DISPLAY_TIME); } } /*************************************** * {@inheritDoc} */ @Override protected void logout() { dispose(); checkAuthentication(); } /*************************************** * Overridden to execute the application process for login. * * @see GwtApplicationPanelManager#performLogin(boolean) */ @Override protected void performLogin(boolean bReauthenticate) { if (bReauthenticate) { super.performLogin(true); } else { executeMainApplicationProcess(); } } /*************************************** * {@inheritDoc} */ @Override protected void processFinished( PanelManager<?, ?> rPanelManager, ProcessState rProcessState) { logout(); } /*************************************** * {@inheritDoc} */ @Override protected void processUpdated( PanelManager<?, ?> rPanelManager, ProcessState rProcessState) { // not needed as there is only one application process } /*************************************** * {@inheritDoc} */ @Override protected void removeApplicationPanel() { if (aProcessPanel != null) { aProcessPanel.dispose(); aProcessPanel = null; } removeAllComponents(); } }
/*L * Copyright Oracle Inc * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cadsr-cgmdr-nci-uk/LICENSE.txt for details. */ /* * eXist Open Source Native XML Database * Copyright (C) 2007 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * $Id$ * * @author Pierrick Brihaye <pierrick.brihaye@free.fr> */ package org.exist.indexing.spatial; import java.io.File; import java.io.StringReader; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import junit.framework.TestCase; import org.exist.EXistException; import org.exist.dom.NodeSet; import org.exist.indexing.spatial.AbstractGMLJDBCIndex.SpatialOperator; import org.exist.memtree.SAXAdapter; import org.exist.security.xacml.AccessContext; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.xmldb.IndexQueryService; import org.exist.xquery.XQuery; import org.exist.xquery.value.Sequence; import org.geotools.gml.GMLFilterDocument; import org.geotools.gml.GMLFilterGeometry; import org.geotools.gml.GMLHandlerJTS; import org.xml.sax.InputSource; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLFilterImpl; import org.xmldb.api.DatabaseManager; import org.xmldb.api.base.Collection; import org.xmldb.api.base.Database; import org.xmldb.api.base.XMLDBException; import org.xmldb.api.modules.CollectionManagementService; import org.xmldb.api.modules.XMLResource; import com.vividsolutions.jts.geom.Geometry; /** * */ public class GMLIndexTest extends TestCase { private final static String FILES[] = { "port-talbot.gml" }; static File existDir; static { String existHome = System.getProperty("exist.home"); existDir = existHome==null ? new File(".") : new File(existHome); } private final static File RESOURCE_DIR_DIR = new File(existDir, "extensions/indexes/spatial/test/resources"); private static final String TEST_COLLECTION_NAME = "test-spatial-index"; private static String COLLECTION_CONFIG = "<collection xmlns=\"http://exist-db.org/collection-config/1.0\">" + " <index>" + " <gml/>" + " </index>" + " <validation mode=\"no\"/> " + "</collection>"; String IN_MEMORY_GML = "<gml:Polygon xmlns:gml = 'http://www.opengis.net/gml' srsName='osgb:BNG'>" + " <gml:outerBoundaryIs><gml:LinearRing><gml:coordinates>" + "278515.400,187060.450 278515.150,187057.950 278516.350,187057.150 " + "278546.700,187054.000 278580.550,187050.900 278609.500,187048.100 " + "278609.750,187051.250 278574.750,187054.650 278544.950,187057.450 " + "278515.400,187060.450 " + " </gml:coordinates></gml:LinearRing></gml:outerBoundaryIs>" + "</gml:Polygon>"; String WKT_POLYGON = "POLYGON ((-3.7530493069563913 51.5695210244188, " + "-3.7526220716233705 51.569500427086325, -3.752191300029012 51.569481679670055, " + "-3.7516853221460167 51.5694586575048, -3.751687839470607 51.569430291017945, " + "-3.752106350923544 51.56944922336166, -3.752595638781826 51.5694697950237, " + "-3.753034464037513 51.56949156828257, -3.753052048201362 51.56949850020053, " + "-3.7530493069563913 51.5695210244188))"; private Database database; private Collection testCollection; private Geometry currentGeometry; public void testIndexDocument() { BrokerPool pool = null; DBBroker broker = null; try { for (int i = 0; i < FILES.length; i++) { XMLResource doc = (XMLResource) testCollection.createResource( FILES[i], "XMLResource" ); //Doh ! Setting a new content doesn't remove the old one if any ! if (testCollection.getResource(FILES[i]) != null) testCollection.removeResource(doc); doc.setContent(new File(RESOURCE_DIR_DIR, FILES[i])); testCollection.storeResource(doc); assertNotNull(testCollection.getResource(FILES[i])); } pool = BrokerPool.getInstance(); assertNotNull(pool); broker = pool.get(org.exist.security.SecurityManager.SYSTEM_USER); assertNotNull(broker); GMLHSQLIndexWorker indexWorker = (GMLHSQLIndexWorker)broker.getIndexController().getWorkerByIndexId(AbstractGMLJDBCIndex.ID); //Unplugged if (indexWorker == null) { System.out.println("No spatial index found"); } else { try { Connection conn = null; try { conn = indexWorker.acquireConnection(); for (int i = 0; i < FILES.length; i++) { XMLResource doc = (XMLResource) testCollection.getResource(FILES[i]); PreparedStatement ps = conn.prepareStatement( "SELECT * FROM " + GMLHSQLIndex.TABLE_NAME + " WHERE DOCUMENT_URI = ?;" ); ps.setString(1, testCollection.getName() + "/" + doc.getDocumentId()); ResultSet rs = ps.executeQuery(); while (rs.next()) {} int count = rs.getRow(); System.out.println(count + " geometries in the index"); ps.close(); assertTrue(count > 0); } } finally { indexWorker.releaseConnection(conn); } } catch (SQLException e) { e.printStackTrace(); fail(e.getMessage()); } } } catch (XMLDBException e) { e.printStackTrace(); fail(e.getMessage()); } catch (EXistException e) { e.printStackTrace(); fail(e.getMessage()); } finally { pool.release(broker); } } public void testCheckIndex() { BrokerPool pool = null; DBBroker broker = null; try { pool = BrokerPool.getInstance(); assertNotNull(pool); broker = pool.get(org.exist.security.SecurityManager.SYSTEM_USER); AbstractGMLJDBCIndex index = (AbstractGMLJDBCIndex)pool.getIndexManager().getIndexById(AbstractGMLJDBCIndex.ID); //Unplugged if (index == null) System.out.println("No spatial index found"); else assertTrue(index.checkIndex(broker)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { pool.release(broker); } } public void testScanIndex() { BrokerPool pool = null; DBBroker broker = null; try { pool = BrokerPool.getInstance(); broker = pool.get(org.exist.security.SecurityManager.SYSTEM_USER); XQuery xquery = broker.getXQueryService(); assertNotNull(xquery); Sequence seq = xquery.execute( "declare namespace gml = 'http://www.opengis.net/gml'; " + "declare function local:key-callback($term as xs:string, $data as xs:int+) as element() { " + " <entry>" + " <term>{$term}</term>" + " <frequency>{$data[1]}</frequency>" + " <documents>{$data[2]}</documents>" + " <position>{$data[3]}</position>" + " </entry> " + "}; " + //"util:index-keys(//gml:*, '', util:function('local:key-callback', 2), 1000, 'spatial-index')[entry/frequency > 1] ", "util:index-keys(//gml:*, '', util:function('local:key-callback', 2), 1000, 'spatial-index') ", null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 1); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { pool.release(broker); } } public void testLowLevelSearch() { GMLHandlerJTS geometryHandler = new GeometryHandler(); GMLFilterGeometry geometryFilter = new GMLFilterGeometry(geometryHandler); GMLFilterDocument handler = new GMLFilterDocument(geometryFilter); BrokerPool pool = null; DBBroker broker = null; try { pool = BrokerPool.getInstance(); assertNotNull(pool); broker = pool.get(org.exist.security.SecurityManager.SYSTEM_USER); assertNotNull(broker); AbstractGMLJDBCIndexWorker indexWorker = (AbstractGMLJDBCIndexWorker)broker.getIndexController().getWorkerByIndexId(AbstractGMLJDBCIndex.ID); //Unplugged if (indexWorker == null) System.out.println("No spatial index found"); else { SAXParserFactory factory = SAXParserFactory.newInstance(); factory.setNamespaceAware(true); InputSource src = new InputSource(new StringReader(IN_MEMORY_GML)); SAXParser parser = factory.newSAXParser(); XMLReader reader = parser.getXMLReader(); SAXAdapter adapter = new SAXAdapter(); reader.setContentHandler(handler); reader.setProperty("http://xml.org/sax/properties/lexical-handler", adapter); reader.parse(src); Geometry EPSG4326_geometry = indexWorker.transformGeometry(currentGeometry, "osgb:BNG", "EPSG:4326"); assertNotNull(EPSG4326_geometry); System.out.println(EPSG4326_geometry); NodeSet ns = indexWorker.search(broker, null, EPSG4326_geometry, SpatialOperator.EQUALS); assertTrue(ns.getLength() > 0); ns = indexWorker.search(broker, null, EPSG4326_geometry, SpatialOperator.DISJOINT); assertTrue(ns.getLength() > 0); ns = indexWorker.search(broker, null, EPSG4326_geometry, SpatialOperator.INTERSECTS); assertTrue(ns.getLength() > 0); ns = indexWorker.search(broker, null, EPSG4326_geometry, SpatialOperator.TOUCHES); //assertTrue(ns.getLength() > 0); ns = indexWorker.search(broker, null, EPSG4326_geometry, SpatialOperator.CROSSES); //assertTrue(ns.getLength() > 0); ns = indexWorker.search(broker, null, EPSG4326_geometry, SpatialOperator.WITHIN); assertTrue(ns.getLength() > 0); ns = indexWorker.search(broker, null, EPSG4326_geometry, SpatialOperator.CONTAINS); assertTrue(ns.getLength() > 0); //ns = ((GMLIndexWorker)index.getWorker()).search(broker, EPSG4326_geometry, SpatialOperator.OVERLAPS); //assertTrue(ns.getLength() > 0); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { pool.release(broker); } } public void testHighLevelSearch() { BrokerPool pool = null; DBBroker broker = null; try { pool = BrokerPool.getInstance(); assertNotNull(pool); broker = pool.get(org.exist.security.SecurityManager.SYSTEM_USER); XQuery xquery = broker.getXQueryService(); assertNotNull(xquery); String query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:equals(//gml:*, //gml:Point[gml:coordinates[. = '278697.450,187740.900']])"; Sequence seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:disjoint(//gml:*, //gml:Point[gml:coordinates[. = '278697.450,187740.900']])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:intersects(//gml:*, //gml:Point[gml:coordinates[. = '278697.450,187740.900']])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:touches(//gml:*, //gml:Point[gml:coordinates[. = '278697.450,187740.900']])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); //assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:crosses(//gml:*, //gml:Point[gml:coordinates[. = '278697.450,187740.900']])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); //assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:within(//gml:*, //gml:Point[gml:coordinates[. = '278697.450,187740.900']])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:contains(//gml:*, //gml:Point[gml:coordinates[. = '278697.450,187740.900']])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:overlaps(//gml:*, //gml:Point[gml:coordinates[. = '278697.450,187740.900']])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); //assertTrue(seq.getItemCount() > 0); //Tests with empty sequences query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:equals(//gml:*, ())"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:overlaps((), //gml:Point[gml:coordinates[. = '278697.450,187740.900']])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); //In-memory test query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:equals(//gml:*, " + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { pool.release(broker); } } public void testGeometricProperties() { BrokerPool pool = null; DBBroker broker = null; try { pool = BrokerPool.getInstance(); assertNotNull(pool); broker = pool.get(org.exist.security.SecurityManager.SYSTEM_USER); XQuery xquery = broker.getXQueryService(); assertNotNull(xquery); String query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getWKT(//gml:Polygon[1])"; Sequence seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getWKB(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getMinX(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getMaxX(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getMinY(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getMaxY(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getCentroidX(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getCentroidY(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getArea(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326WKT(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326WKB(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326MinX(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326MaxX(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326MinY(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326MaxY(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326CentroidX(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326CentroidY(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326Area(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getSRS(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getGeometryType(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:isClosed(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:isSimple(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:isValid(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); //Tests with empty sequences query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getWKT(())"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getArea(())"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); //In-memory tests query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getWKT(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getWKB(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getMinX(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getMaxX(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getMinY(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getMaxY(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getCentroidX(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getCentroidY(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getArea(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326WKT(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326WKB(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326MinX(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326MaxX(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326MinY(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326MaxY(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326CentroidX(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326CentroidY(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getEPSG4326Area(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getSRS(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getGeometryType(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:isClosed(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:isSimple(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:isValid(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { pool.release(broker); } } public void testGMLProducers() { BrokerPool pool = null; DBBroker broker = null; try { pool = BrokerPool.getInstance(); assertNotNull(pool); broker = pool.get(org.exist.security.SecurityManager.SYSTEM_USER); XQuery xquery = broker.getXQueryService(); assertNotNull(xquery); String query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:transform(//gml:Polygon[1], 'EPSG:4326')"; Sequence seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getWKT(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:buffer(//gml:Polygon[1], 100)"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:buffer(//gml:Polygon[1], 100, 1)"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getBbox(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:convexHull(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:boundary(//gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:intersection(//gml:Polygon[1], //gml:Polygon[2])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:union(//gml:Polygon[1], //gml:Polygon[2])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:difference(//gml:Polygon[1], //gml:Polygon[2])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:symetricDifference(//gml:Polygon[1], //gml:Polygon[2])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); //Tests with empty sequences query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:transform((), 'EPSG:4326')"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getWKT(())"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:buffer((), 100)"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:buffer((), 100, 1)"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getBbox(())"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:convexHull(())"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:boundary(())"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:union((), ())"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:union(//gml:Polygon[1], ())"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 1); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:union((), //gml:Polygon[1])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 1); //In-memory tests query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:transform(" + IN_MEMORY_GML + ", 'EPSG:4326')"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:buffer(" + IN_MEMORY_GML + ", 100)"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:buffer(" + IN_MEMORY_GML + ", 100, 1)"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:getBbox(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:convexHull(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:boundary(" + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:intersection(" + IN_MEMORY_GML + ", //gml:Polygon[2])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:union(" + IN_MEMORY_GML + ", //gml:Polygon[2])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:difference(" + IN_MEMORY_GML + ", //gml:Polygon[2])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:symetricDifference(" + IN_MEMORY_GML + ", //gml:Polygon[2])"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:intersection(//gml:Polygon[1]," + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:union(//gml:Polygon[1]," + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:difference(//gml:Polygon[1]," + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "spatial:symetricDifference(//gml:Polygon[1]," + IN_MEMORY_GML + ")"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { pool.release(broker); } } public void testUpdate() { BrokerPool pool = null; DBBroker broker = null; try { pool = BrokerPool.getInstance(); assertNotNull(pool); broker = pool.get(org.exist.security.SecurityManager.SYSTEM_USER); XQuery xquery = broker.getXQueryService(); assertNotNull(xquery); String query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "(# exist:force-index-use #) { " + "spatial:getArea(//gml:Polygon[1]) " + "}"; Sequence seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 1); String area1 = seq.toString(); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "update value //gml:Polygon[1]/gml:outerBoundaryIs/gml:LinearRing/gml:coordinates " + "(: strip decimals :) " + "with fn:replace(//gml:Polygon[1], '(\\d+).(\\d+)', '$1')"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 0); query = "import module namespace spatial='http://exist-db.org/xquery/spatial' " + "at 'java:org.exist.examples.indexing.spatial.module.SpatialModule'; " + "declare namespace gml = 'http://www.opengis.net/gml'; " + "(# exist:force-index-use #) { " + "spatial:getArea(//gml:Polygon[1])" + "}"; seq = xquery.execute(query, null, AccessContext.TEST); assertNotNull(seq); assertTrue(seq.getItemCount() == 1); String area2 = seq.toString(); assertFalse(area1.equals(area2)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { pool.release(broker); } } protected void setUp() { try { // initialize driver Class cl = Class.forName("org.exist.xmldb.DatabaseImpl"); database = (Database) cl.newInstance(); database.setProperty("create-database", "true"); DatabaseManager.registerDatabase(database); Collection root = DatabaseManager.getCollection("xmldb:exist://" + DBBroker.ROOT_COLLECTION, "admin", null); CollectionManagementService service = (CollectionManagementService) root.getService( "CollectionManagementService", "1.0"); testCollection = root.getChildCollection(TEST_COLLECTION_NAME); if (testCollection == null) { testCollection = service.createCollection(TEST_COLLECTION_NAME); assertNotNull(testCollection); IndexQueryService idxConf = (IndexQueryService) testCollection.getService("IndexQueryService", "1.0"); idxConf.configureCollection(COLLECTION_CONFIG); } } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (XMLDBException e) { e.printStackTrace(); } } protected void tearDown() { try { DatabaseManager.deregisterDatabase(database); } catch (XMLDBException e) { e.printStackTrace(); } } private class GeometryHandler extends XMLFilterImpl implements GMLHandlerJTS { public void geometry(Geometry geometry) { currentGeometry = geometry; } } }
/* * The MIT License (MIT) * * Copyright (c) 2020 Ziver Koc * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package zutil; import java.util.*; /** * This is a utility class that will generate timestamps from a Cron formatted String. * <br> * A cron string consists of 5 to 6 sections separated by a space: * <ol> * <li>Minute in Hour</li> * <li>Hour in a Day</li> * <li>Day of Month</li> * <li>Month</li> * <li>Day of Week</li> * <li>Year</li> * </ol> * Each section is defined by a number or by special characters: * <ul> * <li>*: any value, wildcard</li> * <li>,: separator for multiple expressions, e.g. 3,4,5</li> * <li>-: defines a range, e.g. 3-6</li> * <li>&#47;: step values, e.g. *&#47;10 (every tenth)</li> * <li>?: sets value as start time, when the cron was initialized. NOT SUPPORTED</li> * </ul> * <br> * Examples (from Cron Format Specification): * <ul> * <li>"* * * * * *": Each minute</li> * <li>"0 0 * * * *": Daily at midnight</li> * <li>"* * * 1,2,3 * *": Each minute in January, February or March</li> * </ul> * <br> * Note that this class will only calculate the next cron up to 50 years in the future ant not more. * * @see <a href="http://www.nncron.ru/help/EN/working/cron-format.htm">Cron Format Specification</a> * @see <a href="https://crontab.guru/">Cron calculator</a> * @see <a href="http://stackoverflow.com/a/322058/833746">Stackoverflow implementation reference</a> */ public class CronTimer implements Iterator<Long>, Iterable<Long>{ private TimeZone timeZone; private int[] minutes; private int[] hours; private int[] dayOfMonths; private int[] months; private int[] dayOfWeeks; private int[] years; /** * A Constructor that takes a String containing 5 (or 6 for extended) individual fields in Cron format */ public CronTimer(String cron) { String[] arr = cron.split("\\s"); if (arr.length < 5 || arr.length > 6) throw new IllegalArgumentException( "String must contain between 5-6 fields, but got(" + arr.length + " fields): " + cron); init(arr[0], arr[1], arr[2], arr[3], arr[4], (arr.length>5 ? arr[5]: "*")); } /** * A Constructor that takes separate Strings for each field */ public CronTimer(String minute, String hour, String dayOfMonth, String monthOfYear, String dayOfWeek) { this(minute, hour, dayOfMonth, monthOfYear, dayOfWeek, "*"); } /** * A Constructor that takes separate Strings for each field with an extended year field */ public CronTimer(String minute, String hour, String dayOfMonth, String monthOfYear, String dayOfWeek, String year) { init(minute, hour, dayOfMonth, monthOfYear, dayOfWeek, year); } private void init(String minute, String hour, String dayOfMonth, String monthOfYear, String dayOfWeek, String year) { minutes = ArrayUtil.toIntArray(getRange(minute, 0, 59)); hours = ArrayUtil.toIntArray(getRange(hour, 0, 23)); dayOfMonths = ArrayUtil.toIntArray(getRange(dayOfMonth, 1, 31)); months = ArrayUtil.toIntArray(getRange(monthOfYear, 1, 12)); dayOfWeeks = ArrayUtil.toIntArray(getRange(dayOfWeek, 1, 7)); years = ArrayUtil.toIntArray(getRange(year, 1970, Calendar.getInstance().get(Calendar.YEAR)+30)); } protected static List<Integer> getRange(String str, int from, int to) { if (str == null || str.isEmpty()) return Collections.emptyList(); List<Integer> list = new LinkedList<>(); String[] commaArr = str.split(","); if (commaArr.length > 1) { for (String section : commaArr) list.addAll(getRange(section, from, to)); } else { String[] divisionArr = str.split("/", 2); if (divisionArr.length == 2) { float divider = Integer.parseInt(divisionArr[1]); for (Integer i : getRange(divisionArr[0], from, to)) { if (i % divider == 0) list.add(i); } } else { String[] rangeArr; if (str.equals("*")) rangeArr = new String[]{"" +from, "" +to}; else rangeArr = str.split("-", 2); if (rangeArr.length == 2) { int rangeFrom = Integer.parseInt(rangeArr[0]); int rangeTo = Integer.parseInt(rangeArr[1]); if (from > rangeFrom || rangeTo > to) throw new IllegalArgumentException("Invalid range " + rangeFrom + "-" + rangeTo + " must be between: " + from + "-" + to); for (int i = rangeFrom; i <= rangeTo; ++i) list.add(i); } else { int value = Integer.parseInt(str); if (from > value || value > to) throw new IllegalArgumentException("Valid values are between " + from + "-" + to + " but got: " + value); list.add(value); } } } return list; } /** * Set the TimeZone that should be used by the cron algorithm */ public void setTimeZone(TimeZone timeZone) { this.timeZone = timeZone; } @Override public boolean hasNext() { return true; } @Override public Iterator<Long> iterator() { return this; } @Override public void remove() { throw new UnsupportedOperationException(); } /** * @return the next timestamp that triggers this cron timer from now, * -1 if there is no more future trigger points. */ @Override public Long next() { return next(System.currentTimeMillis()); } /** * @param fromTimestamp the timestamp offset to check the trigger from. Should be in MS * @return the next timestamp that triggers this cron timer from the given timestamp, * -1 if there is no more future trigger points. */ public Long next(long fromTimestamp) { Calendar cal = getCalendar(fromTimestamp); cal.set(Calendar.MILLISECOND, 0); cal.set(Calendar.SECOND, 0); cal.add(Calendar.MINUTE, 1); // skipp current time while (true) { int index; int year = cal.get(Calendar.YEAR); index = Arrays.binarySearch(years, year); if (index < 0) { // index not found in array if (Math.abs(index) > years.length) return -1L; // We have reach the limit no more years left else cal.set(Calendar.YEAR, years[Math.abs(index + 1)]); cal.set(Calendar.MONTH, months[0] - 1); cal.set(Calendar.DAY_OF_MONTH, dayOfMonths[0]); cal.set(Calendar.HOUR_OF_DAY, hours[0]); cal.set(Calendar.MINUTE, minutes[0]); continue; } int month = cal.get(Calendar.MONTH); // month ids are between 0-11 :( index = Arrays.binarySearch(months, month + 1); if (index < 0) { // index not found in array if (Math.abs(index) > months.length) { cal.set(Calendar.MONTH, months[0] - 1); cal.add(Calendar.YEAR, 1); } else cal.set(Calendar.MONTH, months[Math.abs(index + 1)] - 1); cal.set(Calendar.DAY_OF_MONTH, dayOfMonths[0]); cal.set(Calendar.HOUR_OF_DAY, hours[0]); cal.set(Calendar.MINUTE, minutes[0]); continue; } int day = cal.get(Calendar.DAY_OF_MONTH); index = Arrays.binarySearch(dayOfMonths, day); if (index < 0) { // index not found in array if (Math.abs(index) > dayOfMonths.length || // check if month have that many days in it dayOfMonths[Math.abs(index + 1)] > cal.getActualMaximum(Calendar.DAY_OF_MONTH)) { cal.set(Calendar.DAY_OF_MONTH, dayOfMonths[0]); cal.add(Calendar.MONTH, 1); } else cal.set(Calendar.DAY_OF_MONTH, dayOfMonths[Math.abs(index + 1)]); cal.set(Calendar.HOUR_OF_DAY, hours[0]); cal.set(Calendar.MINUTE, minutes[0]); continue; } // Calendar DAY_OF_WEEK is weird so we need to convert it to a logical number int dayOfWeek = getDayOfWeekID(cal.get(Calendar.DAY_OF_WEEK)); index = Arrays.binarySearch(dayOfWeeks, dayOfWeek); if (index < 0) { // index not found in array if (Math.abs(index) > dayOfWeeks.length) { cal.set(Calendar.DAY_OF_WEEK, getDayOfWeekEnum(dayOfWeeks[0])); cal.add(Calendar.WEEK_OF_YEAR, 1); } else cal.set(Calendar.DAY_OF_WEEK, getDayOfWeekEnum(dayOfWeeks[Math.abs(index + 1)])); cal.set(Calendar.HOUR_OF_DAY, hours[0]); cal.set(Calendar.MINUTE, minutes[0]); continue; } int hour = cal.get(Calendar.HOUR_OF_DAY); index = Arrays.binarySearch(hours, hour); if (index < 0) { // not found in array if (Math.abs(index) > hours.length) { cal.set(Calendar.HOUR_OF_DAY, hours[0]); cal.add(Calendar.DAY_OF_MONTH, 1); } else cal.set(Calendar.HOUR_OF_DAY, hours[Math.abs(index + 1)]); cal.set(Calendar.MINUTE, minutes[0]); continue; } int minute = cal.get(Calendar.MINUTE); index = Arrays.binarySearch(minutes, minute); if (index < 0) { // not found in array if (Math.abs(index) > minutes.length) { cal.set(Calendar.MINUTE, minutes[0]); cal.add(Calendar.HOUR_OF_DAY, 1); } else cal.set(Calendar.MINUTE, minutes[Math.abs(index + 1)]); continue; } // If we reach the end that means that we got match for all parameters break; } return cal.getTimeInMillis(); } protected Calendar getCalendar(long timestamp) { Calendar cal = Calendar.getInstance(); if (timeZone != null) cal.setTimeZone(timeZone); cal.setTimeInMillis(timestamp); return cal; } /** * Converts Calendar DAY_OF_WEEK enum to id starting from 1 (Monday) to 7 (Sunday) */ private int getDayOfWeekID(int calDayOfWeek) { switch (calDayOfWeek) { case Calendar.MONDAY: return 1; case Calendar.TUESDAY: return 2; case Calendar.WEDNESDAY: return 3; case Calendar.THURSDAY: return 4; case Calendar.FRIDAY: return 5; case Calendar.SATURDAY: return 6; case Calendar.SUNDAY: return 7; } return -1; } private int getDayOfWeekEnum(int dayId) { switch (dayId) { case 1: return Calendar.MONDAY; case 2: return Calendar.TUESDAY; case 3: return Calendar.WEDNESDAY; case 4: return Calendar.THURSDAY; case 5: return Calendar.FRIDAY; case 6: return Calendar.SATURDAY; case 7: return Calendar.SUNDAY; } return -1; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.dm.test.bundle.annotation.aspect; import org.apache.felix.dm.DependencyManager; import org.apache.felix.dm.annotation.api.AspectService; import org.apache.felix.dm.annotation.api.Component; import org.apache.felix.dm.annotation.api.Destroy; import org.apache.felix.dm.annotation.api.Init; import org.apache.felix.dm.annotation.api.Inject; import org.apache.felix.dm.annotation.api.ServiceDependency; import org.apache.felix.dm.annotation.api.Stop; import org.apache.felix.dm.test.bundle.annotation.sequencer.Sequencer; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; public class AspectChainTest { public interface ServiceInterface { public void invoke(Runnable run); } @Component public static class ServiceProvider implements ServiceInterface { @ServiceDependency(filter="(name=AspectChainTest.ServiceProvider)") protected Sequencer m_sequencer; // Injected by reflection. protected ServiceRegistration m_sr; @Init void init() { System.out.println("ServiceProvider.init"); } @Destroy void destroy() { System.out.println("ServiceProvider.destroy"); } public void invoke(Runnable run) { run.run(); m_sequencer.step(6); } } @AspectService(ranking = 20) public static class ServiceAspect2 implements ServiceInterface { @ServiceDependency(filter="(name=AspectChainTest.ServiceAspect2)") protected Sequencer m_sequencer; // Injected by reflection. private volatile ServiceInterface m_parentService; // Check auto config injections @Inject BundleContext m_bc; BundleContext m_bcNotInjected; @Inject DependencyManager m_dm; DependencyManager m_dmNotInjected; @Inject org.apache.felix.dm.Component m_component; org.apache.felix.dm.Component m_componentNotInjected; @Init void init() { System.out.println("ServiceAspect2.init"); } @Destroy void destroy() { System.out.println("ServiceAspect2.destroy"); } public void invoke(Runnable run) { checkInjectedFields(); m_sequencer.step(3); m_parentService.invoke(run); } private void checkInjectedFields() { if (m_bc == null) { m_sequencer.throwable(new Exception("Bundle Context not injected")); return; } if (m_bcNotInjected != null) { m_sequencer.throwable(new Exception("Bundle Context must not be injected")); return; } if (m_dm == null) { m_sequencer.throwable(new Exception("DependencyManager not injected")); return; } if (m_dmNotInjected != null) { m_sequencer.throwable(new Exception("DependencyManager must not be injected")); return; } if (m_component == null) { m_sequencer.throwable(new Exception("Component not injected")); return; } if (m_componentNotInjected != null) { m_sequencer.throwable(new Exception("Component must not be injected")); return; } } } @AspectService(ranking = 30, added="add") public static class ServiceAspect3 implements ServiceInterface { @ServiceDependency(filter="(name=AspectChainTest.ServiceAspect3)") protected Sequencer m_sequencer; // Injected using add callback. private volatile ServiceInterface m_parentService; @Init void init() { System.out.println("ServiceAspect3.init"); } @Destroy void destroy() { System.out.println("ServiceAspect3.destroy"); } void add(ServiceInterface si) { m_parentService = si; } public void invoke(Runnable run) { m_sequencer.step(2); m_parentService.invoke(run); } } @AspectService(ranking = 10, added="added", removed="removed") public static class ServiceAspect1 implements ServiceInterface { @ServiceDependency(filter="(name=AspectChainTest.ServiceAspect1)") protected Sequencer m_sequencer; // Injected by reflection. private volatile ServiceInterface m_parentService; @Init void init() { System.out.println("ServiceAspect1.init"); } @Destroy void destroy() { System.out.println("ServiceAspect1.destroy"); } void added(ServiceInterface si) { m_parentService = si; } @Stop void stop() { m_sequencer.step(7); } void removed(ServiceInterface si) { m_sequencer.step(8); } public void invoke(Runnable run) { m_sequencer.step(4); m_parentService.invoke(run); } } @Component public static class ServiceConsumer implements Runnable { @ServiceDependency(filter = "(name=AspectChainTest.ServiceConsumer)") protected Sequencer m_sequencer; @ServiceDependency private volatile ServiceInterface m_service; private Thread m_thread; @Init public void init() { m_thread = new Thread(this, "ServiceConsumer"); m_thread.start(); } public void run() { m_sequencer.waitForStep(1, 2000); m_service.invoke(new Runnable() { public void run() { m_sequencer.step(5); } }); } @Destroy void destroy() { m_thread.interrupt(); try { m_thread.join(); } catch (InterruptedException e) { } } } }
package de.pascaldierich.watchdog.ui.fragments; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.text.Editable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.EditText; import android.widget.ProgressBar; import android.widget.Switch; import android.widget.Toast; import java.util.ArrayList; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnCheckedChanged; import butterknife.OnTextChanged; import de.pascaldierich.domain.executor.impl.ThreadExecutor; import de.pascaldierich.model.SupportedNetworks; import de.pascaldierich.model.domainmodels.Observable; import de.pascaldierich.model.domainmodels.Site; import de.pascaldierich.threading.MainThreadImpl; import de.pascaldierich.watchdog.R; import de.pascaldierich.watchdog.presenter.fragments.setobservable.Presenter; import de.pascaldierich.watchdog.presenter.fragments.setobservable.SetObservablePresenter; import de.pascaldierich.watchdog.ui.activities.SetObservableActivity; import hugo.weaving.DebugLog; public class SetObservableFragment extends Fragment implements SetObservablePresenter.View, SetObservableActivity.SetObservableCallback { /* Instantiation */ private Presenter mPresenter; private View mRootView; /* Layout */ @Nullable @BindView(R.id.setObservable_progressBar) ProgressBar mProgressBar; @Nullable @BindView(R.id.setObservable_textName) EditText mTextName; // YouTube @Nullable @BindView(R.id.setObservable_textYouTubeName) EditText mTextYouTube; @Nullable @BindView(R.id.switch_YouTube) Switch mSwitchYouTube; @Override public void onCreate(Bundle savedInstance) { super.onCreate(savedInstance); setHasOptionsMenu(false); mPresenter = Presenter.onCreate(ThreadExecutor.getInstance(), MainThreadImpl.getInstance(), savedInstance, this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstance) { mRootView = inflater.inflate(R.layout.fragment_set_observable, container, false); ButterKnife.bind(this, mRootView); return mRootView; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); if (savedInstanceState != null) { mTextName.setText(getContext().getString(R.string.instanceState_displayName)); mTextName.setText(getContext().getString(R.string.instanceState_youtubeName)); } } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putString(getContext().getString(R.string.instanceState_displayName), mTextName.getText().toString()); outState.putString(getContext().getString(R.string.instanceState_youtubeName), mTextYouTube.getText().toString()); } /* Initial Methods */ @Override public void onStart() { super.onStart(); mPresenter.onStart(); } /** * returns the args the fragment got created with. * Args contains (if != null) an Observable which should get edited. * <b>Note:</b> key := R.string.parcelable_observable * <p/> * * @return args, Bundle: if Bundle != null it contains an Observable-Object */ @Nullable @Override public Bundle getArgumentsBundle() { return this.getArguments(); } @Override public Context getContext() { return getActivity(); } @Override public void showError() { Toast.makeText(getContext(), getContext().getString(R.string.error_unknown), Toast.LENGTH_SHORT).show(); } /* View Methods for Presenter */ /** * changes the visibility of the progressBar */ @Override public void changeProgressVisibility() { if (mProgressBar.getVisibility() == View.VISIBLE) { mProgressBar.setVisibility(View.INVISIBLE); } else { mProgressBar.setVisibility(View.VISIBLE); } } /** * show given Observable if exists * <p/> * * @param observable, Observable: existing Observable from model */ @Override public void setObservable(@NonNull Observable observable) { mTextName.setText(observable.getDisplayName()); if (observable.getGotThumbnail()) { } } /** * show loaded Sites if Observable set * <p/> * * @param sites, ArrayList<Sites>: related Sites for set Observable */ @Override public void setSites(@NonNull ArrayList<Site> sites) { for (int i = 0; i < sites.size(); i++) { switch (sites.get(i).getSite()) { case SupportedNetworks.YOUTUBE: { mTextYouTube.setText(getContext().getString(R.string.setObservableActivity_editText_site_defined)); } // [...] <-- ... } } } @Override public void showErrorMessage(String errorMessage) { Toast.makeText(getContext(), errorMessage, Toast.LENGTH_SHORT).show(); } /** * returns the Text set in the EditText for specific Network * <p/> * * @param network, String: Name of the Network for which the EditText works * @return user-input, String: Channel name * @throws NullPointerException, if not usable input */ @Override public String getTextNetwork(String network) throws NullPointerException { switch (network) { case SupportedNetworks.YOUTUBE: { return mTextYouTube.getText().toString(); } // [..] } throw new NullPointerException(); } /** * change font-color of TextField for specific Site * <p/> * * @param site, String: Site for which TextField the color get changed * @param color, int: colorCode */ @Override public void setTextColor(@SupportedNetworks String site, int color) { switch (site) { case SupportedNetworks.YOUTUBE: { mTextYouTube.setTextColor(color); break; } // [..] <- when there will be more supported networks } } /** * return the Text from Name-Field * <p/> * * @return user-input, String: Name of Observable * @throws NullPointerException, if not usable input */ @Override public String getTextDisplayName() throws NullPointerException { return mTextName.getText().toString(); } @Override public void setTextDisplayName(String displayName) { mTextName.setText(displayName); } @Override public void setTextYouTubeName(String youTubeName) { mTextYouTube.setText(youTubeName); } /* View-Listener */ /** * gets called by state-change for switch YouTube * calls Presenter to check for id * <p/> * * @param checked, boolean */ @DebugLog @OnCheckedChanged(R.id.switch_YouTube) void onSwitchChangedYouTube(boolean checked) { mPresenter.onStateChanged(SupportedNetworks.YOUTUBE, checked); } @DebugLog @OnTextChanged(value = R.id.setObservable_textYouTubeName, callback = OnTextChanged.Callback.AFTER_TEXT_CHANGED) void onAfterTextChangedYouTube(final Editable newText) { if (newText.length() == 20) { Toast.makeText(getContext(), getContext().getString(R.string.setObservableActivity_editText_charErrorMessage), Toast.LENGTH_SHORT).show(); } new Handler().postDelayed(new Runnable() { @Override public void run() { if (mSwitchYouTube.isChecked()) mPresenter.onInputChanged(SupportedNetworks.YOUTUBE, newText.toString()); } }, 1000); } /* SetObservable Callback from SetObservableActivity.class */ /** * Checks if user-input is correct * <p/> * * @return boolean, true -> input workable */ @Override public boolean inputVerified() { return mPresenter.inputVerified(); } /** * returns the Observable set by User * <p/> * * @return Observable, null */ @Override public Observable getObservableCallback() { return mPresenter.getObservableCallback(); } /** * return the Site-Collection * <p/> * * @return Site-Collection */ @Override public ArrayList<Site> getSitesCallback() { return mPresenter.getSitesCallback(); } }
package com.perpetumobile.bit.config; import java.io.BufferedInputStream; import java.io.InputStream; import java.io.IOException; import java.util.ArrayList; import java.util.StringTokenizer; import java.util.Properties; import com.perpetumobile.bit.android.util.FileUtil; import com.perpetumobile.bit.util.Util; /** * * @author Zoran Dukic */ public class ConfigProperties { final static public String CONFIG_DELIMITER_DEFAULT = ";"; final static public String CONFIG_KEY_INCLUDE = "$include"; protected Properties properties = null; public ConfigProperties() { properties = new Properties(); } /** * Create ConfigProperties and load properties from the given file. */ public ConfigProperties(String fileName) throws ConfigPropertiesException { properties = new Properties(); loadProperties(properties, fileName); } protected void throwConfigPropertiesException(String msg) throws ConfigPropertiesException { throw new ConfigPropertiesException(msg); } protected InputStream getInputStream(String fileName) throws IOException { String filePath = Config.CONFIG_PROPERTIES_VERSION_DIRECTORY_PATH() + "/" + fileName; return new BufferedInputStream(FileUtil.getFileInputStream(filePath)); } protected void putAll(ConfigProperties src) { properties.putAll(src.properties); } protected void loadProperties(Properties result, String fileList) throws ConfigPropertiesException { loadProperties(result, fileList, null); } protected void loadProperties(Properties result, String fileList, ArrayList<String> includeArrayList) throws ConfigPropertiesException { if (includeArrayList == null) { includeArrayList = new ArrayList<String>(); } StringTokenizer includeStringTokenizer = new StringTokenizer(fileList, CONFIG_DELIMITER_DEFAULT); while (includeStringTokenizer.hasMoreTokens()) { String configFileName = includeStringTokenizer.nextToken(); if (!includeArrayList.contains(configFileName)) { includeArrayList.add(configFileName); try { InputStream in = getInputStream(configFileName); if (in != null) { result.load(in); in.close(); } else { throwConfigPropertiesException("Config file '" + configFileName + "' not found."); } } catch (IOException e) { throwConfigPropertiesException("Config.loadProperties exception for '" + configFileName + "'"); } String include = result.getProperty(CONFIG_KEY_INCLUDE); if (include != null && !include.equals("")) { result.setProperty(CONFIG_KEY_INCLUDE, ""); loadProperties(result, include, includeArrayList); } } } } /** * Returns a property value for a given key. */ private String getPropertyImpl(String key) { return properties.getProperty(key); } /** * Returns a property value for a given key. * Returns the defaultValue if the key is not specified. */ public String getProperty(String key, String defaultValue) { String result = getPropertyImpl(key); if (result == null) { result = defaultValue; } return result; } /** * @see getProperty(String key, String defaultValue) */ public int getIntProperty(String key, int defaultValue) { return Util.toInt(getPropertyImpl(key), defaultValue); } /** * @see getProperty(String key, String defaultValue) */ public long getLongProperty(String key, long defaultValue) { return Util.toLong(getPropertyImpl(key), defaultValue); } /** * @see getProperty(String key, String defaultValue) */ public float getFloatProperty(String key, float defaultValue) { return Util.toFloat(getPropertyImpl(key), defaultValue); } /** * @see getProperty(String key, String defaultValue) */ public double getDoubleProperty(String key, double defaultValue) { return Util.toDouble(getPropertyImpl(key), defaultValue); } /** * @see getProperty(String key, String defaultValue) */ public boolean getBooleanProperty(String key, boolean defaultValue) { return Util.toBoolean(getPropertyImpl(key), defaultValue); } /** * Returns a property value for a given classKey.key. */ private String getClassPropertyImpl(String classKey, String key) { StringBuffer buf = new StringBuffer(classKey); buf.append('.'); buf.append(key); return getPropertyImpl(buf.toString()); } /** * Returns a property value for a given classKey.key. * Returns a property value for a given key if classKey.key is not specified. * Returns the defaultValue otherwise. */ public String getClassProperty(String classKey, String key, String defaultValue) { String strResult = getClassPropertyImpl(classKey, key); if (strResult == null) { strResult = getProperty(key, defaultValue); } return strResult; } /** * @see getClassProperty(String classKey, String key, String defaultValue) */ public int getIntClassProperty(String classKey, String key, int defaultValue) { String strResult = getClassPropertyImpl(classKey, key); int result = defaultValue; if (strResult != null) { result = Util.toInt(strResult, defaultValue); } else { result = getIntProperty(key, defaultValue); } return result; } /** * @see getClassProperty(String classKey, String key, String defaultValue) */ public long getLongClassProperty(String classKey, String key, long defaultValue) { String strResult = getClassPropertyImpl(classKey, key); long result = defaultValue; if (strResult != null) { result = Util.toLong(strResult, defaultValue); } else { result = getLongProperty(key, defaultValue); } return result; } /** * @see getClassProperty(String classKey, String key, String defaultValue) */ public float getFloatClassProperty(String classKey, String key, float defaultValue) { String strResult = getClassPropertyImpl(classKey, key); float result = defaultValue; if (strResult != null) { result = Util.toFloat(strResult, defaultValue); } else { result = getFloatProperty(key, defaultValue); } return result; } /** * @see getClassProperty(String classKey, String key, String defaultValue) */ public double getDoubleClassProperty(String classKey, String key, double defaultValue) { String strResult = getClassPropertyImpl(classKey, key); double result = defaultValue; if (strResult != null) { result = Util.toDouble(strResult, defaultValue); } else { result = getDoubleProperty(key, defaultValue); } return result; } /** * @see getClassProperty(String classKey, String key, String defaultValue) */ public boolean getBooleanClassProperty(String classKey, String key, boolean defaultValue) { String strResult = getClassPropertyImpl(classKey, key); boolean result = defaultValue; if (strResult != null) { result = Util.toBoolean(strResult, defaultValue); } else { result = getBooleanProperty(key, defaultValue); } return result; } }
/* * Copyright (c) 2012, 2014, Credit Suisse (Anatole Tresch), Werner Keil and others by the @author tag. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.javamoney.moneta.convert; import java.io.InputStream; import java.math.MathContext; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.GregorianCalendar; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import javax.money.CurrencyUnit; import javax.money.Monetary; import javax.money.MonetaryException; import javax.money.convert.ConversionContextBuilder; import javax.money.convert.ConversionQuery; import javax.money.convert.CurrencyConversionException; import javax.money.convert.ExchangeRate; import javax.money.convert.ProviderContext; import javax.money.convert.RateType; import javax.money.spi.Bootstrap; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.javamoney.moneta.spi.AbstractRateProvider; import org.javamoney.moneta.spi.DefaultNumberValue; import org.javamoney.moneta.spi.LoaderService; /** * Base to all Europe Central Bank implementation. * * @author otaviojava */ abstract class ECBAbstractRateProvider extends AbstractRateProvider implements LoaderService.LoaderListener { static final String BASE_CURRENCY_CODE = "EUR"; /** * Base currency of the loaded rates is always EUR. */ public static final CurrencyUnit BASE_CURRENCY = Monetary.getCurrency(BASE_CURRENCY_CODE); /** * Historic exchange rates, rate timestamp as UTC long. */ protected final Map<LocalDate, Map<String, ExchangeRate>> rates = new ConcurrentHashMap<>(); /** * Parser factory. */ private final SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); protected volatile String loadState; protected volatile CountDownLatch loadLock = new CountDownLatch(1); public ECBAbstractRateProvider(ProviderContext context) { super(context); saxParserFactory.setNamespaceAware(false); saxParserFactory.setValidating(false); LoaderService loader = Bootstrap.getService(LoaderService.class); loader.addLoaderListener(this, getDataId()); loader.loadDataAsync(getDataId()); } public abstract String getDataId(); @Override public void newDataLoaded(String data, InputStream is) { try { final int oldSize = this.rates.size(); SAXParser parser = saxParserFactory.newSAXParser(); parser.parse(is, new ECBRateReader(rates, getContext())); int newSize = this.rates.size(); loadState = "Loaded " + getDataId() + " exchange rates for days:" + (newSize - oldSize); LOGGER.info(loadState); loadLock.countDown(); } catch (Exception e) { loadState = "Last Error during data load: " + e.getMessage(); throw new IllegalArgumentException("Failed to load ECB data provided.", e); } } protected LocalDate[] getTargetDates(ConversionQuery query){ if (rates.isEmpty()) { return new LocalDate[0]; } LocalDate date; Calendar cal = query.get(GregorianCalendar.class); if(cal==null){ cal = query.get(Calendar.class); } if(cal==null){ List<LocalDate> dates = new ArrayList<>(rates.keySet()); Collections.sort(dates); date = dates.get(dates.size()-1); } else{ date = LocalDate.from(cal); } return new LocalDate[]{date, date.minusDays(1), date.minusDays(2), date.minusDays(3)}; } @Override public ExchangeRate getExchangeRate(ConversionQuery query) { Objects.requireNonNull(query); try { if (loadLock.await(30, TimeUnit.SECONDS)) { if (rates.isEmpty()) { return null; } if (!isAvailable(query)) { return null; } LocalDate selectedDate = null; Map<String, ExchangeRate> targets = null; for(LocalDate date: getTargetDates(query)){ targets = this.rates.get(date); if(targets!=null){ selectedDate = date; break; } } if (targets==null) { return null; } ExchangeRateBuilder builder = getBuilder(query, selectedDate); ExchangeRate sourceRate = targets.get(query.getBaseCurrency() .getCurrencyCode()); ExchangeRate target = targets .get(query.getCurrency().getCurrencyCode()); return createExchangeRate(query, builder, sourceRate, target); }else{ // Lets wait for a successful load only once, then answer requests as data is present. loadLock.countDown(); throw new MonetaryException("Failed to load currency conversion data: " + loadState); } } catch(InterruptedException e){ throw new MonetaryException("Failed to load currency conversion data: Load task has been interrupted.", e); } } private ExchangeRate createExchangeRate(ConversionQuery query, ExchangeRateBuilder builder, ExchangeRate sourceRate, ExchangeRate target) { if (areBothBaseCurrencies(query)) { builder.setFactor(DefaultNumberValue.ONE); return builder.build(); } else if (BASE_CURRENCY_CODE.equals(query.getCurrency().getCurrencyCode())) { if (sourceRate==null) { return null; } return reverse(sourceRate); } else if (BASE_CURRENCY_CODE.equals(query.getBaseCurrency() .getCurrencyCode())) { return target; } else { // Get Conversion base as derived rate: base -> EUR -> term ExchangeRate rate1 = getExchangeRate( query.toBuilder().setTermCurrency(Monetary.getCurrency(BASE_CURRENCY_CODE)).build()); ExchangeRate rate2 = getExchangeRate( query.toBuilder().setBaseCurrency(Monetary.getCurrency(BASE_CURRENCY_CODE)) .setTermCurrency(query.getCurrency()).build()); if (rate1!=null && rate2!=null) { builder.setFactor(multiply(rate1.getFactor(), rate2.getFactor())); builder.setRateChain(rate1, rate2); return builder.build(); } throw new CurrencyConversionException(query.getBaseCurrency(), query.getCurrency(), sourceRate.getContext()); } } private boolean areBothBaseCurrencies(ConversionQuery query) { return BASE_CURRENCY_CODE.equals(query.getBaseCurrency().getCurrencyCode()) && BASE_CURRENCY_CODE.equals(query.getCurrency().getCurrencyCode()); } private ExchangeRateBuilder getBuilder(ConversionQuery query, LocalDate localDate) { ExchangeRateBuilder builder = new ExchangeRateBuilder( ConversionContextBuilder.create(getContext(), RateType.HISTORIC) .set(localDate).set("LocalDate", localDate.toString()).build()); builder.setBase(query.getBaseCurrency()); builder.setTerm(query.getCurrency()); return builder; } private ExchangeRate reverse(ExchangeRate rate) { if (rate==null) { throw new IllegalArgumentException("Rate null is not reversible."); } return new ExchangeRateBuilder(rate).setRate(rate).setBase(rate.getCurrency()).setTerm(rate.getBaseCurrency()) .setFactor(divide(DefaultNumberValue.ONE, rate.getFactor(), MathContext.DECIMAL64)).build(); } }
/** * Copyright 2005-2015 Red Hat, Inc. * <p/> * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.forge.addon.utils; import java.util.Stack; import javax.xml.namespace.NamespaceContext; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; /** * A {@link XMLStreamWriter} that do not write namespaces and support indent. */ public class JaxbNoNamespaceWriter implements XMLStreamWriter { private final XMLStreamWriter delegate; private final int indent; private static final Object SEEN_NOTHING = new Object(); private static final Object SEEN_ELEMENT = new Object(); private static final Object SEEN_DATA = new Object(); private Object state; private final Stack<Object> stateStack = new Stack<>(); private String indentStep; private int depth; private int elements; private String rootElementName; private String skipAttributes; public JaxbNoNamespaceWriter(XMLStreamWriter delegate) { this(delegate, 0); } public JaxbNoNamespaceWriter(XMLStreamWriter delegate, int indent) { this.delegate = delegate; this.indent = indent; this.state = SEEN_NOTHING; if (indent > 0) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < indent; i++) { sb.append(" "); } indentStep = sb.toString(); } } private void onStartElement() throws XMLStreamException { this.stateStack.push(SEEN_ELEMENT); this.state = SEEN_NOTHING; if (this.depth > 0) { delegate.writeCharacters("\n"); } this.doIndent(); ++this.depth; } private void onEndElement() throws XMLStreamException { --this.depth; if (this.state == SEEN_ELEMENT) { delegate.writeCharacters("\n"); this.doIndent(); } this.state = this.stateStack.pop(); } private void onEmptyElement() throws XMLStreamException { this.state = SEEN_ELEMENT; if (this.depth > 0) { delegate.writeCharacters("\n"); } this.doIndent(); } private void doIndent() throws XMLStreamException { if (this.depth > 0) { for (int i = 0; i < this.depth; ++i) { delegate.writeCharacters(this.indentStep); } } } @Override public void writeStartElement(String localName) throws XMLStreamException { onStartElement(); delegate.writeStartElement(localName); elements++; if (rootElementName == null) { rootElementName = localName; } } @Override public void writeStartElement(String namespaceURI, String localName) throws XMLStreamException { onStartElement(); // we do not want to write namespaces delegate.writeStartElement("", localName); elements++; if (rootElementName == null) { rootElementName = localName; } } @Override public void writeStartElement(String prefix, String localName, String namespaceURI) throws XMLStreamException { onStartElement(); // we do not want to write namespaces delegate.writeStartElement("", localName, ""); elements++; if (rootElementName == null) { rootElementName = localName; } } @Override public void writeEmptyElement(String namespaceURI, String localName) throws XMLStreamException { onEmptyElement(); // we do not want to write namespaces delegate.writeEmptyElement("", localName); } @Override public void writeEmptyElement(String prefix, String localName, String namespaceURI) throws XMLStreamException { onEmptyElement(); // we do not want to write namespaces delegate.writeEmptyElement("", localName, ""); } @Override public void writeEmptyElement(String localName) throws XMLStreamException { onEmptyElement(); delegate.writeEmptyElement(localName); } @Override public void writeEndElement() throws XMLStreamException { onEndElement(); delegate.writeEndElement(); } @Override public void writeEndDocument() throws XMLStreamException { delegate.writeEndDocument(); } @Override public void close() throws XMLStreamException { delegate.close(); } @Override public void flush() throws XMLStreamException { delegate.flush(); } @Override public void writeAttribute(String localName, String value) throws XMLStreamException { if (skipAttributes != null && isSkipAttribute(localName)) { return; } delegate.writeAttribute(localName, value); } @Override public void writeAttribute(String prefix, String namespaceURI, String localName, String value) throws XMLStreamException { if ("xsi".equals(prefix)) { // skip xsi namespace } else { if (skipAttributes != null && isSkipAttribute(localName)) { return; } // we do not want to write namespaces delegate.writeAttribute(prefix, "", localName, value); } } @Override public void writeAttribute(String namespaceURI, String localName, String value) throws XMLStreamException { if (skipAttributes != null && isSkipAttribute(localName)) { return; } // we do not want to write namespaces delegate.writeAttribute("", localName, value); } @Override public void writeNamespace(String prefix, String namespaceURI) throws XMLStreamException { // we do not want to write namespaces } @Override public void writeDefaultNamespace(String namespaceURI) throws XMLStreamException { // we do not want to write namespaces } @Override public void writeComment(String data) throws XMLStreamException { delegate.writeComment(data); } @Override public void writeProcessingInstruction(String target) throws XMLStreamException { delegate.writeProcessingInstruction(target); } @Override public void writeProcessingInstruction(String target, String data) throws XMLStreamException { delegate.writeProcessingInstruction(target, data); } @Override public void writeCData(String data) throws XMLStreamException { state = SEEN_DATA; delegate.writeCData(data); } @Override public void writeDTD(String dtd) throws XMLStreamException { delegate.writeDTD(dtd); } @Override public void writeEntityRef(String name) throws XMLStreamException { delegate.writeEntityRef(name); } @Override public void writeStartDocument() throws XMLStreamException { delegate.writeStartDocument(); if (indent > 0) { delegate.writeCharacters("\n"); } } @Override public void writeStartDocument(String version) throws XMLStreamException { delegate.writeStartDocument(version); if (indent > 0) { delegate.writeCharacters("\n"); } } @Override public void writeStartDocument(String encoding, String version) throws XMLStreamException { delegate.writeStartDocument(encoding, version); if (indent > 0) { delegate.writeCharacters("\n"); } } @Override public void writeCharacters(String text) throws XMLStreamException { state = SEEN_DATA; delegate.writeCharacters(text); } @Override public void writeCharacters(char[] text, int start, int len) throws XMLStreamException { state = SEEN_DATA; delegate.writeCharacters(text, start, len); } @Override public String getPrefix(String uri) throws XMLStreamException { return delegate.getPrefix(uri); } @Override public void setPrefix(String prefix, String uri) throws XMLStreamException { delegate.setPrefix(prefix, uri); } @Override public void setDefaultNamespace(String uri) throws XMLStreamException { delegate.setDefaultNamespace(uri); } @Override public void setNamespaceContext(NamespaceContext context) throws XMLStreamException { delegate.setNamespaceContext(context); } @Override public NamespaceContext getNamespaceContext() { return delegate.getNamespaceContext(); } @Override public Object getProperty(String name) throws IllegalArgumentException { return delegate.getProperty(name); } public String getSkipAttributes() { return skipAttributes; } /** * Sets a comma separated list of attribute names to skip writing. */ public void setSkipAttributes(String skipAttributes) { this.skipAttributes = skipAttributes; } /** * Number of elements in the XML */ public int getElements() { return elements; } /** * The root element name */ public String getRootElementName() { return rootElementName; } private boolean isSkipAttribute(String localName) { for (String att : skipAttributes.split(",")) { if (localName.equals(att)) { return true; } } return false; } }
package org.javasimon.jdbc4; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.net.URL; import java.sql.*; import java.util.Calendar; import java.util.Map; /** * Simon JDBC proxy callable statement implementation class. * * @author Radovan Sninsky * @author <a href="mailto:virgo47@gmail.com">Richard "Virgo" Richter</a> * @since 2.4 */ public final class SimonCallableStatement extends SimonPreparedStatement implements CallableStatement { private CallableStatement stmt; /** * Class constructor, initializes Simons (lifespan, active) related to statement. * * @param conn database connection (simon impl.) * @param stmt real callable statement * @param sql sql command * @param prefix hierarchy prefix for statement Simons * @param sqlNormalizerFactory factory to map queries to Simon keys */ SimonCallableStatement(Connection conn, CallableStatement stmt, String sql, String prefix, SqlNormalizerFactory sqlNormalizerFactory) { super(conn, stmt, sql, prefix, sqlNormalizerFactory); this.stmt = stmt; } @Override public void registerOutParameter(int i, int i1) throws SQLException { stmt.registerOutParameter(i, i1); } @Override public void registerOutParameter(int i, int i1, int i2) throws SQLException { stmt.registerOutParameter(i, i1, i2); } @Override public boolean wasNull() throws SQLException { return stmt.wasNull(); } @Override public String getString(int i) throws SQLException { return stmt.getString(i); } @Override public boolean getBoolean(int i) throws SQLException { return stmt.getBoolean(i); } @Override public byte getByte(int i) throws SQLException { return stmt.getByte(i); } @Override public short getShort(int i) throws SQLException { return stmt.getShort(i); } @Override public int getInt(int i) throws SQLException { return stmt.getInt(i); } @Override public long getLong(int i) throws SQLException { return stmt.getLong(i); } @Override public float getFloat(int i) throws SQLException { return stmt.getFloat(i); } @Override public double getDouble(int i) throws SQLException { return stmt.getDouble(i); } @Deprecated @Override public BigDecimal getBigDecimal(int i, int i1) throws SQLException { return stmt.getBigDecimal(i, i1); } @Override public byte[] getBytes(int i) throws SQLException { return stmt.getBytes(i); } @Override public Date getDate(int i) throws SQLException { return stmt.getDate(i); } @Override public Time getTime(int i) throws SQLException { return stmt.getTime(i); } @Override public Timestamp getTimestamp(int i) throws SQLException { return stmt.getTimestamp(i); } @Override public Object getObject(int i) throws SQLException { return stmt.getObject(i); } @Override public BigDecimal getBigDecimal(int i) throws SQLException { return stmt.getBigDecimal(i); } @Override public Object getObject(int i, Map<String, Class<?>> stringClassMap) throws SQLException { return stmt.getObject(i, stringClassMap); } @Override public Ref getRef(int i) throws SQLException { return stmt.getRef(i); } @Override public Blob getBlob(int i) throws SQLException { return stmt.getBlob(i); } @Override public Clob getClob(int i) throws SQLException { return stmt.getClob(i); } @Override public Array getArray(int i) throws SQLException { return stmt.getArray(i); } @Override public Date getDate(int i, Calendar calendar) throws SQLException { return stmt.getDate(i, calendar); } @Override public Time getTime(int i, Calendar calendar) throws SQLException { return stmt.getTime(i, calendar); } @Override public Timestamp getTimestamp(int i, Calendar calendar) throws SQLException { return stmt.getTimestamp(i, calendar); } @Override public void registerOutParameter(int i, int i1, String s) throws SQLException { stmt.registerOutParameter(i, i1, s); } @Override public void registerOutParameter(String s, int i) throws SQLException { stmt.registerOutParameter(s, i); } @Override public void registerOutParameter(String s, int i, int i1) throws SQLException { stmt.registerOutParameter(s, i, i1); } @Override public void registerOutParameter(String s, int i, String s1) throws SQLException { stmt.registerOutParameter(s, i, s1); } @Override public URL getURL(int i) throws SQLException { return stmt.getURL(i); } @Override public void setURL(String s, URL url) throws SQLException { stmt.setURL(s, url); } @Override public void setNull(String s, int i) throws SQLException { stmt.setNull(s, i); } @Override public void setBoolean(String s, boolean b) throws SQLException { stmt.setBoolean(s, b); } @Override public void setByte(String s, byte b) throws SQLException { stmt.setByte(s, b); } @Override public void setShort(String s, short i) throws SQLException { stmt.setShort(s, i); } @Override public void setInt(String s, int i) throws SQLException { stmt.setInt(s, i); } @Override public void setLong(String s, long l) throws SQLException { stmt.setLong(s, l); } @Override public void setFloat(String s, float v) throws SQLException { stmt.setFloat(s, v); } @Override public void setDouble(String s, double v) throws SQLException { stmt.setDouble(s, v); } @Override public void setBigDecimal(String s, BigDecimal bigDecimal) throws SQLException { stmt.setBigDecimal(s, bigDecimal); } @Override public void setString(String s, String s1) throws SQLException { stmt.setString(s, s1); } @Override public void setBytes(String s, byte[] bytes) throws SQLException { stmt.setBytes(s, bytes); } @Override public void setDate(String s, Date date) throws SQLException { stmt.setDate(s, date); } @Override public void setTime(String s, Time time) throws SQLException { stmt.setTime(s, time); } @Override public void setTimestamp(String s, Timestamp timestamp) throws SQLException { stmt.setTimestamp(s, timestamp); } @Override public void setAsciiStream(String s, InputStream inputStream, int i) throws SQLException { stmt.setAsciiStream(s, inputStream, i); } @Override public void setBinaryStream(String s, InputStream inputStream, int i) throws SQLException { stmt.setBinaryStream(s, inputStream, i); } @Override public void setObject(String s, Object o, int i, int i1) throws SQLException { stmt.setObject(s, o, i, i1); } @Override public void setObject(String s, Object o, int i) throws SQLException { stmt.setObject(s, o, i); } @Override public void setObject(String s, Object o) throws SQLException { stmt.setObject(s, o); } @Override public void setCharacterStream(String s, Reader reader, int i) throws SQLException { stmt.setCharacterStream(s, reader, i); } @Override public void setDate(String s, Date date, Calendar calendar) throws SQLException { stmt.setDate(s, date, calendar); } @Override public void setTime(String s, Time time, Calendar calendar) throws SQLException { stmt.setTime(s, time, calendar); } @Override public void setTimestamp(String s, Timestamp timestamp, Calendar calendar) throws SQLException { stmt.setTimestamp(s, timestamp, calendar); } @Override public void setNull(String s, int i, String s1) throws SQLException { stmt.setNull(s, i, s1); } @Override public String getString(String s) throws SQLException { return stmt.getString(s); } @Override public boolean getBoolean(String s) throws SQLException { return stmt.getBoolean(s); } @Override public byte getByte(String s) throws SQLException { return stmt.getByte(s); } @Override public short getShort(String s) throws SQLException { return stmt.getShort(s); } @Override public int getInt(String s) throws SQLException { return stmt.getInt(s); } @Override public long getLong(String s) throws SQLException { return stmt.getLong(s); } @Override public float getFloat(String s) throws SQLException { return stmt.getFloat(s); } @Override public double getDouble(String s) throws SQLException { return stmt.getDouble(s); } @Override public byte[] getBytes(String s) throws SQLException { return stmt.getBytes(s); } @Override public Date getDate(String s) throws SQLException { return stmt.getDate(s); } @Override public Time getTime(String s) throws SQLException { return stmt.getTime(s); } @Override public Timestamp getTimestamp(String s) throws SQLException { return stmt.getTimestamp(s); } @Override public Object getObject(String s) throws SQLException { return stmt.getObject(s); } @Override public BigDecimal getBigDecimal(String s) throws SQLException { return stmt.getBigDecimal(s); } @Override public Object getObject(String s, Map<String, Class<?>> stringClassMap) throws SQLException { return stmt.getObject(s, stringClassMap); } @Override public Ref getRef(String s) throws SQLException { return stmt.getRef(s); } @Override public Blob getBlob(String s) throws SQLException { return stmt.getBlob(s); } @Override public Clob getClob(String s) throws SQLException { return stmt.getClob(s); } @Override public Array getArray(String s) throws SQLException { return stmt.getArray(s); } @Override public Date getDate(String s, Calendar calendar) throws SQLException { return stmt.getDate(s, calendar); } @Override public Time getTime(String s, Calendar calendar) throws SQLException { return stmt.getTime(s, calendar); } @Override public Timestamp getTimestamp(String s, Calendar calendar) throws SQLException { return stmt.getTimestamp(s, calendar); } @Override public URL getURL(String s) throws SQLException { return stmt.getURL(s); } @Override public RowId getRowId(int i) throws SQLException { return stmt.getRowId(i); } @Override public RowId getRowId(String s) throws SQLException { return stmt.getRowId(s); } @Override public void setRowId(String s, RowId rowId) throws SQLException { stmt.setRowId(s, rowId); } @Override public void setNString(String s, String s1) throws SQLException { stmt.setNString(s, s1); } @Override public void setNCharacterStream(String s, Reader reader, long l) throws SQLException { stmt.setNCharacterStream(s, reader, l); } @Override public void setNClob(String s, NClob nClob) throws SQLException { stmt.setNClob(s, nClob); } @Override public void setClob(String s, Reader reader, long l) throws SQLException { stmt.setClob(s, reader, l); } @Override public void setBlob(String s, InputStream inputStream, long l) throws SQLException { stmt.setBlob(s, inputStream, l); } @Override public void setNClob(String s, Reader reader, long l) throws SQLException { stmt.setNClob(s, reader, l); } @Override public NClob getNClob(int i) throws SQLException { return stmt.getNClob(i); } @Override public NClob getNClob(String s) throws SQLException { return stmt.getNClob(s); } @Override public void setSQLXML(String s, SQLXML sqlxml) throws SQLException { stmt.setSQLXML(s, sqlxml); } @Override public SQLXML getSQLXML(int i) throws SQLException { return stmt.getSQLXML(i); } @Override public SQLXML getSQLXML(String s) throws SQLException { return stmt.getSQLXML(s); } @Override public String getNString(int i) throws SQLException { return stmt.getNString(i); } @Override public String getNString(String s) throws SQLException { return stmt.getNString(s); } @Override public Reader getNCharacterStream(int i) throws SQLException { return stmt.getNCharacterStream(i); } @Override public Reader getNCharacterStream(String s) throws SQLException { return stmt.getNCharacterStream(s); } @Override public Reader getCharacterStream(int i) throws SQLException { return stmt.getCharacterStream(i); } @Override public Reader getCharacterStream(String s) throws SQLException { return stmt.getCharacterStream(s); } @Override public void setBlob(String s, Blob blob) throws SQLException { stmt.setBlob(s, blob); } @Override public void setClob(String s, Clob clob) throws SQLException { stmt.setClob(s, clob); } @Override public void setAsciiStream(String s, InputStream inputStream, long l) throws SQLException { stmt.setAsciiStream(s, inputStream, l); } @Override public void setBinaryStream(String s, InputStream inputStream, long l) throws SQLException { stmt.setBinaryStream(s, inputStream, l); } @Override public void setCharacterStream(String s, Reader reader, long l) throws SQLException { stmt.setCharacterStream(s, reader, l); } @Override public void setAsciiStream(String s, InputStream inputStream) throws SQLException { stmt.setAsciiStream(s, inputStream); } @Override public void setBinaryStream(String s, InputStream inputStream) throws SQLException { stmt.setBinaryStream(s, inputStream); } @Override public void setCharacterStream(String s, Reader reader) throws SQLException { stmt.setCharacterStream(s, reader); } @Override public void setNCharacterStream(String s, Reader reader) throws SQLException { stmt.setNCharacterStream(s, reader); } @Override public void setClob(String s, Reader reader) throws SQLException { stmt.setClob(s, reader); } @Override public void setBlob(String s, InputStream inputStream) throws SQLException { stmt.setBlob(s, inputStream); } @Override public void setNClob(String s, Reader reader) throws SQLException { stmt.setNClob(s, reader); } @Override public <T> T getObject(int parameterIndex, Class<T> type) throws SQLException { return stmt.getObject(parameterIndex, type); } @Override public <T> T getObject(String parameterName, Class<T> type) throws SQLException { return stmt.getObject(parameterName, type); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spi.discovery.tcp; import java.io.IOException; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketTimeoutException; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import org.apache.ignite.Ignite; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.managers.GridManagerAdapter; import org.apache.ignite.internal.managers.discovery.GridDiscoveryManager; import org.apache.ignite.internal.util.GridConcurrentHashSet; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.util.worker.GridWorker; import org.apache.ignite.spi.IgniteSpiOperationTimeoutException; import org.apache.ignite.spi.IgniteSpiOperationTimeoutHelper; import org.apache.ignite.spi.communication.CommunicationSpi; import org.apache.ignite.spi.communication.tcp.internal.GridNioServerWrapper; import org.apache.ignite.spi.discovery.DiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Test; import static org.apache.ignite.events.EventType.EVT_NODE_FAILED; import static org.apache.ignite.events.EventType.EVT_NODE_SEGMENTED; /** * */ public class TcpDiscoveryNetworkIssuesTest extends GridCommonAbstractTest { /** */ private static final int NODE_0_PORT = 47500; /** */ private static final int NODE_1_PORT = 47501; /** */ private static final int NODE_2_PORT = 47502; /** */ private static final int NODE_3_PORT = 47503; /** */ private static final int NODE_4_PORT = 47504; /** */ private static final int NODE_5_PORT = 47505; /** */ private static final String NODE_0_NAME = "node00-" + NODE_0_PORT; /** */ private static final String NODE_1_NAME = "node01-" + NODE_1_PORT; /** */ private static final String NODE_2_NAME = "node02-" + NODE_2_PORT; /** */ private static final String NODE_3_NAME = "node03-" + NODE_3_PORT; /** */ private static final String NODE_4_NAME = "node04-" + NODE_4_PORT; /** */ private static final String NODE_5_NAME = "node05-" + NODE_5_PORT; /** */ private TcpDiscoveryVmIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** */ private TcpDiscoverySpi specialSpi; /** */ private boolean usePortFromNodeName; /** */ private int connectionRecoveryTimeout = -1; /** */ private int failureDetectionTimeout = 2_000; /** */ private final GridConcurrentHashSet<Integer> segmentedNodes = new GridConcurrentHashSet<>(); /** {@inheritDoc} */ @Override protected void afterTest() { stopAllGrids(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); TcpDiscoverySpi spi = (specialSpi != null) ? specialSpi : new TcpDiscoverySpi(); if (usePortFromNodeName) spi.setLocalPort(Integer.parseInt(igniteInstanceName.split("-")[1])); spi.setIpFinder(ipFinder); if (connectionRecoveryTimeout >= 0) spi.setConnectionRecoveryTimeout(connectionRecoveryTimeout); cfg.setFailureDetectionTimeout(failureDetectionTimeout); cfg.setDiscoverySpi(spi); cfg.setIncludeEventTypes(EVT_NODE_SEGMENTED); cfg.setSystemWorkerBlockedTimeout(10_000); return cfg; } /** * Test scenario: some node (lets call it IllN) in the middle experience network issues: its previous cannot see it, * and the node cannot see two nodes in front of it. * * IllN is considered failed by othen nodes in topology but IllN manages to connect to topology and * sends StatusCheckMessage with non-empty failedNodes collection. * * Expected outcome: IllN eventually segments from topology, other healthy nodes work normally. * * @see <a href="https://issues.apache.org/jira/browse/IGNITE-11364">IGNITE-11364</a> * for more details about actual bug. */ @Test public void testServerGetsSegmentedOnBecomeDangling() throws Exception { usePortFromNodeName = true; connectionRecoveryTimeout = 0; AtomicBoolean networkBroken = new AtomicBoolean(false); IgniteEx ig0 = startGrid(NODE_0_NAME); IgniteEx ig1 = startGrid(NODE_1_NAME); specialSpi = new TcpDiscoverySpi() { @Override protected int readReceipt(Socket sock, long timeout) throws IOException { if (networkBroken.get() && sock.getPort() == NODE_3_PORT) throw new SocketTimeoutException("Read timed out"); return super.readReceipt(sock, timeout); } @Override protected Socket openSocket(InetSocketAddress sockAddr, IgniteSpiOperationTimeoutHelper timeoutHelper) throws IOException, IgniteSpiOperationTimeoutException { if (networkBroken.get() && sockAddr.getPort() == NODE_4_PORT) throw new SocketTimeoutException("connect timed out"); return super.openSocket(sockAddr, timeoutHelper); } }; Ignite ig2 = startGrid(NODE_2_NAME); AtomicBoolean illNodeSegmented = new AtomicBoolean(false); ig2.events().localListen((e) -> { illNodeSegmented.set(true); return false; }, EVT_NODE_SEGMENTED); specialSpi = null; startGrid(NODE_3_NAME); startGrid(NODE_4_NAME); startGrid(NODE_5_NAME); breakDiscoConnectionToNext(ig1); networkBroken.set(true); GridTestUtils.waitForCondition(illNodeSegmented::get, 10_000); assertTrue(illNodeSegmented.get()); Map failedNodes = getFailedNodesCollection(ig0); assertTrue(String.format("Failed nodes is expected to be empty, but contains %s nodes.", failedNodes.size()), failedNodes.isEmpty()); } /** * Ensures sequential failure of two nodes has no additional issues. */ @Test public void testSequentialFailTwoNodes() throws Exception { simulateFailureOfTwoNodes(true); } /** * Ensures sequential failure of two nodes has no additional issues. */ @Test public void testNotSequentialFailTwoNodes() throws Exception { simulateFailureOfTwoNodes(false); } /** */ private void simulateFailureOfTwoNodes(boolean sequentionally) throws Exception { failureDetectionTimeout = 1000; int gridCnt = 7; startGrids(gridCnt); awaitPartitionMapExchange(); final CountDownLatch failLatch = new CountDownLatch(2); for (int i = 0; i < gridCnt; i++) { ignite(i).events().localListen(evt -> { failLatch.countDown(); return true; }, EVT_NODE_FAILED); int nodeIdx = i; ignite(i).events().localListen(evt -> { segmentedNodes.add(nodeIdx); return true; }, EVT_NODE_SEGMENTED); } Set<Integer> failedNodes = new HashSet<>(); failedNodes.add(2); if (sequentionally) failedNodes.add(3); else failedNodes.add(4); failedNodes.forEach(idx -> processNetworkThreads(ignite(idx), Thread::suspend)); try { failLatch.await(10, TimeUnit.SECONDS); } finally { failedNodes.forEach(idx -> processNetworkThreads(ignite(idx), Thread::resume)); } for (int i = 0; i < gridCnt; i++) { if (!failedNodes.contains(i)) assertFalse(segmentedNodes.contains(i)); } } /** * @param ig Ignite instance to get failedNodes collection from. */ private Map getFailedNodesCollection(IgniteEx ig) { GridDiscoveryManager disco = ig.context().discovery(); Object spis = GridTestUtils.getFieldValue(disco, GridManagerAdapter.class, "spis"); return GridTestUtils.getFieldValue(((Object[])spis)[0], "impl", "failedNodes"); } /** * Breaks connectivity of passed server node to its next to simulate network failure. * * @param ig Ignite instance which connection to next node has to be broken. */ private void breakDiscoConnectionToNext(IgniteEx ig) throws Exception { GridDiscoveryManager disco = ig.context().discovery(); Object spis = GridTestUtils.getFieldValue(disco, GridManagerAdapter.class, "spis"); OutputStream out = GridTestUtils.getFieldValue(((Object[])spis)[0], "impl", "msgWorker", "out"); out.close(); } /** * Simulates network failure on certain node. */ private void processNetworkThreads(Ignite ignite, Consumer<Thread> proc) { DiscoverySpi disco = ignite.configuration().getDiscoverySpi(); ServerImpl serverImpl = U.field(disco, "impl"); for (Thread thread : serverImpl.threads()) proc.accept(thread); CommunicationSpi<?> comm = ignite.configuration().getCommunicationSpi(); GridNioServerWrapper nioServerWrapper = U.field(comm, "nioSrvWrapper"); for (GridWorker worker : nioServerWrapper.nio().workers()) proc.accept(worker.runner()); } }
/* * Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 7113275 * @summary compatibility issue with MD2 trust anchor and old X509TrustManager * * SunJSSE does not support dynamic system properties, no way to re-use * system properties in samevm/agentvm mode. * @run main/othervm MD2InTrustAnchor PKIX TLSv1.1 * @run main/othervm MD2InTrustAnchor SunX509 TLSv1.1 * @run main/othervm MD2InTrustAnchor PKIX TLSv1.2 * @run main/othervm MD2InTrustAnchor SunX509 TLSv1.2 */ import java.net.*; import java.util.*; import java.io.*; import javax.net.ssl.*; import java.security.KeyStore; import java.security.KeyFactory; import java.security.cert.Certificate; import java.security.cert.CertificateFactory; import java.security.spec.*; import java.security.interfaces.*; import java.util.Base64; public class MD2InTrustAnchor { /* * ============================================================= * Set the various variables needed for the tests, then * specify what tests to run on each side. */ /* * Should we run the client or server in a separate thread? * Both sides can throw exceptions, but do you have a preference * as to which side should be the main thread. */ static boolean separateServerThread = false; /* * Certificates and key used in the test. */ // It's a trust anchor signed with MD2 hash function. static String trustedCertStr = "-----BEGIN CERTIFICATE-----\n" + "MIICkjCCAfugAwIBAgIBADANBgkqhkiG9w0BAQIFADA7MQswCQYDVQQGEwJVUzEN\n" + "MAsGA1UEChMESmF2YTEdMBsGA1UECxMUU3VuSlNTRSBUZXN0IFNlcml2Y2UwHhcN\n" + "MTExMTE4MTExNDA0WhcNMzIxMDI4MTExNDA0WjA7MQswCQYDVQQGEwJVUzENMAsG\n" + "A1UEChMESmF2YTEdMBsGA1UECxMUU3VuSlNTRSBUZXN0IFNlcml2Y2UwgZ8wDQYJ\n" + "KoZIhvcNAQEBBQADgY0AMIGJAoGBAPGyB9tugUGgxtdeqe0qJEwf9x1Gy4BOi1yR\n" + "wzDZY4H5LquvIfQ2V3J9X1MQENVsFvkvp65ZcFcy+ObOucXUUPFcd/iw2DVb5QXA\n" + "ffyeVqWD56GPi8Qe37wrJO3L6fBhN9oxp/BbdRLgjU81zx8qLEyPODhPMxV4OkcA\n" + "SDwZTSxxAgMBAAGjgaUwgaIwHQYDVR0OBBYEFLOAtr/YrYj9H04EDLA0fd14jisF\n" + "MGMGA1UdIwRcMFqAFLOAtr/YrYj9H04EDLA0fd14jisFoT+kPTA7MQswCQYDVQQG\n" + "EwJVUzENMAsGA1UEChMESmF2YTEdMBsGA1UECxMUU3VuSlNTRSBUZXN0IFNlcml2\n" + "Y2WCAQAwDwYDVR0TAQH/BAUwAwEB/zALBgNVHQ8EBAMCAQYwDQYJKoZIhvcNAQEC\n" + "BQADgYEAr8ExpXu/FTIRiMzPm0ubqwME4lniilwQUiEOD/4DbksNjEIcUyS2hIk1\n" + "qsmjJz3SHBnwhxl9dhJVwk2tZLkPGW86Zn0TPVRsttK4inTgCC9GFGeqQBdrU/uf\n" + "lipBzXWljrfbg4N/kK8m2LabtKUMMnGysM8rN0Fx2PYm5xxGvtM=\n" + "-----END CERTIFICATE-----"; // The certificate issued by above trust anchor, signed with MD5 static String targetCertStr = "-----BEGIN CERTIFICATE-----\n" + "MIICeDCCAeGgAwIBAgIBAjANBgkqhkiG9w0BAQQFADA7MQswCQYDVQQGEwJVUzEN\n" + "MAsGA1UEChMESmF2YTEdMBsGA1UECxMUU3VuSlNTRSBUZXN0IFNlcml2Y2UwHhcN\n" + "MTExMTE4MTExNDA2WhcNMzEwODA1MTExNDA2WjBPMQswCQYDVQQGEwJVUzENMAsG\n" + "A1UEChMESmF2YTEdMBsGA1UECxMUU3VuSlNTRSBUZXN0IFNlcml2Y2UxEjAQBgNV\n" + "BAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAwDnm96mw\n" + "fXCH4bgXk1US0VcJsQVxUtGMyncAveMuzBzNzOmKZPeqyYX1Fuh4q+cuza03WTJd\n" + "G9nOkNr364e3Rn1aaHjCMcBmFflObnGnhhufNmIGYogJ9dJPmhUVPEVAXrMG+Ces\n" + "NKy2E8woGnLMrqu6yiuTClbLBPK8fWzTXrECAwEAAaN4MHYwCwYDVR0PBAQDAgPo\n" + "MB0GA1UdDgQWBBSdRrpocLPJXyGfDmMWJrcEf29WGDAfBgNVHSMEGDAWgBSzgLa/\n" + "2K2I/R9OBAywNH3deI4rBTAnBgNVHSUEIDAeBggrBgEFBQcDAQYIKwYBBQUHAwIG\n" + "CCsGAQUFBwMDMA0GCSqGSIb3DQEBBAUAA4GBAKJ71ZiCUykkJrCLYUxlFlhvUcr9\n" + "sTcOc67QdroW5f412NI15SXWDiley/JOasIiuIFPjaJBjOKoHOvTjG/snVu9wEgq\n" + "YNR8dPsO+NM8r79C6jO+Jx5fYAC7os2XxS75h3NX0ElJcbwIXGBJ6xRrsFh/BGYH\n" + "yvudOlX4BkVR0l1K\n" + "-----END CERTIFICATE-----"; // Private key in the format of PKCS#8. static String targetPrivateKey = "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMA55vepsH1wh+G4\n" + "F5NVEtFXCbEFcVLRjMp3AL3jLswczczpimT3qsmF9RboeKvnLs2tN1kyXRvZzpDa\n" + "9+uHt0Z9Wmh4wjHAZhX5Tm5xp4YbnzZiBmKICfXST5oVFTxFQF6zBvgnrDSsthPM\n" + "KBpyzK6rusorkwpWywTyvH1s016xAgMBAAECgYEAn9bF3oRkdDoBU0i/mcww5I+K\n" + "SH9tFt+WQbiojjz9ac49trkvUfu7MO1Jui2+QbrvaSkyj+HYGFOJd1wMsPXeB7ck\n" + "5mOIYV4uZK8jfNMSQ8v0tFEeIPp5lKdw1XnrQfSe+abo2eL5Lwso437Y4s3w37+H\n" + "aY3d76hR5qly+Ys+Ww0CQQDjeOoX89d/xhRqGXKjCx8ImE/dPmsI8O27cwtKrDYJ\n" + "6t0v/xryVIdvOYcRBvKnqEogOH7T1kI+LnWKUTJ2ehJ7AkEA2FVloPVqCehXcc7e\n" + "z3TDpU9w1B0JXklcV5HddYsRqp9RukN/VK4szKE7F1yoarIUtfE9Lr9082Jwyp3M\n" + "L11xwwJBAKsZ+Hur3x0tUY29No2Nf/pnFyvEF57SGwA0uPmiL8Ol9lpz+UDudDEl\n" + "hIM6Rqv12kwCMuQE9i7vo1o3WU3k5KECQEqhg1L49yD935TqiiFFpe0Ur9btQXse\n" + "kdXAA4d2d5zGI7q/aGD9SYU6phkUJSHR16VA2RuUfzMrpb+wmm1IrmMCQFtLoKRT\n" + "A5kokFb+E3Gplu29tJvCUpfwgBFRS+wmkvtiaU/tiyDcVgDO+An5DwedxxdVzqiE\n" + "njWHoKY3axDQ8OU=\n"; static char passphrase[] = "passphrase".toCharArray(); /* * Is the server ready to serve? */ volatile static boolean serverReady = false; /* * Turn on SSL debugging? */ static boolean debug = false; /* * Define the server side of the test. * * If the server prematurely exits, serverReady will be set to true * to avoid infinite hangs. */ void doServerSide() throws Exception { SSLContext context = generateSSLContext(trustedCertStr, targetCertStr, targetPrivateKey); SSLServerSocketFactory sslssf = context.getServerSocketFactory(); SSLServerSocket sslServerSocket = (SSLServerSocket)sslssf.createServerSocket(serverPort); sslServerSocket.setNeedClientAuth(true); serverPort = sslServerSocket.getLocalPort(); /* * Signal Client, we're ready for his connect. */ serverReady = true; SSLSocket sslSocket = (SSLSocket)sslServerSocket.accept(); InputStream sslIS = sslSocket.getInputStream(); OutputStream sslOS = sslSocket.getOutputStream(); sslIS.read(); sslOS.write('A'); sslOS.flush(); sslSocket.close(); } /* * Define the client side of the test. * * If the server prematurely exits, serverReady will be set to true * to avoid infinite hangs. */ void doClientSide() throws Exception { /* * Wait for server to get started. */ while (!serverReady) { Thread.sleep(50); } SSLContext context = generateSSLContext(trustedCertStr, targetCertStr, targetPrivateKey); SSLSocketFactory sslsf = context.getSocketFactory(); SSLSocket sslSocket = (SSLSocket)sslsf.createSocket("localhost", serverPort); // enable the specified TLS protocol sslSocket.setEnabledProtocols(new String[] {tlsProtocol}); InputStream sslIS = sslSocket.getInputStream(); OutputStream sslOS = sslSocket.getOutputStream(); sslOS.write('B'); sslOS.flush(); sslIS.read(); sslSocket.close(); } /* * ============================================================= * The remainder is just support stuff */ private static String tmAlgorithm; // trust manager private static String tlsProtocol; // trust manager private static void parseArguments(String[] args) { tmAlgorithm = args[0]; tlsProtocol = args[1]; } private static SSLContext generateSSLContext(String trustedCertStr, String keyCertStr, String keySpecStr) throws Exception { // generate certificate from cert string CertificateFactory cf = CertificateFactory.getInstance("X.509"); // create a key store KeyStore ks = KeyStore.getInstance("JKS"); ks.load(null, null); // import the trused cert Certificate trusedCert = null; ByteArrayInputStream is = null; if (trustedCertStr != null) { is = new ByteArrayInputStream(trustedCertStr.getBytes()); trusedCert = cf.generateCertificate(is); is.close(); ks.setCertificateEntry("RSA Export Signer", trusedCert); } if (keyCertStr != null) { // generate the private key. PKCS8EncodedKeySpec priKeySpec = new PKCS8EncodedKeySpec( Base64.getMimeDecoder().decode(keySpecStr)); KeyFactory kf = KeyFactory.getInstance("RSA"); RSAPrivateKey priKey = (RSAPrivateKey)kf.generatePrivate(priKeySpec); // generate certificate chain is = new ByteArrayInputStream(keyCertStr.getBytes()); Certificate keyCert = cf.generateCertificate(is); is.close(); // It's not allowed to send MD2 signed certificate to peer, // even it may be a trusted certificate. Then we will not // place the trusted certficate in the chain. Certificate[] chain = new Certificate[1]; chain[0] = keyCert; // import the key entry. ks.setKeyEntry("Whatever", priKey, passphrase, chain); } // create SSL context TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmAlgorithm); tmf.init(ks); SSLContext ctx = SSLContext.getInstance(tlsProtocol); if (keyCertStr != null && !keyCertStr.isEmpty()) { KeyManagerFactory kmf = KeyManagerFactory.getInstance("NewSunX509"); kmf.init(ks, passphrase); ctx.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null); ks = null; } else { ctx.init(null, tmf.getTrustManagers(), null); } return ctx; } // use any free port by default volatile int serverPort = 0; volatile Exception serverException = null; volatile Exception clientException = null; public static void main(String[] args) throws Exception { if (debug) System.setProperty("javax.net.debug", "all"); /* * Get the customized arguments. */ parseArguments(args); /* * Start the tests. */ new MD2InTrustAnchor(); } Thread clientThread = null; Thread serverThread = null; /* * Primary constructor, used to drive remainder of the test. * * Fork off the other side, then do your work. */ MD2InTrustAnchor() throws Exception { try { if (separateServerThread) { startServer(true); startClient(false); } else { startClient(true); startServer(false); } } catch (Exception e) { // swallow for now. Show later } /* * Wait for other side to close down. */ if (separateServerThread) { serverThread.join(); } else { clientThread.join(); } /* * When we get here, the test is pretty much over. * Which side threw the error? */ Exception local; Exception remote; String whichRemote; if (separateServerThread) { remote = serverException; local = clientException; whichRemote = "server"; } else { remote = clientException; local = serverException; whichRemote = "client"; } /* * If both failed, return the curthread's exception, but also * print the remote side Exception */ if ((local != null) && (remote != null)) { System.out.println(whichRemote + " also threw:"); remote.printStackTrace(); System.out.println(); throw local; } if (remote != null) { throw remote; } if (local != null) { throw local; } } void startServer(boolean newThread) throws Exception { if (newThread) { serverThread = new Thread() { public void run() { try { doServerSide(); } catch (Exception e) { /* * Our server thread just died. * * Release the client, if not active already... */ System.err.println("Server died..."); serverReady = true; serverException = e; } } }; serverThread.start(); } else { try { doServerSide(); } catch (Exception e) { serverException = e; } finally { serverReady = true; } } } void startClient(boolean newThread) throws Exception { if (newThread) { clientThread = new Thread() { public void run() { try { doClientSide(); } catch (Exception e) { /* * Our client thread just died. */ System.err.println("Client died..."); clientException = e; } } }; clientThread.start(); } else { try { doClientSide(); } catch (Exception e) { clientException = e; } } } }
/** * Copyright MaDgIK Group 2010 - 2015. */ package madgik.exareme.master.engine.executor.remote; import madgik.exareme.common.app.engine.AdpDBDMOperator; import madgik.exareme.common.app.engine.AdpDBSelectOperator; import madgik.exareme.common.consts.AdpDBArtPlanGeneratorConsts; import madgik.exareme.common.optimizer.OperatorBehavior; import madgik.exareme.common.schema.BuildIndex; import madgik.exareme.common.schema.DropIndex; import madgik.exareme.master.client.AdpDBClientProperties; import madgik.exareme.master.engine.AdpDBQueryExecutionPlan; import madgik.exareme.master.engine.parser.SemanticException; import madgik.exareme.master.queryProcessor.graph.ConcreteOperator; import madgik.exareme.master.queryProcessor.graph.Link; import madgik.exareme.master.queryProcessor.optimizer.scheduler.OperatorAssignment; import madgik.exareme.utils.encoding.Base64Util; import madgik.exareme.utils.properties.AdpDBProperties; import madgik.exareme.worker.art.executionPlan.entity.OperatorEntity; import madgik.exareme.worker.art.executionPlan.parser.expression.Operator; import madgik.exareme.worker.art.executionPlan.parser.expression.OperatorLink; import madgik.exareme.worker.art.executionPlan.parser.expression.Parameter; import madgik.exareme.worker.art.executionPlan.parser.expression.PlanExpression; import org.apache.log4j.Logger; import java.io.IOException; import java.util.*; /** * @author herald * @author John Chronis * @author Vaggelis Nikolopoulos */ public class AdpDBArtPlanGenerator { // Characteristics public static final OperatorBehavior SELECT_BEHAVIOR = OperatorBehavior.store_and_forward; public static final OperatorBehavior DM_BEHAVIOR = OperatorBehavior.store_and_forward; public static final int SELECT_MEMORY = AdpDBProperties.getAdpDBProps().getInt("db.engine.memory.select"); public static final int DM_MEMORY = AdpDBProperties.getAdpDBProps().getInt("db.engine.memory.dm"); public static final int BUFFER_SIZE_MB = AdpDBProperties.getAdpDBProps().getInt("db.engine.memory.bufferMB"); private static final Logger log = Logger.getLogger(AdpDBArtPlanGenerator.class); private AdpDBArtPlanGenerator() { } public static void generateJsonPlan(List<String> containers, AdpDBQueryExecutionPlan plan, HashMap<String, String> categoryMessageMap, PlanExpression planExpression, AdpDBClientProperties props) throws IOException { if (plan.getQueryOperators().isEmpty() == false) { generateRunQueryPlan(containers, plan, categoryMessageMap, planExpression, props); return; } if (plan.getDataManipulationOperators().isEmpty() == false) { generateDataManipulationQueryPlan(containers, plan, categoryMessageMap, planExpression); return; } throw new SemanticException("Plan is empty"); } private static void generateRunQueryPlan(List<String> containers, AdpDBQueryExecutionPlan plan, Map<String, String> categoryMessageMap, PlanExpression planExpression, AdpDBClientProperties props) throws IOException { log.debug("Generating Query plan ... "); // Create operators for (ConcreteOperator cop : plan.getGraph().getOperators()) { OperatorAssignment oa = plan.getSchedulingResult().operatorAssigments.get(cop.opID); String container = containers.get(oa.container); AdpDBSelectOperator dbOperator = plan.getQueryOperators().get(cop.opID); // dbOperator.printStatistics(cop.operatorName); if (dbOperator == null) { throw new SemanticException("DB Operator not found for " + oa.operatorName); } String opSerialized = Base64Util.encodeBase64(dbOperator); String operatorCode = null; String operatorCategory = ""; switch (dbOperator.getType()) { case runQuery: operatorCode = AdpDBArtPlanGeneratorConsts.EXECUTE_SELECT; operatorCategory = AdpDBArtPlanGeneratorConsts.EXECUTE_SELECT_CATEGORY + dbOperator .getOutputTables().iterator().next(); break; case tableUnionReplicator: operatorCode = AdpDBArtPlanGeneratorConsts.TABLE_UNION_REPLICATOR; operatorCategory = AdpDBArtPlanGeneratorConsts.TABLE_UNION_REPLICATOR_CATEGORY + dbOperator .getOutputTables().iterator().next(); break; case tableInput: operatorCode = AdpDBArtPlanGeneratorConsts.TABLE_UNION_REPLICATOR; operatorCategory = AdpDBArtPlanGeneratorConsts.TABLE_UNION_REPLICATOR_CATEGORY + dbOperator .getOutputTables().iterator().next(); break; } categoryMessageMap.put(operatorCategory, dbOperator.getQuery().getComments() + dbOperator.getQuery().getQuery()); LinkedList<Parameter> parameters = new LinkedList<>(); parameters .add(new Parameter(OperatorEntity.BEHAVIOR_PARAM, SELECT_BEHAVIOR.toString())); parameters.add(new Parameter(OperatorEntity.CATEGORY_PARAM, operatorCategory)); parameters .add(new Parameter((OperatorEntity.MEMORY_PARAM), String.valueOf(SELECT_MEMORY))); planExpression.addOperator(new Operator(cop.operatorName, AdpDBArtPlanGeneratorConsts.LIB_PATH + "." + operatorCode, parameters, "{" + opSerialized + "};", container, null)); } for (Link link : plan.getGraph().getLinks()) { OperatorAssignment from = plan.getSchedulingResult().operatorAssigments.get(link.from.opID); OperatorAssignment to = plan.getSchedulingResult().operatorAssigments.get(link.to.opID); AdpDBSelectOperator fromDbOp = plan.getQueryOperators().get(from.getOpID()); AdpDBSelectOperator toDbOp = plan.getQueryOperators().get(to.getOpID()); String fromC = containers.get(from.container); String toC = containers.get(to.container); // log.debug("FromC :" + fromC + " toC :" + toC); String table = fromDbOp.getOutputTables().iterator().next(); BitSet common = AdpDBSelectOperator.findCommonPartitions(fromDbOp, toDbOp, table); // log.debug( // "Partitions in common : (" // + from.operatorName + " -> " // + to.operatorName + ") :" // + common + " ( " + common.cardinality() +" )"); if (common.cardinality() > 1) { throw new SemanticException("Operators do not have only one partition in common!"); } int commonPartition = -1; if (common.cardinality() == 0) { if (props.isTreeEnabled() == false) { throw new SemanticException( "Operators do not have only one partition in common!"); } List<Integer> outputs = fromDbOp.getOutputPartitions(table); if (outputs.size() != 1) { throw new SemanticException("Tree reduction error!"); } commonPartition = outputs.get(0); } else { commonPartition = common.nextSetBit(0); } // Connect the operators LinkedList<Parameter> parameters = new LinkedList<>(); parameters.add(new Parameter(AdpDBArtPlanGeneratorConsts.BUFFER_TABLE_NAME, table)); parameters.add(new Parameter(AdpDBArtPlanGeneratorConsts.BUFFER_TABLE_PART_NAME, String.valueOf(commonPartition))); for (Operator op : planExpression.operatorList) { // log.debug("**DEBUG-- " + op.operatorName); if (op.operatorName == from.operatorName) { op.addLinkParam(to.operatorName, parameters); } } // log.debug("adding Operator link, " + from.operatorName + " -> " + to.operatorName); planExpression.addOperatorConnect( new OperatorLink(from.operatorName, to.operatorName, fromC, parameters)); } // log.debug(planExpression.toString()); } private static void generateDataManipulationQueryPlan(List<String> containers, AdpDBQueryExecutionPlan plan, Map<String, String> categoryMessageMap, PlanExpression planExpression) throws IOException { log.debug("Generating DM plan ... "); for (ConcreteOperator cop : plan.getGraph().getOperators()) { OperatorAssignment oa = plan.getSchedulingResult().operatorAssigments.get(cop.opID); String container = containers.get(oa.container); AdpDBDMOperator dmOperator = plan.getDataManipulationOperators().get(cop.opID); if (dmOperator == null) { throw new SemanticException("DM Operator not found for " + oa.operatorName); } String opSerialized = Base64Util.encodeBase64(dmOperator); String operatorCategory = ""; switch (dmOperator.getType()) { case buildIndex: operatorCategory = AdpDBArtPlanGeneratorConsts.BUILD_INDEX_CATEGORY + ((BuildIndex) dmOperator .getDMQuery()).getIndexName(); break; case dropIndex: operatorCategory = AdpDBArtPlanGeneratorConsts.DROP_INDEX_CATEGORY + ((DropIndex) dmOperator .getDMQuery()).getIndexName(); break; case dropTable: operatorCategory = AdpDBArtPlanGeneratorConsts.DROP_TABLE_CATEGORY + dmOperator.getDMQuery() .getTable(); break; } categoryMessageMap.put(operatorCategory, dmOperator.getDMQuery().getComments() + dmOperator.getDMQuery().getQuery()); LinkedList<Parameter> parameters = new LinkedList<>(); parameters.add(new Parameter(OperatorEntity.BEHAVIOR_PARAM, DM_BEHAVIOR.toString())); parameters.add(new Parameter(OperatorEntity.CATEGORY_PARAM, operatorCategory)); parameters.add(new Parameter(OperatorEntity.MEMORY_PARAM, String.valueOf(DM_MEMORY))); planExpression.addOperator(new Operator(cop.operatorName, AdpDBArtPlanGeneratorConsts.LIB_PATH + "." + AdpDBArtPlanGeneratorConsts.EXECUTE_DM, parameters, "{" + opSerialized + "};", container, null)); } } }
package com.github.czyzby.kiwi.util.gdx.collection.pooled; import java.util.Iterator; import com.badlogic.gdx.utils.Pool; import com.github.czyzby.kiwi.util.common.Nullables; /** {@link java.util.LinkedList} equivalent for LibGDX applications. As opposed to * {@link com.badlogic.gdx.utils.PooledLinkedList}, this list allows to use custom node pools to share them among * multiple list instances and implements {@link Iterable} interface. * <p> * List's nodes are pooled to limit object creation (hence the name). Iterator is stored and reused - every * {@link #iterator()} call returns the same object. All provided methods are constant-time operations - costly * operations, like removal by index, were simply not included. * <p> * This list does NOT implement any collection-related interfaces, because LibGDX (sadly) does not provide any, and * standard Java collection interfaces contain operations that would be highly inefficient in case of a linked list * (like accessing or removing an element by its numeric index) and non-generic methods for backwards compatibility. It * was assumed that no common collection interface is better than misleading API, especially since LibGDX collections do * not share any interface either. * <p> * Usage examples: FIFO queue:<blockquote> * * <pre> * list.add(element); * list.addLast(element); // add(T) alias. * while (list.isNotEmpty()) { * E element = list.removeLast(); * } * </pre> * * </blockquote>LIFO queue: <blockquote> * * <pre> * while (list.isNotEmpty()) { * E element = list.removeFirst(); * } * </pre> * * </blockquote>List modifications during iteration: <blockquote> * * <pre> * for (E element : list) { * // Inserting a value after the element: * list.insertAfter(value); * // Removing element: * list.remove(); * } * </pre> * * </blockquote>See {@link PooledListIterator} docs for more informations about modifying the collection during * iteration. * * @author MJ * @param <T> type of stored values. */ public class PooledList<T> implements Iterable<T> { /** Used by the default constructor and factory methods. Has no limit. NOT thread-safe - in multi-thread * applications, a custom node pool should be used (either a thread-safe pool or a separate pool for each * thread). */ @SuppressWarnings({ "rawtypes", "unchecked" }) public static final Pool<Node<?>> DEFAULT_POOL = new NodePool(); private final Pool<Node<T>> pool; private final PooledListIterator<T> iterator = new PooledListIterator<T>(this); private int size; // Both head and tail are never null. If head == tail, list is empty. private final Node<T> head; private Node<T> tail; /** Creates a new {@link PooledList} using the {@link #DEFAULT_POOL}. */ public PooledList() { this(DEFAULT_POOL); } /** @param pool will be used as the custom node pool. * @see #newNodePool(int, int) */ @SuppressWarnings("unchecked") public PooledList(final Pool<Node<?>> pool) { this.pool = (Pool<Node<T>>) (Object) pool; head = tail = this.pool.obtain(); } /** @return a new, empty {@link PooledList} instance using the {@link #DEFAULT_POOL}. */ public static <Type> PooledList<Type> newList() { return new PooledList<Type>(); } /** @param pool custom node pool. * @return a new, empty {@link PooledList} instance using the passed node pool. */ public static <Type> PooledList<Type> newList(final Pool<Node<?>> pool) { return new PooledList<Type>(pool); } /** @param elements will be added to the list. None of them can be null. * @return a new {@link PooledList} instance using the {@link #DEFAULT_POOL}. */ public static <Type> PooledList<Type> of(final Type... elements) { return new PooledList<Type>().addAll(elements); } /** @param pool custom node pool. * @param elements will be added to the list. None of them can be null. * @return a new {@link PooledList} instance using the passed node pool. */ public static <Type> PooledList<Type> of(final Pool<Node<?>> pool, final Type... elements) { return new PooledList<Type>(pool).addAll(elements); } /** @param elements will be iterated over and added to the list. * @return a new {@link PooledList} instance using the {@link #DEFAULT_POOL}. */ public static <Type> PooledList<Type> copyOf(final Iterable<Type> elements) { return new PooledList<Type>().addAll(elements); } /** @param pool custom node pool. * @param elements will be iterated over and added to the list. * @return a new {@link PooledList} instance using the passed node pool. */ public static <Type> PooledList<Type> copyOf(final Pool<Node<?>> pool, final Iterable<Type> elements) { return new PooledList<Type>(pool).addAll(elements); } /** @param initialCapacity initial size of the free objects array. Will be resized if needed. * @param max the maximum number of free objects to store in this pool. If free objects array size matches max, * freed objects are rejected and garbage collected. * @return a new node pool of the selected size. */ @SuppressWarnings({ "rawtypes", "unchecked" }) public static Pool<Node<?>> newNodePool(final int initialCapacity, final int max) { return new NodePool(initialCapacity, max); } /** @param list can be null. * @return true if list is null or has no elements. */ public static boolean isEmpty(final PooledList<?> list) { return list == null || list.isEmpty(); } /** @param list can be null. * @return true if list is not null and has any elements. */ public static boolean isNotEmpty(final PooledList<?> list) { return list != null && list.isNotEmpty(); } /** @return amount of elements in the list. */ public int size() { return size; } /** @return true if list has no elements. */ public boolean isEmpty() { return size == 0; } /** @return true if list has any elements. */ public boolean isNotEmpty() { return size > 0; } /** @return head (first element) of the list. Null if list is empty. */ public T getFirst() { return isNotEmpty() ? head.next.element : null; } /** @return tail (last element) of the list. Null if list is empty. */ public T getLast() { return tail.element; } /** @param element will replace the current head value. Previous head value will be removed. Cannot be null. * @return previous head value. * @throws IllegalStateException if list is empty. */ public T setFirst(final T element) { Nullables.requireNotNull(element); if (isEmpty()) { throw new IllegalStateException("List is empty. Cannot change head."); } final T previous = head.next.element; head.next.element = element; return previous; } /** @param element will replace current tail value. Previous tail value will be removed. Cannot be null. * @return previous tail value. * @throws IllegalStateException if list is empty. */ public T setLast(final T element) { Nullables.requireNotNull(element); if (isEmpty()) { throw new IllegalStateException("List is empty. Cannot change tail."); } final T previous = tail.element; tail.element = element; return previous; } /** @param element will be added as the last element in the list. Cannot be null. * @return this, for chaining. */ public PooledList<T> add(final T element) { addLast(element); return this; } /** @param elements will be added to the list. None of them can be null. * @return this, for chaining. */ public PooledList<T> addAll(final T... elements) { for (final T element : elements) { addLast(element); } return this; } /** @param elements will be added to the list. None of them can be null. * @return this, for chaining. */ public PooledList<T> addAll(final Iterable<T> elements) { for (final T element : elements) { addLast(element); } return this; } /** @param element will be added as the last element in the list. Cannot be null. * @see #add(Object) */ public void addLast(final T element) { insertAfter(tail, element); } /** @param element will be added as the first element in the list. Cannot be null. */ public void addFirst(final T element) { insertAfter(head, element); } /** @return value of the first element that got removed. Null if list is empty. */ public T removeFirst() { return isEmpty() ? null : remove(head.next); } /** @return value of last element that got removed. Null if list is empty. */ public T removeLast() { return isEmpty() ? null : remove(tail); } /** @return internally stored reusable {@link PooledListIterator}. */ @Override public PooledListIterator<T> iterator() { return iterator.reset(); } /** @return a new instance of {@link PooledListIterator}. As opposed to {@link #iterator()} method, this method * returns a new instance of the iterator, allowing to (for example) iterate over the list in a nested * for-each loop. However, keep in mind that removal or insertion during nested iteration is not advised. */ public PooledListIterator<T> iterate() { return new PooledListIterator<T>(this).reset(); } /** Operation valid ONLY during iteration over the list using default, cached iterator ({@link #iterator()} method). * * @return value of the removed element. */ public T remove() { final Node<T> previous = iterator.currentNode.previous; final T element = remove(iterator.currentNode); iterator.currentNode = previous; return element; } private T remove(final Node<T> node) { final T element = node.element; node.previous.next = node.next; if (node.next != null) { node.next.previous = node.previous; } else { tail = node.previous; } node.free(pool); size--; return element; } /** Operation valid ONLY during iteration over the list using default, cached iterator ({@link #iterator()} method). * Note that this operation might cause an infinite loop - be careful when inserting values during iteration. * * @param element will be inserted after the element that is currently processed by the iterator. Will be processed * as the next element during current iteration. * @see #skip() */ public void insertAfter(final T element) { insertAfter(iterator.currentNode, element); } protected void insertAfter(final Node<T> after, final T element) { Nullables.requireNotNull(element); final Node<T> node = pool.obtain(); node.element = element; node.next = after.next; node.previous = after; after.next = node; if (node.next != null) { node.next.previous = node; } else { tail = node; } size++; } /** Operation valid ONLY during iteration over the list using default, cached iterator ({@link #iterator()} method). * * @param element will be inserted before the element that is currently processed by the iterator. Will be ignored * in the current iteration. */ public void insertBefore(final T element) { insertBefore(iterator.currentNode, element); } protected void insertBefore(final Node<T> before, final T element) { Nullables.requireNotNull(element); final Node<T> node = pool.obtain(); node.element = element; node.next = before; node.previous = before.previous; before.previous = node; node.previous.next = node; size++; } /** Operation valid ONLY during iteration over the list using default, cached iterator ({@link #iterator()} method). * Changes current iteration pointer. * * @see PooledListIterator#skip() */ public void skip() { iterator.skip(); } /** Operation valid ONLY during iteration over the list using default, cached iterator ({@link #iterator()} method). * * @param element will replace the value of current node. * @return previous element value. */ public T replace(final T element) { return iterator.replace(element); } /** @return direct reference to list's pool. */ public Pool<Node<T>> getPool() { return pool; } /** Removes all elements of the list. Frees all nodes to the pool. */ public void clear() { Node<T> node = head.next, next; size = 0; head.reset(); // Clearing references. tail = head; iterator.currentNode = head; while (node != null) { next = node.next; node.free(pool); node = next; } } /** Clears the list. Instead of returning the nodes to the pool, it simply clears references to them, allowing them * to be garbage-collected. Invoke this method only if you don't plan on using {@link PooledList} with the selected * pool anymore or if the pool has a max value that is already achieved (or lesser than total amount of nodes in the * list), in which case nodes would be garbage-collected as well. * * @see #clear() */ public void purge() { head.reset(); tail = head; size = 0; iterator.currentNode = head; } /** Represents a single node in the {@link PooledList}. Stores an element and references to its node neighbors. * * @author MJ * * @param <T> type of stored element. */ public static final class Node<T> { private T element; private Node<T> previous; private Node<T> next; /** Clears node data. */ public void reset() { element = null; previous = null; next = null; } /** @param pool returns node to this pool. */ public void free(final Pool<Node<T>> pool) { reset(); pool.free(this); } /** @return stored element. */ public T getElement() { return element; } protected void setElement(final T element) { this.element = element; } /** @return previous node in the list. */ public Node<T> getPrevious() { return previous; } protected void setPrevious(final Node<T> previous) { this.previous = previous; } /** @return next node in the list. */ public Node<T> getNext() { return next; } protected void setNext(final Node<T> next) { this.next = next; } } /** Allows to iterate over a {@link PooledList}. Provides operations that allow to modify the list: * {@link PooledListIterator#remove()}, {@link PooledListIterator#insert(Object)}, * {@link PooledListIterator#insertBefore(Object)} and {@link PooledListIterator#replace(Object)}. Implements * {@link Iterable} (resetting and returning itself on {@link PooledListIterator#iterator()} call) for extra * utility. * * @author MJ * * @param <T> type of stored values. */ public static class PooledListIterator<T> implements Iterator<T>, Iterable<T> { private final PooledList<T> list; private Node<T> currentNode; /** @param list will be iterated over. */ public PooledListIterator(final PooledList<T> list) { this.list = list; } /** Resets the iterator (starting iteration from head) and returns it. * * @return this. */ @Override public Iterator<T> iterator() { return reset(); } @Override public boolean hasNext() { return currentNode.next != null; } @Override public T next() { currentNode = currentNode.next; return currentNode.element; } /** A null-safe method that skips the current iteration element. As this changes pointer to the current value, * modifying operations (like {@link #remove()}) are generally not safe to use after invoking this method. * <p> * This method should be called after {@link #insert(Object)} if you want to omit the next inserted value during * iteration. */ public void skip() { if (hasNext()) { currentNode = currentNode.next; } } @Override public void remove() { final Node<T> previous = currentNode.previous; if (previous == null) { throw new IllegalStateException("next() has to be called before removing a value."); } list.remove(currentNode); currentNode = previous; } /** @param element will replace current element value. Cannot be null. * @return previous element value. */ public T replace(final T element) { Nullables.requireNotNull(element); final T previous = currentNode.element; currentNode.element = element; return previous; } /** If this is the main, cached iterator of the list, {@link PooledList#insertAfter(Object)} can be used * instead. This method should not be invoked before {@link #next()}. * * @param element will be inserted after the current element. Will be the next element during this iteration. * @see #skip() */ public void insert(final T element) { list.insertAfter(currentNode, element); } /** If this is the main, cached iterator of the list, {@link PooledList#insertBefore(Object)} can be used * instead. This method cannot not be invoked before {@link #next()}. * * @param element will be inserted before the current element. Will be ignored during current iteration. */ public void insertBefore(final T element) { list.insertBefore(currentNode, element); } /** Starts iteration from list's head (first element). * * @return this, for chaining. */ public PooledListIterator<T> reset() { currentNode = list.head; return this; } } /** Default implementation of a {@link Pool} storing {@link PooledList}'s {@link Node nodes}. * * @author MJ * * @param <T> type of stored values. */ public static class NodePool<T> extends Pool<Node<T>> { /** Creates a new pool with default initial size and no max value. */ public NodePool() { this(16, Integer.MAX_VALUE); } /** @param initialCapacity initial size of the free objects array. Will be resized if needed. * @param max the maximum number of free objects to store in this pool. If free objects array size matches max, * freed objects are rejected and garbage collected. */ public NodePool(final int initialCapacity, final int max) { super(initialCapacity, max); } /** @return a new pool with default size. */ public static <T> Pool<Node<T>> newPool() { return new NodePool<T>(); } @Override @SuppressWarnings({ "rawtypes", "unchecked" }) protected Node<T> newObject() { return new Node(); } } }
package net.morimekta.providence.util; /** * Dummy struct that is just empty. Can be used to &quot;ignore&quot; everything. */ @SuppressWarnings("unused") @javax.annotation.Generated("providence-maven-plugin") @javax.annotation.concurrent.Immutable public class Empty implements net.morimekta.providence.PMessage<Empty,Empty._Field>, Comparable<Empty>, java.io.Serializable, net.morimekta.providence.serializer.binary.BinaryWriter { private final static long serialVersionUID = 6224063400190903929L; private volatile transient int tHashCode; // Transient object used during java deserialization. private transient Empty tSerializeInstance; private Empty(_Builder builder) { } @Override public boolean has(int key) { switch(key) { default: return false; } } @Override @SuppressWarnings("unchecked") public <T> T get(int key) { switch(key) { default: return null; } } @Override public boolean equals(Object o) { if (o == this) return true; if (o == null || !o.getClass().equals(getClass())) return false; return true; } @Override public int hashCode() { if (tHashCode == 0) { tHashCode = java.util.Objects.hash( Empty.class); } return tHashCode; } @Override public String toString() { return "putil.Empty" + asString(); } @Override @javax.annotation.Nonnull public String asString() { StringBuilder out = new StringBuilder(); out.append("{"); out.append('}'); return out.toString(); } @Override public int compareTo(Empty other) { int c; return 0; } private void writeObject(java.io.ObjectOutputStream oos) throws java.io.IOException { oos.defaultWriteObject(); net.morimekta.providence.serializer.BinarySerializer serializer = new net.morimekta.providence.serializer.BinarySerializer(false); serializer.serialize(oos, this); } private void readObject(java.io.ObjectInputStream ois) throws java.io.IOException, ClassNotFoundException { ois.defaultReadObject(); net.morimekta.providence.serializer.BinarySerializer serializer = new net.morimekta.providence.serializer.BinarySerializer(false); tSerializeInstance = serializer.deserialize(ois, kDescriptor); } private Object readResolve() throws java.io.ObjectStreamException { return tSerializeInstance; } @Override public int writeBinary(net.morimekta.util.io.BigEndianBinaryWriter writer) throws java.io.IOException { int length = 0; length += writer.writeByte((byte) 0); return length; } @javax.annotation.Nonnull @Override public _Builder mutate() { return new _Builder(this); } public enum _Field implements net.morimekta.providence.descriptor.PField { ; private final int mId; private final net.morimekta.providence.descriptor.PRequirement mRequired; private final String mName; private final net.morimekta.providence.descriptor.PDescriptorProvider mTypeProvider; private final net.morimekta.providence.descriptor.PValueProvider<?> mDefaultValue; _Field(int id, net.morimekta.providence.descriptor.PRequirement required, String name, net.morimekta.providence.descriptor.PDescriptorProvider typeProvider, net.morimekta.providence.descriptor.PValueProvider<?> defaultValue) { mId = id; mRequired = required; mName = name; mTypeProvider = typeProvider; mDefaultValue = defaultValue; } @Override public int getId() { return mId; } @Override public net.morimekta.providence.descriptor.PRequirement getRequirement() { return mRequired; } @Override public net.morimekta.providence.descriptor.PDescriptor getDescriptor() { return mTypeProvider.descriptor(); } @Override public String getName() { return mName; } @Override public boolean hasDefaultValue() { return mDefaultValue != null; } @Override public Object getDefaultValue() { return hasDefaultValue() ? mDefaultValue.get() : null; } @Override public String toString() { return net.morimekta.providence.descriptor.PField.asString(this); } /** * @param id Field name * @return The identified field or null */ public static _Field findById(int id) { switch (id) { } return null; } /** * @param name Field name * @return The named field or null */ public static _Field findByName(String name) { switch (name) { } return null; } /** * @param id Field name * @return The identified field * @throws IllegalArgumentException If no such field */ public static _Field fieldForId(int id) { _Field field = findById(id); if (field == null) { throw new IllegalArgumentException("No such field id " + id + " in putil.Empty"); } return field; } /** * @param name Field name * @return The named field * @throws IllegalArgumentException If no such field */ public static _Field fieldForName(String name) { _Field field = findByName(name); if (field == null) { throw new IllegalArgumentException("No such field \"" + name + "\" in putil.Empty"); } return field; } } @javax.annotation.Nonnull public static net.morimekta.providence.descriptor.PStructDescriptorProvider<Empty,_Field> provider() { return new _Provider(); } @Override @javax.annotation.Nonnull public net.morimekta.providence.descriptor.PStructDescriptor<Empty,_Field> descriptor() { return kDescriptor; } public static final net.morimekta.providence.descriptor.PStructDescriptor<Empty,_Field> kDescriptor; private static class _Descriptor extends net.morimekta.providence.descriptor.PStructDescriptor<Empty,_Field> { public _Descriptor() { super("putil", "Empty", _Builder::new, true); } @Override @javax.annotation.Nonnull public _Field[] getFields() { return _Field.values(); } @Override @javax.annotation.Nullable public _Field findFieldByName(String name) { return _Field.findByName(name); } @Override @javax.annotation.Nullable public _Field findFieldById(int id) { return _Field.findById(id); } } static { kDescriptor = new _Descriptor(); } private final static class _Provider extends net.morimekta.providence.descriptor.PStructDescriptorProvider<Empty,_Field> { @Override public net.morimekta.providence.descriptor.PStructDescriptor<Empty,_Field> descriptor() { return kDescriptor; } } /** * Make a <code>putil.Empty</code> builder. * @return The builder instance. */ public static _Builder builder() { return new _Builder(); } /** * Dummy struct that is just empty. Can be used to &quot;ignore&quot; everything. */ public static class _Builder extends net.morimekta.providence.PMessageBuilder<Empty,_Field> implements net.morimekta.providence.serializer.binary.BinaryReader { private java.util.BitSet optionals; private java.util.BitSet modified; /** * Make a putil.Empty builder instance. */ public _Builder() { optionals = new java.util.BitSet(0); modified = new java.util.BitSet(0); } /** * Make a mutating builder off a base putil.Empty. * * @param base The base Empty */ public _Builder(Empty base) { this(); } @javax.annotation.Nonnull @Override public _Builder merge(Empty from) { return this; } @Override public boolean equals(Object o) { if (o == this) return true; if (o == null || !o.getClass().equals(getClass())) return false; return true; } @Override public int hashCode() { return java.util.Objects.hash( Empty.class, optionals); } @Override @SuppressWarnings("unchecked") public net.morimekta.providence.PMessageBuilder mutator(int key) { switch (key) { default: throw new IllegalArgumentException("Not a message field ID: " + key); } } @javax.annotation.Nonnull @Override @SuppressWarnings("unchecked") public _Builder set(int key, Object value) { if (value == null) return clear(key); switch (key) { default: break; } return this; } @Override public boolean isSet(int key) { switch (key) { default: break; } return false; } @Override public boolean isModified(int key) { switch (key) { default: break; } return false; } @Override public _Builder addTo(int key, Object value) { switch (key) { default: break; } return this; } @javax.annotation.Nonnull @Override public _Builder clear(int key) { switch (key) { default: break; } return this; } @Override public boolean valid() { return true; } @Override public void validate() { } @javax.annotation.Nonnull @Override public net.morimekta.providence.descriptor.PStructDescriptor<Empty,_Field> descriptor() { return kDescriptor; } @Override public void readBinary(net.morimekta.util.io.BigEndianBinaryReader reader, boolean strict) throws java.io.IOException { byte type = reader.expectByte(); while (type != 0) { int field = reader.expectShort(); switch (field) { default: { net.morimekta.providence.serializer.binary.BinaryFormatUtils.readFieldValue(reader, new net.morimekta.providence.serializer.binary.BinaryFormatUtils.FieldInfo(field, type), null, false); break; } } type = reader.expectByte(); } } @Override public Empty build() { return new Empty(this); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.TreeSet; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key; import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore; import org.apache.jackrabbit.oak.plugins.document.util.Utils; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.EmptyHook; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.NodeStore; import org.apache.jackrabbit.oak.stats.Clock; import org.jetbrains.annotations.NotNull; import org.junit.Test; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import static com.google.common.collect.ImmutableList.copyOf; import static org.apache.jackrabbit.oak.plugins.document.Collection.NODES; import static org.apache.jackrabbit.oak.plugins.document.MongoBlobGCTest.randomStream; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.DOC_SIZE_THRESHOLD; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.NUM_REVS_THRESHOLD; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.PREV_SPLIT_FACTOR; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.SplitDocType; import static org.apache.jackrabbit.oak.plugins.document.TestUtils.NO_BINARY; import static org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation.Type.REMOVE_MAP_ENTRY; import static org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation.Type.SET_MAP_ENTRY; import static org.apache.jackrabbit.oak.plugins.document.util.Utils.isCommitted; import static org.apache.jackrabbit.oak.plugins.memory.BinaryPropertyState.binaryProperty; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Check correct splitting of documents (OAK-926 & OAK-1342). */ public class DocumentSplitTest extends BaseDocumentMKTest { @Test public void splitRevisions() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); Set<Revision> revisions = Sets.newHashSet(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/")); assertNotNull(doc); revisions.addAll(doc.getLocalRevisions().keySet()); revisions.add(Revision.fromString(mk.commit("/", "+\"foo\":{}+\"bar\":{}", null, null))); // create nodes while (revisions.size() <= NodeDocument.NUM_REVS_THRESHOLD) { revisions.add(Revision.fromString(mk.commit("/", "+\"foo/node-" + revisions.size() + "\":{}" + "+\"bar/node-" + revisions.size() + "\":{}", null, null))); } mk.runBackgroundOperations(); String head = mk.getHeadRevision(); doc = store.find(NODES, Utils.getIdFromPath("/")); assertNotNull(doc); Map<Revision, String> revs = doc.getLocalRevisions(); // one remaining in the local revisions map assertEquals(1, revs.size()); for (Revision rev : revisions) { assertTrue(doc.containsRevision(rev)); assertTrue(isCommitted(ns.getCommitValue(rev, doc))); } // check if document is still there assertNotNull(ns.getNode(Path.ROOT, RevisionVector.fromString(head))); NodeDocument prevDoc = Iterators.getOnlyElement(doc.getAllPreviousDocs()); assertThat(prevDoc.getSplitDocType(), either(is(SplitDocType.DEFAULT)).or(is(SplitDocType.DEFAULT_NO_BRANCH))); mk.commit("/", "+\"baz\":{}", null, null); ns.setAsyncDelay(0); mk.backgroundWrite(); } @Test public void splitDeleted() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); Set<Revision> revisions = Sets.newHashSet(); mk.commit("/", "+\"foo\":{}", null, null); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); revisions.addAll(doc.getLocalRevisions().keySet()); boolean create = false; while (revisions.size() <= NodeDocument.NUM_REVS_THRESHOLD) { if (create) { revisions.add(Revision.fromString(mk.commit("/", "+\"foo\":{}", null, null))); } else { revisions.add(Revision.fromString(mk.commit("/", "-\"foo\"", null, null))); } create = !create; } mk.runBackgroundOperations(); String head = mk.getHeadRevision(); doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Map<Revision, String> deleted = doc.getLocalDeleted(); // one remaining in the local deleted map assertEquals(1, deleted.size()); for (Revision rev : revisions) { assertTrue("document should contain revision (or have revision in commit root path):" + rev, doc.containsRevision(rev) || doc.getCommitRootPath(rev) != null); assertTrue(isCommitted(ns.getCommitValue(rev, doc))); } DocumentNodeState node = ns.getNode(Path.fromString("/foo"), RevisionVector.fromString(head)); // check status of node if (create) { assertNull(node); } else { assertNotNull(node); } } @Test public void splitCommitRoot() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); mk.commit("/", "+\"foo\":{}+\"bar\":{}", null, null); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Set<Revision> commitRoots = Sets.newHashSet(); commitRoots.addAll(doc.getLocalCommitRoot().keySet()); // create nodes while (commitRoots.size() <= NodeDocument.NUM_REVS_THRESHOLD) { commitRoots.add(Revision.fromString(mk.commit("/", "^\"foo/prop\":" + commitRoots.size() + "^\"bar/prop\":" + commitRoots.size(), null, null))); } mk.runBackgroundOperations(); doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Map<Revision, String> commits = doc.getLocalCommitRoot(); // two remaining in the local commit root map // the first _commitRoot entry for the _deleted when the node was created // the second _commitRoot entry for the most recent prop change assertEquals(2, commits.size()); for (Revision rev : commitRoots) { assertTrue(isCommitted(ns.getCommitValue(rev, doc))); } } @Test public void splitPropertyRevisions() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); mk.commit("/", "+\"foo\":{}", null, null); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Set<Revision> revisions = Sets.newHashSet(); // create nodes while (revisions.size() <= NodeDocument.NUM_REVS_THRESHOLD) { revisions.add(Revision.fromString(mk.commit("/", "^\"foo/prop\":" + revisions.size(), null, null))); } mk.runBackgroundOperations(); doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Map<Revision, String> localRevs = doc.getLocalRevisions(); // one remaining in the local revisions map assertEquals(1, localRevs.size()); for (Revision rev : revisions) { assertTrue(isCommitted(ns.getCommitValue(rev, doc))); } // all revisions in the prop map Map<Revision, String> valueMap = doc.getValueMap("prop"); assertEquals((long) revisions.size(), valueMap.size()); // one remaining revision in the local map valueMap = doc.getLocalMap("prop"); assertEquals(1L, valueMap.size()); } @Test public void cluster() { MemoryDocumentStore ds = new MemoryDocumentStore(); MemoryBlobStore bs = new MemoryBlobStore(); DocumentMK.Builder builder; builder = new DocumentMK.Builder(); builder.setDocumentStore(ds).setBlobStore(bs).setAsyncDelay(0); DocumentMK mk1 = builder.setClusterId(1).open(); mk1.commit("/", "+\"test\":{\"prop1\":0}", null, null); // make sure the new node is visible to other DocumentMK instances mk1.backgroundWrite(); builder = new DocumentMK.Builder(); builder.setDocumentStore(ds).setBlobStore(bs).setAsyncDelay(0); DocumentMK mk2 = builder.setClusterId(2).open(); builder = new DocumentMK.Builder(); builder.setDocumentStore(ds).setBlobStore(bs).setAsyncDelay(0); DocumentMK mk3 = builder.setClusterId(3).open(); for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { mk1.commit("/", "^\"test/prop1\":" + i, null, null); mk2.commit("/", "^\"test/prop2\":" + i, null, null); mk3.commit("/", "^\"test/prop3\":" + i, null, null); } mk1.runBackgroundOperations(); mk2.runBackgroundOperations(); mk3.runBackgroundOperations(); NodeDocument doc = ds.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); Map<Revision, String> revs = doc.getLocalRevisions(); assertEquals(3, revs.size()); revs = doc.getValueMap("_revisions"); assertEquals(3 * NodeDocument.NUM_REVS_THRESHOLD, revs.size()); Revision previous = null; for (Map.Entry<Revision, String> entry : revs.entrySet()) { if (previous != null) { assertTrue(previous.compareRevisionTimeThenClusterId(entry.getKey()) > 0); } previous = entry.getKey(); } mk1.dispose(); mk2.dispose(); mk3.dispose(); } @Test // OAK-1233 public void manyRevisions() { final int numMKs = 3; MemoryDocumentStore ds = new MemoryDocumentStore(); MemoryBlobStore bs = new MemoryBlobStore(); List<Set<String>> changes = new ArrayList<Set<String>>(); List<DocumentMK> mks = new ArrayList<DocumentMK>(); for (int i = 1; i <= numMKs; i++) { DocumentMK.Builder builder = new DocumentMK.Builder(); builder.setDocumentStore(ds).setBlobStore(bs).setAsyncDelay(0); DocumentMK mk = builder.setClusterId(i).open(); mks.add(mk); changes.add(new HashSet<String>()); if (i == 1) { mk.commit("/", "+\"test\":{}", null, null); mk.runBackgroundOperations(); } } List<String> propNames = Arrays.asList("prop1", "prop2", "prop3"); Random random = new Random(0); for (int i = 0; i < 1000; i++) { int mkIdx = random.nextInt(mks.size()); // pick mk DocumentMK mk = mks.get(mkIdx); DocumentNodeStore ns = mk.getNodeStore(); // pick property name to update String name = propNames.get(random.nextInt(propNames.size())); // need to sync? for (int j = 0; j < changes.size(); j++) { Set<String> c = changes.get(j); if (c.contains(name)) { syncMKs(mks, j); c.clear(); break; } } // read current value NodeDocument doc = ds.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); RevisionVector head = ns.getHeadRevision(); Revision lastRev = ns.getPendingModifications().get(Path.fromString("/test")); DocumentNodeState n = doc.getNodeAtRevision(mk.getNodeStore(), head, lastRev); assertNotNull(n); String value = n.getPropertyAsString(name); // set or increment if (value == null) { value = String.valueOf(0); } else { value = String.valueOf(Integer.parseInt(value) + 1); } mk.commit("/test", "^\"" + name + "\":" + value, null, null); changes.get(mkIdx).add(name); } for (DocumentMK mk : mks) { mk.dispose(); } } @Test public void commitRootInPrevious() { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); mk.commit("/", "+\"test\":{\"node\":{}}", null, null); mk.commit("/test", "+\"foo\":{}+\"bar\":{}", null, null); mk.commit("/test", "^\"foo/prop\":0^\"bar/prop\":0", null, null); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); assertNotNull(doc); String rev = null; for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { rev = mk.commit("/test/foo", "^\"prop\":" + i, null, null); } ns.runBackgroundOperations(); doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); assertNotNull(doc); DocumentNodeState node = doc.getNodeAtRevision(ns, RevisionVector.fromString(rev), null); assertNotNull(node); } @Test public void testSplitDocNoChild() throws Exception{ DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); mk.commit("/", "+\"test\":{\"node\":{}}", null, null); mk.commit("/test", "+\"foo\":{}+\"bar\":{}", null, null); for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { mk.commit("/test/foo", "^\"prop\":" + i, null, null); } ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); List<NodeDocument> prevDocs = ImmutableList.copyOf(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); assertEquals(SplitDocType.DEFAULT_LEAF, prevDocs.get(0).getSplitDocType()); } @Test public void testSplitPropAndCommitOnly() throws Exception{ DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder b1 = ns.getRoot().builder(); b1.child("test").child("foo").child("bar"); ns.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); //Commit on a node which has a child and where the commit root // is parent for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { b1 = ns.getRoot().builder(); b1.child("test").child("foo").setProperty("prop",i); b1.child("test").setProperty("prop",i); ns.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); } ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); List<NodeDocument> prevDocs = ImmutableList.copyOf(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); assertEquals(SplitDocType.COMMIT_ROOT_ONLY, prevDocs.get(0).getSplitDocType()); } @Test public void splitDocWithHasBinary() throws Exception{ DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder b1 = ns.getRoot().builder(); b1.child("test").child("foo").setProperty("binaryProp",ns.createBlob(randomStream(1, 4096))); ns.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); //Commit on a node which has a child and where the commit root // is parent for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { b1 = ns.getRoot().builder(); b1.child("test").child("foo").setProperty("prop",i); ns.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); } ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); List<NodeDocument> prevDocs = ImmutableList.copyOf(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); //Check for hasBinary assertTrue(doc.hasBinary()); assertTrue(prevDocs.get(0).hasBinary()); } @Test public void cascadingSplit() { cascadingSplit("/test/node"); } @Test public void cascadingSplitLongPath() { Path p = Path.ROOT; while (!Utils.isLongPath(p)) { p = new Path(p, "long-path-element"); } cascadingSplit(p.toString()); } private void cascadingSplit(String path) { // use a store without sync delay mk.dispose(); mk = new DocumentMK.Builder().setAsyncDelay(0).open(); DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); String rev = null; String p = "/"; for (String name : PathUtils.elements(path)) { rev = mk.commit(p, "+\"" + name + "\":{}", rev, null); p = PathUtils.concat(p, name); } List<String> revs = Lists.newArrayList(); for (int i = 0; i < NodeDocument.PREV_SPLIT_FACTOR + 1; i++) { NodeDocument doc = store.find(NODES, Utils.getIdFromPath(path)); assertNotNull(doc); assertEquals(i, doc.getPreviousRanges().size()); for (int j = 0; j < NodeDocument.NUM_REVS_THRESHOLD; j++) { int value = (i * NodeDocument.NUM_REVS_THRESHOLD + j); rev = mk.commit(path, "^\"prop\":" + value, rev, null); revs.add(rev); } ns.runBackgroundOperations(); } NodeDocument doc = store.find(NODES, Utils.getIdFromPath(path)); assertNotNull(doc); assertEquals(2, doc.getPreviousRanges().size()); List<NodeDocument> prevDocs = ImmutableList.copyOf(doc.getAllPreviousDocs()); //1 intermediate and 11 previous doc assertEquals(1 + 11, prevDocs.size()); assertTrue(Iterables.any(prevDocs, new Predicate<NodeDocument>() { @Override public boolean apply(NodeDocument input) { return input.getSplitDocType() == SplitDocType.INTERMEDIATE; } })); for (String s : revs) { Revision r = Revision.fromString(s); if (doc.getLocalRevisions().containsKey(r)) { continue; } Iterable<NodeDocument> prev = doc.getPreviousDocs("prop", r); assertEquals(1, Iterables.size(prev)); for (NodeDocument d : prev) { assertTrue(d.containsRevision(r)); } } int numPrev = 0; for (NodeDocument prev : doc.getPreviousDocs("prop", null)) { numPrev++; assertTrue(!prev.getValueMap("prop").isEmpty()); } assertEquals(2, numPrev); Revision previous = null; int numValues = 0; Map<Revision, String> valueMap = doc.getValueMap("prop"); for (Map.Entry<Revision, String> entry : valueMap.entrySet()) { if (previous != null) { assertTrue(previous.compareRevisionTime(entry.getKey()) > 0); } previous = entry.getKey(); numValues++; assertEquals(entry.getValue(), valueMap.get(entry.getKey())); } assertEquals(revs.size(), numValues); assertEquals(revs.size(), valueMap.size()); assertNotNull(doc.getNodeAtRevision(ns, RevisionVector.fromString(rev), null)); } @Test public void mainPath() { Revision r = Revision.fromString("r1-0-1"); for (String p : new String[]{"/", "/test", "/test/path"}) { Path path = Path.fromString(p); DocumentStore store = mk.getDocumentStore(); NodeDocument doc = new NodeDocument(store); String id = Utils.getPreviousIdFor(path, r, 0); doc.put(NodeDocument.ID, id); assertEquals(path, doc.getMainPath()); } } // OAK-1692 @Test public void cascadingWithSplitRatio() { String id = Utils.getIdFromPath("/test"); mk.commit("/", "+\"test\":{}", null, null); DocumentStore store = mk.getDocumentStore(); int clusterId = mk.getNodeStore().getClusterId(); UpdateOp op = new UpdateOp(id, false); // create some baggage from another cluster node for (int i = 0; i < 4000; i++) { Revision r = Revision.newRevision(2); op.setMapEntry("prop", r, "some long test value with many characters"); NodeDocument.setRevision(op, r, "c"); } store.findAndUpdate(NODES, op); NodeDocument doc = store.find(NODES, id); assertNotNull(doc); assertTrue(doc.getMemory() > DOC_SIZE_THRESHOLD); // some fake previous doc references to trigger UpdateOp // for an intermediate document TreeSet<Revision> prev = Sets.newTreeSet(StableRevisionComparator.INSTANCE); for (int i = 0; i < PREV_SPLIT_FACTOR; i++) { Revision low = Revision.newRevision(clusterId); Revision high = Revision.newRevision(clusterId); prev.add(high); NodeDocument.setPrevious(op, new Range(high, low, 0)); } store.findAndUpdate(NODES, op); doc = store.find(NODES, id); assertNotNull(doc); List<UpdateOp> splitOps = Lists.newArrayList(doc.split( mk.getNodeStore(), mk.getNodeStore().getHeadRevision(), NO_BINARY)); assertEquals(2, splitOps.size()); // first update op is for the new intermediate doc op = splitOps.get(0); String newPrevId = Utils.getPreviousIdFor(Path.fromString("/test"), prev.last(), 1); assertEquals(newPrevId, op.getId()); // second update op is for the main document op = splitOps.get(1); assertEquals(id, op.getId()); for (Map.Entry<Key, Operation> entry : op.getChanges().entrySet()) { Revision r = entry.getKey().getRevision(); assertNotNull(r); assertEquals(clusterId, r.getClusterId()); if (entry.getKey().getName().equals("_prev")) { if (entry.getValue().type == REMOVE_MAP_ENTRY) { assertTrue(prev.contains(r)); } else if (entry.getValue().type == SET_MAP_ENTRY) { assertEquals(newPrevId, Utils.getPreviousIdFor(Path.fromString("/test"), r, 1)); } else { fail("unexpected update operation " + entry); } } else { fail("unexpected update operation " + entry); } } } // OAK-1770 @Test public void splitRevisionsManyClusterNodes() { int numClusterNodes = 5; String id = Utils.getIdFromPath("/test"); mk.commit("/", "+\"test\":{}", null, null); DocumentStore store = mk.getDocumentStore(); int clusterId = mk.getNodeStore().getClusterId(); List<Revision> revs = Lists.newArrayList(); UpdateOp op = new UpdateOp(id, false); for (int i = 0; i < numClusterNodes; i++) { // create some commits for each cluster node for (int j = 0; j < NUM_REVS_THRESHOLD; j++) { Revision r = Revision.newRevision(i + 1); if (clusterId == r.getClusterId()) { revs.add(r); } op.setMapEntry("prop", r, "value"); NodeDocument.setRevision(op, r, "c"); } } store.findAndUpdate(NODES, op); NodeDocument doc = store.find(NODES, id); assertNotNull(doc); // must split document and create a previous document starting at // the second most recent revision List<UpdateOp> splitOps = Lists.newArrayList(doc.split( mk.getNodeStore(), mk.getNodeStore().getHeadRevision(), NO_BINARY)); assertEquals(2, splitOps.size()); String prevId = Utils.getPreviousIdFor(Path.fromString("/test"), revs.get(revs.size() - 2), 0); assertEquals(prevId, splitOps.get(0).getId()); assertEquals(id, splitOps.get(1).getId()); } // OAK-1794 @Test public void keepRevisionsForMostRecentChanges() throws Exception { DocumentStore store = mk.getDocumentStore(); NodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.setProperty("foo", -1); builder.setProperty("bar", -1); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); for (int i = 0; i < NUM_REVS_THRESHOLD; i++) { builder = ns.getRoot().builder(); builder.setProperty("foo", i); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } mk.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/")); assertNotNull(doc); // the local _revisions map must still contain the entry for // the initial 'bar' property Map<Revision, String> valueMap = doc.getValueMap("bar"); assertFalse(valueMap.isEmpty()); Revision r = valueMap.keySet().iterator().next(); assertTrue(doc.getLocalRevisions().containsKey(r)); // but also the previous document must contain the revision List<NodeDocument> prevDocs = Lists.newArrayList(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); NodeDocument prev = prevDocs.get(0); assertTrue(prev.getLocalRevisions().containsKey(r)); } // OAK-1794 @Test public void keepCommitRootForMostRecentChanges() throws Exception { DocumentStore store = mk.getDocumentStore(); NodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.setProperty("p", -1); NodeBuilder test = builder.child("test"); test.setProperty("foo", -1); test.setProperty("bar", -1); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); for (int i = 0; i < NUM_REVS_THRESHOLD; i++) { builder = ns.getRoot().builder(); builder.setProperty("p", i); test = builder.child("test"); test.setProperty("foo", i); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } mk.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); // the local _commitRoot map must still contain the entry for // the initial 'bar' property Map<Revision, String> valueMap = doc.getValueMap("bar"); assertFalse(valueMap.isEmpty()); Revision r = valueMap.keySet().iterator().next(); assertTrue(doc.getLocalCommitRoot().containsKey(r)); // but also the previous document must contain the commitRoot entry List<NodeDocument> prevDocs = Lists.newArrayList(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); NodeDocument prev = prevDocs.get(0); assertTrue(prev.getLocalCommitRoot().containsKey(r)); } @Test(expected = IllegalArgumentException.class) public void splitPreviousDocument() { NodeDocument doc = new NodeDocument(mk.getDocumentStore()); doc.put(NodeDocument.ID, Utils.getIdFromPath("/test")); doc.put(NodeDocument.SD_TYPE, NodeDocument.SplitDocType.DEFAULT.type); RevisionVector head = mk.getNodeStore().getHeadRevision(); SplitOperations.forDocument(doc, DummyRevisionContext.INSTANCE, head, NO_BINARY, NUM_REVS_THRESHOLD); } @Test public void readLocalCommitInfo() throws Exception { final Set<String> readSet = Sets.newHashSet(); DocumentStore store = new MemoryDocumentStore() { @Override public <T extends Document> T find(Collection<T> collection, String key, int maxCacheAge) { readSet.add(key); return super.find(collection, key, maxCacheAge); } }; DocumentNodeStore ns = new DocumentMK.Builder() .setDocumentStore(store).setAsyncDelay(0).getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("test"); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); for (int i = 0; i < NUM_REVS_THRESHOLD; i++) { builder = ns.getRoot().builder(); builder.setProperty("p", i); builder.child("test").setProperty("p", i); builder.child("test").setProperty("q", i); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } builder = ns.getRoot().builder(); builder.child("test").removeProperty("q"); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); readSet.clear(); // must not access previous document of /test doc.getNodeAtRevision(ns, ns.getHeadRevision(), null); for (String id : Sets.newHashSet(readSet)) { doc = store.find(NODES, id); assertNotNull(doc); if (doc.isSplitDocument() && !doc.getMainPath().equals("/")) { fail("must not access previous document: " + id); } } ns.dispose(); } // OAK-2528 @Test public void commitRootForChildrenFlag() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); for (int i = 0; i < NUM_REVS_THRESHOLD * 2; i++) { NodeBuilder builder = ns.getRoot().builder(); builder.child("test").child("child-" + i); merge(ns, builder); } ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); assertTrue(doc.getLocalCommitRoot().size() < NUM_REVS_THRESHOLD); } // OAK-3333 @Test public void purgeAllButMostRecentCommitRoot() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns1 = mk.getNodeStore(); NodeBuilder builder1 = ns1.getRoot().builder(); builder1.child("test"); merge(ns1, builder1); ns1.runBackgroundOperations(); DocumentNodeStore ns2 = new DocumentMK.Builder().setDocumentStore(store) .setAsyncDelay(0).setClusterId(ns1.getClusterId() + 1).getNodeStore(); // prevent merge retries ns2.setMaxBackOffMillis(0); assertTrue(ns2.getRoot().hasChildNode("test")); NodeBuilder builder2 = ns2.getRoot().builder(); builder2.child("test").remove(); for (int i = 0; i < NUM_REVS_THRESHOLD * 2; i++) { builder1 = ns1.getRoot().builder(); builder1.child("test").child("child-" + i); merge(ns1, builder1); } ns1.runBackgroundOperations(); try { merge(ns2, builder2); fail("merge must fail with CommitFailedException"); } catch (CommitFailedException e) { // expected } ns2.dispose(); } // OAK-4050 @Test public void purgeAllButMostRecentCommittedCommitRoot() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns1 = mk.getNodeStore(); NodeBuilder builder1 = ns1.getRoot().builder(); builder1.child("test"); merge(ns1, builder1); ns1.runBackgroundOperations(); DocumentNodeStore ns2 = new DocumentMK.Builder().setDocumentStore(store) .setAsyncDelay(0).setClusterId(ns1.getClusterId() + 1).getNodeStore(); // prevent merge retries ns2.setMaxBackOffMillis(0); assertTrue(ns2.getRoot().hasChildNode("test")); NodeBuilder builder2 = ns2.getRoot().builder(); builder2.child("test").remove(); for (int i = 0; i < NUM_REVS_THRESHOLD * 2; i++) { builder1 = ns1.getRoot().builder(); builder1.child("test").child("child-" + i); merge(ns1, builder1); } // create a _commitRoot entry for a revision, which is not committed UpdateOp op = new UpdateOp(Utils.getIdFromPath("/test"), false); NodeDocument.setCommitRoot(op, ns1.newRevision(), 0); store.findAndUpdate(NODES, op); ns1.runBackgroundOperations(); try { merge(ns2, builder2); fail("merge must fail with CommitFailedException"); } catch (CommitFailedException e) { // expected } ns2.dispose(); } // OAK-3081 @Test public void removeGarbage() throws Exception { final DocumentStore store = mk.getDocumentStore(); final DocumentNodeStore ns = mk.getNodeStore(); final List<Exception> exceptions = Lists.newArrayList(); final List<RevisionVector> revisions = Lists.newArrayList(); Thread t = new Thread(new Runnable() { @Override public void run() { try { for (int i = 0; i < 200; i++) { NodeBuilder builder = ns.getRoot().builder(); builder.child("foo").child("node").child("node").child("node").child("node"); builder.child("bar").child("node").child("node").child("node").child("node"); merge(ns, builder); revisions.add(ns.getHeadRevision()); builder = ns.getRoot().builder(); builder.child("foo").child("node").remove(); builder.child("bar").child("node").remove(); merge(ns, builder); revisions.add(ns.getHeadRevision()); } } catch (CommitFailedException e) { exceptions.add(e); } } }); t.start(); // Use a revision context, which wraps the DocumentNodeStore and // randomly delays calls to get the head revision RevisionContext rc = new TestRevisionContext(ns); while (t.isAlive()) { for (String id : ns.getSplitCandidates()) { RevisionVector head = ns.getHeadRevision(); NodeDocument doc = store.find(NODES, id); List<UpdateOp> ops = SplitOperations.forDocument(doc, rc, head, NO_BINARY, NUM_REVS_THRESHOLD); Set<Revision> removed = Sets.newHashSet(); Set<Revision> added = Sets.newHashSet(); for (UpdateOp op : ops) { for (Map.Entry<Key, Operation> e : op.getChanges().entrySet()) { if (!"_deleted".equals(e.getKey().getName())) { continue; } Revision r = e.getKey().getRevision(); if (e.getValue().type == Operation.Type.REMOVE_MAP_ENTRY) { removed.add(r); } else if (e.getValue().type == Operation.Type.SET_MAP_ENTRY) { added.add(r); } } } removed.removeAll(added); assertTrue("SplitOperations must not remove committed changes: " + removed, removed.isEmpty()); } // perform the actual cleanup ns.runBackgroundOperations(); } // check documents below /foo and /bar // the _deleted map must contain all revisions for (NodeDocument doc : Utils.getAllDocuments(store)) { if (doc.isSplitDocument() || Utils.getDepthFromId(doc.getId()) < 2) { continue; } Set<Revision> revs = Sets.newHashSet(); for (RevisionVector rv : revisions) { Iterables.addAll(revs, rv); } revs.removeAll(doc.getValueMap("_deleted").keySet()); assertTrue("Missing _deleted entries on " + doc.getId() + ": " + revs, revs.isEmpty()); } } @Test public void splitDocumentWithBinary() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); // use more than 4k of binary data (OAK-5205) PropertyState binary = binaryProperty("p", randomBytes(5 * 1024)); for (int i = 0; i < 10; i++) { builder = ns.getRoot().builder(); builder.child("foo").setProperty(binary); merge(ns, builder); ns.runBackgroundOperations(); } NodeDocument foo = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(foo); List<NodeDocument> prevDocs = copyOf(foo.getAllPreviousDocs()); // all but most recent value are moved to individual previous docs assertEquals(9, prevDocs.size()); } @Test public void noBinarySplitWhenRemoved() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); // use more than 4k of binary data (OAK-5205) PropertyState binary = binaryProperty("p", randomBytes(5 * 1024)); builder.child("foo").setProperty(binary); merge(ns, builder); builder = ns.getRoot().builder(); builder.child("foo").remove(); merge(ns, builder); ns.runBackgroundOperations(); // must not create split document in this case. See OAK-5010 NodeDocument foo = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(foo); assertEquals(0, foo.getPreviousRanges().size()); // re-create it builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); ns.runBackgroundOperations(); // now the old binary value must be moved to a previous document foo = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(foo); List<NodeDocument> prevDocs = copyOf(foo.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); } // OAK-5205 @Test public void noSplitForSmallBinary() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); for (int i = 0; i < 10; i++) { builder = ns.getRoot().builder(); builder.child("foo").setProperty( binaryProperty("p", ("value" + i).getBytes())); merge(ns, builder); ns.runBackgroundOperations(); } NodeDocument foo = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(foo); List<NodeDocument> prevDocs = copyOf(foo.getAllPreviousDocs()); // must not create split documents for small binaries less 4k assertEquals(0, prevDocs.size()); } @Test public void nonSplittableBigDocument() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); String id = Utils.getIdFromPath("/foo"); int num = 0; while (store.find(NODES, id).getMemory() < DOC_SIZE_THRESHOLD) { builder = ns.getRoot().builder(); for (int i = 0; i < 50; i++) { builder.child("foo").setProperty("p" + num++, "some value as payload for the document"); } merge(ns, builder); } Iterable<UpdateOp> splitOps = store.find(NODES, id) .split(ns, ns.getHeadRevision(), NO_BINARY); assertEquals(0, Iterables.size(splitOps)); } @Test public void splitWithBranchCommit() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); String branch = mk.branch(null); branch = mk.commit("/foo", "^\"p\":\"value\"", branch, null); mk.merge(branch, null); String id = Utils.getIdFromPath("/foo"); NodeDocument doc = store.find(NODES, id); assertNotNull(doc); assertThat(doc.getLocalBranchCommits(), is(not(empty()))); for (int i = 0; i < 5; i++) { builder = ns.getRoot().builder(); builder.child("foo").setProperty("p", "value-" + i); merge(ns, builder); } ns.runBackgroundOperations(); doc = store.find(NODES, id); for (UpdateOp op : SplitOperations.forDocument(doc, ns, ns.getHeadRevision(), NO_BINARY, 5)) { store.createOrUpdate(NODES, op); } doc = store.find(NODES, id); // must have a previous document now assertThat(doc.getPreviousRanges().keySet(), hasSize(1)); // branch commit entry moved to previous document assertThat(doc.getLocalBranchCommits(), is(empty())); NodeDocument prev = doc.getAllPreviousDocs().next(); assertThat(prev.getLocalBranchCommits(), is(not(empty()))); } @Test public void splitDefaultNoBranch() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo").child("bar"); merge(ns, builder); for (int i = 0; i < 5; i++) { builder = ns.getRoot().builder(); builder.child("foo").setProperty("p", "value-" + i); merge(ns, builder); } ns.runBackgroundOperations(); String id = Utils.getIdFromPath("/foo"); NodeDocument doc = store.find(NODES, id); assertNotNull(doc); for (UpdateOp op : SplitOperations.forDocument(doc, ns, ns.getHeadRevision(), NO_BINARY, 5)) { store.createOrUpdate(NODES, op); } doc = store.find(NODES, id); // must have a previous document now assertThat(doc.getPreviousRanges().keySet(), is(not(empty()))); Iterator<NodeDocument> it = doc.getAllPreviousDocs(); while (it.hasNext()) { assertEquals(SplitDocType.DEFAULT_NO_BRANCH, it.next().getSplitDocType()); } } private static class TestRevisionContext implements RevisionContext { private final RevisionContext rc; TestRevisionContext(RevisionContext rc) { this.rc = rc; } @Override public UnmergedBranches getBranches() { return rc.getBranches(); } @Override public UnsavedModifications getPendingModifications() { return rc.getPendingModifications(); } @Override public int getClusterId() { return rc.getClusterId(); } @NotNull @Override public RevisionVector getHeadRevision() { try { Thread.sleep((long) (Math.random() * 100)); } catch (InterruptedException e) { // ignore } return rc.getHeadRevision(); } @NotNull @Override public Revision newRevision() { return rc.newRevision(); } @NotNull @Override public Clock getClock() { return rc.getClock(); } @Override public String getCommitValue(@NotNull Revision changeRevision, @NotNull NodeDocument doc) { return rc.getCommitValue(changeRevision, doc); } } private static NodeState merge(NodeStore store, NodeBuilder root) throws CommitFailedException { return store.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY); } private void syncMKs(List<DocumentMK> mks, int idx) { mks.get(idx).runBackgroundOperations(); for (int i = 0; i < mks.size(); i++) { if (idx != i) { mks.get(i).runBackgroundOperations(); } } } private byte[] randomBytes(int num) { Random random = new Random(42); byte[] data = new byte[num]; random.nextBytes(data); return data; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.kafka.supervisor; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory; import org.apache.druid.indexing.kafka.KafkaIndexTaskClientFactory; import org.apache.druid.indexing.kafka.KafkaIndexTaskModule; import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator; import org.apache.druid.indexing.overlord.TaskMaster; import org.apache.druid.indexing.overlord.TaskStorage; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.expression.LookupEnabledTestExprMacroTable; import org.apache.druid.server.metrics.DruidMonitorSchedulerConfig; import org.apache.druid.server.metrics.NoopServiceEmitter; import org.junit.Assert; import org.junit.Test; import java.io.IOException; public class KafkaSupervisorSpecTest { private final ObjectMapper mapper; public KafkaSupervisorSpecTest() { mapper = new DefaultObjectMapper(); mapper.setInjectableValues( new InjectableValues.Std() .addValue(TaskStorage.class, null) .addValue(TaskMaster.class, null) .addValue(IndexerMetadataStorageCoordinator.class, null) .addValue(KafkaIndexTaskClientFactory.class, null) .addValue(ObjectMapper.class, mapper) .addValue(ServiceEmitter.class, new NoopServiceEmitter()) .addValue(DruidMonitorSchedulerConfig.class, null) .addValue(RowIngestionMetersFactory.class, null) .addValue(ExprMacroTable.class.getName(), LookupEnabledTestExprMacroTable.INSTANCE) ); mapper.registerModules((Iterable<Module>) new KafkaIndexTaskModule().getJacksonModules()); } @Test public void testSerde() throws IOException { String json = "{\n" + " \"type\": \"kafka\",\n" + " \"dataSchema\": {\n" + " \"dataSource\": \"metrics-kafka\",\n" + " \"parser\": {\n" + " \"type\": \"string\",\n" + " \"parseSpec\": {\n" + " \"format\": \"json\",\n" + " \"timestampSpec\": {\n" + " \"column\": \"timestamp\",\n" + " \"format\": \"auto\"\n" + " },\n" + " \"dimensionsSpec\": {\n" + " \"dimensions\": [],\n" + " \"dimensionExclusions\": [\n" + " \"timestamp\",\n" + " \"value\"\n" + " ]\n" + " }\n" + " }\n" + " },\n" + " \"metricsSpec\": [\n" + " {\n" + " \"name\": \"count\",\n" + " \"type\": \"count\"\n" + " },\n" + " {\n" + " \"name\": \"value_sum\",\n" + " \"fieldName\": \"value\",\n" + " \"type\": \"doubleSum\"\n" + " },\n" + " {\n" + " \"name\": \"value_min\",\n" + " \"fieldName\": \"value\",\n" + " \"type\": \"doubleMin\"\n" + " },\n" + " {\n" + " \"name\": \"value_max\",\n" + " \"fieldName\": \"value\",\n" + " \"type\": \"doubleMax\"\n" + " }\n" + " ],\n" + " \"granularitySpec\": {\n" + " \"type\": \"uniform\",\n" + " \"segmentGranularity\": \"HOUR\",\n" + " \"queryGranularity\": \"NONE\"\n" + " }\n" + " },\n" + " \"ioConfig\": {\n" + " \"topic\": \"metrics\",\n" + " \"consumerProperties\": {\n" + " \"bootstrap.servers\": \"localhost:9092\"\n" + " },\n" + " \"taskCount\": 1\n" + " }\n" + "}"; KafkaSupervisorSpec spec = mapper.readValue(json, KafkaSupervisorSpec.class); Assert.assertNotNull(spec); Assert.assertNotNull(spec.getDataSchema()); Assert.assertEquals(4, spec.getDataSchema().getAggregators().length); Assert.assertNotNull(spec.getIoConfig()); Assert.assertEquals("metrics", spec.getIoConfig().getTopic()); Assert.assertNotNull(spec.getTuningConfig()); Assert.assertNull(spec.getContext()); Assert.assertFalse(spec.isSuspended()); String serialized = mapper.writeValueAsString(spec); // expect default values populated in reserialized string Assert.assertTrue(serialized.contains("\"tuningConfig\":{")); Assert.assertTrue(serialized.contains("\"indexSpec\":{")); Assert.assertTrue(serialized.contains("\"suspended\":false")); KafkaSupervisorSpec spec2 = mapper.readValue(serialized, KafkaSupervisorSpec.class); String stable = mapper.writeValueAsString(spec2); Assert.assertEquals(serialized, stable); } @Test public void testSuspendResume() throws IOException { String json = "{\n" + " \"type\": \"kafka\",\n" + " \"dataSchema\": {\n" + " \"dataSource\": \"metrics-kafka\",\n" + " \"parser\": {\n" + " \"type\": \"string\",\n" + " \"parseSpec\": {\n" + " \"format\": \"json\",\n" + " \"timestampSpec\": {\n" + " \"column\": \"timestamp\",\n" + " \"format\": \"auto\"\n" + " },\n" + " \"dimensionsSpec\": {\n" + " \"dimensions\": [],\n" + " \"dimensionExclusions\": [\n" + " \"timestamp\",\n" + " \"value\"\n" + " ]\n" + " }\n" + " }\n" + " },\n" + " \"metricsSpec\": [\n" + " {\n" + " \"name\": \"count\",\n" + " \"type\": \"count\"\n" + " },\n" + " {\n" + " \"name\": \"value_sum\",\n" + " \"fieldName\": \"value\",\n" + " \"type\": \"doubleSum\"\n" + " },\n" + " {\n" + " \"name\": \"value_min\",\n" + " \"fieldName\": \"value\",\n" + " \"type\": \"doubleMin\"\n" + " },\n" + " {\n" + " \"name\": \"value_max\",\n" + " \"fieldName\": \"value\",\n" + " \"type\": \"doubleMax\"\n" + " }\n" + " ],\n" + " \"granularitySpec\": {\n" + " \"type\": \"uniform\",\n" + " \"segmentGranularity\": \"HOUR\",\n" + " \"queryGranularity\": \"NONE\"\n" + " }\n" + " },\n" + " \"ioConfig\": {\n" + " \"topic\": \"metrics\",\n" + " \"consumerProperties\": {\n" + " \"bootstrap.servers\": \"localhost:9092\"\n" + " },\n" + " \"taskCount\": 1\n" + " }\n" + "}"; KafkaSupervisorSpec spec = mapper.readValue(json, KafkaSupervisorSpec.class); Assert.assertNotNull(spec); Assert.assertNotNull(spec.getDataSchema()); Assert.assertEquals(4, spec.getDataSchema().getAggregators().length); Assert.assertNotNull(spec.getIoConfig()); Assert.assertEquals("metrics", spec.getIoConfig().getTopic()); Assert.assertNotNull(spec.getTuningConfig()); Assert.assertNull(spec.getContext()); Assert.assertFalse(spec.isSuspended()); String suspendedSerialized = mapper.writeValueAsString(spec.createSuspendedSpec()); // expect default values populated in reserialized string Assert.assertTrue(suspendedSerialized.contains("\"tuningConfig\":{")); Assert.assertTrue(suspendedSerialized.contains("\"indexSpec\":{")); Assert.assertTrue(suspendedSerialized.contains("\"suspended\":true")); KafkaSupervisorSpec suspendedSpec = mapper.readValue(suspendedSerialized, KafkaSupervisorSpec.class); Assert.assertTrue(suspendedSpec.isSuspended()); String runningSerialized = mapper.writeValueAsString(spec.createRunningSpec()); KafkaSupervisorSpec runningSpec = mapper.readValue(runningSerialized, KafkaSupervisorSpec.class); Assert.assertFalse(runningSpec.isSuspended()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.execution; import com.google.common.collect.Ordering; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.concurrent.ThreadPoolExecutorMBean; import io.airlift.log.Logger; import io.airlift.units.DataSize; import io.airlift.units.Duration; import io.trino.ExceededCpuLimitException; import io.trino.ExceededScanLimitException; import io.trino.Session; import io.trino.execution.QueryExecution.QueryOutputInfo; import io.trino.execution.StateMachine.StateChangeListener; import io.trino.memory.ClusterMemoryManager; import io.trino.server.BasicQueryInfo; import io.trino.server.protocol.Slug; import io.trino.spi.QueryId; import io.trino.spi.TrinoException; import io.trino.sql.planner.Plan; import org.weakref.jmx.Managed; import org.weakref.jmx.Nested; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.annotation.concurrent.ThreadSafe; import javax.inject.Inject; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.util.concurrent.Futures.immediateFailedFuture; import static io.airlift.concurrent.Threads.threadsNamed; import static io.trino.SystemSessionProperties.getQueryMaxCpuTime; import static io.trino.SystemSessionProperties.getQueryMaxScanPhysicalBytes; import static io.trino.execution.QueryState.RUNNING; import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; @ThreadSafe public class SqlQueryManager implements QueryManager { private static final Logger log = Logger.get(SqlQueryManager.class); private final ClusterMemoryManager memoryManager; private final QueryTracker<QueryExecution> queryTracker; private final Duration maxQueryCpuTime; private final Optional<DataSize> maxQueryScanPhysicalBytes; private final ExecutorService queryExecutor; private final ThreadPoolExecutorMBean queryExecutorMBean; private final ScheduledExecutorService queryManagementExecutor; private final ThreadPoolExecutorMBean queryManagementExecutorMBean; @Inject public SqlQueryManager(ClusterMemoryManager memoryManager, QueryManagerConfig queryManagerConfig) { this.memoryManager = requireNonNull(memoryManager, "memoryManager is null"); this.maxQueryCpuTime = queryManagerConfig.getQueryMaxCpuTime(); this.maxQueryScanPhysicalBytes = queryManagerConfig.getQueryMaxScanPhysicalBytes(); this.queryExecutor = newCachedThreadPool(threadsNamed("query-scheduler-%s")); this.queryExecutorMBean = new ThreadPoolExecutorMBean((ThreadPoolExecutor) queryExecutor); this.queryManagementExecutor = newScheduledThreadPool(queryManagerConfig.getQueryManagerExecutorPoolSize(), threadsNamed("query-management-%s")); this.queryManagementExecutorMBean = new ThreadPoolExecutorMBean((ThreadPoolExecutor) queryManagementExecutor); this.queryTracker = new QueryTracker<>(queryManagerConfig, queryManagementExecutor); } @PostConstruct public void start() { queryTracker.start(); queryManagementExecutor.scheduleWithFixedDelay(() -> { try { enforceMemoryLimits(); } catch (Throwable e) { log.error(e, "Error enforcing memory limits"); } try { enforceCpuLimits(); } catch (Throwable e) { log.error(e, "Error enforcing query CPU time limits"); } try { enforceScanLimits(); } catch (Throwable e) { log.error(e, "Error enforcing query scan bytes limits"); } }, 1, 1, TimeUnit.SECONDS); } @PreDestroy public void stop() { queryTracker.stop(); queryManagementExecutor.shutdownNow(); queryExecutor.shutdownNow(); } @Override public List<BasicQueryInfo> getQueries() { return queryTracker.getAllQueries().stream() .map(queryExecution -> { try { return queryExecution.getBasicQueryInfo(); } catch (RuntimeException ignored) { return null; } }) .filter(Objects::nonNull) .collect(toImmutableList()); } @Override public void addOutputInfoListener(QueryId queryId, Consumer<QueryOutputInfo> listener) { requireNonNull(listener, "listener is null"); queryTracker.getQuery(queryId).addOutputInfoListener(listener); } @Override public void outputTaskFailed(TaskId taskId, Throwable failure) { queryTracker.getQuery(taskId.getQueryId()).outputTaskFailed(taskId, failure); } @Override public void addStateChangeListener(QueryId queryId, StateChangeListener<QueryState> listener) { requireNonNull(listener, "listener is null"); queryTracker.getQuery(queryId).addStateChangeListener(listener); } @Override public ListenableFuture<QueryState> getStateChange(QueryId queryId, QueryState currentState) { return queryTracker.tryGetQuery(queryId) .map(query -> query.getStateChange(currentState)) .orElseGet(() -> immediateFailedFuture(new NoSuchElementException())); } @Override public BasicQueryInfo getQueryInfo(QueryId queryId) { return queryTracker.getQuery(queryId).getBasicQueryInfo(); } @Override public QueryInfo getFullQueryInfo(QueryId queryId) throws NoSuchElementException { return queryTracker.getQuery(queryId).getQueryInfo(); } @Override public Session getQuerySession(QueryId queryId) throws NoSuchElementException { return queryTracker.getQuery(queryId).getSession(); } @Override public Slug getQuerySlug(QueryId queryId) { return queryTracker.getQuery(queryId).getSlug(); } public Plan getQueryPlan(QueryId queryId) { return queryTracker.getQuery(queryId).getQueryPlan(); } public void addFinalQueryInfoListener(QueryId queryId, StateChangeListener<QueryInfo> stateChangeListener) { queryTracker.getQuery(queryId).addFinalQueryInfoListener(stateChangeListener); } @Override public QueryState getQueryState(QueryId queryId) { return queryTracker.getQuery(queryId).getState(); } @Override public void recordHeartbeat(QueryId queryId) { queryTracker.tryGetQuery(queryId) .ifPresent(QueryExecution::recordHeartbeat); } @Override public void createQuery(QueryExecution queryExecution) { requireNonNull(queryExecution, "queryExecution is null"); if (!queryTracker.addQuery(queryExecution)) { throw new TrinoException(GENERIC_INTERNAL_ERROR, format("Query %s already registered", queryExecution.getQueryId())); } queryExecution.addFinalQueryInfoListener(finalQueryInfo -> { // execution MUST be added to the expiration queue or there will be a leak queryTracker.expireQuery(queryExecution.getQueryId()); }); queryExecution.start(); } @Override public void failQuery(QueryId queryId, Throwable cause) { requireNonNull(cause, "cause is null"); queryTracker.tryGetQuery(queryId) .ifPresent(query -> query.fail(cause)); } @Override public void cancelQuery(QueryId queryId) { log.debug("Cancel query %s", queryId); queryTracker.tryGetQuery(queryId) .ifPresent(QueryExecution::cancelQuery); } @Override public void cancelStage(StageId stageId) { requireNonNull(stageId, "stageId is null"); log.debug("Cancel stage %s", stageId); queryTracker.tryGetQuery(stageId.getQueryId()) .ifPresent(query -> query.cancelStage(stageId)); } @Managed(description = "Query scheduler executor") @Nested public ThreadPoolExecutorMBean getExecutor() { return queryExecutorMBean; } @Managed(description = "Query query management executor") @Nested public ThreadPoolExecutorMBean getManagementExecutor() { return queryManagementExecutorMBean; } /** * Enforce memory limits at the query level */ private void enforceMemoryLimits() { List<QueryExecution> runningQueries = queryTracker.getAllQueries().stream() .filter(query -> query.getState() == RUNNING) .collect(toImmutableList()); memoryManager.process(runningQueries, this::getQueries); } /** * Enforce query CPU time limits */ private void enforceCpuLimits() { for (QueryExecution query : queryTracker.getAllQueries()) { Duration cpuTime = query.getTotalCpuTime(); Duration sessionLimit = getQueryMaxCpuTime(query.getSession()); Duration limit = Ordering.natural().min(maxQueryCpuTime, sessionLimit); if (cpuTime.compareTo(limit) > 0) { query.fail(new ExceededCpuLimitException(limit)); } } } /** * Enforce query scan physical bytes limits */ private void enforceScanLimits() { for (QueryExecution query : queryTracker.getAllQueries()) { Optional<DataSize> limitOpt = getQueryMaxScanPhysicalBytes(query.getSession()); if (maxQueryScanPhysicalBytes.isPresent()) { limitOpt = limitOpt .flatMap(sessionLimit -> maxQueryScanPhysicalBytes.map(serverLimit -> Ordering.natural().min(serverLimit, sessionLimit))) .or(() -> maxQueryScanPhysicalBytes); } limitOpt.ifPresent(limit -> { DataSize scan = query.getBasicQueryInfo().getQueryStats().getPhysicalInputDataSize(); if (scan.compareTo(limit) > 0) { query.fail(new ExceededScanLimitException(limit)); } }); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht.preloader; import org.apache.ignite.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.util.tostring.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.plugin.extensions.communication.*; import java.io.*; import java.nio.*; import java.util.*; /** * Partition supply message. */ public class GridDhtPartitionSupplyMessage extends GridCacheMessage implements GridCacheDeployable { /** */ private static final long serialVersionUID = 0L; /** Worker ID. */ private int workerId = -1; /** Update sequence. */ private long updateSeq; /** Acknowledgement flag. */ private boolean ack; /** Partitions that have been fully sent. */ @GridDirectCollection(int.class) private Collection<Integer> last; /** Partitions which were not found. */ @GridToStringInclude @GridDirectCollection(int.class) private Collection<Integer> missed; /** Entries. */ @GridDirectMap(keyType = int.class, valueType = CacheEntryInfoCollection.class) private Map<Integer, CacheEntryInfoCollection> infos = new HashMap<>(); /** Message size. */ @GridDirectTransient private int msgSize; /** * @param workerId Worker ID. * @param updateSeq Update sequence for this node. * @param cacheId Cache ID. */ GridDhtPartitionSupplyMessage(int workerId, long updateSeq, int cacheId) { assert workerId >= 0; assert updateSeq > 0; this.cacheId = cacheId; this.updateSeq = updateSeq; this.workerId = workerId; } /** * Empty constructor required for {@link Externalizable}. */ public GridDhtPartitionSupplyMessage() { // No-op. } /** {@inheritDoc} */ @Override public boolean allowForStartup() { return true; } /** {@inheritDoc} */ @Override public boolean ignoreClassErrors() { return true; } /** * @return Worker ID. */ int workerId() { return workerId; } /** * @return Update sequence. */ long updateSequence() { return updateSeq; } /** * Marks this message for acknowledgment. */ void markAck() { ack = true; } /** * @return Acknowledgement flag. */ boolean ack() { return ack; } /** * @return Flag to indicate last message for partition. */ Collection<Integer> last() { return last == null ? Collections.<Integer>emptySet() : last; } /** * @param p Partition which was fully sent. */ void last(int p) { if (last == null) last = new HashSet<>(); if (last.add(p)) { msgSize += 4; // If partition is empty, we need to add it. if (!infos.containsKey(p)) { CacheEntryInfoCollection infoCol = new CacheEntryInfoCollection(); infoCol.init(); infos.put(p, infoCol); } } } /** * @param p Missed partition. */ void missed(int p) { if (missed == null) missed = new HashSet<>(); if (missed.add(p)) msgSize += 4; } /** * @return Missed partitions. */ Collection<Integer> missed() { return missed == null ? Collections.<Integer>emptySet() : missed; } /** * @return Entries. */ Map<Integer, CacheEntryInfoCollection> infos() { return infos; } /** * @return Message size. */ int messageSize() { return msgSize; } /** * @param p Partition. * @param info Entry to add. * @param ctx Cache context. * @throws IgniteCheckedException If failed. */ void addEntry(int p, GridCacheEntryInfo info, GridCacheContext ctx) throws IgniteCheckedException { assert info != null; marshalInfo(info, ctx); msgSize += info.marshalledSize(ctx); CacheEntryInfoCollection infoCol = infos.get(p); if (infoCol == null) { msgSize += 4; infos.put(p, infoCol = new CacheEntryInfoCollection()); infoCol.init(); } infoCol.add(info); } /** * @param p Partition. * @param info Entry to add. * @param ctx Cache context. * @throws IgniteCheckedException If failed. */ void addEntry0(int p, GridCacheEntryInfo info, GridCacheContext ctx) throws IgniteCheckedException { assert info != null; assert (info.key() != null || info.keyBytes() != null); assert info.value() != null; // Need to call this method to initialize info properly. marshalInfo(info, ctx); msgSize += info.marshalledSize(ctx); CacheEntryInfoCollection infoCol = infos.get(p); if (infoCol == null) { msgSize += 4; infos.put(p, infoCol = new CacheEntryInfoCollection()); infoCol.init(); } infoCol.add(info); } /** {@inheritDoc} */ @SuppressWarnings("ForLoopReplaceableByForEach") @Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException { super.finishUnmarshal(ctx, ldr); GridCacheContext cacheCtx = ctx.cacheContext(cacheId); for (CacheEntryInfoCollection col : infos().values()) { List<GridCacheEntryInfo> entries = col.infos(); for (int i = 0; i < entries.size(); i++) entries.get(i).unmarshal(cacheCtx, ldr); } } /** * @return Number of entries in message. */ public int size() { return infos.size(); } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!super.writeTo(buf, writer)) return false; if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 3: if (!writer.writeBoolean("ack", ack)) return false; writer.incrementState(); case 4: if (!writer.writeMap("infos", infos, MessageCollectionItemType.INT, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 5: if (!writer.writeCollection("last", last, MessageCollectionItemType.INT)) return false; writer.incrementState(); case 6: if (!writer.writeCollection("missed", missed, MessageCollectionItemType.INT)) return false; writer.incrementState(); case 7: if (!writer.writeLong("updateSeq", updateSeq)) return false; writer.incrementState(); case 8: if (!writer.writeInt("workerId", workerId)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; if (!super.readFrom(buf, reader)) return false; switch (reader.state()) { case 3: ack = reader.readBoolean("ack"); if (!reader.isLastRead()) return false; reader.incrementState(); case 4: infos = reader.readMap("infos", MessageCollectionItemType.INT, MessageCollectionItemType.MSG, false); if (!reader.isLastRead()) return false; reader.incrementState(); case 5: last = reader.readCollection("last", MessageCollectionItemType.INT); if (!reader.isLastRead()) return false; reader.incrementState(); case 6: missed = reader.readCollection("missed", MessageCollectionItemType.INT); if (!reader.isLastRead()) return false; reader.incrementState(); case 7: updateSeq = reader.readLong("updateSeq"); if (!reader.isLastRead()) return false; reader.incrementState(); case 8: workerId = reader.readInt("workerId"); if (!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(GridDhtPartitionSupplyMessage.class); } /** {@inheritDoc} */ @Override public byte directType() { return 45; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 9; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridDhtPartitionSupplyMessage.class, this, "size", size(), "parts", infos.keySet(), "super", super.toString()); } }
/** * Quadrant Feedback System */ package com.example.psfeedbackprototype; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.util.Log; import android.view.Window; import android.view.WindowManager; import android.widget.Toast; public class MainActivity extends Activity { private static final int RECORDER_SAMPLERATE = 44100; private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO; private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT; private static final int TIME_FRAME_IN_MILIS = 2000; // The value of each chunk private AudioRecord recorder; private Thread streamingThread; private boolean isRecording = false; private int BufferElements2Rec = MainActivity.RECORDER_SAMPLERATE * (TIME_FRAME_IN_MILIS/1000); // total number of elements to buffer is equal // number of samples per sec * the value of each chunk private int BytesPerElement = 2; // 2 bytes in 16bit format private SocketCommTx comm; private String serverIP = "192.168.1.103"; //CHECK IP ADDRESS***** static int volumeValue, speedValue; double thresholdVol = 56.5; //CALIBRATE double maxThresholdVol = 67; //CALIBRATE double thresholdRate = 2.15; //CALIBRATE public synchronized void setValues(String loudness, String rate) { double avgLoud = Double.parseDouble(loudness); double avgRateF = Double.parseDouble(rate); Log.i("debugRun", "avgLoud: " + avgLoud + " avgRateF: " + avgRateF); checkInputData(avgLoud, avgRateF); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); this.requestWindowFeature(Window.FEATURE_NO_TITLE); this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON, WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); setContentView(R.layout.activity_main); } @Override protected void onResume() { super.onResume(); loadCalibData(); readIPaddress(); comm = new SocketCommTx(serverIP, 9090, this); startRecording(); } private void loadCalibData() { File root = android.os.Environment.getExternalStorageDirectory(); File dir = new File (root.getAbsolutePath() + "/ROCHCIStorage"); File file = new File(dir, "ROCSpeakGlass_Calibration.info"); if(!file.exists()){ Toast.makeText(getApplicationContext(), "No Calibration Data Found", Toast.LENGTH_LONG).show(); return; } try { BufferedReader br = new BufferedReader(new FileReader(file)); String maxLoudVal = br.readLine(); if(maxLoudVal == null) { Toast.makeText(getApplicationContext(), "No Calibration Data Found", Toast.LENGTH_LONG).show(); return; } maxThresholdVol = Double.parseDouble(maxLoudVal); // Toast.makeText(getApplicationContext(), "max=" + maxThresholdVol, Toast.LENGTH_LONG).show(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { Toast.makeText(getApplicationContext(), "No Calibration Data Found", Toast.LENGTH_LONG).show(); e.printStackTrace(); } } //end loadCalibData private void readIPaddress(){ File root = android.os.Environment.getExternalStorageDirectory(); File dir = new File (root.getAbsolutePath() + "/ROCHCIStorage"); File file = new File(dir, "ipaddress.txt"); try { BufferedReader br = new BufferedReader(new FileReader(file)); serverIP = br.readLine(); //*** Log.i("debugRun", "serverIP: " + serverIP); br.close(); } catch (IOException e) { e.printStackTrace(); } } @Override protected void onPause() { stopRecording(); comm.stopThread(); super.onPause(); } @Override protected void onStop() { super.onStop(); int pid = android.os.Process.myPid(); android.os.Process.killProcess(pid); } public void checkInputData(double avgLoud, double avgRateF) { //if volume HIGH && speed HIGH if(avgLoud >= thresholdVol && avgRateF >= thresholdRate) { MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.q1); MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q2); MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q3); MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q4); } //if volume HIGH && speed LOW if(avgLoud >= thresholdVol && avgRateF < thresholdRate) { MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q1); MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.q2); MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q3); MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q4); } //if volume LOW && speed LOW if(avgLoud < thresholdVol && avgRateF < thresholdRate) { MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q1); MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q2); MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.q3); MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q4); } //if volume LOW && speed HIGH if(avgLoud < thresholdVol && avgRateF >= thresholdRate) { MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q1); MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q2); MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.q3); MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.q4); } } //end check //**************************************************************************************************** //Write the output audio in byte private void writeAudioDataToSocket() { short sData[] = new short[BufferElements2Rec]; // buffer to store the elements, using // short because encoding format is 16 bit PCM while (isRecording) { // gets the voice output from microphone to byte format int read = recorder.read(sData, 0, BufferElements2Rec); byte bData[] = short2byte(sData); comm.fillBuffer(bData); //Log.d("Extracted_Data", new String(bData)); //comm.fillBuffer(bData); } } private byte[] short2byte(short[] sData) { int shortArrsize = sData.length; byte[] bytes = new byte[shortArrsize * 2]; for (int i = 0; i < shortArrsize; i++) { bytes[i * 2] = (byte) (sData[i] & 0x00FF); bytes[(i * 2) + 1] = (byte) (sData[i] >> 8); sData[i] = 0; } return bytes; } private void startRecording() { // Calculate the required buffersize for the desired sample rate and encoding rate int bufferSize = determineMinimumBufferSize(RECORDER_SAMPLERATE, RECORDER_AUDIO_ENCODING); // Prepare the recorder recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, bufferSize); recorder.startRecording(); isRecording = true; Log.d("MainActivity_startRecording", "recording started"); // Running thread for writing audio data streamingThread = new Thread(new Runnable() { public void run() { writeAudioDataToSocket(); } }, "AudioRecorder Thread"); streamingThread.start(); Log.d("MainActivity_startRecording", "recording started"); } //Stops the recording activity private void stopRecording() { if (recorder != null) { isRecording = false; recorder.stop(); recorder.release(); recorder = null; streamingThread = null; } } // Function for calculating minimum buffer size private int determineMinimumBufferSize(final int sampleRate, int encoding) { int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, encoding); return minBufferSize>BufferElements2Rec*BytesPerElement? minBufferSize:BufferElements2Rec * BytesPerElement; } }//end class
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova.splashscreen; import android.app.Activity; import android.app.Dialog; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.res.Configuration; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.graphics.Point; import android.os.Handler; import android.util.DisplayMetrics; import android.util.Log; import android.view.Display; import android.view.Gravity; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.view.WindowManager; import android.view.animation.AlphaAnimation; import android.view.animation.Animation; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaWebView; import org.json.JSONArray; import org.json.JSONException; import java.io.File; public class SplashScreen extends CordovaPlugin { private static final String LOG_TAG = "SplashScreen"; // Cordova 3.x.x has a copy of this plugin bundled with it (SplashScreenInternal.java). // Enable functionality only if running on 4.x.x. private static final boolean HAS_BUILT_IN_SPLASH_SCREEN = Integer.valueOf(CordovaWebView.CORDOVA_VERSION.split("\\.")[0]) < 4; private static Dialog splashDialog; private static ProgressDialog spinnerDialog; private static boolean firstShow = true; /** * Displays the splash drawable. */ private ImageView splashImageView; /** * Displays the sponsored by text. */ private TextView textView; /** * Remember last device orientation to detect orientation changes. */ private int orientation; private LinearLayout splashScreenContent; // Helper to be compile-time compatible with both Cordova 3.x and 4.x. private View getView() { try { return (View)webView.getClass().getMethod("getView").invoke(webView); } catch (Exception e) { return (View)webView; } } @Override protected void pluginInitialize() { if (HAS_BUILT_IN_SPLASH_SCREEN || !firstShow) { return; } // Make WebView invisible while loading URL //getView().setVisibility(View.INVISIBLE); int drawableId = preferences.getInteger("SplashDrawableId", 0); if (drawableId == 0) { String splashResource = preferences.getString("SplashScreen", "screen"); if (splashResource != null) { drawableId = cordova.getActivity().getResources().getIdentifier(splashResource, "drawable", cordova.getActivity().getClass().getPackage().getName()); if (drawableId == 0) { drawableId = cordova.getActivity().getResources().getIdentifier(splashResource, "drawable", cordova.getActivity().getPackageName()); } preferences.set("SplashDrawableId", drawableId); } } // Save initial orientation. orientation = cordova.getActivity().getResources().getConfiguration().orientation; firstShow = false; //loadSpinner(); showSplashScreen(true); } /** * Shorter way to check value of "SplashMaintainAspectRatio" preference. */ private boolean isMaintainAspectRatio () { return preferences.getBoolean("SplashMaintainAspectRatio", false); } @Override public void onPause(boolean multitasking) { if (HAS_BUILT_IN_SPLASH_SCREEN) { return; } // hide the splash screen to avoid leaking a window this.removeSplashScreen(); } @Override public void onDestroy() { if (HAS_BUILT_IN_SPLASH_SCREEN) { return; } // hide the splash screen to avoid leaking a window this.removeSplashScreen(); firstShow = true; } @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { if (action.equals("hide")) { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { webView.postMessage("splashscreen", "hide"); } }); } else if (action.equals("show")) { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { webView.postMessage("splashscreen", "show"); } }); } else if (action.equals("spinnerStart")) { if (!HAS_BUILT_IN_SPLASH_SCREEN) { final String title = args.getString(0); final String message = args.getString(1); cordova.getActivity().runOnUiThread(new Runnable() { public void run() { spinnerStart(title, message); } }); } } else { return false; } callbackContext.success(); return true; } @Override public Object onMessage(String id, Object data) { if (HAS_BUILT_IN_SPLASH_SCREEN) { return null; } if ("splashscreen".equals(id)) { if ("hide".equals(data.toString())) { this.removeSplashScreen(); } else { this.showSplashScreen(false); } } else if ("spinner".equals(id)) { if ("stop".equals(data.toString())) { this.spinnerStop(); getView().setVisibility(View.VISIBLE); } } else if ("onReceivedError".equals(id)) { spinnerStop(); } return null; } // Don't add @Override so that plugin still compiles on 3.x.x for a while public void onConfigurationChanged(Configuration newConfig) { if (newConfig.orientation != orientation) { orientation = newConfig.orientation; // Splash drawable may change with orientation, so reload it. // if (splashImageView != null) { // int drawableId = preferences.getInteger("SplashDrawableId", 0); // if (drawableId != 0) { // splashImageView.setImageDrawable(cordova.getActivity().getResources().getDrawable(drawableId)); // } // } if(textView != null) { LayoutParams layoutParams = null; if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) { layoutParams = new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, 0, (float) 0.2); } else { layoutParams = new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, 0, (float) 0.1); } textView.setLayoutParams(layoutParams); } } } private void removeSplashScreen() { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { if (splashDialog != null && splashDialog.isShowing()) { if(splashScreenContent != null) { AlphaAnimation fadeOut = new AlphaAnimation(1, 0); fadeOut.setDuration(800); splashScreenContent.setAnimation(fadeOut); splashScreenContent.startAnimation(fadeOut); fadeOut.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { splashDialog.dismiss(); splashDialog = null; splashImageView = null; } @Override public void onAnimationRepeat(Animation animation) { } }); } else { splashDialog.dismiss(); splashDialog = null; splashImageView = null; } } } }); } /** * Shows the splash screen over the full Activity */ @SuppressWarnings("deprecation") private void showSplashScreen(final boolean hideAfterDelay) { final int splashscreenTime = preferences.getInteger("SplashScreenDelay", 3000); final int drawableId = preferences.getInteger("SplashDrawableId", 0); // If the splash dialog is showing don't try to show it again if (splashDialog != null && splashDialog.isShowing()) { return; } if (drawableId == 0 || (splashscreenTime <= 0 && hideAfterDelay)) { return; } cordova.getActivity().runOnUiThread(new Runnable() { public void run() { Context context = webView.getContext(); File imgFile = new File(context.getCacheDir().getAbsolutePath() + "/organization_splash.png"); Bitmap myBitmap = null; if(imgFile.exists()) { myBitmap = BitmapFactory.decodeFile(imgFile.getAbsolutePath()); } // Use an ImageView to render the image because of its flexible scaling options. splashImageView = new ImageView(context); if(myBitmap != null) { splashImageView.setImageBitmap(myBitmap); } else { splashImageView.setImageResource(drawableId); } LayoutParams layoutParams = new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, 0, (float) 0.5); splashImageView.setLayoutParams(layoutParams); int width = 150; int height = 150; if (android.os.Build.VERSION.SDK_INT >= 13) { Display display = cordova.getActivity().getWindowManager().getDefaultDisplay(); Point size = new Point(); display.getSize(size); // calculate padding percentage width = size.x; height = size.y; } int paddingSide = (int)Math.round(width * 0.1); int paddingTop = (int)Math.round(height * 0.1); splashImageView.setPadding(paddingSide, paddingTop, paddingSide, paddingTop); splashImageView.setScaleType(ImageView.ScaleType.FIT_CENTER); // ImageView for the logo - (our splashscreen) ImageView logoView = new ImageView(context); logoView.setImageResource(drawableId); logoView.setScaleType(ImageView.ScaleType.CENTER); layoutParams = new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, 0, (float) 0.5); logoView.setLayoutParams(layoutParams); logoView.setBackgroundColor(0xFFe7ecf0); // add a TextView for the Text textView = new TextView(context); textView.setText("Updates powered by:"); textView.setTextSize(24); textView.setPadding(5, 5, 5, 5); textView.setTextColor(Color.BLACK); textView.setGravity(Gravity.CENTER_HORIZONTAL); layoutParams = new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, 0, (float) 0.1); textView.setLayoutParams(layoutParams); // add the elements to the splash screen splashScreenContent = new LinearLayout(context); splashScreenContent.setBackgroundColor(0xFFe7ecf0); splashScreenContent.setOrientation(LinearLayout.VERTICAL); splashScreenContent.setGravity(Gravity.CENTER); splashScreenContent.addView(logoView); if(myBitmap != null) { splashScreenContent.addView(textView); splashScreenContent.addView(splashImageView); } // Create and show the dialog splashDialog = new Dialog(context, android.R.style.Theme_Translucent_NoTitleBar); // check to see if the splash screen should be full screen if ((cordova.getActivity().getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_FULLSCREEN) == WindowManager.LayoutParams.FLAG_FULLSCREEN) { splashDialog.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); } splashDialog.setContentView(splashScreenContent); splashDialog.setCancelable(false); splashDialog.show(); // Set Runnable to remove splash screen just in case if (hideAfterDelay) { final Handler handler = new Handler(); handler.postDelayed(new Runnable() { public void run() { removeSplashScreen(); } }, splashscreenTime); } } }); } /* * Load the spinner */ private void loadSpinner() { // If loadingDialog property, then show the App loading dialog for first page of app String loading = null; if (webView.canGoBack()) { loading = preferences.getString("LoadingDialog", null); } else { loading = preferences.getString("LoadingPageDialog", null); } if (loading != null) { String title = ""; String message = "Loading Application..."; if (loading.length() > 0) { int comma = loading.indexOf(','); if (comma > 0) { title = loading.substring(0, comma); message = loading.substring(comma + 1); } else { title = ""; message = loading; } } spinnerStart(title, message); } } private void spinnerStart(final String title, final String message) { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { spinnerStop(); spinnerDialog = ProgressDialog.show(webView.getContext(), title, message, true, true, new DialogInterface.OnCancelListener() { public void onCancel(DialogInterface dialog) { spinnerDialog = null; } }); } }); } private void spinnerStop() { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { if (spinnerDialog != null && spinnerDialog.isShowing()) { spinnerDialog.dismiss(); spinnerDialog = null; } } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jasper.compiler; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Vector; import javax.servlet.ServletContext; import javax.servlet.descriptor.JspConfigDescriptor; import javax.servlet.descriptor.JspPropertyGroupDescriptor; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; /** * Handles the jsp-config element in WEB_INF/web.xml. This is used * for specifying the JSP configuration information on a JSP page * * @author Kin-man Chung * @author Remy Maucherat */ public class JspConfig { // Logger private final Log log = LogFactory.getLog(JspConfig.class); private Vector<JspPropertyGroup> jspProperties = null; private final ServletContext ctxt; private volatile boolean initialized = false; private static final String defaultIsXml = null; // unspecified private String defaultIsELIgnored = null; // unspecified private static final String defaultIsScriptingInvalid = null; private String defaultDeferedSyntaxAllowedAsLiteral = null; private static final String defaultTrimDirectiveWhitespaces = null; private static final String defaultDefaultContentType = null; private static final String defaultBuffer = null; private static final String defaultErrorOnUndeclaredNamespace = "false"; private JspProperty defaultJspProperty; public JspConfig(ServletContext ctxt) { this.ctxt = ctxt; } private void processWebDotXml() { // Very, very unlikely but just in case... if (ctxt.getEffectiveMajorVersion() < 2) { defaultIsELIgnored = "true"; defaultDeferedSyntaxAllowedAsLiteral = "true"; return; } if (ctxt.getEffectiveMajorVersion() == 2) { if (ctxt.getEffectiveMinorVersion() < 5) { defaultDeferedSyntaxAllowedAsLiteral = "true"; } if (ctxt.getEffectiveMinorVersion() < 4) { defaultIsELIgnored = "true"; return; } } JspConfigDescriptor jspConfig = ctxt.getJspConfigDescriptor(); if (jspConfig == null) { return; } jspProperties = new Vector<>(); Collection<JspPropertyGroupDescriptor> jspPropertyGroups = jspConfig.getJspPropertyGroups(); for (JspPropertyGroupDescriptor jspPropertyGroup : jspPropertyGroups) { Collection<String> urlPatterns = jspPropertyGroup.getUrlPatterns(); if (urlPatterns.size() == 0) { continue; } JspProperty property = new JspProperty(jspPropertyGroup.getIsXml(), jspPropertyGroup.getElIgnored(), jspPropertyGroup.getScriptingInvalid(), jspPropertyGroup.getPageEncoding(), jspPropertyGroup.getIncludePreludes(), jspPropertyGroup.getIncludeCodas(), jspPropertyGroup.getDeferredSyntaxAllowedAsLiteral(), jspPropertyGroup.getTrimDirectiveWhitespaces(), jspPropertyGroup.getDefaultContentType(), jspPropertyGroup.getBuffer(), jspPropertyGroup.getErrorOnUndeclaredNamespace()); // Add one JspPropertyGroup for each URL Pattern. This makes // the matching logic easier. for (String urlPattern : urlPatterns) { String path = null; String extension = null; if (urlPattern.indexOf('*') < 0) { // Exact match path = urlPattern; } else { int i = urlPattern.lastIndexOf('/'); String file; if (i >= 0) { path = urlPattern.substring(0,i+1); file = urlPattern.substring(i+1); } else { file = urlPattern; } // pattern must be "*", or of the form "*.jsp" if (file.equals("*")) { extension = "*"; } else if (file.startsWith("*.")) { extension = file.substring(file.indexOf('.')+1); } // The url patterns are reconstructed as the following: // path != null, extension == null: / or /foo/bar.ext // path == null, extension != null: *.ext // path != null, extension == "*": /foo/* boolean isStar = "*".equals(extension); if ((path == null && (extension == null || isStar)) || (path != null && !isStar)) { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage( "jsp.warning.bad.urlpattern.propertygroup", urlPattern)); } continue; } } JspPropertyGroup propertyGroup = new JspPropertyGroup(path, extension, property); jspProperties.addElement(propertyGroup); } } } private void init() { if (!initialized) { synchronized (this) { if (!initialized) { processWebDotXml(); defaultJspProperty = new JspProperty(defaultIsXml, defaultIsELIgnored, defaultIsScriptingInvalid, null, null, null, defaultDeferedSyntaxAllowedAsLiteral, defaultTrimDirectiveWhitespaces, defaultDefaultContentType, defaultBuffer, defaultErrorOnUndeclaredNamespace); initialized = true; } } } } /** * Select the property group that has more restrictive url-pattern. * In case of tie, select the first. */ @SuppressWarnings("null") // NPE not possible private JspPropertyGroup selectProperty(JspPropertyGroup prev, JspPropertyGroup curr) { if (prev == null) { return curr; } if (prev.getExtension() == null) { // exact match return prev; } if (curr.getExtension() == null) { // exact match return curr; } String prevPath = prev.getPath(); String currPath = curr.getPath(); if (prevPath == null && currPath == null) { // Both specifies a *.ext, keep the first one return prev; } if (prevPath == null && currPath != null) { return curr; } if (prevPath != null && currPath == null) { return prev; } if (prevPath.length() >= currPath.length()) { return prev; } return curr; } /** * Find a property that best matches the supplied resource. * @param uri the resource supplied. * @return a JspProperty indicating the best match, or some default. */ public JspProperty findJspProperty(String uri) { init(); // JSP Configuration settings do not apply to tag files if (jspProperties == null || uri.endsWith(".tag") || uri.endsWith(".tagx")) { return defaultJspProperty; } String uriPath = null; int index = uri.lastIndexOf('/'); if (index >=0 ) { uriPath = uri.substring(0, index+1); } String uriExtension = null; index = uri.lastIndexOf('.'); if (index >=0) { uriExtension = uri.substring(index+1); } Collection<String> includePreludes = new ArrayList<>(); Collection<String> includeCodas = new ArrayList<>(); JspPropertyGroup isXmlMatch = null; JspPropertyGroup elIgnoredMatch = null; JspPropertyGroup scriptingInvalidMatch = null; JspPropertyGroup pageEncodingMatch = null; JspPropertyGroup deferedSyntaxAllowedAsLiteralMatch = null; JspPropertyGroup trimDirectiveWhitespacesMatch = null; JspPropertyGroup defaultContentTypeMatch = null; JspPropertyGroup bufferMatch = null; JspPropertyGroup errorOnUndeclaredNamespaceMatch = null; Iterator<JspPropertyGroup> iter = jspProperties.iterator(); while (iter.hasNext()) { JspPropertyGroup jpg = iter.next(); JspProperty jp = jpg.getJspProperty(); // (arrays will be the same length) String extension = jpg.getExtension(); String path = jpg.getPath(); if (extension == null) { // exact match pattern: /a/foo.jsp if (!uri.equals(path)) { // not matched; continue; } } else { // Matching patterns *.ext or /p/* if (path != null && uriPath != null && ! uriPath.startsWith(path)) { // not matched continue; } if (!extension.equals("*") && !extension.equals(uriExtension)) { // not matched continue; } } // We have a match // Add include-preludes and include-codas if (jp.getIncludePrelude() != null) { includePreludes.addAll(jp.getIncludePrelude()); } if (jp.getIncludeCoda() != null) { includeCodas.addAll(jp.getIncludeCoda()); } // If there is a previous match for the same property, remember // the one that is more restrictive. if (jp.isXml() != null) { isXmlMatch = selectProperty(isXmlMatch, jpg); } if (jp.isELIgnored() != null) { elIgnoredMatch = selectProperty(elIgnoredMatch, jpg); } if (jp.isScriptingInvalid() != null) { scriptingInvalidMatch = selectProperty(scriptingInvalidMatch, jpg); } if (jp.getPageEncoding() != null) { pageEncodingMatch = selectProperty(pageEncodingMatch, jpg); } if (jp.isDeferedSyntaxAllowedAsLiteral() != null) { deferedSyntaxAllowedAsLiteralMatch = selectProperty(deferedSyntaxAllowedAsLiteralMatch, jpg); } if (jp.isTrimDirectiveWhitespaces() != null) { trimDirectiveWhitespacesMatch = selectProperty(trimDirectiveWhitespacesMatch, jpg); } if (jp.getDefaultContentType() != null) { defaultContentTypeMatch = selectProperty(defaultContentTypeMatch, jpg); } if (jp.getBuffer() != null) { bufferMatch = selectProperty(bufferMatch, jpg); } if (jp.isErrorOnUndeclaredNamespace() != null) { errorOnUndeclaredNamespaceMatch = selectProperty(errorOnUndeclaredNamespaceMatch, jpg); } } String isXml = defaultIsXml; String isELIgnored = defaultIsELIgnored; String isScriptingInvalid = defaultIsScriptingInvalid; String pageEncoding = null; String isDeferedSyntaxAllowedAsLiteral = defaultDeferedSyntaxAllowedAsLiteral; String isTrimDirectiveWhitespaces = defaultTrimDirectiveWhitespaces; String defaultContentType = defaultDefaultContentType; String buffer = defaultBuffer; String errorOnUndelcaredNamespace = defaultErrorOnUndeclaredNamespace; if (isXmlMatch != null) { isXml = isXmlMatch.getJspProperty().isXml(); } if (elIgnoredMatch != null) { isELIgnored = elIgnoredMatch.getJspProperty().isELIgnored(); } if (scriptingInvalidMatch != null) { isScriptingInvalid = scriptingInvalidMatch.getJspProperty().isScriptingInvalid(); } if (pageEncodingMatch != null) { pageEncoding = pageEncodingMatch.getJspProperty().getPageEncoding(); } if (deferedSyntaxAllowedAsLiteralMatch != null) { isDeferedSyntaxAllowedAsLiteral = deferedSyntaxAllowedAsLiteralMatch.getJspProperty().isDeferedSyntaxAllowedAsLiteral(); } if (trimDirectiveWhitespacesMatch != null) { isTrimDirectiveWhitespaces = trimDirectiveWhitespacesMatch.getJspProperty().isTrimDirectiveWhitespaces(); } if (defaultContentTypeMatch != null) { defaultContentType = defaultContentTypeMatch.getJspProperty().getDefaultContentType(); } if (bufferMatch != null) { buffer = bufferMatch.getJspProperty().getBuffer(); } if (errorOnUndeclaredNamespaceMatch != null) { errorOnUndelcaredNamespace = errorOnUndeclaredNamespaceMatch.getJspProperty().isErrorOnUndeclaredNamespace(); } return new JspProperty(isXml, isELIgnored, isScriptingInvalid, pageEncoding, includePreludes, includeCodas, isDeferedSyntaxAllowedAsLiteral, isTrimDirectiveWhitespaces, defaultContentType, buffer, errorOnUndelcaredNamespace); } /** * To find out if an uri matches an url pattern in jsp config. If so, * then the uri is a JSP page. This is used primarily for jspc. */ public boolean isJspPage(String uri) { init(); if (jspProperties == null) { return false; } String uriPath = null; int index = uri.lastIndexOf('/'); if (index >=0 ) { uriPath = uri.substring(0, index+1); } String uriExtension = null; index = uri.lastIndexOf('.'); if (index >=0) { uriExtension = uri.substring(index+1); } Iterator<JspPropertyGroup> iter = jspProperties.iterator(); while (iter.hasNext()) { JspPropertyGroup jpg = iter.next(); String extension = jpg.getExtension(); String path = jpg.getPath(); if (extension == null) { if (uri.equals(path)) { // There is an exact match return true; } } else { if ((path == null || path.equals(uriPath)) && (extension.equals("*") || extension.equals(uriExtension))) { // Matches *, *.ext, /p/*, or /p/*.ext return true; } } } return false; } public static class JspPropertyGroup { private final String path; private final String extension; private final JspProperty jspProperty; JspPropertyGroup(String path, String extension, JspProperty jspProperty) { this.path = path; this.extension = extension; this.jspProperty = jspProperty; } public String getPath() { return path; } public String getExtension() { return extension; } public JspProperty getJspProperty() { return jspProperty; } } public static class JspProperty { private final String isXml; private final String elIgnored; private final String scriptingInvalid; private final String pageEncoding; private final Collection<String> includePrelude; private final Collection<String> includeCoda; private final String deferedSyntaxAllowedAsLiteral; private final String trimDirectiveWhitespaces; private final String defaultContentType; private final String buffer; private final String errorOnUndeclaredNamespace; public JspProperty(String isXml, String elIgnored, String scriptingInvalid, String pageEncoding, Collection<String> includePrelude, Collection<String> includeCoda, String deferedSyntaxAllowedAsLiteral, String trimDirectiveWhitespaces, String defaultContentType, String buffer, String errorOnUndeclaredNamespace) { this.isXml = isXml; this.elIgnored = elIgnored; this.scriptingInvalid = scriptingInvalid; this.pageEncoding = pageEncoding; this.includePrelude = includePrelude; this.includeCoda = includeCoda; this.deferedSyntaxAllowedAsLiteral = deferedSyntaxAllowedAsLiteral; this.trimDirectiveWhitespaces = trimDirectiveWhitespaces; this.defaultContentType = defaultContentType; this.buffer = buffer; this.errorOnUndeclaredNamespace = errorOnUndeclaredNamespace; } public String isXml() { return isXml; } public String isELIgnored() { return elIgnored; } public String isScriptingInvalid() { return scriptingInvalid; } public String getPageEncoding() { return pageEncoding; } public Collection<String> getIncludePrelude() { return includePrelude; } public Collection<String> getIncludeCoda() { return includeCoda; } public String isDeferedSyntaxAllowedAsLiteral() { return deferedSyntaxAllowedAsLiteral; } public String isTrimDirectiveWhitespaces() { return trimDirectiveWhitespaces; } public String getDefaultContentType() { return defaultContentType; } public String getBuffer() { return buffer; } public String isErrorOnUndeclaredNamespace() { return errorOnUndeclaredNamespace; } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.diff.impl.incrementalMerge; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diff.DiffColors; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.markup.GutterIconRenderer; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.editor.markup.SeparatorPlacement; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.TextRange; import com.intellij.util.Assertion; import com.intellij.util.Function; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.Convertor; import junit.framework.Assert; import junit.framework.AssertionFailedError; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class MergeTestUtils { private final Project myProject; private final ArrayList<Editor> myEditorsToDispose = new ArrayList<>(); private static final Assertion CHECK = new Assertion(); public static class Range { private final String myId; private final TextRange myRange; Range(String id, TextRange range) { myId = id; myRange = range; } @Override public String toString() { return myId + " " + myRange; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Range range = (Range)o; if (!myId.equals(range.myId)) return false; if (!myRange.equals(range.myRange)) return false; return true; } @Override public int hashCode() { int result = myId.hashCode(); result = 31 * result + myRange.hashCode(); return result; } } public MergeTestUtils(Project project) { myProject = project; } public static void checkMarkup(Editor editor, Range[] expected) { checkMarkup(editor, expected, null); } public static void checkMarkup(Editor editor, Range[] expected, @Nullable Range[] expectedApplied) { List<RangeHighlighter> allHighlighters = Arrays.asList(editor.getMarkupModel().getAllHighlighters()); List<RangeHighlighter> changes = ContainerUtil.findAll(allHighlighters, CHANGE_HIGHLIGHTERS); List<RangeHighlighter> appliedChanges = ContainerUtil.findAll(allHighlighters, APPLIED_CHANGE_HIGHLIGHTERS); try { checkMarkup(editor, changes, expected); if (expectedApplied != null) { checkMarkup(editor, appliedChanges, expectedApplied); } } catch(AssertionFailedError e) { List<Range> ranges = ContainerUtil.map(allHighlighters, new HighlighterToRangeConvertor(editor)); CHECK.enumerate(ranges); throw e; } } private static void checkMarkup(Editor editor, List<RangeHighlighter> changes, Range[] expected) { Function<RangeHighlighter, Range> toRangeConvertor = new HighlighterToRangeConvertor(editor); List<Range> actualRanges = ContainerUtil.map(changes, toRangeConvertor); Assertion.compareUnordered(expected, actualRanges); for (RangeHighlighter highlighter : changes) { if (highlighter.getStartOffset() == highlighter.getEndOffset()) continue; Assert.assertEquals(Color.GRAY, highlighter.getLineSeparatorColor()); Assert.assertEquals(SeparatorPlacement.TOP, highlighter.getLineSeparatorPlacement()); List<RangeHighlighter> allHighlighters = Arrays.asList(editor.getMarkupModel().getAllHighlighters()); RangeHighlighter bottomLine = findBottomHighlighter(highlighter, allHighlighters); Assert.assertNotNull(String.format("The bottom line of %s is null!", toRangeConvertor.fun(highlighter)), bottomLine); Assert.assertEquals(Color.GRAY, bottomLine.getLineSeparatorColor()); } } @Nullable private static RangeHighlighter findBottomHighlighter(RangeHighlighter highlighter, List<RangeHighlighter> allHighlighters) { int startOffset = highlighter.getStartOffset(); int endOffset = highlighter.getEndOffset(); if (highlighter.getDocument().getCharsSequence().charAt(endOffset - 1) == '\n') endOffset--; for (RangeHighlighter rangeHighlighter : allHighlighters) { if (rangeHighlighter.getStartOffset() != startOffset || rangeHighlighter.getEndOffset() != endOffset) continue; if (!SeparatorPlacement.BOTTOM.equals(rangeHighlighter.getLineSeparatorPlacement())) continue; return rangeHighlighter; } return null; } public static Range ins(int start, int length) { return new Range(DiffColors.DIFF_INSERTED.getExternalName(), createRange(start, length)); } protected void tearDown() throws Exception { EditorFactory editorFactory = EditorFactory.getInstance(); for (Editor editor : myEditorsToDispose) { editorFactory.releaseEditor(editor); } } public Editor[] createEditors(Document[] documents) { Editor[] editors = new Editor[documents.length]; for (int i = 0; i < documents.length; i++) { Document document = documents[i]; editors[i] = createEditor(document); } return editors; } public Editor createEditor(Document document) { Editor editor = EditorFactory.getInstance().createEditor(document); myEditorsToDispose.add(editor); return editor; } private static final String[] POSSIBLE_ATTRIBUTES = {DiffColors.DIFF_INSERTED.getExternalName(), DiffColors.DIFF_DELETED.getExternalName(), DiffColors.DIFF_MODIFIED.getExternalName(), DiffColors.DIFF_CONFLICT.getExternalName()}; private static final Condition<RangeHighlighter> COMMON_CHANGE_HIGHLIGHTERS = highlighter -> { if (!highlighter.isValid()) return false; if (highlighter.getLineSeparatorPlacement() == SeparatorPlacement.BOTTOM) return false; GutterIconRenderer iconRenderer = (GutterIconRenderer)highlighter.getGutterIconRenderer(); if (highlighter.getTextAttributes() == null && highlighter.getLineSeparatorColor() == null && iconRenderer != null && iconRenderer.getClickAction() != null) return false; return true; }; private static final Condition<RangeHighlighter> CHANGE_HIGHLIGHTERS = highlighter -> COMMON_CHANGE_HIGHLIGHTERS.value(highlighter) && !isAppliedChange(highlighter); private static final Condition<RangeHighlighter> APPLIED_CHANGE_HIGHLIGHTERS = highlighter -> COMMON_CHANGE_HIGHLIGHTERS.value(highlighter) && isAppliedChange(highlighter); private static boolean isAppliedChange(RangeHighlighter highlighter) { Color stripeMarkColor = highlighter.getErrorStripeMarkColor(); return stripeMarkColor != null && stripeMarkColor.getAlpha() == ChangeHighlighterHolder.APPLIED_CHANGE_TRANSPARENCY; } public static Document createRODocument(String text) { Document document = createDocument(text); document.setReadOnly(true); return document; } public static Document createDocument(String text) { return EditorFactory.getInstance().createDocument(text); } public static Range del(int start, int length) { return new Range(DiffColors.DIFF_DELETED.getExternalName(), createRange(start, length)); } private static TextRange createRange(int start, int length) { return new TextRange(start, start + length); } public static Range mod(int start, int length) { return new Range(DiffColors.DIFF_MODIFIED.getExternalName(), createRange(start, length)); } public static Range conf(int start, int length) { return new Range(DiffColors.DIFF_CONFLICT.getExternalName(), createRange(start, length)); } public void insertString(final Document document, final int offset, final String text) { ApplicationManager.getApplication().runWriteAction(() -> CommandProcessor.getInstance().executeCommand(myProject, () -> document.insertString(offset, text), null, null)); } private static class ColorToIdConvertor implements Convertor<Color, String> { private final Editor myEditor; public ColorToIdConvertor(Editor editor) { myEditor = editor; } @Override public String convert(Color backgroundColor) { for (String key : POSSIBLE_ATTRIBUTES) { if (!backgroundColor.equals(getAttributes(key).getBackgroundColor())) continue; return key; } return null; } private TextAttributes getAttributes(String key) { return myEditor.getColorsScheme().getAttributes(TextAttributesKey.find(key)); } } private static class HighlighterToRangeConvertor implements Function<RangeHighlighter, Range> { private final Convertor<Color, String> myColorToId; public HighlighterToRangeConvertor(Editor editor) { myColorToId = new ColorToIdConvertor(editor); } @Override public Range fun(RangeHighlighter highlighter) { TextAttributes textAttributes = highlighter.getTextAttributes(); Color color; if (textAttributes != null) color = textAttributes.getBackgroundColor(); else color = highlighter.getLineSeparatorColor(); String id; if (Color.GRAY.equals(color)) id = "lineSeparator"; else id = color != null ? myColorToId.convert(color) : highlighter.getGutterIconRenderer().getTooltipText(); TextRange range = TextRange.create(highlighter); return new Range(id, range); } } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi.impl.source.tree.injected; import com.intellij.injected.editor.DocumentWindow; import com.intellij.injected.editor.VirtualFileWindow; import com.intellij.lang.Language; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.model.ModelBranch; import com.intellij.openapi.editor.Caret; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.DebugUtil; import com.intellij.psi.impl.PsiManagerEx; import com.intellij.psi.impl.source.DummyHolder; import com.intellij.psi.injection.ReferenceInjector; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.*; import com.intellij.reference.SoftReference; import com.intellij.testFramework.LightVirtualFile; import com.intellij.util.containers.ConcurrentList; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.function.Supplier; /** * @deprecated Use {@link InjectedLanguageManager} instead */ @Deprecated(forRemoval = true) public class InjectedLanguageUtilBase { public static final Key<IElementType> INJECTED_FRAGMENT_TYPE = Key.create("INJECTED_FRAGMENT_TYPE"); @NotNull static PsiElement loadTree(@NotNull PsiElement host, @NotNull PsiFile containingFile) { if (containingFile instanceof DummyHolder) { PsiElement context = containingFile.getContext(); if (context != null) { PsiFile topFile = context.getContainingFile(); topFile.getNode(); //load tree TextRange textRange = host.getTextRange().shiftRight(context.getTextRange().getStartOffset()); PsiElement inLoadedTree = PsiTreeUtil.findElementOfClassAtRange(topFile, textRange.getStartOffset(), textRange.getEndOffset(), host.getClass()); if (inLoadedTree != null) { host = inLoadedTree; } } } return host; } private static final Key<List<TokenInfo>> HIGHLIGHT_TOKENS = Key.create("HIGHLIGHT_TOKENS"); public static List<TokenInfo> getHighlightTokens(@NotNull PsiFile file) { return file.getUserData(HIGHLIGHT_TOKENS); } public static String getUnescapedText(@NotNull PsiFile file, @Nullable final PsiElement startElement, @Nullable final PsiElement endElement) { final InjectedLanguageManager manager = InjectedLanguageManager.getInstance(file.getProject()); if (manager.getInjectionHost(file) == null) { return file.getText().substring(startElement == null ? 0 : startElement.getTextRange().getStartOffset(), endElement == null ? file.getTextLength() : endElement.getTextRange().getStartOffset()); } final StringBuilder sb = new StringBuilder(); file.accept(new PsiRecursiveElementWalkingVisitor() { Boolean myState = startElement == null ? Boolean.TRUE : null; @Override public void visitElement(@NotNull PsiElement element) { if (element == startElement) myState = Boolean.TRUE; if (element == endElement) myState = Boolean.FALSE; if (Boolean.FALSE == myState) return; if (Boolean.TRUE == myState && element.getFirstChild() == null) { sb.append(getUnescapedLeafText(element, false)); } else { super.visitElement(element); } } }); return sb.toString(); } public static class TokenInfo { @NotNull public final IElementType type; @NotNull public final ProperTextRange rangeInsideInjectionHost; public final int shredIndex; /** * @deprecated Use textAttributesKeys */ @Deprecated(forRemoval = true) public final TextAttributes attributes; public final TextAttributesKey @NotNull [] textAttributesKeys; public TokenInfo(@NotNull IElementType type, @NotNull ProperTextRange rangeInsideInjectionHost, int shredIndex, @NotNull TextAttributes attributes, TextAttributesKey @NotNull [] textAttributesKeys) { this.type = type; this.rangeInsideInjectionHost = rangeInsideInjectionHost; this.shredIndex = shredIndex; this.attributes = attributes; this.textAttributesKeys = textAttributesKeys; } } static void setHighlightTokens(@NotNull PsiFile file, @NotNull List<TokenInfo> tokens) { file.putUserData(HIGHLIGHT_TOKENS, tokens); } public static Place getShreds(@NotNull PsiFile injectedFile) { FileViewProvider viewProvider = injectedFile.getViewProvider(); return getShreds(viewProvider); } public static Place getShreds(@NotNull FileViewProvider viewProvider) { if (!(viewProvider instanceof InjectedFileViewProvider)) return null; InjectedFileViewProvider myFileViewProvider = (InjectedFileViewProvider)viewProvider; return getShreds(myFileViewProvider.getDocument()); } @NotNull private static Place getShreds(@NotNull DocumentWindow document) { return ((DocumentWindowImpl)document).getShreds(); } /** * @deprecated use {@link InjectedLanguageManager#enumerate(PsiElement, PsiLanguageInjectionHost.InjectedPsiVisitor)} instead */ @Deprecated(forRemoval = true) public static boolean enumerate(@NotNull PsiElement host, @NotNull PsiLanguageInjectionHost.InjectedPsiVisitor visitor) { PsiFile containingFile = host.getContainingFile(); PsiUtilCore.ensureValid(containingFile); return enumerate(host, containingFile, true, visitor); } /** * @deprecated use {@link InjectedLanguageManager#enumerateEx(PsiElement, PsiFile, boolean, PsiLanguageInjectionHost.InjectedPsiVisitor)} instead */ @Deprecated(forRemoval = true) public static boolean enumerate(@NotNull PsiElement host, @NotNull PsiFile containingFile, boolean probeUp, @NotNull PsiLanguageInjectionHost.InjectedPsiVisitor visitor) { //do not inject into nonphysical files except during completion if (!containingFile.isPhysical() && containingFile.getOriginalFile() == containingFile && ModelBranch.getPsiBranch(containingFile) == null) { final PsiElement context = InjectedLanguageManager.getInstance(containingFile.getProject()).getInjectionHost(containingFile); if (context == null) return false; final PsiFile file = context.getContainingFile(); if (file == null || !file.isPhysical() && file.getOriginalFile() == file) return false; } if (containingFile.getViewProvider() instanceof InjectedFileViewProvider) return false; // no injection inside injection PsiElement inTree = loadTree(host, containingFile); if (inTree != host) { host = inTree; containingFile = host.getContainingFile(); } PsiDocumentManager documentManager = PsiDocumentManager.getInstance(containingFile.getProject()); Document document = documentManager.getDocument(containingFile); if (document == null || documentManager.isCommitted(document)) { probeElementsUp(host, containingFile, probeUp, visitor); } return true; } /** * This is a quick check, that can be performed before committing document and invoking * {@link com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil#getEditorForInjectedLanguageNoCommit(Editor, Caret, PsiFile)} or other methods here, which don't work * for uncommitted documents. */ static boolean mightHaveInjectedFragmentAtCaret(@NotNull Project project, @NotNull Document hostDocument, int hostOffset) { PsiFile hostPsiFile = PsiDocumentManager.getInstance(project).getCachedPsiFile(hostDocument); if (hostPsiFile == null || !hostPsiFile.isValid()) return false; List<DocumentWindow> documents = InjectedLanguageManager.getInstance(project).getCachedInjectedDocumentsInRange(hostPsiFile, TextRange.create(hostOffset, hostOffset)); for (DocumentWindow document : documents) { if (document.isValid() && document.getHostRange(hostOffset) != null) return true; } return false; } /** * Invocation of this method on uncommitted {@code host} can lead to unexpected results, including throwing an exception! */ @Nullable public static PsiFile findInjectedPsiNoCommit(@NotNull PsiFile host, int offset) { PsiElement injected = InjectedLanguageManager.getInstance(host.getProject()).findInjectedElementAt(host, offset); return injected == null ? null : PsiUtilCore.getTemplateLanguageFile(injected.getContainingFile()); } /** * Invocation of this method on uncommitted {@code file} can lead to unexpected results, including throwing an exception! */ // consider injected elements public static PsiElement findElementAtNoCommit(@NotNull PsiFile file, int offset) { FileViewProvider viewProvider = file.getViewProvider(); Trinity<PsiElement, PsiElement, Language> result = null; if (!(viewProvider instanceof InjectedFileViewProvider)) { PsiDocumentManager documentManager = PsiDocumentManager.getInstance(file.getProject()); result = tryOffset(file, offset, documentManager); PsiElement injected = result.first; if (injected != null) { return injected; } } Language baseLanguage = viewProvider.getBaseLanguage(); if (result != null && baseLanguage == result.third) { return result.second; // already queried } return viewProvider.findElementAt(offset, baseLanguage); } // list of injected fragments injected into this psi element (can be several if some crazy injector calls startInjecting()/doneInjecting()/startInjecting()/doneInjecting()) private static final Key<Supplier<InjectionResult>> INJECTED_PSI = Key.create("INJECTED_PSI"); private static void probeElementsUp(@NotNull PsiElement element, @NotNull PsiFile hostPsiFile, boolean probeUp, @NotNull PsiLanguageInjectionHost.InjectedPsiVisitor visitor) { element = skipNonInjectablePsi(element, probeUp); if (element == null) return; InjectedLanguageManagerImpl injectedManager = InjectedLanguageManagerImpl.getInstanceImpl(hostPsiFile.getProject()); InjectionResult result = null; PsiElement current; for (current = element; current != null && current != hostPsiFile && !(current instanceof PsiDirectory); ) { ProgressManager.checkCanceled(); if ("EL".equals(current.getLanguage().getID())) break; result = SoftReference.deref(current.getUserData(INJECTED_PSI)); if (result == null || !result.isModCountUpToDate() || !result.isValid()) { result = injectedManager.processInPlaceInjectorsFor(hostPsiFile, current); preventResultFromGCWhileInjectedPsiIsReachable(result); } current = current.getParent(); if (result != null) { if (result.files != null) { for (PsiFile injectedPsiFile : result.files) { Place place = getShreds(injectedPsiFile); if (place.isValid()) { // check that injections found intersect with queried element boolean intersects = intersects(element, place); if (intersects) { visitor.visit(injectedPsiFile, place); } } } } if (result.references != null && visitor instanceof InjectedReferenceVisitor) { InjectedReferenceVisitor refVisitor = (InjectedReferenceVisitor)visitor; for (Pair<ReferenceInjector, Place> pair : result.references) { Place place = pair.getSecond(); if (place.isValid()) { // check that injections found intersect with queried element boolean intersects = intersects(element, place); if (intersects) { ReferenceInjector injector = pair.getFirst(); refVisitor.visitInjectedReference(injector, place); } } } } break; // found injection, stop } if (!probeUp) { break; } } if (element != current && (probeUp || result != null)) { cacheResults(element, current, hostPsiFile, result); } } private static void cacheResults(@NotNull PsiElement from, @Nullable PsiElement upUntil, @NotNull PsiFile hostFile, @Nullable InjectionResult result) { Supplier<InjectionResult> cachedRef = result == null || result.isEmpty() ? getEmptyInjectionResult(hostFile) : new SoftReference<>(result); for (PsiElement e = from; e != upUntil && e != null; e = e.getParent()) { ProgressManager.checkCanceled(); e.putUserData(INJECTED_PSI, cachedRef); } } private static final Key<InjectionResult> INJECTION_HOLDER_BACK_REFERENCE = Key.create("INJECTION_HOLDER_BACK_REFERENCE"); /** * Prevents InjectionResult from being GC-ed while there are references to the PSI inside, * to avoid new injected PSI being created when there's one alive already. */ private static void preventResultFromGCWhileInjectedPsiIsReachable(@Nullable InjectionResult result) { if (result != null && result.files != null) { for (PsiFile injectedPsiFile : result.files) { injectedPsiFile.getViewProvider().putUserData(INJECTION_HOLDER_BACK_REFERENCE, result); } } } @NotNull private static InjectionResult getEmptyInjectionResult(@NotNull PsiFile host) { return CachedValuesManager.getCachedValue(host, () -> CachedValueProvider.Result.createSingleDependency(new InjectionResult(host, null, null), PsiModificationTracker.MODIFICATION_COUNT)); } /** * Quick check if we should bother injecting something inside this PSI at all */ public static boolean isInjectable(@NotNull PsiElement element, boolean probeUp) { return stopLookingForInjection(element) || element.getFirstChild() != null || probeUp; } /** * We can only inject into injection hosts or their ancestors, so if we're sure there are no PsiLanguageInjectionHost descendants, * we can skip that PSI safely. */ @Nullable private static PsiElement skipNonInjectablePsi(@NotNull PsiElement element, boolean probeUp) { if (!stopLookingForInjection(element) && element.getFirstChild() == null) { if (!probeUp) return null; element = element.getParent(); while (element != null && !stopLookingForInjection(element) && element.getFirstChild() == element.getLastChild()) { element = element.getParent(); } } return element; } private static boolean stopLookingForInjection(@NotNull PsiElement element) { return element instanceof PsiFileSystemItem || element instanceof PsiLanguageInjectionHost; } private static boolean intersects(@NotNull PsiElement hostElement, @NotNull Place place) { TextRange hostElementRange = hostElement.getTextRange(); boolean intersects = false; for (PsiLanguageInjectionHost.Shred shred : place) { PsiLanguageInjectionHost shredHost = shred.getHost(); if (shredHost != null && shredHost.getTextRange().intersects(hostElementRange)) { intersects = true; break; } } return intersects; } /** * Invocation of this method on uncommitted {@code hostFile} can lead to unexpected results, including throwing an exception! */ static PsiElement findInjectedElementNoCommit(@NotNull PsiFile hostFile, final int offset) { if (hostFile instanceof PsiCompiledElement) return null; Project project = hostFile.getProject(); if (InjectedLanguageManager.getInstance(project).isInjectedFragment(hostFile)) return null; final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(project); Trinity<PsiElement, PsiElement, Language> result = tryOffset(hostFile, offset, documentManager); return result.first; } // returns (injected psi, leaf element at the offset, language of the leaf element) // since findElementAt() is expensive, we trying to reuse its result @NotNull private static Trinity<PsiElement, PsiElement, Language> tryOffset(@NotNull PsiFile hostFile, final int offset, @NotNull PsiDocumentManager documentManager) { FileViewProvider provider = hostFile.getViewProvider(); Language leafLanguage = null; PsiElement leafElement = null; for (Language language : provider.getLanguages()) { PsiElement element = provider.findElementAt(offset, language); if (element != null) { if (leafLanguage == null) { leafLanguage = language; leafElement = element; } PsiElement injected = findInside(element, hostFile, offset, documentManager); if (injected != null) return Trinity.create(injected, element, language); } // maybe we are at the border between two psi elements, then try to find injection at the end of the left element if (offset != 0 && (element == null || element.getTextRange().getStartOffset() == offset)) { PsiElement leftElement = provider.findElementAt(offset - 1, language); if (leftElement != null && leftElement.getTextRange().getEndOffset() == offset) { PsiElement injected = findInside(leftElement, hostFile, offset, documentManager); if (injected != null) return Trinity.create(injected, element, language); } } } return Trinity.create(null, leafElement, leafLanguage); } private static PsiElement findInside(@NotNull PsiElement element, @NotNull PsiFile hostFile, final int hostOffset, @NotNull final PsiDocumentManager documentManager) { final Ref<PsiElement> out = new Ref<>(); enumerate(element, hostFile, true, (injectedPsi, places) -> { for (PsiLanguageInjectionHost.Shred place : places) { TextRange hostRange = place.getHost().getTextRange(); if (hostRange.cutOut(place.getRangeInsideHost()).grown(1).contains(hostOffset)) { DocumentWindowImpl document = (DocumentWindowImpl)documentManager.getCachedDocument(injectedPsi); if (document == null) return; int injectedOffset = document.hostToInjected(hostOffset); PsiElement injElement = injectedPsi.findElementAt(injectedOffset); out.set(injElement == null ? injectedPsi : injElement); } } }); return out.get(); } private static final Key<List<DocumentWindow>> INJECTED_DOCS_KEY = Key.create("INJECTED_DOCS_KEY"); /** * @deprecated use {@link InjectedLanguageManager#getCachedInjectedDocumentsInRange(PsiFile, TextRange)} instead */ @NotNull @Deprecated(forRemoval = true) public static ConcurrentList<DocumentWindow> getCachedInjectedDocuments(@NotNull PsiFile hostPsiFile) { // modification of cachedInjectedDocuments must be under InjectedLanguageManagerImpl.ourInjectionPsiLock only List<DocumentWindow> injected = hostPsiFile.getUserData(INJECTED_DOCS_KEY); if (injected == null) { injected = ((UserDataHolderEx)hostPsiFile).putUserDataIfAbsent(INJECTED_DOCS_KEY, ContainerUtil.createConcurrentList()); } return (ConcurrentList<DocumentWindow>)injected; } @NotNull static List<DocumentWindow> getCachedInjectedDocumentsInRange(@NotNull PsiFile hostPsiFile, @NotNull TextRange range) { List<DocumentWindow> injected = getCachedInjectedDocuments(hostPsiFile); return ContainerUtil.filter(injected, inj-> Arrays.stream(inj.getHostRanges()).anyMatch(range::intersects)); } static void clearCachedInjectedFragmentsForFile(@NotNull PsiFile file) { file.putUserData(INJECTED_DOCS_KEY, null); } static void clearCaches(@NotNull Project project, @NotNull DocumentWindow documentWindow) { if (project.isDisposed()) return; VirtualFileWindowImpl virtualFile = (VirtualFileWindowImpl)Objects.requireNonNull(FileDocumentManager.getInstance().getFile(documentWindow)); PsiManagerEx psiManagerEx = PsiManagerEx.getInstanceEx(project); DebugUtil.performPsiModification("injected clearCaches", () -> psiManagerEx.getFileManager().setViewProvider(virtualFile, null)); VirtualFile delegate = virtualFile.getDelegate(); if (!delegate.isValid()) return; FileViewProvider viewProvider = psiManagerEx.getFileManager().findCachedViewProvider(delegate); if (viewProvider == null) return; for (PsiFile hostFile : ((AbstractFileViewProvider)viewProvider).getCachedPsiFiles()) { // modification of cachedInjectedDocuments must be under InjectedLanguageManagerImpl.ourInjectionPsiLock synchronized (InjectedLanguageManagerImpl.ourInjectionPsiLock) { List<DocumentWindow> cachedInjectedDocuments = getCachedInjectedDocuments(hostFile); for (int i = cachedInjectedDocuments.size() - 1; i >= 0; i--) { DocumentWindow cachedInjectedDocument = cachedInjectedDocuments.get(i); if (cachedInjectedDocument == documentWindow) { cachedInjectedDocuments.remove(i); } } } } } /** * @deprecated use {@link InjectedLanguageManager#getTopLevelFile(PsiElement)} instead */ @Deprecated public static PsiFile getTopLevelFile(@NotNull PsiElement element) { PsiFile containingFile = element.getContainingFile(); if (containingFile == null) return null; if (containingFile.getViewProvider() instanceof InjectedFileViewProvider) { PsiElement host = InjectedLanguageManager.getInstance(containingFile.getProject()).getInjectionHost(containingFile); if (host != null) containingFile = host.getContainingFile(); } return containingFile; } @Nullable public static String getUnescapedLeafText(PsiElement element, boolean strict) { String unescaped = element.getCopyableUserData(LeafPatcher.UNESCAPED_TEXT); if (unescaped != null) { return unescaped; } if (!strict && element.getFirstChild() == null) { return element.getText(); } return null; } @Nullable public static PsiLanguageInjectionHost findInjectionHost(@Nullable PsiElement psi) { if (psi == null) return null; PsiFile containingFile = psi.getContainingFile().getOriginalFile(); // * formatting PsiElement fileContext = containingFile.getContext(); // * quick-edit-handler if (fileContext instanceof PsiLanguageInjectionHost) return (PsiLanguageInjectionHost)fileContext; Place shreds = getShreds(containingFile.getViewProvider()); // * injection-registrar if (shreds == null) { VirtualFile virtualFile = PsiUtilCore.getVirtualFile(containingFile); if (virtualFile instanceof LightVirtualFile) { virtualFile = ((LightVirtualFile)virtualFile).getOriginalFile(); // * dynamic files-from-text } if (virtualFile instanceof VirtualFileWindow) { shreds = getShreds(((VirtualFileWindow)virtualFile).getDocumentWindow()); } } return shreds != null ? shreds.getHostPointer().getElement() : null; } @Nullable public static PsiLanguageInjectionHost findInjectionHost(@Nullable VirtualFile virtualFile) { return virtualFile instanceof VirtualFileWindow ? getShreds(((VirtualFileWindow)virtualFile).getDocumentWindow()).getHostPointer().getElement() : null; } /** * @deprecated Use {@link InjectedLanguageManager#getInjectedPsiFiles(PsiElement)} != null instead */ @Deprecated(forRemoval = true) public static boolean hasInjections(@NotNull PsiLanguageInjectionHost host) { if (!host.isPhysical()) return false; final Ref<Boolean> result = Ref.create(false); enumerate(host, (injectedPsi, places) -> result.set(true)); return result.get().booleanValue(); } public static boolean isInInjectedLanguagePrefixSuffix(@NotNull final PsiElement element) { PsiFile injectedFile = element.getContainingFile(); if (injectedFile == null) return false; Project project = injectedFile.getProject(); InjectedLanguageManager languageManager = InjectedLanguageManager.getInstance(project); if (!languageManager.isInjectedFragment(injectedFile)) return false; TextRange elementRange = element.getTextRange(); List<TextRange> edibles = languageManager.intersectWithAllEditableFragments(injectedFile, elementRange); int combinedEdiblesLength = edibles.stream().mapToInt(TextRange::getLength).sum(); return combinedEdiblesLength != elementRange.getLength(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.runtime.evaluators.functions; import java.io.DataOutput; import org.apache.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer; import org.apache.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer; import org.apache.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer; import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer; import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer; import org.apache.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer; import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider; import org.apache.asterix.om.base.AMutableDouble; import org.apache.asterix.om.base.AMutableFloat; import org.apache.asterix.om.base.AMutableInt16; import org.apache.asterix.om.base.AMutableInt32; import org.apache.asterix.om.base.AMutableInt64; import org.apache.asterix.om.base.AMutableInt8; import org.apache.asterix.om.base.ANull; import org.apache.asterix.om.functions.AsterixBuiltinFunctions; import org.apache.asterix.om.functions.IFunctionDescriptor; import org.apache.asterix.om.functions.IFunctionDescriptorFactory; import org.apache.asterix.om.types.ATypeTag; import org.apache.asterix.om.types.BuiltinType; import org.apache.asterix.om.types.EnumDeserializer; import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException; import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier; import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator; import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory; import org.apache.hyracks.api.context.IHyracksTaskContext; import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.data.std.api.IPointable; import org.apache.hyracks.data.std.primitive.VoidPointable; import org.apache.hyracks.data.std.util.ArrayBackedValueStorage; import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference; public class NumericSubtractDescriptor extends AbstractScalarFunctionDynamicDescriptor { private static final long serialVersionUID = 1L; public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() { @Override public IFunctionDescriptor createFunctionDescriptor() { return new NumericSubtractDescriptor(); } }; @Override public FunctionIdentifier getIdentifier() { return AsterixBuiltinFunctions.NUMERIC_SUBTRACT; } @Override public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) throws AlgebricksException { return new IScalarEvaluatorFactory() { private static final long serialVersionUID = 1L; @Override public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws AlgebricksException { return new IScalarEvaluator() { private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage(); private DataOutput out = resultStorage.getDataOutput(); // one temp. buffer re-used by both children private IPointable argPtr = new VoidPointable(); private IScalarEvaluator evalLeft = args[0].createScalarEvaluator(ctx); private IScalarEvaluator evalRight = args[1].createScalarEvaluator(ctx); private double[] operands = new double[args.length]; private boolean metInt8 = false, metInt16 = false, metInt32 = false, metInt64 = false, metFloat = false, metDouble = false; private ATypeTag typeTag; private AMutableDouble aDouble = new AMutableDouble(0); private AMutableFloat aFloat = new AMutableFloat(0); private AMutableInt64 aInt64 = new AMutableInt64(0); private AMutableInt32 aInt32 = new AMutableInt32(0); private AMutableInt16 aInt16 = new AMutableInt16((short) 0); private AMutableInt8 aInt8 = new AMutableInt8((byte) 0); @SuppressWarnings("rawtypes") private ISerializerDeserializer serde; @SuppressWarnings("unchecked") @Override public void evaluate(IFrameTupleReference tuple, IPointable result) throws AlgebricksException { try { resultStorage.reset(); for (int i = 0; i < args.length; i++) { if (i == 0) { evalLeft.evaluate(tuple, argPtr); } else { evalRight.evaluate(tuple, argPtr); } byte[] data = argPtr.getByteArray(); int offset = argPtr.getStartOffset(); typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(data[offset]); switch (typeTag) { case INT8: { metInt8 = true; operands[i] = AInt8SerializerDeserializer.getByte(data, offset + 1); break; } case INT16: { metInt16 = true; operands[i] = AInt16SerializerDeserializer.getShort(data, offset + 1); break; } case INT32: { metInt32 = true; operands[i] = AInt32SerializerDeserializer.getInt(data, offset + 1); break; } case INT64: { metInt64 = true; operands[i] = AInt64SerializerDeserializer.getLong(data, offset + 1); break; } case FLOAT: { metFloat = true; operands[i] = AFloatSerializerDeserializer.getFloat(data, offset + 1); break; } case DOUBLE: { metDouble = true; operands[i] = ADoubleSerializerDeserializer.getDouble(data, offset + 1); break; } case NULL: { serde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.ANULL); serde.serialize(ANull.NULL, out); result.set(resultStorage); return; } default: { throw new NotImplementedException(AsterixBuiltinFunctions.NUMERIC_SUBTRACT .getName() + (i == 0 ? ": left" : ": right") + " operand can not be " + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(data[offset])); } } } if (metDouble) { serde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.ADOUBLE); aDouble.setValue(operands[0] - operands[1]); serde.serialize(aDouble, out); } else if (metFloat) { serde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.AFLOAT); aFloat.setValue((float) (operands[0] - operands[1])); serde.serialize(aFloat, out); } else if (metInt64) { serde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.AINT64); aInt64.setValue((long) (operands[0] - operands[1])); serde.serialize(aInt64, out); } else if (metInt32) { serde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.AINT32); aInt32.setValue((int) (operands[0] - operands[1])); serde.serialize(aInt32, out); } else if (metInt16) { serde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.AINT16); aInt16.setValue((short) (operands[0] - operands[1])); serde.serialize(aInt16, out); } else if (metInt8) { serde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.AINT8); aInt8.setValue((byte) (operands[0] - operands[1])); serde.serialize(aInt8, out); } result.set(resultStorage); } catch (HyracksDataException hde) { throw new AlgebricksException(hde); } } }; } }; } }
package util.genome.pwm; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Serializable; import java.util.*; import util.DNAUtils; import util.genome.NoSuchBaseException; /** * This class represents a position weight matrix * * @author Tristan Bepler * */ public class PWM implements Serializable{ private static final long serialVersionUID = 1L; private static final int NUM_BASES = 4; private static final int A = 0; private static final int C = 1; private static final int G = 2; private static final int T = 3; private static final String A_LINE = "A:"; private static final String C_LINE = "C:"; private static final String G_LINE = "G:"; private static final String T_LINE = "T:"; private int getBaseIndex(char base){ if(base == 'a' || base == 'A'){ return A; } if(base == 'c' || base == 'C'){ return C; } if(base == 'g' || base == 'G'){ return G; } if(base == 't' || base == 'T'){ return T; } throw new NoSuchBaseException("No encoding for base: "+base); } public static final PWM readPWM(InputStream in){ List<Double> a = new ArrayList<Double>(); List<Double> c = new ArrayList<Double>(); List<Double> g = new ArrayList<Double>(); List<Double> t = new ArrayList<Double>(); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); String line; try { while((line = reader.readLine()) != null){ if(line.startsWith(A_LINE)){ a = parse(line); } if(line.startsWith(C_LINE)){ c = parse(line); } if(line.startsWith(G_LINE)){ g = parse(line); } if(line.startsWith(T_LINE)){ t = parse(line); } //check if all bases have been read if(!a.isEmpty() && !c.isEmpty() && !g.isEmpty() && !t.isEmpty()){ break; } } reader.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return new PWM(toArray(a), toArray(c), toArray(g), toArray(t)); } private static final double[] toArray(Collection<Double> col){ double[] array = new double[col.size()]; int i = 0; for(double d : col){ array[i] = d; i++; } return array; } private static final List<Double> parse(String line){ String[] tokens = line.split("\\s+"); List<Double> scores = new ArrayList<Double>(); for(int i=1; i<tokens.length; i++){ scores.add(Double.parseDouble(tokens[i])); } return scores; } private final double[][] m_Scores; public PWM(double[] aScores, double[] cScores, double[] gScores, double[] tScores){ if(aScores.length != cScores.length || aScores.length != gScores.length || aScores.length != tScores.length){ throw new RuntimeException("Cannot create a PWM from score arrays of different lengths: A "+aScores.length+ ", C "+cScores.length+", G "+gScores.length+", T "+tScores.length+"."); } m_Scores = new double[NUM_BASES][aScores.length]; for(int i=0; i<aScores.length; i++){ m_Scores[A][i] = aScores[i]; m_Scores[C][i] = cScores[i]; m_Scores[G][i] = gScores[i]; m_Scores[T][i] = tScores[i]; } } private String toString(int base){ String s = ""; for(int i=0; i<this.length(); i++){ s += m_Scores[base][i] + "\t"; } return s; } @Override public String toString(){ String s = ""; s += A_LINE + "\t" + this.toString(A) + "\n"; s += C_LINE + "\t" + this.toString(C) + "\n"; s += G_LINE + "\t" + this.toString(G) + "\n"; s += T_LINE + "\t" + this.toString(T) + "\n"; return s; } public boolean canScore(String seq){ if(seq.length() < this.length()){ return false; } for(char c : seq.toCharArray()){ try{ getBaseIndex(c); } catch (NoSuchBaseException e){ return false; } } return true; } /** * Returns the score of the given base at the given index * @param base * @param index * @return */ public double score(char base, int index){ return m_Scores[getBaseIndex(base)][index]; } /** * Returns the score of the given word according to this PWM. The word score is the sum of the (char,position) scores. * @param word - sequence to be scored by this PWM * @return the summed position scores */ public double score(String word){ if(word.length() == this.length()){ double score = 0; for(int i=0; i<word.length(); i++){ char base = word.charAt(i); score += score(base, i); } return score; } throw new RuntimeException("Cannot score word: "+word+". Word length is not the same as PWM length: "+this.length()+"."); } /** * Returns the score of the given word according to this PWM. The score is calculated as the * natural log of the product of the scores of each position in the word. * @param word - sequence to be scored * @return log ratio score */ public double scoreLogRatio(String word){ if(word.length() == this.length()){ double score = 0; for( int i = 0 ; i < word.length() ; ++i ){ char base = word.charAt(i); score += Math.log(score(base, i)); } return score; } throw new RuntimeException("Cannot score word: "+word+". Word length is not the same as PWM length: "+this.length()+"."); } public double score(String seq, int start, int stop){ int len = stop-start; if(len == this.length()){ double score = 0; for(int i=start; i<stop; i++){ char base = seq.charAt(i); score += m_Scores[getBaseIndex(base)][i-start]; } return score; } throw new RuntimeException("Cannot score seq: "+seq+" between "+start+" and "+stop+". "+len+" is not the same as PWM length: "+this.length()+"."); } public double scoreReverseCompliment(String seq, int start, int stop){ int len = stop-start; if(len == this.length()){ double score = 0; for(int i=start; i<stop; i++){ char base = DNAUtils.compliment(seq.charAt(i)); score += m_Scores[getBaseIndex(base)][this.length() - (i-start) - 1]; } return score; } throw new RuntimeException("Cannot score reverse compliment of seq: "+seq+" between "+start+" and "+stop+". "+len+" is not the same as PWM length: "+this.length()+"."); } /** * Scores a sequence according to this PWM. Returns an array containing the scores of each word of contained * by the given sequence. * @param seq - the sequence to be scored * @return an array of size (seq.length() - PWM.length() + 1) containing the scores of each word of size PWM.length() contained by the sequence */ public double[] scoreSeq(String seq){ if(seq.length() < this.length()){ throw new RuntimeException("Cannot score sequence: "+seq+". Sequence length is less than this PWMs length: "+this.length()+"."); } double[] scores = new double[seq.length() - this.length() + 1]; for(int i=0; i<scores.length; i++){ String word = seq.substring(i, i+this.length()); scores[i] = this.score(word); } return scores; } public int length(){ return m_Scores[A].length; } }
// Copyright (c) 2013-present, febit.org. All Rights Reserved. package org.febit.wit.util; import lombok.AccessLevel; import lombok.NoArgsConstructor; /** * refer to the * <a href="https://github.com/oblac/jodd">Jodd</a> project. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public class FileNameUtil { private static final char UNIX_SEPARATOR = '/'; private static final char WINDOWS_SEPARATOR = '\\'; private static boolean isSeparator(char ch) { return (ch == UNIX_SEPARATOR) || (ch == WINDOWS_SEPARATOR); } public static String getPath(String filename) { if (filename == null) { return null; } int index = filename.lastIndexOf(UNIX_SEPARATOR); if (index < 0) { return ""; } return filename.substring(0, index + 1); } public static String concat(String basePath, String fullFilenameToAdd) { if (basePath == null) { return null; } int len = basePath.length(); int prefix = getPrefixLength(fullFilenameToAdd); if (prefix == 0 && len != 0) { if (isSeparator(basePath.charAt(len - 1))) { return normalize(basePath.concat(fullFilenameToAdd)); } return normalize(basePath + '/' + fullFilenameToAdd); } if (prefix > 0) { return normalize(fullFilenameToAdd); } return null; } /** * Internal method to perform the normalization. * * @param filename file name * @return normalized filename */ @SuppressWarnings({ "squid:S3776", // Cognitive Complexity of methods should not be too high "squid:ForLoopCounterChangedCheck", "squid:LabelsShouldNotBeUsedCheck" }) public static String normalize(String filename) { if (filename == null) { return null; } int size = filename.length(); if (size == 0) { return filename; } int prefix = getPrefixLength(filename); if (prefix < 0) { return null; } char[] array = new char[size + 2]; // +1 for possible extra slash, +2 for arraycopy filename.getChars(0, filename.length(), array, 0); // fix separators throughout for (int i = 0; i < size; i++) { if (array[i] == WINDOWS_SEPARATOR) { array[i] = UNIX_SEPARATOR; } } // add extra separator on the end to simplify code below if (array[size - 1] != UNIX_SEPARATOR) { array[size++] = UNIX_SEPARATOR; } // adjoining slashes for (int i = prefix + 1; i < size; i++) { if (array[i] == UNIX_SEPARATOR && array[i - 1] == UNIX_SEPARATOR) { System.arraycopy(array, i, array, i - 1, size - i); size--; i--; } } // dot slash for (int i = prefix + 1; i < size; i++) { if (array[i] == UNIX_SEPARATOR && array[i - 1] == '.' && (i == prefix + 1 || array[i - 2] == UNIX_SEPARATOR)) { System.arraycopy(array, i + 1, array, i - 1, size - i); size -= 2; i--; } } // double dot slash outer: for (int i = prefix + 2; i < size; i++) { if (array[i] == UNIX_SEPARATOR && array[i - 1] == '.' && array[i - 2] == '.' && (i == prefix + 2 || array[i - 3] == UNIX_SEPARATOR)) { if (i == prefix + 2) { return null; } int j; for (j = i - 4; j >= prefix; j--) { if (array[j] == UNIX_SEPARATOR) { // remove b/../ from a/b/../c System.arraycopy(array, i + 1, array, j + 1, size - i); size -= (i - j); i = j + 1; continue outer; } } // remove a/../ from a/../c System.arraycopy(array, i + 1, array, prefix, size - i); size -= (i + 1 - prefix); i = prefix + 1; } } if (size <= 0) { // should never be less than 0 return ""; } if (size <= prefix) { // should never be less than prefix return new String(array, 0, size); } return new String(array, 0, size - 1); // lose trailing separator } // ---------------------------------------------------------------- prefix /** * Returns the length of the filename prefix, such as <code>C:/</code> or <code>~/</code>. * <p> * This method will handle a file in either Unix or Windows format. * <p> * The prefix length includes the first slash in the full filename if applicable. Thus, it is possible that the * length returned is greater than the length of the input string. * <pre> * Windows: * a\b\c.txt --> "" --> relative * \a\b\c.txt --> "\" --> current drive absolute * C:a\b\c.txt --> "C:" --> drive relative * C:\a\b\c.txt --> "C:\" --> absolute * \\server\a\b\c.txt --> "\\server\" --> UNC * * Unix: * a/b/c.txt --> "" --> relative * /a/b/c.txt --> "/" --> absolute * ~/a/b/c.txt --> "~/" --> current user * ~ --> "~/" --> current user (slash added) * ~user/a/b/c.txt --> "~user/" --> named user * ~user --> "~user/" --> named user (slash added) * </pre> * <p> * The output will be the same irrespective of the machine that the code is running on. ie. both Unix and Windows * prefixes are matched regardless. * * @param filename the filename to find the prefix in, null returns -1 * @return the length of the prefix, -1 if invalid or null */ @SuppressWarnings({ "squid:S3776" // Cognitive Complexity of methods should not be too high }) private static int getPrefixLength(String filename) { if (filename == null) { return -1; } final int len = filename.length(); if (len == 0) { return 0; } final char ch0 = filename.charAt(0); if (ch0 == '.') { return 0; } if (ch0 == ':') { return -1; } if (len == 1) { if (isSeparator(ch0)) { return 1; } return ch0 == '~' ? 2 : 0; } else { char ch1 = filename.charAt(1); if (ch0 == '~') { int posUnix = filename.indexOf(UNIX_SEPARATOR, 1); if (posUnix == -1) { return len + 1; // return a length greater than the input } return posUnix + 1; } if (ch1 == ':') { if ((ch0 >= 'A' && ch0 <= 'Z') || (ch0 >= 'a' && ch0 <= 'z')) { if (len == 2 || !isSeparator(filename.charAt(2))) { return 2; } return 3; } return -1; } if (isSeparator(ch0)) { if (isSeparator(ch1)) { int posUnix = filename.indexOf(UNIX_SEPARATOR, 2); int posWin = filename.indexOf(WINDOWS_SEPARATOR, 2); if ((posUnix == -1 && posWin == -1) || posUnix == 2 || posWin == 2) { return -1; } posUnix = posUnix == -1 ? posWin : posUnix; posWin = posWin == -1 ? posUnix : posWin; return Math.min(posUnix, posWin) + 1; } return 1; } return 0; } } }
package org.apache.maven.project; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.util.ArrayList; import java.util.List; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; import org.codehaus.plexus.util.FileUtils; public class DefaultMavenProjectBuilderTest extends AbstractMavenProjectTestCase { private List<File> filesToDelete = new ArrayList<>(); private File localRepoDir; @Override public void setUp() throws Exception { super.setUp(); projectBuilder = lookup( ProjectBuilder.class ); localRepoDir = new File( System.getProperty( "java.io.tmpdir" ), "local-repo." + System.currentTimeMillis() ); localRepoDir.mkdirs(); filesToDelete.add( localRepoDir ); } @Override public void tearDown() throws Exception { super.tearDown(); if ( !filesToDelete.isEmpty() ) { for ( File file : filesToDelete ) { if ( file.exists() ) { if ( file.isDirectory() ) { FileUtils.deleteDirectory( file ); } else { file.delete(); } } } } } protected MavenProject getProject( Artifact pom, boolean allowStub ) throws Exception { ProjectBuildingRequest configuration = new DefaultProjectBuildingRequest(); configuration.setLocalRepository( getLocalRepository() ); initRepoSession( configuration ); return projectBuilder.build( pom, allowStub, configuration ).getProject(); } /** * Check that we can build ok from the middle pom of a (parent,child,grandchild) heirarchy * @throws Exception */ public void testBuildFromMiddlePom() throws Exception { File f1 = getTestFile( "src/test/resources/projects/grandchild-check/child/pom.xml"); File f2 = getTestFile( "src/test/resources/projects/grandchild-check/child/grandchild/pom.xml"); getProject( f1 ); // it's the building of the grandchild project, having already cached the child project // (but not the parent project), which causes the problem. getProject( f2 ); } public void testDuplicatePluginDefinitionsMerged() throws Exception { File f1 = getTestFile( "src/test/resources/projects/duplicate-plugins-merged-pom.xml" ); MavenProject project = getProject( f1 ); assertEquals( 2, project.getBuildPlugins().get( 0 ).getDependencies().size() ); assertEquals( 2, project.getBuildPlugins().get( 0 ).getExecutions().size() ); assertEquals( "first", project.getBuildPlugins().get( 0 ).getExecutions().get( 0 ).getId() ); } public void testBuildStubModelForMissingRemotePom() throws Exception { Artifact pom = repositorySystem.createProjectArtifact( "org.apache.maven.its", "missing", "0.1" ); MavenProject project = getProject( pom, true ); assertNotNull( project.getArtifactId() ); assertNotNull( project.getRemoteArtifactRepositories() ); assertFalse( project.getRemoteArtifactRepositories().isEmpty() ); assertNotNull( project.getPluginArtifactRepositories() ); assertFalse( project.getPluginArtifactRepositories().isEmpty() ); assertNull( project.getParent() ); assertNull( project.getParentArtifact() ); assertFalse( project.isExecutionRoot() ); } @Override protected ArtifactRepository getLocalRepository() throws Exception { ArtifactRepositoryLayout repoLayout = lookup( ArtifactRepositoryLayout.class, "default" ); ArtifactRepository r = repositorySystem.createArtifactRepository( "local", "file://" + localRepoDir.getAbsolutePath(), repoLayout, null, null ); return r; } public void xtestLoop() throws Exception { while ( true ) { File f1 = getTestFile( "src/test/resources/projects/duplicate-plugins-merged-pom.xml" ); getProject( f1 ); } } public void testPartialResultUponBadDependencyDeclaration() throws Exception { File pomFile = getTestFile( "src/test/resources/projects/bad-dependency.xml" ); try { ProjectBuildingRequest request = newBuildingRequest(); request.setProcessPlugins( false ); request.setResolveDependencies( true ); projectBuilder.build( pomFile, request ); fail( "Project building did not fail despite invalid POM" ); } catch ( ProjectBuildingException e ) { List<ProjectBuildingResult> results = e.getResults(); assertNotNull( results ); assertEquals( 1, results.size() ); ProjectBuildingResult result = results.get( 0 ); assertNotNull( result ); assertNotNull( result.getProject() ); assertEquals( 1, result.getProblems().size() ); assertEquals( 1, result.getProject().getArtifacts().size() ); assertNotNull( result.getDependencyResolutionResult() ); } } public void testImportScopePomResolvesFromPropertyBasedRepository() throws Exception { File pomFile = getTestFile( "src/test/resources/projects/import-scope-pom-resolves-from-property-based-repository.xml" ); ProjectBuildingRequest request = newBuildingRequest(); request.setProcessPlugins( false ); request.setResolveDependencies( true ); projectBuilder.build( pomFile, request ); } /** * Tests whether local version range parent references are build correctly. * * @throws Exception */ public void testBuildValidParentVersionRangeLocally() throws Exception { File f1 = getTestFile( "src/test/resources/projects/parent-version-range-local-valid/child/pom.xml" ); final MavenProject childProject = getProject( f1 ); assertNotNull( childProject.getParentArtifact() ); assertEquals( childProject.getParentArtifact().getVersion(), "1" ); assertNotNull( childProject.getParent() ); assertEquals( childProject.getParent().getVersion(), "1" ); assertNotNull( childProject.getModel().getParent() ); assertEquals( childProject.getModel().getParent().getVersion(), "[1,10]" ); } /** * Tests whether local version range parent references are build correctly. * * @throws Exception */ public void testBuildParentVersionRangeLocallyWithoutChildVersion() throws Exception { File f1 = getTestFile( "src/test/resources/projects/parent-version-range-local-child-without-version/child/pom.xml" ); try { getProject( f1 ); fail( "Expected 'ProjectBuildingException' not thrown." ); } catch ( final ProjectBuildingException e ) { assertNotNull( e.getMessage() ); assertTrue( e.getMessage().contains( "Version must be a constant" ) ); } } /** * Tests whether local version range parent references are build correctly. * * @throws Exception */ public void testBuildParentVersionRangeLocallyWithChildVersionExpression() throws Exception { File f1 = getTestFile( "src/test/resources/projects/parent-version-range-local-child-version-expression/child/pom.xml" ); try { getProject( f1 ); fail( "Expected 'ProjectBuildingException' not thrown." ); } catch ( final ProjectBuildingException e ) { assertNotNull( e.getMessage() ); assertTrue( e.getMessage().contains( "Version must be a constant" ) ); } } /** * Tests whether external version range parent references are build correctly. * * @throws Exception */ public void testBuildParentVersionRangeExternally() throws Exception { File f1 = getTestFile( "src/test/resources/projects/parent-version-range-external-valid/pom.xml" ); final MavenProject childProject = this.getProjectFromRemoteRepository( f1 ); assertNotNull( childProject.getParentArtifact() ); assertEquals( childProject.getParentArtifact().getVersion(), "1" ); assertNotNull( childProject.getParent() ); assertEquals( childProject.getParent().getVersion(), "1" ); assertNotNull( childProject.getModel().getParent() ); assertEquals( childProject.getModel().getParent().getVersion(), "[1,1]" ); } /** * Tests whether external version range parent references are build correctly. * * @throws Exception */ public void testBuildParentVersionRangeExternallyWithoutChildVersion() throws Exception { File f1 = getTestFile( "src/test/resources/projects/parent-version-range-external-child-without-version/pom.xml" ); try { this.getProjectFromRemoteRepository( f1 ); fail( "Expected 'ProjectBuildingException' not thrown." ); } catch ( final ProjectBuildingException e ) { assertNotNull( e.getMessage() ); assertTrue( e.getMessage().contains( "Version must be a constant" ) ); } } /** * Tests whether external version range parent references are build correctly. * * @throws Exception */ public void testBuildParentVersionRangeExternallyWithChildVersionExpression() throws Exception { File f1 = getTestFile( "src/test/resources/projects/parent-version-range-external-child-version-expression/pom.xml" ); try { this.getProjectFromRemoteRepository( f1 ); fail( "Expected 'ProjectBuildingException' not thrown." ); } catch ( final ProjectBuildingException e ) { assertNotNull( e.getMessage() ); assertTrue( e.getMessage().contains( "Version must be a constant" ) ); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.coordination; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolInfo; import org.elasticsearch.threadpool.ThreadPoolStats; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.Delayed; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; public class DeterministicTaskQueue { private static final Logger logger = LogManager.getLogger(DeterministicTaskQueue.class); private final Settings settings; private final List<Runnable> runnableTasks = new ArrayList<>(); private final Random random; private List<DeferredTask> deferredTasks = new ArrayList<>(); private long currentTimeMillis; private long nextDeferredTaskExecutionTimeMillis = Long.MAX_VALUE; private long executionDelayVariabilityMillis; private long latestDeferredExecutionTime; public DeterministicTaskQueue(Settings settings, Random random) { this.settings = settings; this.random = random; } public long getExecutionDelayVariabilityMillis() { return executionDelayVariabilityMillis; } public void setExecutionDelayVariabilityMillis(long executionDelayVariabilityMillis) { assert executionDelayVariabilityMillis >= 0 : executionDelayVariabilityMillis; this.executionDelayVariabilityMillis = executionDelayVariabilityMillis; } public void runAllRunnableTasks() { while (hasRunnableTasks()) { runRandomTask(); } } public void runAllTasks() { while (hasDeferredTasks() || hasRunnableTasks()) { if (hasDeferredTasks() && random.nextBoolean()) { advanceTime(); } else if (hasRunnableTasks()) { runRandomTask(); } } } public void runAllTasksInTimeOrder() { while (hasDeferredTasks() || hasRunnableTasks()) { if (hasRunnableTasks()) { runRandomTask(); } else { advanceTime(); } } } /** * @return whether there are any runnable tasks. */ public boolean hasRunnableTasks() { return runnableTasks.isEmpty() == false; } /** * @return whether there are any deferred tasks, i.e. tasks that are scheduled for the future. */ public boolean hasDeferredTasks() { return deferredTasks.isEmpty() == false; } /** * @return the current (simulated) time, in milliseconds. */ public long getCurrentTimeMillis() { return currentTimeMillis; } /** * Runs an arbitrary runnable task. */ public void runRandomTask() { assert hasRunnableTasks(); runTask(RandomNumbers.randomIntBetween(random, 0, runnableTasks.size() - 1)); } private void runTask(final int index) { final Runnable task = runnableTasks.remove(index); logger.trace("running task {} of {}: {}", index, runnableTasks.size() + 1, task); task.run(); } /** * Schedule a task for immediate execution. */ public void scheduleNow(final Runnable task) { if (executionDelayVariabilityMillis > 0 && random.nextBoolean()) { final long executionDelay = RandomNumbers.randomLongBetween(random, 1, executionDelayVariabilityMillis); final DeferredTask deferredTask = new DeferredTask(currentTimeMillis + executionDelay, task); logger.trace("scheduleNow: delaying [{}ms], scheduling {}", executionDelay, deferredTask); scheduleDeferredTask(deferredTask); } else { logger.trace("scheduleNow: adding runnable {}", task); runnableTasks.add(task); } } /** * Schedule a task for future execution. */ public void scheduleAt(final long executionTimeMillis, final Runnable task) { final long extraDelayMillis = RandomNumbers.randomLongBetween(random, 0, executionDelayVariabilityMillis); final long actualExecutionTimeMillis = executionTimeMillis + extraDelayMillis; if (actualExecutionTimeMillis <= currentTimeMillis) { logger.trace("scheduleAt: [{}ms] is not in the future, adding runnable {}", executionTimeMillis, task); runnableTasks.add(task); } else { final DeferredTask deferredTask = new DeferredTask(actualExecutionTimeMillis, task); logger.trace("scheduleAt: adding {} with extra delay of [{}ms]", deferredTask, extraDelayMillis); scheduleDeferredTask(deferredTask); } } private void scheduleDeferredTask(DeferredTask deferredTask) { nextDeferredTaskExecutionTimeMillis = Math.min(nextDeferredTaskExecutionTimeMillis, deferredTask.getExecutionTimeMillis()); latestDeferredExecutionTime = Math.max(latestDeferredExecutionTime, deferredTask.getExecutionTimeMillis()); deferredTasks.add(deferredTask); } /** * Advance the current time to the time of the next deferred task, and update the sets of deferred and runnable tasks accordingly. */ public void advanceTime() { assert hasDeferredTasks(); assert currentTimeMillis < nextDeferredTaskExecutionTimeMillis; logger.trace("advanceTime: from [{}ms] to [{}ms]", currentTimeMillis, nextDeferredTaskExecutionTimeMillis); currentTimeMillis = nextDeferredTaskExecutionTimeMillis; assert currentTimeMillis <= latestDeferredExecutionTime : latestDeferredExecutionTime + " < " + currentTimeMillis; nextDeferredTaskExecutionTimeMillis = Long.MAX_VALUE; List<DeferredTask> remainingDeferredTasks = new ArrayList<>(); for (final DeferredTask deferredTask : deferredTasks) { assert currentTimeMillis <= deferredTask.getExecutionTimeMillis(); if (deferredTask.getExecutionTimeMillis() == currentTimeMillis) { logger.trace("advanceTime: no longer deferred: {}", deferredTask); runnableTasks.add(deferredTask.getTask()); } else { remainingDeferredTasks.add(deferredTask); nextDeferredTaskExecutionTimeMillis = Math.min(nextDeferredTaskExecutionTimeMillis, deferredTask.getExecutionTimeMillis()); } } deferredTasks = remainingDeferredTasks; assert deferredTasks.isEmpty() == (nextDeferredTaskExecutionTimeMillis == Long.MAX_VALUE); } /** * @return A <code>ExecutorService</code> that uses this task queue. */ public ExecutorService getExecutorService() { return getExecutorService(Function.identity()); } /** * @return A <code>ExecutorService</code> that uses this task queue and wraps <code>Runnable</code>s in the given wrapper. */ public ExecutorService getExecutorService(Function<Runnable, Runnable> runnableWrapper) { return new ExecutorService() { @Override public void shutdown() { throw new UnsupportedOperationException(); } @Override public List<Runnable> shutdownNow() { throw new UnsupportedOperationException(); } @Override public boolean isShutdown() { throw new UnsupportedOperationException(); } @Override public boolean isTerminated() { throw new UnsupportedOperationException(); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public <T> Future<T> submit(Callable<T> task) { throw new UnsupportedOperationException(); } @Override public <T> Future<T> submit(Runnable task, T result) { throw new UnsupportedOperationException(); } @Override public Future<?> submit(Runnable task) { throw new UnsupportedOperationException(); } @Override public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) { throw new UnsupportedOperationException(); } @Override public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks) { throw new UnsupportedOperationException(); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public void execute(Runnable command) { scheduleNow(runnableWrapper.apply(command)); } }; } /** * @return A <code>ThreadPool</code> that uses this task queue. */ public ThreadPool getThreadPool() { return getThreadPool(Function.identity()); } /** * @return A <code>ThreadPool</code> that uses this task queue and wraps <code>Runnable</code>s in the given wrapper. */ public ThreadPool getThreadPool(Function<Runnable, Runnable> runnableWrapper) { return new ThreadPool(settings) { private final Map<String, ThreadPool.Info> infos = new HashMap<>(); { stopCachedTimeThread(); } @Override public long relativeTimeInMillis() { return currentTimeMillis; } @Override public long absoluteTimeInMillis() { return currentTimeMillis; } @Override public ThreadPoolInfo info() { throw new UnsupportedOperationException(); } @Override public Info info(String name) { return infos.computeIfAbsent(name, n -> new Info(n, ThreadPoolType.FIXED, random.nextInt(10) + 1)); } @Override public ThreadPoolStats stats() { throw new UnsupportedOperationException(); } @Override public ExecutorService generic() { return getExecutorService(runnableWrapper); } @Override public ExecutorService executor(String name) { return getExecutorService(runnableWrapper); } @Override public ScheduledCancellable schedule(Runnable command, TimeValue delay, String executor) { final int NOT_STARTED = 0; final int STARTED = 1; final int CANCELLED = 2; final AtomicInteger taskState = new AtomicInteger(NOT_STARTED); scheduleAt(currentTimeMillis + delay.millis(), runnableWrapper.apply(new Runnable() { @Override public void run() { if (taskState.compareAndSet(NOT_STARTED, STARTED)) { command.run(); } } @Override public String toString() { return command.toString(); } })); return new ScheduledCancellable() { @Override public long getDelay(TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public int compareTo(Delayed o) { throw new UnsupportedOperationException(); } @Override public boolean cancel() { return taskState.compareAndSet(NOT_STARTED, CANCELLED); } @Override public boolean isCancelled() { return taskState.get() == CANCELLED; } }; } @Override public Cancellable scheduleWithFixedDelay(Runnable command, TimeValue interval, String executor) { return super.scheduleWithFixedDelay(command, interval, executor); } @Override public void shutdown() { throw new UnsupportedOperationException(); } @Override public void shutdownNow() { throw new UnsupportedOperationException(); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public ScheduledExecutorService scheduler() { return new ScheduledExecutorService() { @Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public <V> ScheduledFuture<V> schedule(Callable<V> callable, long delay, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public ScheduledFuture<?> scheduleAtFixedRate(Runnable command, long initialDelay, long period, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, long initialDelay, long delay, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public void shutdown() { throw new UnsupportedOperationException(); } @Override public List<Runnable> shutdownNow() { throw new UnsupportedOperationException(); } @Override public boolean isShutdown() { throw new UnsupportedOperationException(); } @Override public boolean isTerminated() { throw new UnsupportedOperationException(); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public <T> Future<T> submit(Callable<T> task) { throw new UnsupportedOperationException(); } @Override public <T> Future<T> submit(Runnable task, T result) { throw new UnsupportedOperationException(); } @Override public Future<?> submit(Runnable task) { throw new UnsupportedOperationException(); } @Override public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) { throw new UnsupportedOperationException(); } @Override public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks) { throw new UnsupportedOperationException(); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public void execute(Runnable command) { throw new UnsupportedOperationException(); } }; } }; } public long getLatestDeferredExecutionTime() { return latestDeferredExecutionTime; } private static class DeferredTask { private final long executionTimeMillis; private final Runnable task; DeferredTask(long executionTimeMillis, Runnable task) { this.executionTimeMillis = executionTimeMillis; this.task = task; assert executionTimeMillis < Long.MAX_VALUE : "Long.MAX_VALUE is special, cannot be an execution time"; } long getExecutionTimeMillis() { return executionTimeMillis; } Runnable getTask() { return task; } @Override public String toString() { return "DeferredTask{" + "executionTimeMillis=" + executionTimeMillis + ", task=" + task + '}'; } } }
/* * Copyright (c) 2020 EmeraldPay Inc, All Rights Reserved. * Copyright (c) 2016-2017 Infinitape Inc, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.emeraldpay.etherjar.rpc.json; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import io.emeraldpay.etherjar.domain.*; import java.io.Serializable; import java.util.List; import java.util.Objects; @JsonDeserialize(using = TransactionReceiptJsonDeserializer.class) @JsonSerialize(using = TransactionReceiptJsonSerializer.class) public class TransactionReceiptJson implements TransactionRef, Serializable { /** * hash of the transaction */ private TransactionId transactionHash; /** * position in the block */ private Long transactionIndex; /** * hash of the block where this transaction was in. */ private BlockHash blockHash; /** * block number where this transaction was in */ private Long blockNumber; /** * total amount of gas used when this transaction was executed in the block. */ private Long cumulativeGasUsed; /** * Sender */ private Address from; /** * Target address */ private Address to; /** * amount of gas used by this specific transaction alone. */ private Long gasUsed; /** * The contract address created, if the transaction was a contract creation, otherwise null. */ private Address contractAddress; /** * Array of log objects, which this transaction generated. */ private List<TransactionLogJson> logs; private Bloom logsBloom; /** * Optinal tx status. 0 if failed, 1 if successfull */ private Integer status; public TransactionId getTransactionHash() { return transactionHash; } public void setTransactionHash(TransactionId transactionHash) { this.transactionHash = transactionHash; } public Long getTransactionIndex() { return transactionIndex; } public void setTransactionIndex(Long transactionIndex) { this.transactionIndex = transactionIndex; } public BlockHash getBlockHash() { return blockHash; } public void setBlockHash(BlockHash blockHash) { this.blockHash = blockHash; } public Long getBlockNumber() { return blockNumber; } public void setBlockNumber(Long blockNumber) { this.blockNumber = blockNumber; } public Long getCumulativeGasUsed() { return cumulativeGasUsed; } public void setCumulativeGasUsed(Long cumulativeGasUsed) { this.cumulativeGasUsed = cumulativeGasUsed; } public Address getFrom() { return from; } public void setFrom(Address from) { this.from = from; } public Address getTo() { return to; } public void setTo(Address to) { this.to = to; } public Long getGasUsed() { return gasUsed; } public void setGasUsed(Long gasUsed) { this.gasUsed = gasUsed; } public Address getContractAddress() { return contractAddress; } public void setContractAddress(Address contractAddress) { this.contractAddress = contractAddress; } public List<TransactionLogJson> getLogs() { return logs; } public void setLogs(List<TransactionLogJson> logs) { this.logs = logs; } public Bloom getLogsBloom() { return logsBloom; } public void setLogsBloom(Bloom logsBloom) { this.logsBloom = logsBloom; } public Integer getStatus() { return status; } public void setStatus(Integer status) { this.status = status; } @Override public TransactionId getHash() { return transactionHash; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof TransactionReceiptJson)) return false; TransactionReceiptJson that = (TransactionReceiptJson) o; if (!Objects.equals(transactionHash, that.transactionHash)) return false; if (!Objects.equals(transactionIndex, that.transactionIndex)) return false; if (!Objects.equals(from, that.from)) return false; if (!Objects.equals(to, that.to)) return false; if (!Objects.equals(blockHash, that.blockHash)) return false; if (!Objects.equals(blockNumber, that.blockNumber)) return false; if (!Objects.equals(cumulativeGasUsed, that.cumulativeGasUsed)) return false; if (!Objects.equals(gasUsed, that.gasUsed)) return false; if (!Objects.equals(contractAddress, that.contractAddress)) return false; if (!Objects.equals(logsBloom, that.logsBloom)) return false; return Objects.equals(logs, that.logs); } @Override public int hashCode() { int result = transactionHash != null ? transactionHash.hashCode() : 0; result = 31 * result + (blockHash != null ? blockHash.hashCode() : 0); return result; } }
package cbedoy.cblibrary.widgets; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.RectF; import android.os.Build; import android.os.Bundle; import android.os.Parcelable; import android.util.AttributeSet; import android.view.Gravity; import android.view.View; import cbedoy.cblibrary.R; /** * Created by Carlos Bedoy on 28/12/2014. * * Mobile App Developer * CBLibrary * * E-mail: carlos.bedoy@gmail.com * Facebook: https://www.facebook.com/carlos.bedoy * Github: https://github.com/cbedoy */ public class CircularProgressBar extends View { private static final String INSTANCE_STATE_SAVEDSTATE = "saved_state"; private static final String INSTANCE_STATE_PROGRESS = "progress_loader"; private static final String INSTANCE_STATE_MARKER_PROGRESS = "marker_progress"; private static final String INSTANCE_STATE_PROGRESS_BACKGROUND_COLOR = "progress_background_color"; private static final String INSTANCE_STATE_PROGRESS_COLOR = "progress_color"; private static final String INSTANCE_STATE_THUMB_VISIBLE = "thumb_visible"; private static final String INSTANCE_STATE_MARKER_VISIBLE = "marker_visible"; private final RectF mCircleBounds = new RectF(); private Paint mBackgroundColorPaint = new Paint(); private int mCircleStrokeWidth = 10; private int mGravity = Gravity.CENTER; private int mHorizontalInset = 0; private boolean mIsInitializing = true; private boolean mIsThumbEnabled = true; private Paint mMarkerColorPaint; private Paint mProgressColorPaint; private Paint mThumbColorPaint = new Paint(); private float mMarkerProgress = 0.0f; private boolean mOverrdraw = false; private float mProgress = 0.3f; private int mProgressBackgroundColor; private int mProgressColor; private float mRadius; private float mThumbPosX; private float mThumbPosY; private int mThumbRadius = 20; private float mTranslationOffsetX; private float mTranslationOffsetY; private int mVerticalInset = 0; public CircularProgressBar(final Context context) { this(context, null); } public CircularProgressBar(final Context context, final AttributeSet attrs) { this(context, attrs, R.attr.circularProgressBarStyle); } public CircularProgressBar(final Context context, final AttributeSet attrs, final int defStyle) { super(context, attrs, defStyle); final TypedArray attributes = context .obtainStyledAttributes(attrs, R.styleable.HoloCircularProgressBar, defStyle, 0); if (attributes != null) { try { setProgressColor(attributes.getColor(R.styleable.HoloCircularProgressBar_progress_color, Color.CYAN)); setProgressBackgroundColor(attributes.getColor(R.styleable.HoloCircularProgressBar_progress_background_color,Color.GREEN)); setProgress(attributes.getFloat(R.styleable.HoloCircularProgressBar_progress_loader, 0.0f)); setWheelSize((int) attributes.getDimension(R.styleable.HoloCircularProgressBar_stroke_width, 10)); setThumbEnabled(attributes.getBoolean(R.styleable.HoloCircularProgressBar_thumb_visible, true)); mGravity = attributes.getInt(R.styleable.HoloCircularProgressBar_android_gravity,Gravity.CENTER); } finally { attributes.recycle(); } } mThumbRadius = mCircleStrokeWidth * 2; updateBackgroundColor(); updateMarkerColor(); updateProgressColor(); // the view has now all properties and can be drawn mIsInitializing = false; } @Override protected void onDraw(final Canvas canvas) { canvas.translate(mTranslationOffsetX, mTranslationOffsetY); final float progressRotation = getCurrentRotation(); // draw the background if (!mOverrdraw) { canvas.drawArc(mCircleBounds, 270, -(360 - progressRotation), false,mBackgroundColorPaint); } // draw the progress or a full circle if overdraw is true canvas.drawArc(mCircleBounds, 270, mOverrdraw ? 360 : progressRotation, false,mProgressColorPaint); // draw the marker at the correct rotated position } @Override protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) { final int height = getDefaultSize(getSuggestedMinimumHeight() + getPaddingTop() + getPaddingBottom(),heightMeasureSpec); final int width = getDefaultSize(getSuggestedMinimumWidth() + getPaddingLeft() + getPaddingRight(), widthMeasureSpec); final int diameter; if (heightMeasureSpec == MeasureSpec.UNSPECIFIED) { // ScrollView diameter = width; computeInsets(0, 0); } else if (widthMeasureSpec == MeasureSpec.UNSPECIFIED) { // HorizontalScrollView diameter = height; computeInsets(0, 0); } else { // Default diameter = Math.min(width, height); computeInsets(width - diameter, height - diameter); } setMeasuredDimension(diameter, diameter); final float halfWidth = diameter * 0.5f; // width of the drawed circle (+ the drawedThumb) final float drawedWith; if (isThumbEnabled()) { drawedWith = mThumbRadius * (5f / 6f); } else { drawedWith = mCircleStrokeWidth / 2f; } // -0.5f for pixel perfect fit inside the viewbounds mRadius = halfWidth - drawedWith - 0.5f; mCircleBounds.set(-mRadius, -mRadius, mRadius, mRadius); mThumbPosX = (float) (mRadius * Math.cos(0)); mThumbPosY = (float) (mRadius * Math.sin(0)); mTranslationOffsetX = halfWidth + mHorizontalInset; mTranslationOffsetY = halfWidth + mVerticalInset; } @Override protected void onRestoreInstanceState(final Parcelable state) { if (state instanceof Bundle) { final Bundle bundle = (Bundle) state; setProgress(bundle.getFloat(INSTANCE_STATE_PROGRESS)); final int progressColor = bundle.getInt(INSTANCE_STATE_PROGRESS_COLOR); if (progressColor != mProgressColor) { mProgressColor = progressColor; updateProgressColor(); } final int progressBackgroundColor = bundle .getInt(INSTANCE_STATE_PROGRESS_BACKGROUND_COLOR); if (progressBackgroundColor != mProgressBackgroundColor) { mProgressBackgroundColor = progressBackgroundColor; updateBackgroundColor(); } mIsThumbEnabled = bundle.getBoolean(INSTANCE_STATE_THUMB_VISIBLE); super.onRestoreInstanceState(bundle.getParcelable(INSTANCE_STATE_SAVEDSTATE)); return; } super.onRestoreInstanceState(state); } @Override protected Parcelable onSaveInstanceState() { final Bundle bundle = new Bundle(); bundle.putParcelable(INSTANCE_STATE_SAVEDSTATE, super.onSaveInstanceState()); bundle.putFloat(INSTANCE_STATE_PROGRESS, mProgress); bundle.putFloat(INSTANCE_STATE_MARKER_PROGRESS, mMarkerProgress); bundle.putInt(INSTANCE_STATE_PROGRESS_COLOR, mProgressColor); bundle.putInt(INSTANCE_STATE_PROGRESS_BACKGROUND_COLOR, mProgressBackgroundColor); bundle.putBoolean(INSTANCE_STATE_THUMB_VISIBLE, mIsThumbEnabled); return bundle; } public boolean isThumbEnabled() { return mIsThumbEnabled; } public void setProgress(final float progress) { if (progress == mProgress) { mProgress = 0.3f; } if (progress == 1) { mOverrdraw = false; mProgress = 1; } else { if (progress >= 1) { mOverrdraw = true; } else { mOverrdraw = false; } mProgress = progress % 1.0f; } if (!mIsInitializing) { invalidate(); } } public void setProgressBackgroundColor(final int color) { mProgressBackgroundColor = color; updateMarkerColor(); updateBackgroundColor(); } public void setProgressColor(final int color) { mProgressColor = color; updateProgressColor(); } public void setThumbEnabled(final boolean enabled) { mIsThumbEnabled = enabled; } public void setWheelSize(final int dimension) { mCircleStrokeWidth = dimension; // update the paints updateBackgroundColor(); updateMarkerColor(); updateProgressColor(); } /** * Compute insets. * * <pre> * ______________________ * |_________dx/2_________| * |......| /'''''\|......| * |-dx/2-|| View ||-dx/2-| * |______| \_____/|______| * |________ dx/2_________| * </pre> * * @param dx the dx the horizontal unfilled space * @param dy the dy the horizontal unfilled space */ @SuppressLint("NewApi") private void computeInsets(final int dx, final int dy) { int absoluteGravity = mGravity; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { absoluteGravity = Gravity.getAbsoluteGravity(mGravity, getLayoutDirection()); } switch (absoluteGravity & Gravity.HORIZONTAL_GRAVITY_MASK) { case Gravity.LEFT: mHorizontalInset = 0; break; case Gravity.RIGHT: mHorizontalInset = dx; break; case Gravity.CENTER_HORIZONTAL: default: mHorizontalInset = dx / 2; break; } switch (absoluteGravity & Gravity.VERTICAL_GRAVITY_MASK) { case Gravity.TOP: mVerticalInset = 0; break; case Gravity.BOTTOM: mVerticalInset = dy; break; case Gravity.CENTER_VERTICAL: default: mVerticalInset = dy / 2; break; } } private float getCurrentRotation() { return 360 * mProgress; } private float getMarkerRotation() { return 360 * mMarkerProgress; } private void updateBackgroundColor() { mBackgroundColorPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mBackgroundColorPaint.setColor(mProgressBackgroundColor); mBackgroundColorPaint.setStyle(Paint.Style.STROKE); mBackgroundColorPaint.setStrokeWidth(mCircleStrokeWidth); invalidate(); } private void updateMarkerColor() { mMarkerColorPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mMarkerColorPaint.setColor(mProgressBackgroundColor); mMarkerColorPaint.setStyle(Paint.Style.STROKE); mMarkerColorPaint.setStrokeWidth(mCircleStrokeWidth / 2); invalidate(); } private void updateProgressColor() { mProgressColorPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mProgressColorPaint.setColor(mProgressColor); mProgressColorPaint.setStyle(Paint.Style.STROKE); mProgressColorPaint.setStrokeWidth(mCircleStrokeWidth); mThumbColorPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mThumbColorPaint.setColor(mProgressColor); mThumbColorPaint.setStyle(Paint.Style.FILL_AND_STROKE); mThumbColorPaint.setStrokeWidth(mCircleStrokeWidth); invalidate(); } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.apple; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.ConfigurationEnvironment; import com.google.devtools.build.lib.analysis.config.ConfigurationFragmentFactory; import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.rules.apple.AppleCommandLineOptions.AppleBitcodeMode; import com.google.devtools.build.lib.rules.apple.Platform.PlatformType; import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable; import com.google.devtools.build.lib.skylarkinterface.SkylarkModule; import com.google.devtools.build.lib.skylarkinterface.SkylarkModuleCategory; import com.google.devtools.build.lib.util.Preconditions; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import javax.annotation.Nullable; /** A configuration containing flags required for Apple platforms and tools. */ @SkylarkModule( name = "apple", doc = "A configuration fragment for Apple platforms", category = SkylarkModuleCategory.CONFIGURATION_FRAGMENT ) @Immutable public class AppleConfiguration extends BuildConfiguration.Fragment { /** * Environment variable name for the xcode version. The value of this environment variable should * be set to the version (for example, "7.2") of xcode to use when invoking part of the apple * toolkit in action execution. **/ public static final String XCODE_VERSION_ENV_NAME = "XCODE_VERSION_OVERRIDE"; /** * Environment variable name for the apple SDK version. If unset, uses the system default of the * host for the platform in the value of {@link #APPLE_SDK_PLATFORM_ENV_NAME}. **/ public static final String APPLE_SDK_VERSION_ENV_NAME = "APPLE_SDK_VERSION_OVERRIDE"; /** * Environment variable name for the apple SDK platform. This should be set for all actions that * require an apple SDK. The valid values consist of {@link Platform} names. **/ public static final String APPLE_SDK_PLATFORM_ENV_NAME = "APPLE_SDK_PLATFORM"; private static final DottedVersion MINIMUM_BITCODE_XCODE_VERSION = DottedVersion.fromString("7"); private final DottedVersion iosSdkVersion; private final DottedVersion iosMinimumOs; private final DottedVersion watchosSdkVersion; private final DottedVersion watchosMinimumOs; private final DottedVersion tvosSdkVersion; private final DottedVersion tvosMinimumOs; private final DottedVersion macosXSdkVersion; private final String iosCpu; private final String appleSplitCpu; private final PlatformType applePlatformType; private final ConfigurationDistinguisher configurationDistinguisher; private final Optional<DottedVersion> xcodeVersion; private final ImmutableList<String> iosMultiCpus; private final ImmutableList<String> watchosCpus; private final ImmutableList<String> tvosCpus; private final AppleBitcodeMode bitcodeMode; private final Label xcodeConfigLabel; @Nullable private final String xcodeToolchain; @Nullable private final Label defaultProvisioningProfileLabel; private final boolean disableNativeSwiftRules; AppleConfiguration(AppleCommandLineOptions appleOptions, Optional<DottedVersion> xcodeVersionOverride, DottedVersion iosSdkVersion, DottedVersion watchosSdkVersion, DottedVersion watchosMinimumOs, DottedVersion tvosSdkVersion, DottedVersion tvosMinimumOs, DottedVersion macosXSdkVersion) { this.iosSdkVersion = Preconditions.checkNotNull(iosSdkVersion, "iosSdkVersion"); this.iosMinimumOs = Preconditions.checkNotNull(appleOptions.iosMinimumOs, "iosMinimumOs"); this.watchosSdkVersion = Preconditions.checkNotNull(watchosSdkVersion, "watchOsSdkVersion"); this.watchosMinimumOs = Preconditions.checkNotNull(watchosMinimumOs, "watchOsMinimumOs"); this.tvosSdkVersion = Preconditions.checkNotNull(tvosSdkVersion, "tvOsSdkVersion"); this.tvosMinimumOs = Preconditions.checkNotNull(tvosMinimumOs, "tvOsMinimumOs"); this.macosXSdkVersion = Preconditions.checkNotNull(macosXSdkVersion, "macOsXSdkVersion"); this.xcodeVersion = Preconditions.checkNotNull(xcodeVersionOverride); this.iosCpu = Preconditions.checkNotNull(appleOptions.iosCpu, "iosCpu"); this.appleSplitCpu = Preconditions.checkNotNull(appleOptions.appleSplitCpu, "appleSplitCpu"); this.applePlatformType = Preconditions.checkNotNull(appleOptions.applePlatformType, "applePlatformType"); this.configurationDistinguisher = appleOptions.configurationDistinguisher; this.iosMultiCpus = ImmutableList.copyOf( Preconditions.checkNotNull(appleOptions.iosMultiCpus, "iosMultiCpus")); this.watchosCpus = (appleOptions.watchosCpus == null || appleOptions.watchosCpus.isEmpty()) ? ImmutableList.of(AppleCommandLineOptions.DEFAULT_WATCHOS_CPU) : ImmutableList.copyOf(appleOptions.watchosCpus); this.tvosCpus = (appleOptions.tvosCpus == null || appleOptions.tvosCpus.isEmpty()) ? ImmutableList.of(AppleCommandLineOptions.DEFAULT_TVOS_CPU) : ImmutableList.copyOf(appleOptions.tvosCpus); this.bitcodeMode = appleOptions.appleBitcodeMode; this.xcodeConfigLabel = Preconditions.checkNotNull(appleOptions.xcodeVersionConfig, "xcodeConfigLabel"); this.defaultProvisioningProfileLabel = appleOptions.defaultProvisioningProfile; this.xcodeToolchain = appleOptions.xcodeToolchain; this.disableNativeSwiftRules = appleOptions.disableNativeSwiftRules; } /** * Returns the minimum iOS version supported by binaries and libraries. Any dependencies on newer * iOS version features or libraries will become weak dependencies which are only loaded if the * runtime OS supports them. */ @SkylarkCallable(name = "ios_minimum_os", structField = true, doc = "The minimum compatible iOS version for target simulators and devices.") public DottedVersion getMinimumOs() { // TODO(bazel-team): Deprecate in favor of getMinimumOsForPlatformType(IOS). return iosMinimumOs; } @SkylarkCallable( name = "minimum_os_for_platform_type", doc = "The minimum compatible OS version for target simulator and devices for a particular " + "platform type.") public DottedVersion getMinimumOsForPlatformType(PlatformType platformType) { switch (platformType) { case IOS: return iosMinimumOs; case TVOS: return tvosMinimumOs; case WATCHOS: return watchosMinimumOs; default: throw new IllegalArgumentException("Unhandled platform: " + platformType); } } /** * Returns the SDK version for ios SDKs (whether they be for simulator or device). This is * directly derived from --ios_sdk_version. * * @deprecated - use {@link #getSdkVersionForPlatform()} */ @Deprecated public DottedVersion getIosSdkVersion() { return getSdkVersionForPlatform(Platform.IOS_DEVICE); } /** * Returns the SDK version for a platform (whether they be for simulator or device). This is * directly derived from command line args. */ @SkylarkCallable(name = "sdk_version_for_platform", doc = "The SDK version given a platform.") public DottedVersion getSdkVersionForPlatform(Platform platform) { switch (platform) { case IOS_DEVICE: case IOS_SIMULATOR: return iosSdkVersion; case TVOS_DEVICE: case TVOS_SIMULATOR: return tvosSdkVersion; case WATCHOS_DEVICE: case WATCHOS_SIMULATOR: return watchosSdkVersion; case MACOS_X: return macosXSdkVersion; } throw new AssertionError(); } /** * Returns the value of the xcode version, if available. This is determined based on a combination * of the {@code --xcode_version} build flag and the {@code xcode_config} target defined in the * {@code --xcode_version_config} flag. */ public Optional<DottedVersion> getXcodeVersion() { return xcodeVersion; } /** * Returns a map of environment variables (derived from configuration) that should be propagated * for actions pertaining to the given apple platform. Keys are variable names and values are * their corresponding values. */ @SkylarkCallable(name = "target_apple_env") public Map<String, String> getTargetAppleEnvironment(Platform platform) { ImmutableMap.Builder<String, String> mapBuilder = ImmutableMap.builder(); mapBuilder.putAll(appleTargetPlatformEnv(platform)); return mapBuilder.build(); } /** * Returns a map of environment variables that should be propagated for actions that build on an * apple host system. These environment variables are needed by the apple toolchain. Keys are * variable names and values are their corresponding values. */ @SkylarkCallable( name = "apple_host_system_env", doc = "Returns a map of environment variables that should be propagated for actions that " + "build on an apple host system. These environment variables are needed by the apple " + "toolchain. Keys are variable names and values are their corresponding values." ) public Map<String, String> getAppleHostSystemEnv() { Optional<DottedVersion> xcodeVersion = getXcodeVersion(); if (xcodeVersion.isPresent()) { return getXcodeVersionEnv(xcodeVersion.get()); } else { return ImmutableMap.of(); } } /** * Returns a map of environment variables that should be propagated for actions that require * a version of xcode to be explicitly declared. Keys are variable names and values are their * corresponding values. */ public Map<String, String> getXcodeVersionEnv(DottedVersion xcodeVersion) { return ImmutableMap.of(AppleConfiguration.XCODE_VERSION_ENV_NAME, xcodeVersion.toString()); } /** * Returns a map of environment variables (derived from configuration) that should be propagated * for actions pertaining to building applications for apple platforms. These environment * variables are needed to use apple toolkits. Keys are variable names and values are their * corresponding values. */ public Map<String, String> appleTargetPlatformEnv(Platform platform) { ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); // TODO(cparsons): Avoid setting SDK version for macosx. Until SDK version is // evaluated for the current configuration xcode version, this would break users who build // cc_* rules without specifying both xcode_version and macosx_sdk_version build options. if (platform != Platform.MACOS_X) { String sdkVersion = getSdkVersionForPlatform(platform).toStringWithMinimumComponents(2); builder.put(AppleConfiguration.APPLE_SDK_VERSION_ENV_NAME, sdkVersion) .put(AppleConfiguration.APPLE_SDK_PLATFORM_ENV_NAME, platform.getNameInPlist()); } return builder.build(); } /** * Returns the value of {@code ios_cpu} for this configuration. This is not necessarily the * platform or cpu for all actions spawned in this configuration; it is appropriate for * identifying the target cpu of iOS compile and link actions within this configuration. */ @SkylarkCallable(name = "ios_cpu", doc = "The value of ios_cpu for this configuration.") public String getIosCpu() { return iosCpu; } /** * Gets the single "effective" architecture for this configuration's {@link PlatformType} (for * example, "i386" or "arm64"). Prefer this over {@link #getMultiArchitectures(PlatformType)} only * if in the context of rule logic which is only concerned with a single architecture (such as in * {@code objc_library}, which registers single-architecture compile actions). * * <p>Single effective architecture is determined using the following rules: * * <ol> * <li>If {@code --apple_split_cpu} is set (done via prior configuration transition), then that is * the effective architecture. * <li>If the multi cpus flag (e.g. {@code --ios_multi_cpus}) is set and non-empty, then the first * such architecture is returned. * <li>In the case of iOS, use {@code --ios_cpu} for backwards compatibility. * <li>Use the default. * </ol> */ @SkylarkCallable( name = "single_arch_cpu", structField = true, doc = "The single \"effective\" architecture for this configuration (e.g. i386 or arm64) " + "in the context of rule logic which is only concerned with a single architecture " + "(such as in objc_library, which registers single-architecture compile actions). " ) public String getSingleArchitecture() { if (!Strings.isNullOrEmpty(appleSplitCpu)) { return appleSplitCpu; } switch (applePlatformType) { case IOS: if (!getIosMultiCpus().isEmpty()) { return getIosMultiCpus().get(0); } else { return getIosCpu(); } case WATCHOS: return watchosCpus.get(0); case TVOS: return tvosCpus.get(0); // TODO(cparsons): Handle all platform types. default: throw new IllegalArgumentException("Unhandled platform type " + applePlatformType); } } /** * Gets the "effective" architecture(s) for the given {@link PlatformType}. For example, * "i386" or "arm64". At least one architecture is always returned. Prefer this over * {@link #getSingleArchitecture} in rule logic which may support multiple architectures, such * as bundling rules. * * <p>Effective architecture(s) is determined using the following rules: * <ol> * <li>If {@code --apple_split_cpu} is set (done via prior configuration transition), then * that is the effective architecture.</li> * <li>If the multi-cpu flag (for example, {@code --ios_multi_cpus}) is non-empty, then, return * all architectures from that flag.</li> * <li>In the case of iOS, use {@code --ios_cpu} for backwards compatibility.</li> * <li>Use the default.</li></ol> * * @throws IllegalArgumentException if {@code --apple_platform_type} is set (via prior * configuration transition) yet does not match {@code platformType} */ public List<String> getMultiArchitectures(PlatformType platformType) { if (!Strings.isNullOrEmpty(appleSplitCpu)) { if (applePlatformType != platformType) { throw new IllegalArgumentException( String.format("Expected post-split-transition platform type %s to match input %s ", applePlatformType, platformType)); } return ImmutableList.of(appleSplitCpu); } switch (platformType) { case IOS: if (getIosMultiCpus().isEmpty()) { return ImmutableList.of(getIosCpu()); } else { return getIosMultiCpus(); } case WATCHOS: return watchosCpus; case TVOS: return tvosCpus; default: throw new IllegalArgumentException("Unhandled platform type " + platformType); } } /** * Gets the single "effective" platform for this configuration's {@link PlatformType} and * architecture. Prefer this over {@link #getMultiArchPlatform(PlatformType)} only in cases if in * the context of rule logic which is only concerned with a single architecture (such as in {@code * objc_library}, which registers single-architecture compile actions). */ @SkylarkCallable( name = "single_arch_platform", doc = "The platform of the current configuration. This should only be invoked in a context where " + "only a single architecture may be supported; consider mutli_arch_platform for other " + "cases.", structField = true ) public Platform getSingleArchPlatform() { return Platform.forTarget(applePlatformType, getSingleArchitecture()); } /** * Gets the current configuration {@link Platform} for the given {@link PlatformType}. Platform * is determined via a combination between the given platform type and the "effective" * architectures of this configuration, as returned by {@link #getMultiArchitectures}; if any * of the supported architectures are of device type, this will return a device platform. * Otherwise, this will return a simulator platform. */ // TODO(bazel-team): This should support returning multiple platforms. @SkylarkCallable(name = "multi_arch_platform", doc = "The platform of the current configuration " + "for the given platform type. This should only be invoked in a context where multiple " + "architectures may be supported; consider single_arch_platform for other cases.") public Platform getMultiArchPlatform(PlatformType platformType) { List<String> architectures = getMultiArchitectures(platformType); switch (platformType) { case IOS: for (String arch : architectures) { if (Platform.forTarget(PlatformType.IOS, arch) == Platform.IOS_DEVICE) { return Platform.IOS_DEVICE; } } return Platform.IOS_SIMULATOR; case WATCHOS: for (String arch : architectures) { if (Platform.forTarget(PlatformType.WATCHOS, arch) == Platform.WATCHOS_DEVICE) { return Platform.WATCHOS_DEVICE; } } return Platform.WATCHOS_SIMULATOR; case TVOS: for (String arch : architectures) { if (Platform.forTarget(PlatformType.TVOS, arch) == Platform.TVOS_DEVICE) { return Platform.TVOS_DEVICE; } } return Platform.TVOS_SIMULATOR; default: throw new IllegalArgumentException("Unsupported platform type " + platformType); } } /** * Returns the {@link Platform} represented by {@code ios_cpu} (see {@link #getIosCpu}. * (For example, {@code i386} maps to {@link Platform#IOS_SIMULATOR}.) Note that this is not * necessarily the effective platform for all ios actions in the current context: This is * typically the correct platform for implicityly-ios compile and link actions in the current * context. For effective platform for bundling actions, see * {@link #getMultiArchPlatform(PlatformType)}. */ // TODO(b/28754442): Deprecate for more general skylark-exposed platform retrieval. @SkylarkCallable(name = "ios_cpu_platform", doc = "The platform given by the ios_cpu flag.") public Platform getIosCpuPlatform() { return Platform.forTarget(PlatformType.IOS, iosCpu); } /** * Returns the architecture for which we keep dependencies that should be present only once (in a * single architecture). * * <p>When building with multiple architectures there are some dependencies we want to avoid * duplicating: they would show up more than once in the same location in the final application * bundle which is illegal. Instead we pick one architecture for which to keep all dependencies * and discard any others. */ public String getDependencySingleArchitecture() { if (!getIosMultiCpus().isEmpty()) { return getIosMultiCpus().get(0); } return getIosCpu(); } /** * List of all CPUs that this invocation is being built for. Different from {@link #getIosCpu()} * which is the specific CPU <b>this target</b> is being built for. */ public ImmutableList<String> getIosMultiCpus() { return iosMultiCpus; } /** * Returns the label of the default provisioning profile to use when bundling/signing an ios * application. Returns null if the target platform is not an iOS device (for example, if * iOS simulator is being targeted). */ @Nullable public Label getDefaultProvisioningProfileLabel() { return defaultProvisioningProfileLabel; } /** * Returns the bitcode mode to use for compilation steps. Users can control bitcode mode using the * {@code apple_bitcode} build flag. * * @see AppleBitcodeMode */ @SkylarkCallable( name = "bitcode_mode", doc = "Returns the bitcode mode to use for compilation steps.", structField = true ) public AppleBitcodeMode getBitcodeMode() { return bitcodeMode; } /** * Returns the label of the xcode_config rule to use for resolving the host system xcode version. */ public Label getXcodeConfigLabel() { return xcodeConfigLabel; } /** * Returns the unique identifier distinguishing configurations that are otherwise the same. * * <p>Use this value for situations in which two configurations create two outputs that are the * same but are not collapsed due to their different configuration owners. */ public ConfigurationDistinguisher getConfigurationDistinguisher() { return configurationDistinguisher; } @Nullable @Override public String getOutputDirectoryName() { List<String> components = new ArrayList<>(); if (!appleSplitCpu.isEmpty()) { components.add(applePlatformType.toString().toLowerCase()); components.add(appleSplitCpu); } if (configurationDistinguisher != ConfigurationDistinguisher.UNKNOWN) { components.add(configurationDistinguisher.toString().toLowerCase(Locale.US)); } if (components.isEmpty()) { return null; } return Joiner.on('-').join(components); } /** Returns the identifier for an Xcode toolchain to use with tools. */ @SkylarkCallable( name = "xcode_toolchain", doc = "Identifier for the custom Xcode toolchain to use in build or None if not specified", allowReturnNones = true, structField = true ) public String getXcodeToolchain() { return xcodeToolchain; } /** * Whether the native Swift support should be disabled. Used to deprecate said functionality. */ public boolean disableNativeSwiftRules() { return disableNativeSwiftRules; } /** * Loads {@link AppleConfiguration} from build options. */ public static class Loader implements ConfigurationFragmentFactory { @Override public AppleConfiguration create(ConfigurationEnvironment env, BuildOptions buildOptions) throws InvalidConfigurationException, InterruptedException { AppleCommandLineOptions appleOptions = buildOptions.get(AppleCommandLineOptions.class); XcodeVersionProperties xcodeVersionProperties = getXcodeVersionProperties(env, appleOptions); DottedVersion iosSdkVersion = (appleOptions.iosSdkVersion != null) ? appleOptions.iosSdkVersion : xcodeVersionProperties.getDefaultIosSdkVersion(); // TODO(cparsons): Look into ios_minimum_os matching the defaulting behavior of the other // platforms. DottedVersion watchosSdkVersion = (appleOptions.watchOsSdkVersion != null) ? appleOptions.watchOsSdkVersion : xcodeVersionProperties.getDefaultWatchosSdkVersion(); DottedVersion watchosMinimumOsVersion = (appleOptions.watchosMinimumOs != null) ? appleOptions.watchosMinimumOs : watchosSdkVersion; DottedVersion tvosSdkVersion = (appleOptions.tvOsSdkVersion != null) ? appleOptions.tvOsSdkVersion : xcodeVersionProperties.getDefaultTvosSdkVersion(); DottedVersion tvosMinimumOsVersion = (appleOptions.tvosMinimumOs != null) ? appleOptions.tvosMinimumOs : tvosSdkVersion; DottedVersion macosxSdkVersion = (appleOptions.macOsXSdkVersion != null) ? appleOptions.macOsXSdkVersion : xcodeVersionProperties.getDefaultMacosxSdkVersion(); AppleConfiguration configuration = new AppleConfiguration(appleOptions, xcodeVersionProperties.getXcodeVersion(), iosSdkVersion, watchosSdkVersion, watchosMinimumOsVersion, tvosSdkVersion, tvosMinimumOsVersion, macosxSdkVersion); validate(configuration); return configuration; } private void validate(AppleConfiguration config) throws InvalidConfigurationException { Optional<DottedVersion> xcodeVersion = config.getXcodeVersion(); if (config.getBitcodeMode() != AppleBitcodeMode.NONE && xcodeVersion.isPresent() && xcodeVersion.get().compareTo(MINIMUM_BITCODE_XCODE_VERSION) < 0) { throw new InvalidConfigurationException( String.format("apple_bitcode mode '%s' is unsupported for xcode version '%s'", config.getBitcodeMode(), xcodeVersion.get())); } } @Override public Class<? extends BuildConfiguration.Fragment> creates() { return AppleConfiguration.class; } @Override public ImmutableSet<Class<? extends FragmentOptions>> requiredOptions() { return ImmutableSet.<Class<? extends FragmentOptions>>of(AppleCommandLineOptions.class); } /** * Uses the {@link AppleCommandLineOptions#xcodeVersion} and {@link * AppleCommandLineOptions#xcodeVersionConfig} command line options to determine and return the * effective xcode version properties. Returns absent if no explicit xcode version is declared, * and host system defaults should be used. * * @param env the current configuration environment * @param appleOptions the command line options * @throws InvalidConfigurationException if the options given (or configuration targets) were * malformed and thus the xcode version could not be determined */ private static XcodeVersionProperties getXcodeVersionProperties( ConfigurationEnvironment env, AppleCommandLineOptions appleOptions) throws InvalidConfigurationException, InterruptedException { Optional<DottedVersion> xcodeVersionCommandLineFlag = Optional.fromNullable(appleOptions.xcodeVersion); Label xcodeVersionConfigLabel = appleOptions.xcodeVersionConfig; return XcodeConfig.resolveXcodeVersion(env, xcodeVersionConfigLabel, xcodeVersionCommandLineFlag, "xcode_version_config"); } } /** * Value used to avoid multiple configurations from conflicting. No two instances of this * transition may exist with the same value in a single Bazel invocation. */ public enum ConfigurationDistinguisher { UNKNOWN, /** Split transition distinguisher for {@code ios_extension} rule. */ IOS_EXTENSION, /** Split transition distinguisher for {@code ios_application} rule. */ IOS_APPLICATION, /** Split transition distinguisher for {@code ios_framework} rule. */ FRAMEWORK, /** Split transition distinguisher for {@code apple_watch1_extension} rule. */ WATCH_OS1_EXTENSION, /** Distinguisher for {@code apple_binary} rule with "ios" platform_type. */ APPLEBIN_IOS, /** Distinguisher for {@code apple_binary} rule with "watchos" platform_type. */ APPLEBIN_WATCHOS, /** Distinguisher for {@code apple_binary} rule with "tvos" platform_type. */ APPLEBIN_TVOS, } }
/* * Copyright (c) 1997, 2008, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.swing.plaf.basic; import javax.swing.*; import javax.swing.event.*; import javax.swing.plaf.*; import javax.swing.plaf.basic.*; import javax.swing.border.*; import java.applet.Applet; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.KeyboardFocusManager; import java.awt.Window; import java.awt.event.*; import java.awt.AWTEvent; import java.awt.Toolkit; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeEvent; import java.util.*; import sun.swing.DefaultLookup; import sun.swing.UIAction; import sun.awt.AppContext; /** * A Windows L&F implementation of PopupMenuUI. This implementation * is a "combined" view/controller. * * @author Georges Saab * @author David Karlton * @author Arnaud Weber */ public class BasicPopupMenuUI extends PopupMenuUI { static final StringBuilder MOUSE_GRABBER_KEY = new StringBuilder( "javax.swing.plaf.basic.BasicPopupMenuUI.MouseGrabber"); static final StringBuilder MENU_KEYBOARD_HELPER_KEY = new StringBuilder( "javax.swing.plaf.basic.BasicPopupMenuUI.MenuKeyboardHelper"); protected JPopupMenu popupMenu = null; private transient PopupMenuListener popupMenuListener = null; private MenuKeyListener menuKeyListener = null; private static boolean checkedUnpostPopup; private static boolean unpostPopup; public static ComponentUI createUI(JComponent x) { return new BasicPopupMenuUI(); } public BasicPopupMenuUI() { BasicLookAndFeel.needsEventHelper = true; LookAndFeel laf = UIManager.getLookAndFeel(); if (laf instanceof BasicLookAndFeel) { ((BasicLookAndFeel)laf).installAWTEventListener(); } } public void installUI(JComponent c) { popupMenu = (JPopupMenu) c; installDefaults(); installListeners(); installKeyboardActions(); } public void installDefaults() { if (popupMenu.getLayout() == null || popupMenu.getLayout() instanceof UIResource) popupMenu.setLayout(new DefaultMenuLayout(popupMenu, BoxLayout.Y_AXIS)); LookAndFeel.installProperty(popupMenu, "opaque", Boolean.TRUE); LookAndFeel.installBorder(popupMenu, "PopupMenu.border"); LookAndFeel.installColorsAndFont(popupMenu, "PopupMenu.background", "PopupMenu.foreground", "PopupMenu.font"); } protected void installListeners() { if (popupMenuListener == null) { popupMenuListener = new BasicPopupMenuListener(); } popupMenu.addPopupMenuListener(popupMenuListener); if (menuKeyListener == null) { menuKeyListener = new BasicMenuKeyListener(); } popupMenu.addMenuKeyListener(menuKeyListener); AppContext context = AppContext.getAppContext(); synchronized (MOUSE_GRABBER_KEY) { MouseGrabber mouseGrabber = (MouseGrabber)context.get( MOUSE_GRABBER_KEY); if (mouseGrabber == null) { mouseGrabber = new MouseGrabber(); context.put(MOUSE_GRABBER_KEY, mouseGrabber); } } synchronized (MENU_KEYBOARD_HELPER_KEY) { MenuKeyboardHelper helper = (MenuKeyboardHelper)context.get(MENU_KEYBOARD_HELPER_KEY); if (helper == null) { helper = new MenuKeyboardHelper(); context.put(MENU_KEYBOARD_HELPER_KEY, helper); MenuSelectionManager msm = MenuSelectionManager.defaultManager(); msm.addChangeListener(helper); } } } protected void installKeyboardActions() { } static InputMap getInputMap(JPopupMenu popup, JComponent c) { InputMap windowInputMap = null; Object[] bindings = (Object[])UIManager.get("PopupMenu.selectedWindowInputMapBindings"); if (bindings != null) { windowInputMap = LookAndFeel.makeComponentInputMap(c, bindings); if (!popup.getComponentOrientation().isLeftToRight()) { Object[] km = (Object[])UIManager.get("PopupMenu.selectedWindowInputMapBindings.RightToLeft"); if (km != null) { InputMap rightToLeftInputMap = LookAndFeel.makeComponentInputMap(c, km); rightToLeftInputMap.setParent(windowInputMap); windowInputMap = rightToLeftInputMap; } } } return windowInputMap; } static ActionMap getActionMap() { return LazyActionMap.getActionMap(BasicPopupMenuUI.class, "PopupMenu.actionMap"); } static void loadActionMap(LazyActionMap map) { map.put(new Actions(Actions.CANCEL)); map.put(new Actions(Actions.SELECT_NEXT)); map.put(new Actions(Actions.SELECT_PREVIOUS)); map.put(new Actions(Actions.SELECT_PARENT)); map.put(new Actions(Actions.SELECT_CHILD)); map.put(new Actions(Actions.RETURN)); BasicLookAndFeel.installAudioActionMap(map); } public void uninstallUI(JComponent c) { uninstallDefaults(); uninstallListeners(); uninstallKeyboardActions(); popupMenu = null; } protected void uninstallDefaults() { LookAndFeel.uninstallBorder(popupMenu); } protected void uninstallListeners() { if (popupMenuListener != null) { popupMenu.removePopupMenuListener(popupMenuListener); } if (menuKeyListener != null) { popupMenu.removeMenuKeyListener(menuKeyListener); } } protected void uninstallKeyboardActions() { SwingUtilities.replaceUIActionMap(popupMenu, null); SwingUtilities.replaceUIInputMap(popupMenu, JComponent.WHEN_IN_FOCUSED_WINDOW, null); } static MenuElement getFirstPopup() { MenuSelectionManager msm = MenuSelectionManager.defaultManager(); MenuElement[] p = msm.getSelectedPath(); MenuElement me = null; for(int i = 0 ; me == null && i < p.length ; i++) { if (p[i] instanceof JPopupMenu) me = p[i]; } return me; } static JPopupMenu getLastPopup() { MenuSelectionManager msm = MenuSelectionManager.defaultManager(); MenuElement[] p = msm.getSelectedPath(); JPopupMenu popup = null; for(int i = p.length - 1; popup == null && i >= 0; i--) { if (p[i] instanceof JPopupMenu) popup = (JPopupMenu)p[i]; } return popup; } static List<JPopupMenu> getPopups() { MenuSelectionManager msm = MenuSelectionManager.defaultManager(); MenuElement[] p = msm.getSelectedPath(); List<JPopupMenu> list = new ArrayList<JPopupMenu>(p.length); for (MenuElement element : p) { if (element instanceof JPopupMenu) { list.add((JPopupMenu) element); } } return list; } public boolean isPopupTrigger(MouseEvent e) { return ((e.getID()==MouseEvent.MOUSE_RELEASED) && ((e.getModifiers() & MouseEvent.BUTTON3_MASK)!=0)); } private static boolean checkInvokerEqual(MenuElement present, MenuElement last) { Component invokerPresent = present.getComponent(); Component invokerLast = last.getComponent(); if (invokerPresent instanceof JPopupMenu) { invokerPresent = ((JPopupMenu)invokerPresent).getInvoker(); } if (invokerLast instanceof JPopupMenu) { invokerLast = ((JPopupMenu)invokerLast).getInvoker(); } return (invokerPresent == invokerLast); } /** * This Listener fires the Action that provides the correct auditory * feedback. * * @since 1.4 */ private class BasicPopupMenuListener implements PopupMenuListener { public void popupMenuCanceled(PopupMenuEvent e) { } public void popupMenuWillBecomeInvisible(PopupMenuEvent e) { } public void popupMenuWillBecomeVisible(PopupMenuEvent e) { BasicLookAndFeel.playSound((JPopupMenu)e.getSource(), "PopupMenu.popupSound"); } } /** * Handles mnemonic for children JMenuItems. * @since 1.5 */ private class BasicMenuKeyListener implements MenuKeyListener { MenuElement menuToOpen = null; public void menuKeyTyped(MenuKeyEvent e) { if (menuToOpen != null) { // we have a submenu to open JPopupMenu subpopup = ((JMenu)menuToOpen).getPopupMenu(); MenuElement subitem = findEnabledChild( subpopup.getSubElements(), -1, true); ArrayList<MenuElement> lst = new ArrayList<MenuElement>(Arrays.asList(e.getPath())); lst.add(menuToOpen); lst.add(subpopup); if (subitem != null) { lst.add(subitem); } MenuElement newPath[] = new MenuElement[0]; newPath = lst.toArray(newPath); MenuSelectionManager.defaultManager().setSelectedPath(newPath); e.consume(); } menuToOpen = null; } public void menuKeyPressed(MenuKeyEvent e) { char keyChar = e.getKeyChar(); // Handle the case for Escape or Enter... if (!Character.isLetterOrDigit(keyChar)) { return; } MenuSelectionManager manager = e.getMenuSelectionManager(); MenuElement path[] = e.getPath(); MenuElement items[] = popupMenu.getSubElements(); int currentIndex = -1; int matches = 0; int firstMatch = -1; int indexes[] = null; for (int j = 0; j < items.length; j++) { if (! (items[j] instanceof JMenuItem)) { continue; } JMenuItem item = (JMenuItem)items[j]; int mnemonic = item.getMnemonic(); if (item.isEnabled() && item.isVisible() && lower(keyChar) == lower(mnemonic)) { if (matches == 0) { firstMatch = j; matches++; } else { if (indexes == null) { indexes = new int[items.length]; indexes[0] = firstMatch; } indexes[matches++] = j; } } if (item.isArmed() || item.isSelected()) { currentIndex = matches - 1; } } if (matches == 0) { // no op } else if (matches == 1) { // Invoke the menu action JMenuItem item = (JMenuItem)items[firstMatch]; if (item instanceof JMenu) { // submenus are handled in menuKeyTyped menuToOpen = item; } else if (item.isEnabled()) { // we have a menu item manager.clearSelectedPath(); item.doClick(); } e.consume(); } else { // Select the menu item with the matching mnemonic. If // the same mnemonic has been invoked then select the next // menu item in the cycle. MenuElement newItem; newItem = items[indexes[(currentIndex + 1) % matches]]; MenuElement newPath[] = new MenuElement[path.length+1]; System.arraycopy(path, 0, newPath, 0, path.length); newPath[path.length] = newItem; manager.setSelectedPath(newPath); e.consume(); } } public void menuKeyReleased(MenuKeyEvent e) { } private char lower(char keyChar) { return Character.toLowerCase(keyChar); } private char lower(int mnemonic) { return Character.toLowerCase((char) mnemonic); } } private static class Actions extends UIAction { // Types of actions private static final String CANCEL = "cancel"; private static final String SELECT_NEXT = "selectNext"; private static final String SELECT_PREVIOUS = "selectPrevious"; private static final String SELECT_PARENT = "selectParent"; private static final String SELECT_CHILD = "selectChild"; private static final String RETURN = "return"; // Used for next/previous actions private static final boolean FORWARD = true; private static final boolean BACKWARD = false; // Used for parent/child actions private static final boolean PARENT = false; private static final boolean CHILD = true; Actions(String key) { super(key); } public void actionPerformed(ActionEvent e) { String key = getName(); if (key == CANCEL) { cancel(); } else if (key == SELECT_NEXT) { selectItem(FORWARD); } else if (key == SELECT_PREVIOUS) { selectItem(BACKWARD); } else if (key == SELECT_PARENT) { selectParentChild(PARENT); } else if (key == SELECT_CHILD) { selectParentChild(CHILD); } else if (key == RETURN) { doReturn(); } } private void doReturn() { KeyboardFocusManager fmgr = KeyboardFocusManager.getCurrentKeyboardFocusManager(); Component focusOwner = fmgr.getFocusOwner(); if(focusOwner != null && !(focusOwner instanceof JRootPane)) { return; } MenuSelectionManager msm = MenuSelectionManager.defaultManager(); MenuElement path[] = msm.getSelectedPath(); MenuElement lastElement; if(path.length > 0) { lastElement = path[path.length-1]; if(lastElement instanceof JMenu) { MenuElement newPath[] = new MenuElement[path.length+1]; System.arraycopy(path,0,newPath,0,path.length); newPath[path.length] = ((JMenu)lastElement).getPopupMenu(); msm.setSelectedPath(newPath); } else if(lastElement instanceof JMenuItem) { JMenuItem mi = (JMenuItem)lastElement; if (mi.getUI() instanceof BasicMenuItemUI) { ((BasicMenuItemUI)mi.getUI()).doClick(msm); } else { msm.clearSelectedPath(); mi.doClick(0); } } } } private void selectParentChild(boolean direction) { MenuSelectionManager msm = MenuSelectionManager.defaultManager(); MenuElement path[] = msm.getSelectedPath(); int len = path.length; if (direction == PARENT) { // selecting parent int popupIndex = len-1; if (len > 2 && // check if we have an open submenu. A submenu item may or // may not be selected, so submenu popup can be either the // last or next to the last item. (path[popupIndex] instanceof JPopupMenu || path[--popupIndex] instanceof JPopupMenu) && !((JMenu)path[popupIndex-1]).isTopLevelMenu()) { // we have a submenu, just close it MenuElement newPath[] = new MenuElement[popupIndex]; System.arraycopy(path, 0, newPath, 0, popupIndex); msm.setSelectedPath(newPath); return; } } else { // selecting child if (len > 0 && path[len-1] instanceof JMenu && !((JMenu)path[len-1]).isTopLevelMenu()) { // we have a submenu, open it JMenu menu = (JMenu)path[len-1]; JPopupMenu popup = menu.getPopupMenu(); MenuElement[] subs = popup.getSubElements(); MenuElement item = findEnabledChild(subs, -1, true); MenuElement[] newPath; if (item == null) { newPath = new MenuElement[len+1]; } else { newPath = new MenuElement[len+2]; newPath[len+1] = item; } System.arraycopy(path, 0, newPath, 0, len); newPath[len] = popup; msm.setSelectedPath(newPath); return; } } // check if we have a toplevel menu selected. // If this is the case, we select another toplevel menu if (len > 1 && path[0] instanceof JMenuBar) { MenuElement currentMenu = path[1]; MenuElement nextMenu = findEnabledChild( path[0].getSubElements(), currentMenu, direction); if (nextMenu != null && nextMenu != currentMenu) { MenuElement newSelection[]; if (len == 2) { // menu is selected but its popup not shown newSelection = new MenuElement[2]; newSelection[0] = path[0]; newSelection[1] = nextMenu; } else { // menu is selected and its popup is shown newSelection = new MenuElement[3]; newSelection[0] = path[0]; newSelection[1] = nextMenu; newSelection[2] = ((JMenu)nextMenu).getPopupMenu(); } msm.setSelectedPath(newSelection); } } } private void selectItem(boolean direction) { MenuSelectionManager msm = MenuSelectionManager.defaultManager(); MenuElement path[] = msm.getSelectedPath(); if (path.length == 0) { return; } int len = path.length; if (len == 1 && path[0] instanceof JPopupMenu) { JPopupMenu popup = (JPopupMenu) path[0]; MenuElement[] newPath = new MenuElement[2]; newPath[0] = popup; newPath[1] = findEnabledChild(popup.getSubElements(), -1, direction); msm.setSelectedPath(newPath); } else if (len == 2 && path[0] instanceof JMenuBar && path[1] instanceof JMenu) { // a toplevel menu is selected, but its popup not shown. // Show the popup and select the first item JPopupMenu popup = ((JMenu)path[1]).getPopupMenu(); MenuElement next = findEnabledChild(popup.getSubElements(), -1, FORWARD); MenuElement[] newPath; if (next != null) { // an enabled item found -- include it in newPath newPath = new MenuElement[4]; newPath[3] = next; } else { // menu has no enabled items -- still must show the popup newPath = new MenuElement[3]; } System.arraycopy(path, 0, newPath, 0, 2); newPath[2] = popup; msm.setSelectedPath(newPath); } else if (path[len-1] instanceof JPopupMenu && path[len-2] instanceof JMenu) { // a menu (not necessarily toplevel) is open and its popup // shown. Select the appropriate menu item JMenu menu = (JMenu)path[len-2]; JPopupMenu popup = menu.getPopupMenu(); MenuElement next = findEnabledChild(popup.getSubElements(), -1, direction); if (next != null) { MenuElement[] newPath = new MenuElement[len+1]; System.arraycopy(path, 0, newPath, 0, len); newPath[len] = next; msm.setSelectedPath(newPath); } else { // all items in the popup are disabled. // We're going to find the parent popup menu and select // its next item. If there's no parent popup menu (i.e. // current menu is toplevel), do nothing if (len > 2 && path[len-3] instanceof JPopupMenu) { popup = ((JPopupMenu)path[len-3]); next = findEnabledChild(popup.getSubElements(), menu, direction); if (next != null && next != menu) { MenuElement[] newPath = new MenuElement[len-1]; System.arraycopy(path, 0, newPath, 0, len-2); newPath[len-2] = next; msm.setSelectedPath(newPath); } } } } else { // just select the next item, no path expansion needed MenuElement subs[] = path[len-2].getSubElements(); MenuElement nextChild = findEnabledChild(subs, path[len-1], direction); if (nextChild == null) { nextChild = findEnabledChild(subs, -1, direction); } if (nextChild != null) { path[len-1] = nextChild; msm.setSelectedPath(path); } } } private void cancel() { // 4234793: This action should call JPopupMenu.firePopupMenuCanceled but it's // a protected method. The real solution could be to make // firePopupMenuCanceled public and call it directly. JPopupMenu lastPopup = getLastPopup(); if (lastPopup != null) { lastPopup.putClientProperty("JPopupMenu.firePopupMenuCanceled", Boolean.TRUE); } String mode = UIManager.getString("Menu.cancelMode"); if ("hideMenuTree".equals(mode)) { MenuSelectionManager.defaultManager().clearSelectedPath(); } else { shortenSelectedPath(); } } private void shortenSelectedPath() { MenuElement path[] = MenuSelectionManager.defaultManager().getSelectedPath(); if (path.length <= 2) { MenuSelectionManager.defaultManager().clearSelectedPath(); return; } // unselect MenuItem and its Popup by default int value = 2; MenuElement lastElement = path[path.length - 1]; JPopupMenu lastPopup = getLastPopup(); if (lastElement == lastPopup) { MenuElement previousElement = path[path.length - 2]; if (previousElement instanceof JMenu) { JMenu lastMenu = (JMenu) previousElement; if (lastMenu.isEnabled() && lastPopup.getComponentCount() > 0) { // unselect the last visible popup only value = 1; } else { // unselect invisible popup and two visible elements value = 3; } } } if (path.length - value <= 2 && !UIManager.getBoolean("Menu.preserveTopLevelSelection")) { // clear selection for the topLevelMenu value = path.length; } MenuElement newPath[] = new MenuElement[path.length - value]; System.arraycopy(path, 0, newPath, 0, path.length - value); MenuSelectionManager.defaultManager().setSelectedPath(newPath); } } private static MenuElement nextEnabledChild(MenuElement e[], int fromIndex, int toIndex) { for (int i=fromIndex; i<=toIndex; i++) { if (e[i] != null) { Component comp = e[i].getComponent(); if ( comp != null && (comp.isEnabled() || UIManager.getBoolean("MenuItem.disabledAreNavigable")) && comp.isVisible()) { return e[i]; } } } return null; } private static MenuElement previousEnabledChild(MenuElement e[], int fromIndex, int toIndex) { for (int i=fromIndex; i>=toIndex; i--) { if (e[i] != null) { Component comp = e[i].getComponent(); if ( comp != null && (comp.isEnabled() || UIManager.getBoolean("MenuItem.disabledAreNavigable")) && comp.isVisible()) { return e[i]; } } } return null; } static MenuElement findEnabledChild(MenuElement e[], int fromIndex, boolean forward) { MenuElement result; if (forward) { result = nextEnabledChild(e, fromIndex+1, e.length-1); if (result == null) result = nextEnabledChild(e, 0, fromIndex-1); } else { result = previousEnabledChild(e, fromIndex-1, 0); if (result == null) result = previousEnabledChild(e, e.length-1, fromIndex+1); } return result; } static MenuElement findEnabledChild(MenuElement e[], MenuElement elem, boolean forward) { for (int i=0; i<e.length; i++) { if (e[i] == elem) { return findEnabledChild(e, i, forward); } } return null; } static class MouseGrabber implements ChangeListener, AWTEventListener, ComponentListener, WindowListener { Window grabbedWindow; MenuElement[] lastPathSelected; public MouseGrabber() { MenuSelectionManager msm = MenuSelectionManager.defaultManager(); msm.addChangeListener(this); this.lastPathSelected = msm.getSelectedPath(); if(this.lastPathSelected.length != 0) { grabWindow(this.lastPathSelected); } } void uninstall() { synchronized (MOUSE_GRABBER_KEY) { MenuSelectionManager.defaultManager().removeChangeListener(this); ungrabWindow(); AppContext.getAppContext().remove(MOUSE_GRABBER_KEY); } } void grabWindow(MenuElement[] newPath) { // A grab needs to be added final Toolkit tk = Toolkit.getDefaultToolkit(); java.security.AccessController.doPrivileged( new java.security.PrivilegedAction<Object>() { public Object run() { tk.addAWTEventListener(MouseGrabber.this, AWTEvent.MOUSE_EVENT_MASK | AWTEvent.MOUSE_MOTION_EVENT_MASK | AWTEvent.MOUSE_WHEEL_EVENT_MASK | AWTEvent.WINDOW_EVENT_MASK | sun.awt.SunToolkit.GRAB_EVENT_MASK); return null; } } ); Component invoker = newPath[0].getComponent(); if (invoker instanceof JPopupMenu) { invoker = ((JPopupMenu)invoker).getInvoker(); } grabbedWindow = invoker instanceof Window? (Window)invoker : SwingUtilities.getWindowAncestor(invoker); if(grabbedWindow != null) { if(tk instanceof sun.awt.SunToolkit) { ((sun.awt.SunToolkit)tk).grab(grabbedWindow); } else { grabbedWindow.addComponentListener(this); grabbedWindow.addWindowListener(this); } } } void ungrabWindow() { final Toolkit tk = Toolkit.getDefaultToolkit(); // The grab should be removed java.security.AccessController.doPrivileged( new java.security.PrivilegedAction<Object>() { public Object run() { tk.removeAWTEventListener(MouseGrabber.this); return null; } } ); realUngrabWindow(); } void realUngrabWindow() { Toolkit tk = Toolkit.getDefaultToolkit(); if(grabbedWindow != null) { if(tk instanceof sun.awt.SunToolkit) { ((sun.awt.SunToolkit)tk).ungrab(grabbedWindow); } else { grabbedWindow.removeComponentListener(this); grabbedWindow.removeWindowListener(this); } grabbedWindow = null; } } public void stateChanged(ChangeEvent e) { MenuSelectionManager msm = MenuSelectionManager.defaultManager(); MenuElement[] p = msm.getSelectedPath(); if (lastPathSelected.length == 0 && p.length != 0) { grabWindow(p); } if (lastPathSelected.length != 0 && p.length == 0) { ungrabWindow(); } lastPathSelected = p; } public void eventDispatched(AWTEvent ev) { if(ev instanceof sun.awt.UngrabEvent) { // Popup should be canceled in case of ungrab event cancelPopupMenu( ); return; } if (!(ev instanceof MouseEvent)) { // We are interested in MouseEvents only return; } MouseEvent me = (MouseEvent) ev; Component src = me.getComponent(); switch (me.getID()) { case MouseEvent.MOUSE_PRESSED: if (isInPopup(src) || (src instanceof JMenu && ((JMenu)src).isSelected())) { return; } if (!(src instanceof JComponent) || ! (((JComponent)src).getClientProperty("doNotCancelPopup") == BasicComboBoxUI.HIDE_POPUP_KEY)) { // Cancel popup only if this property was not set. // If this property is set to TRUE component wants // to deal with this event by himself. cancelPopupMenu(); // Ask UIManager about should we consume event that closes // popup. This made to match native apps behaviour. boolean consumeEvent = UIManager.getBoolean("PopupMenu.consumeEventOnClose"); // Consume the event so that normal processing stops. if(consumeEvent && !(src instanceof MenuElement)) { me.consume(); } } break; case MouseEvent.MOUSE_RELEASED: if(!(src instanceof MenuElement)) { // Do not forward event to MSM, let component handle it if (isInPopup(src)) { break; } } if(src instanceof JMenu || !(src instanceof JMenuItem)) { MenuSelectionManager.defaultManager(). processMouseEvent(me); } break; case MouseEvent.MOUSE_DRAGGED: if(!(src instanceof MenuElement)) { // For the MOUSE_DRAGGED event the src is // the Component in which mouse button was pressed. // If the src is in popupMenu, // do not forward event to MSM, let component handle it. if (isInPopup(src)) { break; } } MenuSelectionManager.defaultManager(). processMouseEvent(me); break; case MouseEvent.MOUSE_WHEEL: if (isInPopup(src)) { return; } cancelPopupMenu(); break; } } boolean isInPopup(Component src) { for (Component c=src; c!=null; c=c.getParent()) { if (c instanceof Applet || c instanceof Window) { break; } else if (c instanceof JPopupMenu) { return true; } } return false; } void cancelPopupMenu() { // We should ungrab window if a user code throws // an unexpected runtime exception. See 6495920. try { // 4234793: This action should call firePopupMenuCanceled but it's // a protected method. The real solution could be to make // firePopupMenuCanceled public and call it directly. List<JPopupMenu> popups = getPopups(); for (JPopupMenu popup : popups) { popup.putClientProperty("JPopupMenu.firePopupMenuCanceled", Boolean.TRUE); } MenuSelectionManager.defaultManager().clearSelectedPath(); } catch (RuntimeException ex) { realUngrabWindow(); throw ex; } catch (Error err) { realUngrabWindow(); throw err; } } public void componentResized(ComponentEvent e) { cancelPopupMenu(); } public void componentMoved(ComponentEvent e) { cancelPopupMenu(); } public void componentShown(ComponentEvent e) { cancelPopupMenu(); } public void componentHidden(ComponentEvent e) { cancelPopupMenu(); } public void windowClosing(WindowEvent e) { cancelPopupMenu(); } public void windowClosed(WindowEvent e) { cancelPopupMenu(); } public void windowIconified(WindowEvent e) { cancelPopupMenu(); } public void windowDeactivated(WindowEvent e) { cancelPopupMenu(); } public void windowOpened(WindowEvent e) {} public void windowDeiconified(WindowEvent e) {} public void windowActivated(WindowEvent e) {} } /** * This helper is added to MenuSelectionManager as a ChangeListener to * listen to menu selection changes. When a menu is activated, it passes * focus to its parent JRootPane, and installs an ActionMap/InputMap pair * on that JRootPane. Those maps are necessary in order for menu * navigation to work. When menu is being deactivated, it restores focus * to the component that has had it before menu activation, and uninstalls * the maps. * This helper is also installed as a KeyListener on root pane when menu * is active. It forwards key events to MenuSelectionManager for mnemonic * keys handling. */ static class MenuKeyboardHelper implements ChangeListener, KeyListener { private Component lastFocused = null; private MenuElement[] lastPathSelected = new MenuElement[0]; private JPopupMenu lastPopup; private JRootPane invokerRootPane; private ActionMap menuActionMap = getActionMap(); private InputMap menuInputMap; private boolean focusTraversalKeysEnabled; /* * Fix for 4213634 * If this is false, KEY_TYPED and KEY_RELEASED events are NOT * processed. This is needed to avoid activating a menuitem when * the menu and menuitem share the same mnemonic. */ private boolean receivedKeyPressed = false; void removeItems() { if (lastFocused != null) { if(!lastFocused.requestFocusInWindow()) { // Workarounr for 4810575. // If lastFocused is not in currently focused window // requestFocusInWindow will fail. In this case we must // request focus by requestFocus() if it was not // transferred from our popup. Window cfw = KeyboardFocusManager .getCurrentKeyboardFocusManager() .getFocusedWindow(); if(cfw != null && "###focusableSwingPopup###".equals(cfw.getName())) { lastFocused.requestFocus(); } } lastFocused = null; } if (invokerRootPane != null) { invokerRootPane.removeKeyListener(this); invokerRootPane.setFocusTraversalKeysEnabled(focusTraversalKeysEnabled); removeUIInputMap(invokerRootPane, menuInputMap); removeUIActionMap(invokerRootPane, menuActionMap); invokerRootPane = null; } receivedKeyPressed = false; } private FocusListener rootPaneFocusListener = new FocusAdapter() { public void focusGained(FocusEvent ev) { Component opposite = ev.getOppositeComponent(); if (opposite != null) { lastFocused = opposite; } ev.getComponent().removeFocusListener(this); } }; /** * Return the last JPopupMenu in <code>path</code>, * or <code>null</code> if none found */ JPopupMenu getActivePopup(MenuElement[] path) { for (int i=path.length-1; i>=0; i--) { MenuElement elem = path[i]; if (elem instanceof JPopupMenu) { return (JPopupMenu)elem; } } return null; } void addUIInputMap(JComponent c, InputMap map) { InputMap lastNonUI = null; InputMap parent = c.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW); while (parent != null && !(parent instanceof UIResource)) { lastNonUI = parent; parent = parent.getParent(); } if (lastNonUI == null) { c.setInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW, map); } else { lastNonUI.setParent(map); } map.setParent(parent); } void addUIActionMap(JComponent c, ActionMap map) { ActionMap lastNonUI = null; ActionMap parent = c.getActionMap(); while (parent != null && !(parent instanceof UIResource)) { lastNonUI = parent; parent = parent.getParent(); } if (lastNonUI == null) { c.setActionMap(map); } else { lastNonUI.setParent(map); } map.setParent(parent); } void removeUIInputMap(JComponent c, InputMap map) { InputMap im = null; InputMap parent = c.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW); while (parent != null) { if (parent == map) { if (im == null) { c.setInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW, map.getParent()); } else { im.setParent(map.getParent()); } break; } im = parent; parent = parent.getParent(); } } void removeUIActionMap(JComponent c, ActionMap map) { ActionMap im = null; ActionMap parent = c.getActionMap(); while (parent != null) { if (parent == map) { if (im == null) { c.setActionMap(map.getParent()); } else { im.setParent(map.getParent()); } break; } im = parent; parent = parent.getParent(); } } public void stateChanged(ChangeEvent ev) { if (!(UIManager.getLookAndFeel() instanceof BasicLookAndFeel)) { uninstall(); return; } MenuSelectionManager msm = (MenuSelectionManager)ev.getSource(); MenuElement[] p = msm.getSelectedPath(); JPopupMenu popup = getActivePopup(p); if (popup != null && !popup.isFocusable()) { // Do nothing for non-focusable popups return; } if (lastPathSelected.length != 0 && p.length != 0 ) { if (!checkInvokerEqual(p[0],lastPathSelected[0])) { removeItems(); lastPathSelected = new MenuElement[0]; } } if (lastPathSelected.length == 0 && p.length > 0) { // menu posted JComponent invoker; if (popup == null) { if (p.length == 2 && p[0] instanceof JMenuBar && p[1] instanceof JMenu) { // a menu has been selected but not open invoker = (JComponent)p[1]; popup = ((JMenu)invoker).getPopupMenu(); } else { return; } } else { Component c = popup.getInvoker(); if(c instanceof JFrame) { invoker = ((JFrame)c).getRootPane(); } else if(c instanceof JDialog) { invoker = ((JDialog)c).getRootPane(); } else if(c instanceof JApplet) { invoker = ((JApplet)c).getRootPane(); } else { while (!(c instanceof JComponent)) { if (c == null) { return; } c = c.getParent(); } invoker = (JComponent)c; } } // remember current focus owner lastFocused = KeyboardFocusManager. getCurrentKeyboardFocusManager().getFocusOwner(); // request focus on root pane and install keybindings // used for menu navigation invokerRootPane = SwingUtilities.getRootPane(invoker); if (invokerRootPane != null) { invokerRootPane.addFocusListener(rootPaneFocusListener); invokerRootPane.requestFocus(true); invokerRootPane.addKeyListener(this); focusTraversalKeysEnabled = invokerRootPane. getFocusTraversalKeysEnabled(); invokerRootPane.setFocusTraversalKeysEnabled(false); menuInputMap = getInputMap(popup, invokerRootPane); addUIInputMap(invokerRootPane, menuInputMap); addUIActionMap(invokerRootPane, menuActionMap); } } else if (lastPathSelected.length != 0 && p.length == 0) { // menu hidden -- return focus to where it had been before // and uninstall menu keybindings removeItems(); } else { if (popup != lastPopup) { receivedKeyPressed = false; } } // Remember the last path selected lastPathSelected = p; lastPopup = popup; } public void keyPressed(KeyEvent ev) { receivedKeyPressed = true; MenuSelectionManager.defaultManager().processKeyEvent(ev); } public void keyReleased(KeyEvent ev) { if (receivedKeyPressed) { receivedKeyPressed = false; MenuSelectionManager.defaultManager().processKeyEvent(ev); } } public void keyTyped(KeyEvent ev) { if (receivedKeyPressed) { MenuSelectionManager.defaultManager().processKeyEvent(ev); } } void uninstall() { synchronized (MENU_KEYBOARD_HELPER_KEY) { MenuSelectionManager.defaultManager().removeChangeListener(this); AppContext.getAppContext().remove(MENU_KEYBOARD_HELPER_KEY); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.storagecache.implementation; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.storagecache.fluent.SkusClient; import com.azure.resourcemanager.storagecache.fluent.models.ResourceSkuInner; import com.azure.resourcemanager.storagecache.models.ResourceSkusResult; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in SkusClient. */ public final class SkusClientImpl implements SkusClient { private final ClientLogger logger = new ClientLogger(SkusClientImpl.class); /** The proxy service used to perform REST calls. */ private final SkusService service; /** The service client containing this operation class. */ private final StorageCacheManagementClientImpl client; /** * Initializes an instance of SkusClientImpl. * * @param client the instance of the service client containing this operation class. */ SkusClientImpl(StorageCacheManagementClientImpl client) { this.service = RestProxy.create(SkusService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for StorageCacheManagementClientSkus to be used by the proxy service to * perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "StorageCacheManageme") private interface SkusService { @Headers({"Content-Type: application/json"}) @Get("/subscriptions/{subscriptionId}/providers/Microsoft.StorageCache/skus") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ResourceSkusResult>> list( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ResourceSkusResult>> listNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * Get the list of StorageCache.Cache SKUs available to this subscription. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of StorageCache.Cache SKUs available to this subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ResourceSkuInner>> listSinglePageAsync() { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .list( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context)) .<PagedResponse<ResourceSkuInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the list of StorageCache.Cache SKUs available to this subscription. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of StorageCache.Cache SKUs available to this subscription. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ResourceSkuInner>> listSinglePageAsync(Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .list( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * Get the list of StorageCache.Cache SKUs available to this subscription. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of StorageCache.Cache SKUs available to this subscription. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<ResourceSkuInner> listAsync() { return new PagedFlux<>(() -> listSinglePageAsync(), nextLink -> listNextSinglePageAsync(nextLink)); } /** * Get the list of StorageCache.Cache SKUs available to this subscription. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of StorageCache.Cache SKUs available to this subscription. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<ResourceSkuInner> listAsync(Context context) { return new PagedFlux<>( () -> listSinglePageAsync(context), nextLink -> listNextSinglePageAsync(nextLink, context)); } /** * Get the list of StorageCache.Cache SKUs available to this subscription. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of StorageCache.Cache SKUs available to this subscription. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<ResourceSkuInner> list() { return new PagedIterable<>(listAsync()); } /** * Get the list of StorageCache.Cache SKUs available to this subscription. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of StorageCache.Cache SKUs available to this subscription. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<ResourceSkuInner> list(Context context) { return new PagedIterable<>(listAsync(context)); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response from the List Cache SKUs operation. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ResourceSkuInner>> listNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<ResourceSkuInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response from the List Cache SKUs operation. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ResourceSkuInner>> listNextSinglePageAsync(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } }
/** * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.portlets.directory; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Resource; import javax.portlet.ActionRequest; import javax.portlet.ActionResponse; import javax.portlet.Event; import javax.portlet.EventRequest; import javax.portlet.EventResponse; import javax.portlet.PortletPreferences; import javax.portlet.PortletRequest; import javax.portlet.RenderRequest; import javax.portlet.WindowState; import javax.portlet.WindowStateException; import javax.servlet.http.HttpServletRequest; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jasig.portal.portlet.container.properties.ThemeNameRequestPropertiesManager; import org.jasig.portal.portlets.lookup.PersonLookupHelperImpl; import org.jasig.portal.portlets.search.DirectoryAttributeType; import org.jasig.portal.search.PortletUrl; import org.jasig.portal.search.PortletUrlParameter; import org.jasig.portal.search.PortletUrlType; import org.jasig.portal.search.SearchConstants; import org.jasig.portal.search.SearchRequest; import org.jasig.portal.search.SearchResult; import org.jasig.portal.search.SearchResults; import org.jasig.portal.security.IPerson; import org.jasig.portal.security.IPersonManager; import org.jasig.portal.url.IPortalRequestUtils; import org.jasig.services.persondir.IPersonAttributes; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.portlet.ModelAndView; import org.springframework.web.portlet.bind.annotation.ActionMapping; import org.springframework.web.portlet.bind.annotation.EventMapping; import org.springframework.web.portlet.bind.annotation.RenderMapping; @Controller @RequestMapping("VIEW") public class DirectoryPortletController { private static final String MAXIMIZE_ON_SEARCH_PREFERENCE = "DirectoryPortletController.maximizeOnSearch"; protected final Log log = LogFactory.getLog(getClass()); private IPortalRequestUtils portalRequestUtils; @Autowired(required = true) public void setPortalRequestUtils(IPortalRequestUtils portalRequestUtils) { this.portalRequestUtils = portalRequestUtils; } private IPersonManager personManager; @Autowired(required = true) public void setPersonManager(IPersonManager personManager) { this.personManager = personManager; } private PersonLookupHelperImpl lookupHelper; @Autowired(required = true) public void setPersonLookupHelper(PersonLookupHelperImpl lookupHelper) { this.lookupHelper = lookupHelper; } private Map<String, DirectoryAttributeType> displayAttributes; @Resource(name="directoryDisplayAttributes") public void setDirectoryDisplayAttributes(Map<String, DirectoryAttributeType> attributes) { this.displayAttributes = attributes; } private List<String> directoryQueryAttributes; @Resource(name="directoryQueryAttributes") public void setDirectoryQueryAttributes(List<String> attributes) { this.directoryQueryAttributes = attributes; } @EventMapping(SearchConstants.SEARCH_REQUEST_QNAME_STRING) public void search2(EventRequest request, EventResponse response) { // get the search query object from the event Event event = request.getEvent(); SearchRequest query = (SearchRequest) event.getValue(); // search the portal's directory service for people matching the request final List<IPersonAttributes> people = searchDirectory(query.getSearchTerms(), request); if (people.size() > 0) { // transform the list of directory results into our generic search // response object final SearchResults results = new SearchResults(); results.setQueryId(query.getQueryId()); results.setWindowId(request.getWindowID()); for (IPersonAttributes person : people) { final SearchResult result = new SearchResult(); result.setTitle((String) person.getAttributeValue("displayName")); result.getType().add("Directory"); PortletUrl url = new PortletUrl(); url.setType(PortletUrlType.RENDER); url.setPortletMode("VIEW"); url.setWindowState("maximized"); PortletUrlParameter actionParam = new PortletUrlParameter(); actionParam.setName("action"); actionParam.getValue().add("findByUsername"); url.getParam().add(actionParam); PortletUrlParameter usernameParam = new PortletUrlParameter(); usernameParam.setName("username"); usernameParam.getValue().add(person.getName()); url.getParam().add(usernameParam); result.setPortletUrl(url); results.getSearchResult().add(result); } // fire a search response event response.setEvent(SearchConstants.SEARCH_RESULTS_QNAME, results); } } @RenderMapping public ModelAndView search(RenderRequest request, @RequestParam(value = "query", required = false) String query) { final Map<String,Object> model = new HashMap<String, Object>(); // if the query is non-null, perform a search request if (query != null) { final List<IPersonAttributes> people = searchDirectory(query, request); model.put("query", query); model.put("people", people); model.put("attributeNames", this.displayAttributes); } final boolean isMobile = isMobile(request); String viewName = isMobile ? "/jsp/Directory/mobileDirectory" : "/jsp/Directory/directory"; return new ModelAndView(viewName, model); } @RenderMapping(params="action=findByUsername") public ModelAndView findPersonByUsername(RenderRequest request, @RequestParam String username) { // get an authorization principal for the current requesting user HttpServletRequest servletRequest = portalRequestUtils.getPortletHttpRequest(request); IPerson currentUser = personManager.getPerson(servletRequest); // get the set of people matching the search query final IPersonAttributes person = this.lookupHelper.findPerson(currentUser, username); final boolean isMobile = isMobile(request); String viewName = isMobile ? "/jsp/Directory/mobileDirectory" : "/jsp/Directory/directory"; final Map<String,Object> model = new HashMap<String, Object>(); model.put("query", username); model.put("people", Collections.singletonList(person)); model.put("attributeNames", this.displayAttributes); return new ModelAndView(viewName, model); } @ActionMapping public void submitSearch(ActionRequest request, ActionResponse response, @RequestParam(value = "query", required = false) String query) { // Should we request to maximize? PortletPreferences prefs = request.getPreferences(); boolean maximize = Boolean.parseBoolean(prefs.getValue(MAXIMIZE_ON_SEARCH_PREFERENCE, "true")); // default is true if (maximize) { try { response.setWindowState(WindowState.MAXIMIZED); } catch (WindowStateException e) { log.warn("Failed to set the window state to MAXIMIZED", e); } } // Forward the query parameter... if (query != null) { response.setRenderParameter("query", query); } } @ModelAttribute("maxResults") public int getMaxResults() { return lookupHelper.getMaxResults(); } /** * Search the directory for people matching the search query. Search results * will be scoped to the permissions of the user performing the search. * * @param query * @param request * @return */ protected List<IPersonAttributes> searchDirectory(String query, PortletRequest request) { final Map<String, Object> queryAttributes = new HashMap<String, Object>(); for (String attr : directoryQueryAttributes) { queryAttributes.put(attr, query); } final List<IPersonAttributes> people; // get an authorization principal for the current requesting user HttpServletRequest servletRequest = portalRequestUtils.getPortletHttpRequest(request); IPerson currentUser = personManager.getPerson(servletRequest); // get the set of people matching the search query people = this.lookupHelper.searchForPeople(currentUser, queryAttributes); return people; } /** * Determine if this should be a mobile view. * * @param request * @return */ protected boolean isMobile(PortletRequest request) { final String themeName = request.getProperty(ThemeNameRequestPropertiesManager.THEME_NAME_PROPERTY); return "UniversalityMobile".equals(themeName); } }
package com.gerken.xaa.mpe.editor; import java.util.ArrayList; import java.util.Iterator; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.forms.FormColors; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.ui.forms.widgets.Section; import org.eclipse.ui.forms.widgets.TableWrapData; import org.eclipse.ui.forms.widgets.TableWrapLayout; import org.w3c.dom.Node; import com.gerken.xaa.mpe.constraint.Constraint; import com.gerken.xaa.mpe.constraint.ConstraintFailure; import com.gerken.xaa.mpe.constraint.ConstraintSet; import com.gerken.xaa.mpe.constraint.IConstraintListener; import com.gerken.xaa.mpe.core.AbstractDetailsSection; import com.gerken.xaa.mpe.core.AbstractFormPage; import com.gerken.xaa.mpe.core.ModelAccess; import com.gerken.xaa.mpe.core.SectionMessageAreaComposite; public class FileDetailsSection extends AbstractDetailsSection implements IConstraintListener { private Button widget_replace; private String value_replace = ""; private Button widget_binary; private String value_binary = ""; private Button widget_changeableName; private String value_changeableName = ""; private Button widget_purposeAsTokenName; private String value_purposeAsTokenName = ""; private boolean _loading = false; private SectionMessageAreaComposite mcomp; public FileDetailsSection(AbstractFormPage page, Composite parent) { super(page, parent); } protected void createClient(Section section, FormToolkit toolkit) { section.setText("File Details"); section.setLayoutData(new TableWrapData(TableWrapData.FILL_GRAB)); section.setDescription("Provide file generation options below"); Composite client = toolkit.createComposite(section); TableWrapLayout layout = new TableWrapLayout(); layout.leftMargin = layout.rightMargin = toolkit.getBorderStyle() != SWT.NULL ? 0 : 2; layout.numColumns = 3; client.setLayout(layout); section.setClient(client); mcomp = new SectionMessageAreaComposite(); mcomp.createControl(client, toolkit, 3); Label label; TableWrapData td; // IActionBars actionBars = getPage().getMpeEditor().getEditorSite().getActionBars(); // Begin replace layout and validation label = toolkit.createLabel(client, "Replace"); label.setForeground(toolkit.getColors().getColor(FormColors.TITLE)); td = new TableWrapData(); td.valign = TableWrapData.MIDDLE; label.setLayoutData(td); widget_replace = toolkit.createButton(client, "Replace file on re-generation", SWT.CHECK); PlatformUI.getWorkbench().getHelpSystem().setHelp(widget_replace, "com.gerken.xaa.mpe.fileoptions_replace"); widget_replace.setText("Replace file on re-generation"); td = new TableWrapData(TableWrapData.FILL_GRAB); td.colspan = 2; td.valign = TableWrapData.MIDDLE; widget_replace.setLayoutData(td); widget_replace.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent arg0) { exec(); } public void widgetSelected(SelectionEvent arg0) { exec(); } private void exec() { if (_loading) { return; } value_replace = String.valueOf(widget_replace.getSelection()); setAttribute(getSourceNode(),"replace",value_replace); markDirty(); getPage().getMpeEditor().propertyChanged(getSourceNode(),"replace"); } }); // End replace layout and validation // Begin binary layout and validation label = toolkit.createLabel(client, "Binary"); label.setForeground(toolkit.getColors().getColor(FormColors.TITLE)); td = new TableWrapData(); td.valign = TableWrapData.MIDDLE; label.setLayoutData(td); widget_binary = toolkit.createButton(client, "Binary file contents", SWT.CHECK); PlatformUI.getWorkbench().getHelpSystem().setHelp(widget_binary, "com.gerken.xaa.mpe.fileoptions_binary"); widget_binary.setText("Binary file contents"); td = new TableWrapData(TableWrapData.FILL_GRAB); td.colspan = 2; td.valign = TableWrapData.MIDDLE; widget_binary.setLayoutData(td); widget_binary.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent arg0) { exec(); } public void widgetSelected(SelectionEvent arg0) { exec(); } private void exec() { if (_loading) { return; } value_binary = String.valueOf(widget_binary.getSelection()); setAttribute(getSourceNode(),"binary",value_binary); markDirty(); getPage().getMpeEditor().propertyChanged(getSourceNode(),"binary"); } }); // End binary layout and validation // Begin changeableName layout and validation label = toolkit.createLabel(client, "ChangeableName"); label.setForeground(toolkit.getColors().getColor(FormColors.TITLE)); td = new TableWrapData(); td.valign = TableWrapData.MIDDLE; label.setLayoutData(td); widget_changeableName = toolkit.createButton(client, "File name is not constant", SWT.CHECK); PlatformUI.getWorkbench().getHelpSystem().setHelp(widget_changeableName, "com.gerken.xaa.mpe.fileoptions_changeableName"); widget_changeableName.setText("File name is not constant"); td = new TableWrapData(TableWrapData.FILL_GRAB); td.colspan = 2; td.valign = TableWrapData.MIDDLE; widget_changeableName.setLayoutData(td); widget_changeableName.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent arg0) { exec(); } public void widgetSelected(SelectionEvent arg0) { exec(); } private void exec() { if (_loading) { return; } value_changeableName = String.valueOf(widget_changeableName.getSelection()); setAttribute(getSourceNode(),"changeableName",value_changeableName); markDirty(); getPage().getMpeEditor().propertyChanged(getSourceNode(),"changeableName"); } }); // End changeableName layout and validation // Begin purposeAsTokenName layout and validation label = toolkit.createLabel(client, "PurposeAsTokenName"); label.setForeground(toolkit.getColors().getColor(FormColors.TITLE)); td = new TableWrapData(); td.valign = TableWrapData.MIDDLE; label.setLayoutData(td); widget_purposeAsTokenName = toolkit.createButton(client, "Create xform token from purpose", SWT.CHECK); PlatformUI.getWorkbench().getHelpSystem().setHelp(widget_purposeAsTokenName, "com.gerken.xaa.mpe.fileoptions_purposeAsTokenName"); widget_purposeAsTokenName.setText("Create xform token from purpose"); td = new TableWrapData(TableWrapData.FILL_GRAB); td.colspan = 2; td.valign = TableWrapData.MIDDLE; widget_purposeAsTokenName.setLayoutData(td); widget_purposeAsTokenName.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent arg0) { exec(); } public void widgetSelected(SelectionEvent arg0) { exec(); } private void exec() { if (_loading) { return; } value_purposeAsTokenName = String.valueOf(widget_purposeAsTokenName.getSelection()); setAttribute(getSourceNode(),"purposeAsTokenName",value_purposeAsTokenName); markDirty(); getPage().getMpeEditor().propertyChanged(getSourceNode(),"purposeAsTokenName"); } }); // End purposeAsTokenName layout and validation toolkit.paintBordersFor(client); getPage().getMpeEditor().getConstraintManager().addConstraintListener(this); } protected void commit() { super.commit(); } public String getReplace() { return value_replace; } public String getBinary() { return value_binary; } public String getChangeableName() { return value_changeableName; } public String getPurposeAsTokenName() { return value_purposeAsTokenName; } public void loadFrom(Node source) { _loading = true; setSourceNode(source); value_replace = ModelAccess.getAttribute(getSourceNode(),"@replace"); value_binary = ModelAccess.getAttribute(getSourceNode(),"@binary"); value_changeableName = ModelAccess.getAttribute(getSourceNode(),"@changeableName"); value_purposeAsTokenName = ModelAccess.getAttribute(getSourceNode(),"@purposeAsTokenName"); updateScreen(); enableSection(true); displayFirst(getPage().getMpeEditor().getConstraintManager().getCurrentProblems()); _loading = false; } public void clear() { clearSourceNode(); value_replace = ""; value_binary = ""; value_changeableName = ""; value_purposeAsTokenName = ""; updateScreen(); enableSection(false); mcomp.resetError(); } private void enableSection(boolean enable) { widget_replace.setEnabled(enable); widget_binary.setEnabled(enable); widget_changeableName.setEnabled(enable); widget_purposeAsTokenName.setEnabled(enable); } private void updateScreen() { widget_replace.setSelection("true".equalsIgnoreCase(value_replace)); widget_binary.setSelection("true".equalsIgnoreCase(value_binary)); widget_changeableName.setSelection("true".equalsIgnoreCase(value_changeableName)); widget_purposeAsTokenName.setSelection("true".equalsIgnoreCase(value_purposeAsTokenName)); } public void markDirty() { super.markDirty(); ((ArtifactPage)getPage()).getArtifactListSection().refreshSelected(); } protected boolean isPrimary() { return false; } public static ConstraintSet getConstraintSet() { ConstraintSet cs = new ConstraintSet(XaaEditor.Section_FileDetails); cs.addConstraint(new Constraint("replace",Constraint.CONSTRAINT_REQUIRED_IF_FILE)); cs.addConstraint(new Constraint("binary",Constraint.CONSTRAINT_REQUIRED_IF_FILE)); cs.addConstraint(new Constraint("changeableName",Constraint.CONSTRAINT_REQUIRED_IF_FILE)); cs.addConstraint(new Constraint("purposeAsTokenName",Constraint.CONSTRAINT_REQUIRED_IF_FILE)); return cs; } public void constraintsChecked(ArrayList<ConstraintFailure> problems) { displayFirst(problems); } public void displayFirst(ArrayList<ConstraintFailure> problems) { if (getSourceNode() == null) { return; } String currentName = getSourceNode().getNodeName(); Iterator<ConstraintFailure> iter = problems.iterator(); while (iter.hasNext()) { ConstraintFailure candidate = iter.next(); if (candidate.getReportingConstraintSet() == XaaEditor.Section_FileDetails ) { if (getSourceNode() == candidate.getTarget()) { mcomp.setError(candidate.getMessage()); return; } } } mcomp.resetError(); } }
package com.getbase.floatingactionbutton; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.LinearGradient; import android.graphics.Paint; import android.graphics.Paint.Style; import android.graphics.RectF; import android.graphics.Shader.TileMode; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.graphics.drawable.LayerDrawable; import android.graphics.drawable.StateListDrawable; import android.os.Build; import android.os.Build.VERSION_CODES; import android.support.annotation.ColorRes; import android.support.annotation.DimenRes; import android.support.annotation.DrawableRes; import android.util.AttributeSet; import android.widget.ImageButton; public class FloatingActionButton extends ImageButton { public static final int SIZE_NORMAL = 0; public static final int SIZE_MINI = 1; private static final int HALF_TRANSPARENT_WHITE = Color.argb(128, 255, 255, 255); private static final int HALF_TRANSPARENT_BLACK = Color.argb(128, 0, 0, 0); int mColorNormal; int mColorPressed; @DrawableRes private int mIcon; private int mSize; private float mCircleSize; private float mShadowRadius; private float mShadowOffset; private int mDrawableSize; public FloatingActionButton(Context context) { this(context, null); } public FloatingActionButton(Context context, AttributeSet attrs) { super(context, attrs); init(context, attrs); } public FloatingActionButton(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(context, attrs); } void init(Context context, AttributeSet attributeSet) { mColorNormal = getColor(android.R.color.holo_blue_dark); mColorPressed = getColor(android.R.color.holo_blue_light); mIcon = 0; mSize = SIZE_NORMAL; if (attributeSet != null) { initAttributes(context, attributeSet); } mCircleSize = getDimension(mSize == SIZE_NORMAL ? R.dimen.fab_size_normal : R.dimen.fab_size_mini); mShadowRadius = getDimension(R.dimen.fab_shadow_radius); mShadowOffset = getDimension(R.dimen.fab_shadow_offset); mDrawableSize = (int) (mCircleSize + 2 * mShadowRadius); updateBackground(); } int getColor(@ColorRes int id) { return getResources().getColor(id); } float getDimension(@DimenRes int id) { return getResources().getDimension(id); } private void initAttributes(Context context, AttributeSet attributeSet) { TypedArray attr = context.obtainStyledAttributes(attributeSet, R.styleable.FloatingActionButton, 0, 0); if (attr != null) { try { mColorNormal = attr.getColor(R.styleable.FloatingActionButton_fab_colorNormal, getColor(android.R.color.holo_blue_dark)); mColorPressed = attr.getColor(R.styleable.FloatingActionButton_fab_colorPressed, getColor(android.R.color.holo_blue_light)); mSize = attr.getInt(R.styleable.FloatingActionButton_fab_size, SIZE_NORMAL); mIcon = attr.getResourceId(R.styleable.FloatingActionButton_fab_icon, 0); } finally { attr.recycle(); } } } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); setMeasuredDimension(mDrawableSize, mDrawableSize); } void updateBackground() { float circleLeft = mShadowRadius; float circleTop = mShadowRadius - mShadowOffset; final RectF circleRect = new RectF(circleLeft, circleTop, circleLeft + mCircleSize, circleTop + mCircleSize); LayerDrawable layerDrawable = new LayerDrawable( new Drawable[] { getResources().getDrawable(mSize == SIZE_NORMAL ? R.drawable.fab_bg_normal : R.drawable.fab_bg_mini), createFillDrawable(circleRect), createStrokesDrawable(circleRect), getIconDrawable() }); float iconOffset = (mCircleSize - getDimension(R.dimen.fab_icon_size)) / 2f; int iconInsetHorizontal = (int) (mShadowRadius + iconOffset); int iconInsetTop = (int) (circleTop + iconOffset); int iconInsetBottom = (int) (mShadowRadius + mShadowOffset + iconOffset); layerDrawable.setLayerInset(3, iconInsetHorizontal, iconInsetTop, iconInsetHorizontal, iconInsetBottom); setBackgroundCompat(layerDrawable); } Drawable getIconDrawable() { if (mIcon != 0) { return getResources().getDrawable(mIcon); } else { return new ColorDrawable(Color.TRANSPARENT); } } private StateListDrawable createFillDrawable(RectF circleRect) { StateListDrawable drawable = new StateListDrawable(); drawable.addState(new int[] { android.R.attr.state_pressed }, createCircleDrawable(circleRect, mColorPressed)); drawable.addState(new int[] { }, createCircleDrawable(circleRect, mColorNormal)); return drawable; } private Drawable createCircleDrawable(RectF circleRect, int color) { final Bitmap bitmap = Bitmap.createBitmap(mDrawableSize, mDrawableSize, Config.ARGB_8888); final Canvas canvas = new Canvas(bitmap); final Paint paint = new Paint(); paint.setAntiAlias(true); paint.setColor(color); canvas.drawOval(circleRect, paint); return new BitmapDrawable(getResources(), bitmap); } private int opacityToAlpha(float opacity) { return (int) (255f * opacity); } private Drawable createStrokesDrawable(RectF circleRect) { final Bitmap bitmap = Bitmap.createBitmap(mDrawableSize, mDrawableSize, Config.ARGB_8888); final Canvas canvas = new Canvas(bitmap); final float strokeWidth = getDimension(R.dimen.fab_stroke_width); final float halfStrokeWidth = strokeWidth / 2f; RectF outerStrokeRect = new RectF( circleRect.left - halfStrokeWidth, circleRect.top - halfStrokeWidth, circleRect.right + halfStrokeWidth, circleRect.bottom + halfStrokeWidth ); RectF innerStrokeRect = new RectF( circleRect.left + halfStrokeWidth, circleRect.top + halfStrokeWidth, circleRect.right - halfStrokeWidth, circleRect.bottom - halfStrokeWidth ); final Paint paint = new Paint(); paint.setAntiAlias(true); paint.setStrokeWidth(strokeWidth); paint.setStyle(Style.STROKE); // outer paint.setColor(Color.BLACK); paint.setAlpha(opacityToAlpha(0.02f)); canvas.drawOval(outerStrokeRect, paint); // inner bottom paint.setShader(new LinearGradient(innerStrokeRect.centerX(), innerStrokeRect.top, innerStrokeRect.centerX(), innerStrokeRect.bottom, new int[] { Color.TRANSPARENT, HALF_TRANSPARENT_BLACK, Color.BLACK }, new float[] { 0f, 0.8f, 1f }, TileMode.CLAMP )); paint.setAlpha(opacityToAlpha(0.04f)); canvas.drawOval(innerStrokeRect, paint); // inner top paint.setShader(new LinearGradient(innerStrokeRect.centerX(), innerStrokeRect.top, innerStrokeRect.centerX(), innerStrokeRect.bottom, new int[] { Color.WHITE, HALF_TRANSPARENT_WHITE, Color.TRANSPARENT }, new float[] { 0f, 0.2f, 1f }, TileMode.CLAMP )); paint.setAlpha(opacityToAlpha(0.8f)); canvas.drawOval(innerStrokeRect, paint); return new BitmapDrawable(getResources(), bitmap); } @SuppressWarnings("deprecation") @SuppressLint("NewApi") private void setBackgroundCompat(Drawable drawable) { if (Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN) { setBackground(drawable); } else { setBackgroundDrawable(drawable); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.ra; import javax.naming.Context; import java.lang.reflect.Method; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Map; import org.jgroups.JChannel; /** * Various utility functions */ public final class ActiveMQRaUtils { /** * Private constructor */ private ActiveMQRaUtils() { } /** * Compare two strings. * * @param me First value * @param you Second value * @return True if object equals else false. */ public static boolean compare(final String me, final String you) { // If both null or intern equals if (me == you) { return true; } // if me null and you are not if (me == null) { return false; } // me will not be null, test for equality return me.equals(you); } /** * Compare two integers. * * @param me First value * @param you Second value * @return True if object equals else false. */ public static boolean compare(final Integer me, final Integer you) { // If both null or intern equals if (me == you) { return true; } // if me null and you are not if (me == null) { return false; } // me will not be null, test for equality return me.equals(you); } /** * Compare two longs. * * @param me First value * @param you Second value * @return True if object equals else false. */ public static boolean compare(final Long me, final Long you) { // If both null or intern equals if (me == you) { return true; } // if me null and you are not if (me == null) { return false; } // me will not be null, test for equality return me.equals(you); } /** * Compare two doubles. * * @param me First value * @param you Second value * @return True if object equals else false. */ public static boolean compare(final Double me, final Double you) { // If both null or intern equals if (me == you) { return true; } // if me null and you are not if (me == null) { return false; } // me will not be null, test for equality return me.equals(you); } /** * Compare two booleans. * * @param me First value * @param you Second value * @return True if object equals else false. */ public static boolean compare(final Boolean me, final Boolean you) { // If both null or intern equals if (me == you) { return true; } // if me null and you are not if (me == null) { return false; } // me will not be null, test for equality return me.equals(you); } /** * Lookup an object in the default initial context * * @param context The context to use * @param name the name to lookup * @param clazz the expected type * @return the object * @throws Exception for any error */ public static Object lookup(final Context context, final String name, final Class<?> clazz) throws Exception { return context.lookup(name); } /** * Used on parsing JNDI Configuration * * @param config * @return hash-table with configuration option pairs */ public static Hashtable<String, String> parseHashtableConfig(final String config) { Hashtable<String, String> hashtable = new Hashtable<String, String>(); String[] topElements = config.split(";"); for (String element : topElements) { String[] expression = element.split("="); if (expression.length != 2) { throw new IllegalArgumentException("Invalid expression " + element + " at " + config); } hashtable.put(expression[0].trim(), expression[1].trim()); } return hashtable; } public static List<Map<String, Object>> parseConfig(final String config) { List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(); String[] topElements = config.split(","); for (String topElement : topElements) { HashMap<String, Object> map = new HashMap<String, Object>(); result.add(map); String[] elements = topElement.split(";"); for (String element : elements) { String[] expression = element.split("="); if (expression.length != 2) { throw new IllegalArgumentException("Invalid expression " + element + " at " + config); } map.put(expression[0].trim(), expression[1].trim()); } } return result; } public static List<String> parseConnectorConnectorConfig(String config) { List<String> res = new ArrayList<String>(); String[] elements = config.split(","); for (String element : elements) { res.add(element.trim()); } return res; } /** * Within AS7 the RA is loaded by JCA. properties can only be passed in String form. However if * RA is configured using jgroups stack, we need to pass a Channel object. As is impossible with * JCA, we use this method to allow a JChannel object to be located. */ public static JChannel locateJGroupsChannel(final String locatorClass, final String name) { return AccessController.doPrivileged(new PrivilegedAction<JChannel>() { public JChannel run() { try { ClassLoader loader = Thread.currentThread().getContextClassLoader(); Class<?> aClass = loader.loadClass(locatorClass); Object o = aClass.newInstance(); Method m = aClass.getMethod("locateChannel", new Class[]{String.class}); return (JChannel) m.invoke(o, name); } catch (Throwable e) { ActiveMQRALogger.LOGGER.debug(e.getMessage(), e); return null; } } }); } /** * This seems duplicate code all over the place, but for security reasons we can't let something like this to be open in a * utility class, as it would be a door to load anything you like in a safe VM. * For that reason any class trying to do a privileged block should do with the AccessController directly. */ private static Object safeInitNewInstance(final String className) { return AccessController.doPrivileged(new PrivilegedAction<Object>() { public Object run() { ClassLoader loader = getClass().getClassLoader(); try { Class<?> clazz = loader.loadClass(className); return clazz.newInstance(); } catch (Throwable t) { try { loader = Thread.currentThread().getContextClassLoader(); if (loader != null) return loader.loadClass(className).newInstance(); } catch (RuntimeException e) { throw e; } catch (Exception e) { } throw new IllegalArgumentException("Could not find class " + className); } } }); } }
package br.com.rooting.roxana.response.processor; import br.com.rooting.roxana.UnitTest; import br.com.rooting.roxana.config.RoxanaProperties; import br.com.rooting.roxana.config.RoxanaProperties.Business.ResponseStrategy; import br.com.rooting.roxana.config.RoxanaPropertiesMockBuilder; import br.com.rooting.roxana.exception.mapper.BusinessException; import br.com.rooting.roxana.exception.mapper.MultiBusinessException; import br.com.rooting.roxana.message.*; import br.com.rooting.roxana.parameter.mapper.Param; import br.com.rooting.roxana.response.Response; import br.com.rooting.roxana.translator.MockedTranslator; import br.com.rooting.roxana.translator.Translator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.function.Executable; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import java.lang.reflect.Constructor; import java.util.List; import java.util.stream.Stream; import static br.com.rooting.roxana.utils.ReflectionUtils.isPackagePrivate; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.mock; class MultiBusinessExceptionResponseProcessorTest extends UnitTest<MultiBusinessExceptionResponseProcessor> { private static final String CUSTOM_KEY = "{custom.key}"; @Test void testClassIsPackagePrivateTest() { assertTrue(isPackagePrivate(this.getUnitTestClass().getModifiers())); } @Test void testClassExtendsMessageCreatorTest() { assertTrue(ResponseProcessor.class.isAssignableFrom(this.getUnitTestClass())); } @Test void testClassWasOnlyOnePackagePrivateConstructorTest() { Constructor<?>[] constructors = this.getUnitTestClass().getDeclaredConstructors(); assertEquals(1, constructors.length); assertTrue(isPackagePrivate(constructors[0].getModifiers())); } @Test void roxanaPropertiesCanNotBeNullTest() { Executable executable = () -> new MultiBusinessExceptionResponseProcessor(null, mock(MessageCreatorFactory.class), mock(ResponseProcessorManager.class)); assertThrows(IllegalArgumentException.class, executable); } @Test void messageCreatorFactoryCanNotBeNullTest() { Executable executable = () -> new MultiBusinessExceptionResponseProcessor(mock(RoxanaProperties.class), null, mock(ResponseProcessorManager.class)); assertThrows(IllegalArgumentException.class, executable); } @Test void responseProcessorManagerCanNotBeNullTest() { Executable executable = () -> new MultiBusinessExceptionResponseProcessor(mock(RoxanaProperties.class), mock(MessageCreatorFactory.class), null); assertThrows(IllegalArgumentException.class, executable); } @Test void suppressOthersExceptionsCanNotBeNullTest() { RoxanaProperties roxanaProperties = new RoxanaPropertiesMockBuilder() .withSuppressOthersExceptions(null) .build(); Executable executable = () -> new MultiBusinessExceptionResponseProcessor(roxanaProperties, mock(MessageCreatorFactory.class), mock(ResponseProcessorManager.class)); assertThrows(IllegalArgumentException.class, executable); } @Test void processNotCustomMultiBusinessExceptionTest() throws Exception { RoxanaProperties roxanaProperties = this.getRoxanaProperties(false); MultiBusinessExceptionResponseProcessor processor = this.getResponseProcessorForTest(roxanaProperties, mock(ResponseProcessorManager.class)); NotCustomBusinessException notCustomBusinessException = new NotCustomBusinessException("test"); CustomResponseCodeBusinessException customResponseCodeBusinessException = new CustomResponseCodeBusinessException("test"); CustomSeverityBusinessException customSeverityBusinessException = new CustomSeverityBusinessException("test"); CustomMessageKeyExceptionTest customMessageKeyExceptionTest = new CustomMessageKeyExceptionTest("test"); NotCustomMultiBusinessException notCustomMultiBusinessException = new NotCustomMultiBusinessException(notCustomBusinessException, customResponseCodeBusinessException, customSeverityBusinessException, customMessageKeyExceptionTest); ResponseEntity<Response> responseEntity = processor.process(notCustomMultiBusinessException); assertNotNull(responseEntity); assertEquals(HttpStatus.UNPROCESSABLE_ENTITY, responseEntity.getStatusCode()); Response response = responseEntity.getBody(); assertNotNull(response); List<Message> messages = response.getMessages(); assertNotNull(messages); assertEquals(4, messages.size()); MessageFully messageCustomResponseCode = (MessageFully) messages.get(0); assertEquals(CustomResponseCodeBusinessException.KEY, messageCustomResponseCode.getKey()); assertEquals(MessageSeverity.ERROR, messageCustomResponseCode.getSeverity()); String translationCustomResponseCode = this.getTranslator().translate(CustomResponseCodeBusinessException.KEY, messageCustomResponseCode.getParameters()); assertEquals(translationCustomResponseCode, messageCustomResponseCode.getTranslation()); MessageFully messageCustomSeverity = (MessageFully) messages.get(1); assertEquals(CustomSeverityBusinessException.KEY, messageCustomSeverity.getKey()); assertEquals(MessageSeverity.INFO, messageCustomSeverity.getSeverity()); String translationCustomSeverity = this.getTranslator().translate(CustomSeverityBusinessException.KEY, messageCustomSeverity.getParameters()); assertEquals(translationCustomSeverity, messageCustomSeverity.getTranslation()); MessageFully messageNotCustom = (MessageFully) messages.get(2); assertEquals(NotCustomBusinessException.KEY, messageNotCustom.getKey()); assertEquals(MessageSeverity.ERROR, messageNotCustom.getSeverity()); String translationNotCustom = this.getTranslator().translate(NotCustomBusinessException.KEY, messageNotCustom.getParameters()); assertEquals(translationNotCustom, messageNotCustom.getTranslation()); MessageFully messageCustomMessageKey = (MessageFully) messages.get(3); assertEquals(CUSTOM_KEY, messageCustomMessageKey.getKey()); assertEquals(MessageSeverity.ERROR, messageCustomMessageKey.getSeverity()); String translationCustomMessageKey = this.getTranslator().translate(CUSTOM_KEY, messageCustomMessageKey.getParameters()); assertEquals(translationCustomMessageKey, messageCustomMessageKey.getTranslation()); } @Test void processCustomResponseCodeMultiBusinessExceptionTest() throws Exception { RoxanaProperties roxanaProperties = this.getRoxanaProperties(false); MultiBusinessExceptionResponseProcessor processor = this.getResponseProcessorForTest(roxanaProperties, mock(ResponseProcessorManager.class)); NotCustomBusinessException notCustomBusinessException = new NotCustomBusinessException("test"); CustomResponseCodeBusinessException customResponseCodeBusinessException = new CustomResponseCodeBusinessException("test"); CustomSeverityBusinessException customSeverityBusinessException = new CustomSeverityBusinessException("test"); CustomMessageKeyExceptionTest customMessageKeyExceptionTest = new CustomMessageKeyExceptionTest("test"); CustomResponseCodeMultiException customResponseCodeMultiBusinessException = new CustomResponseCodeMultiException(notCustomBusinessException, customResponseCodeBusinessException, customSeverityBusinessException, customMessageKeyExceptionTest); ResponseEntity<Response> responseEntity = processor.process(customResponseCodeMultiBusinessException); assertNotNull(responseEntity); assertEquals(HttpStatus.BAD_REQUEST, responseEntity.getStatusCode()); Response response = responseEntity.getBody(); assertNotNull(response); List<Message> messages = response.getMessages(); assertNotNull(messages); assertEquals(4, messages.size()); MessageFully messageCustomResponseCode = (MessageFully) messages.get(0); assertEquals(CustomResponseCodeBusinessException.KEY, messageCustomResponseCode.getKey()); assertEquals(MessageSeverity.ERROR, messageCustomResponseCode.getSeverity()); String translationCustomResponseCode = this.getTranslator().translate(CustomResponseCodeBusinessException.KEY, messageCustomResponseCode.getParameters()); assertEquals(translationCustomResponseCode, messageCustomResponseCode.getTranslation()); MessageFully messageCustomSeverity = (MessageFully) messages.get(1); assertEquals(CustomSeverityBusinessException.KEY, messageCustomSeverity.getKey()); assertEquals(MessageSeverity.INFO, messageCustomSeverity.getSeverity()); String translationCustomSeverity = this.getTranslator().translate(CustomSeverityBusinessException.KEY, messageCustomSeverity.getParameters()); assertEquals(translationCustomSeverity, messageCustomSeverity.getTranslation()); MessageFully messageNotCustom = (MessageFully) messages.get(2); assertEquals(NotCustomBusinessException.KEY, messageNotCustom.getKey()); assertEquals(MessageSeverity.ERROR, messageNotCustom.getSeverity()); String translationNotCustom = this.getTranslator().translate(NotCustomBusinessException.KEY, messageNotCustom.getParameters()); assertEquals(translationNotCustom, messageNotCustom.getTranslation()); MessageFully messageCustomMessageKey = (MessageFully) messages.get(3); assertEquals(CUSTOM_KEY, messageCustomMessageKey.getKey()); assertEquals(MessageSeverity.ERROR, messageCustomMessageKey.getSeverity()); String translationCustomMessageKey = this.getTranslator().translate(CUSTOM_KEY, messageCustomMessageKey.getParameters()); assertEquals(translationCustomMessageKey, messageCustomMessageKey.getTranslation()); } @Test void onlyBusinessExceptionAreConsideredTest() throws Exception { RoxanaProperties roxanaProperties = this.getRoxanaProperties(false); MultiBusinessExceptionResponseProcessor processor = this.getResponseProcessorForTest(roxanaProperties, mock(ResponseProcessorManager.class)); NotCustomMultiBusinessException multiBusinessException = new NotCustomMultiBusinessException(new NullPointerException(), new IllegalArgumentException()); ResponseEntity<Response> responseEntity = processor.process(multiBusinessException); assertNotNull(responseEntity); assertEquals(HttpStatus.UNPROCESSABLE_ENTITY, responseEntity.getStatusCode()); Response response = responseEntity.getBody(); assertNotNull(response); List<Message> messages = response.getMessages(); assertNotNull(messages); assertEquals(0, messages.size()); } private MultiBusinessExceptionResponseProcessor getResponseProcessorForTest(final RoxanaProperties roxanaProperties, final ResponseProcessorManager responseProcessorManager) { return new MultiBusinessExceptionResponseProcessor(roxanaProperties, this.getMessageCreatorFactory(roxanaProperties), responseProcessorManager); } private RoxanaProperties getRoxanaProperties(final Boolean suppressOthersExceptions) { return new RoxanaPropertiesMockBuilder() .withResponseStrategy(ResponseStrategy.FULLY) .withSuppressOthersExceptions(suppressOthersExceptions).build(); } private Translator getTranslator() { return new MockedTranslator(); } private MessageCreatorFactory getMessageCreatorFactory(final RoxanaProperties roxanaProperties) { return new MockedMessageCreatorFactory(roxanaProperties, this.getTranslator()); } @MultiBusinessException private static class NotCustomMultiBusinessException extends RuntimeException { private static final long serialVersionUID = 1L; private NotCustomMultiBusinessException(final Exception... businessException) { Stream.of(businessException) .forEach(this::addSuppressed); } } @MultiBusinessException(responseCode = HttpStatus.BAD_REQUEST) private static class CustomResponseCodeMultiException extends RuntimeException { private static final long serialVersionUID = 1L; private CustomResponseCodeMultiException(final Exception... businessException) { Stream.of(businessException) .forEach(this::addSuppressed); } } @BusinessException private static class NotCustomBusinessException extends RuntimeException { private static final long serialVersionUID = 1L; private static final String KEY = "{br.com.rooting.roxana.response.processor.MultiBusinessExceptionResponseProcessorTest$NotCustomBusinessException}"; @Param private final String parameterString; private NotCustomBusinessException(final String parameterString) { this.parameterString = parameterString; } } @BusinessException(responseCode = HttpStatus.BAD_REQUEST) private static class CustomResponseCodeBusinessException extends RuntimeException { private static final long serialVersionUID = 1L; private static final String KEY = "{br.com.rooting.roxana.response.processor.MultiBusinessExceptionResponseProcessorTest$CustomResponseCodeBusinessException}"; @Param private final String parameterString; private CustomResponseCodeBusinessException(final String parameterString) { this.parameterString = parameterString; } } @BusinessException(severity = MessageSeverity.INFO) private static class CustomSeverityBusinessException extends RuntimeException { private static final long serialVersionUID = 1L; private static final String KEY = "{br.com.rooting.roxana.response.processor.MultiBusinessExceptionResponseProcessorTest$CustomSeverityBusinessException}"; @Param private final String parameterString; private CustomSeverityBusinessException(final String parameterString) { this.parameterString = parameterString; } } @BusinessException(message = CUSTOM_KEY) private static class CustomMessageKeyExceptionTest extends RuntimeException { private static final long serialVersionUID = 1L; @Param private final String parameterString; private CustomMessageKeyExceptionTest(final String parameterString) { this.parameterString = parameterString; } } }