gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.concurrent.Callable; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cache.affinity.AffinityKeyMapped; import org.apache.ignite.cluster.ClusterTopologyException; import org.apache.ignite.compute.ComputeJobContext; import org.apache.ignite.compute.ComputeJobFailoverException; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.util.typedef.CAX; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.lang.IgniteRunnable; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.resources.JobContextResource; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.spi.failover.always.AlwaysFailoverSpi; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * Affinity routing tests. */ public class GridCacheAffinityRoutingSelfTest extends GridCommonAbstractTest { /** */ private static final int GRID_CNT = 4; /** */ private static final String NON_DFLT_CACHE_NAME = "myCache"; /** */ private static final int KEY_CNT = 50; /** */ private static final int MAX_FAILOVER_ATTEMPTS = 5; /** */ private static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** * Constructs test. */ public GridCacheAffinityRoutingSelfTest() { super(/* don't start grid */ false); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); TcpDiscoverySpi spi = new TcpDiscoverySpi(); spi.setIpFinder(ipFinder); cfg.setDiscoverySpi(spi); AlwaysFailoverSpi failSpi = new AlwaysFailoverSpi(); failSpi.setMaximumFailoverAttempts(MAX_FAILOVER_ATTEMPTS); cfg.setFailoverSpi(failSpi); if (!gridName.equals(getTestGridName(GRID_CNT))) { // Default cache configuration. CacheConfiguration dfltCacheCfg = defaultCacheConfiguration(); dfltCacheCfg.setCacheMode(PARTITIONED); dfltCacheCfg.setBackups(1); dfltCacheCfg.setWriteSynchronizationMode(FULL_SYNC); // Non-default cache configuration. CacheConfiguration namedCacheCfg = defaultCacheConfiguration(); namedCacheCfg.setCacheMode(PARTITIONED); namedCacheCfg.setBackups(1); namedCacheCfg.setWriteSynchronizationMode(FULL_SYNC); namedCacheCfg.setName(NON_DFLT_CACHE_NAME); cfg.setCacheConfiguration(dfltCacheCfg, namedCacheCfg); } else { // No cache should be configured for extra node. cfg.setCacheConfiguration(); } return cfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); for (int i = 0; i < GRID_CNT; i++) startGrid(i); assert G.allGrids().size() == GRID_CNT; for (int i = 0; i < KEY_CNT; i++) { grid(0).cache(null).put(i, i); grid(0).cache(NON_DFLT_CACHE_NAME).put(i, i); } } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { super.afterTestsStopped(); for (int i = 0; i < GRID_CNT; i++) stopGrid(i); assert G.allGrids().isEmpty(); } /** * JUnit. * * @throws Exception If failed. */ public void testAffinityRun() throws Exception { for (int i = 0; i < KEY_CNT; i++) grid(0).compute().affinityRun(NON_DFLT_CACHE_NAME, i, new CheckRunnable(i, i)); } /** * @throws Exception If failed. */ public void testAffinityCallRestartFails() throws Exception { GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { grid(0).compute().affinityCall(NON_DFLT_CACHE_NAME, "key", new FailedCallable("key", MAX_FAILOVER_ATTEMPTS + 1)); return null; } }, ClusterTopologyException.class, "Failed to failover a job to another node"); } /** * @throws Exception If failed. */ public void testAffinityCallRestart() throws Exception { assertEquals(MAX_FAILOVER_ATTEMPTS, grid(0).compute().affinityCall(NON_DFLT_CACHE_NAME, "key", new FailedCallable("key", MAX_FAILOVER_ATTEMPTS))); } /** * @throws Exception If failed. */ public void testAffinityRunRestartFails() throws Exception { GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { grid(0).compute().affinityRun(NON_DFLT_CACHE_NAME, "key", new FailedRunnable("key", MAX_FAILOVER_ATTEMPTS + 1)); return null; } }, ClusterTopologyException.class, "Failed to failover a job to another node"); } /** * @throws Exception If failed. */ public void testAffinityRunRestart() throws Exception { grid(0).compute().affinityRun(NON_DFLT_CACHE_NAME, "key", new FailedRunnable("key", MAX_FAILOVER_ATTEMPTS)); } /** * JUnit. * * @throws Exception If failed. */ public void testAffinityRunComplexKey() throws Exception { for (int i = 0; i < KEY_CNT; i++) { AffinityTestKey key = new AffinityTestKey(i); grid(0).compute().affinityRun(NON_DFLT_CACHE_NAME, i, new CheckRunnable(i, key)); grid(0).compute().affinityRun(NON_DFLT_CACHE_NAME, key, new CheckRunnable(i, key)); } } /** * JUnit. * * @throws Exception If failed. */ public void testAffinityCall() throws Exception { for (int i = 0; i < KEY_CNT; i++) grid(0).compute().affinityCall(NON_DFLT_CACHE_NAME, i, new CheckCallable(i, i)); } /** * JUnit. * * @throws Exception If failed. */ public void testAffinityCallComplexKey() throws Exception { for (int i = 0; i < KEY_CNT; i++) { final AffinityTestKey key = new AffinityTestKey(i); grid(0).compute().affinityCall(NON_DFLT_CACHE_NAME, i, new CheckCallable(i, key)); grid(0).compute().affinityCall(NON_DFLT_CACHE_NAME, key, new CheckCallable(i, key)); } } /** * Test key. */ protected static class AffinityTestKey { /** Affinity key. */ @AffinityKeyMapped private final int affKey; /** * @param affKey Affinity key. */ private AffinityTestKey(int affKey) { this.affKey = affKey; } /** * @return Affinity key. */ public int affinityKey() { return affKey; } } /** * Test runnable. */ private static class CheckRunnable extends CAX { /** Affinity key. */ private final Object affKey; /** Key. */ private final Object key; /** */ @IgniteInstanceResource private Ignite ignite; /** */ @JobContextResource private ComputeJobContext jobCtx; /** * @param affKey Affinity key. * @param key Key. */ private CheckRunnable(Object affKey, Object key) { this.affKey = affKey; this.key = key; } /** {@inheritDoc} */ @Override public void applyx() throws IgniteCheckedException { assert ignite.cluster().localNode().id().equals(ignite.affinity(null).mapKeyToNode(affKey).id()); assert ignite.cluster().localNode().id().equals(ignite.affinity(null).mapKeyToNode(key).id()); } } /** * Test runnable. */ private static class FailedCallable implements IgniteCallable<Object> { /** */ private static final long serialVersionUID = 0L; /** */ private static final String ATTR_ATTEMPT = "Attempt"; /** */ @IgniteInstanceResource private Ignite ignite; /** */ @JobContextResource private ComputeJobContext jobCtx; /** Key. */ private final Object key; /** Call attempts. */ private final Integer callAttempt; /** * @param key Key. * @param callAttempt Call attempts. */ public FailedCallable(Object key, Integer callAttempt) { this.key = key; this.callAttempt = callAttempt; } /** {@inheritDoc} */ @Override public Object call() throws IgniteCheckedException { Integer attempt = jobCtx.getAttribute(ATTR_ATTEMPT); if (attempt == null) attempt = 1; assertEquals(ignite.affinity(NON_DFLT_CACHE_NAME).mapKeyToNode(key), ignite.cluster().localNode()); jobCtx.setAttribute(ATTR_ATTEMPT, attempt + 1); if (attempt < callAttempt) throw new ComputeJobFailoverException("Failover exception."); else return attempt; } } /** * Test runnable. */ private static class FailedRunnable implements IgniteRunnable { /** */ private static final long serialVersionUID = 0L; /** */ private static final String ATTR_ATTEMPT = "Attempt"; /** */ @IgniteInstanceResource private Ignite ignite; /** */ @JobContextResource private ComputeJobContext jobCtx; /** Key. */ private final Object key; /** Call attempts. */ private final Integer callAttempt; /** * @param key Key. * @param callAttempt Call attempts. */ public FailedRunnable(Object key, Integer callAttempt) { this.key = key; this.callAttempt = callAttempt; } /** {@inheritDoc} */ @Override public void run() { Integer attempt = jobCtx.getAttribute(ATTR_ATTEMPT); if (attempt == null) attempt = 1; assertEquals(ignite.affinity(NON_DFLT_CACHE_NAME).mapKeyToNode(key), ignite.cluster().localNode()); jobCtx.setAttribute(ATTR_ATTEMPT, attempt + 1); if (attempt < callAttempt) throw new ComputeJobFailoverException("Failover exception."); else assertEquals(callAttempt, attempt); } } /** * Test callable. */ private static class CheckCallable implements IgniteCallable<Object> { /** Affinity key. */ private final Object affKey; /** Key. */ private final Object key; /** */ @IgniteInstanceResource private Ignite ignite; /** */ @JobContextResource private ComputeJobContext jobCtx; /** * @param affKey Affinity key. * @param key Key. */ private CheckCallable(Object affKey, Object key) { this.affKey = affKey; this.key = key; } /** {@inheritDoc} */ @Override public Object call() throws IgniteCheckedException { assert ignite.cluster().localNode().id().equals(ignite.affinity(null).mapKeyToNode(affKey).id()); assert ignite.cluster().localNode().id().equals(ignite.affinity(null).mapKeyToNode(key).id()); return null; } } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.python; import com.facebook.buck.cxx.CxxPlatform; import com.facebook.buck.file.WriteFile; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.FlavorDomain; import com.facebook.buck.model.FlavorDomainException; import com.facebook.buck.model.HasSourceUnderTest; import com.facebook.buck.model.ImmutableFlavor; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildRuleType; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.Description; import com.facebook.buck.rules.Label; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.macros.LocationMacroExpander; import com.facebook.buck.rules.macros.MacroExpander; import com.facebook.buck.rules.macros.MacroHandler; import com.facebook.buck.util.HumanReadableException; import com.facebook.infer.annotation.SuppressFieldNotInitialized; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import java.nio.file.Path; import java.nio.file.Paths; public class PythonTestDescription implements Description<PythonTestDescription.Arg> { private static final BuildRuleType TYPE = BuildRuleType.of("python_test"); private static final Flavor BINARY_FLAVOR = ImmutableFlavor.of("binary"); private static final MacroHandler MACRO_HANDLER = new MacroHandler( ImmutableMap.<String, MacroExpander>of( "location", new LocationMacroExpander())); private final PythonBinaryDescription binaryDescription; private final PythonBuckConfig pythonBuckConfig; private final FlavorDomain<PythonPlatform> pythonPlatforms; private final CxxPlatform defaultCxxPlatform; private final FlavorDomain<CxxPlatform> cxxPlatforms; public PythonTestDescription( PythonBinaryDescription binaryDescription, PythonBuckConfig pythonBuckConfig, FlavorDomain<PythonPlatform> pythonPlatforms, CxxPlatform defaultCxxPlatform, FlavorDomain<CxxPlatform> cxxPlatforms) { this.binaryDescription = binaryDescription; this.pythonBuckConfig = pythonBuckConfig; this.pythonPlatforms = pythonPlatforms; this.defaultCxxPlatform = defaultCxxPlatform; this.cxxPlatforms = cxxPlatforms; } @Override public BuildRuleType getBuildRuleType() { return TYPE; } @Override public Arg createUnpopulatedConstructorArg() { return new Arg(); } @VisibleForTesting protected static Path getTestMainName() { return Paths.get("__test_main__.py"); } @VisibleForTesting protected static Path getTestModulesListName() { return Paths.get("__test_modules__.py"); } @VisibleForTesting protected static Path getTestModulesListPath(BuildTarget buildTarget) { return BuildTargets.getGenPath(buildTarget, "%s").resolve(getTestModulesListName()); } @VisibleForTesting protected static BuildTarget getBinaryBuildTarget(BuildTarget target) { return BuildTargets.createFlavoredBuildTarget(target.checkUnflavored(), BINARY_FLAVOR); } /** * Create the contents of a python source file that just contains a list of * the given test modules. */ private static String getTestModulesListContents(ImmutableSet<String> modules) { String contents = "TEST_MODULES = [\n"; for (String module : modules) { contents += String.format(" \"%s\",\n", module); } contents += "]"; return contents; } /** * Return a {@link BuildRule} that constructs the source file which contains the list * of test modules this python test rule will run. Setting up a separate build rule * for this allows us to use the existing python binary rule without changes to account * for the build-time creation of this file. */ private static BuildRule createTestModulesSourceBuildRule( BuildRuleParams params, BuildRuleResolver resolver, Path outputPath, ImmutableSet<String> testModules) { // Modify the build rule params to change the target, type, and remove all deps. BuildRuleParams newParams = params.copyWithChanges( BuildTargets.createFlavoredBuildTarget( params.getBuildTarget().checkUnflavored(), ImmutableFlavor.of("test_module")), Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of()), Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of())); String contents = getTestModulesListContents(testModules); return new WriteFile( newParams, new SourcePathResolver(resolver), contents, outputPath, /* executable */ false); } @Override public <A extends Arg> PythonTest createBuildRule( TargetGraph targetGraph, final BuildRuleParams params, final BuildRuleResolver resolver, final A args) throws NoSuchBuildTargetException { // Extract the platform from the flavor, falling back to the default platform if none are // found. PythonPlatform pythonPlatform; try { pythonPlatform = pythonPlatforms .getValue(params.getBuildTarget().getFlavors()) .or(pythonPlatforms.getValue( args.platform .transform(Flavor.TO_FLAVOR) .or(pythonPlatforms.getFlavors().iterator().next()))); } catch (FlavorDomainException e) { throw new HumanReadableException("%s: %s", params.getBuildTarget(), e.getMessage()); } // Extract the platform from the flavor, falling back to the default platform if none are // found. CxxPlatform cxxPlatform; try { cxxPlatform = cxxPlatforms .getValue(ImmutableSet.copyOf(params.getBuildTarget().getFlavors())) .or(defaultCxxPlatform); } catch (FlavorDomainException e) { throw new HumanReadableException("%s: %s", params.getBuildTarget(), e.getMessage()); } SourcePathResolver pathResolver = new SourcePathResolver(resolver); Path baseModule = PythonUtil.getBasePath(params.getBuildTarget(), args.baseModule); ImmutableMap<Path, SourcePath> srcs = ImmutableMap.<Path, SourcePath>builder() .putAll( PythonUtil.toModuleMap( params.getBuildTarget(), pathResolver, "srcs", baseModule, args.srcs.asSet())) .putAll( PythonUtil.toModuleMap( params.getBuildTarget(), pathResolver, "platformSrcs", baseModule, args.platformSrcs.get() .getMatchingValues(pythonPlatform.getFlavor().toString()))) .build(); ImmutableMap<Path, SourcePath> resources = ImmutableMap.<Path, SourcePath>builder() .putAll( PythonUtil.toModuleMap( params.getBuildTarget(), pathResolver, "resources", baseModule, args.resources.asSet())) .putAll( PythonUtil.toModuleMap( params.getBuildTarget(), pathResolver, "platformResources", baseModule, args.platformResources.get() .getMatchingValues(pythonPlatform.getFlavor().toString()))) .build(); // Convert the passed in module paths into test module names. ImmutableSet.Builder<String> testModulesBuilder = ImmutableSet.builder(); for (Path name : srcs.keySet()) { testModulesBuilder.add( PythonUtil.toModuleName(params.getBuildTarget(), name.toString())); } ImmutableSet<String> testModules = testModulesBuilder.build(); // Construct a build rule to generate the test modules list source file and // add it to the build. BuildRule testModulesBuildRule = createTestModulesSourceBuildRule( params, resolver, getTestModulesListPath(params.getBuildTarget()), testModules); resolver.addToIndex(testModulesBuildRule); // Build up the list of everything going into the python test. PythonPackageComponents testComponents = PythonPackageComponents.of( ImmutableMap .<Path, SourcePath>builder() .put( getTestModulesListName(), new BuildTargetSourcePath(testModulesBuildRule.getBuildTarget())) .put( getTestMainName(), new PathSourcePath( params.getProjectFilesystem(), pythonBuckConfig.getPathToTestMain())) .putAll(srcs) .build(), resources, ImmutableMap.<Path, SourcePath>of(), ImmutableSet.<SourcePath>of(), args.zipSafe); PythonPackageComponents allComponents = PythonUtil.getAllComponents( params, testComponents, pythonPlatform, cxxPlatform); // Build the PEX using a python binary rule with the minimum dependencies. BuildRuleParams binaryParams = params.copyWithChanges( getBinaryBuildTarget(params.getBuildTarget()), Suppliers.ofInstance(PythonUtil.getDepsFromComponents(pathResolver, allComponents)), Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of())); PythonBinary binary = binaryDescription.createPackageRule( binaryParams, resolver, pathResolver, pythonPlatform, cxxPlatform, PythonUtil.toModuleName(params.getBuildTarget(), getTestMainName().toString()), allComponents, args.buildArgs.or(ImmutableList.<String>of())); resolver.addToIndex(binary); // Supplier which expands macros in the passed in test environment. Supplier<ImmutableMap<String, String>> testEnv = new Supplier<ImmutableMap<String, String>>() { @Override public ImmutableMap<String, String> get() { return ImmutableMap.copyOf( Maps.transformValues( args.env.or(ImmutableMap.<String, String>of()), MACRO_HANDLER.getExpander( params.getBuildTarget(), params.getCellRoots(), resolver, params.getProjectFilesystem()))); } }; // Generate and return the python test rule, which depends on the python binary rule above. return new PythonTest( params.copyWithDeps( Suppliers.ofInstance( ImmutableSortedSet.<BuildRule>naturalOrder() .addAll(params.getDeclaredDeps().get()) .add(binary) .build()), params.getExtraDeps()), pathResolver, testEnv, binary, ImmutableSortedSet.copyOf(Sets.difference(params.getDeps(), binaryParams.getDeps())), resolver.getAllRules(args.sourceUnderTest.or(ImmutableSortedSet.<BuildTarget>of())), args.labels.or(ImmutableSet.<Label>of()), args.contacts.or(ImmutableSet.<String>of())); } @SuppressFieldNotInitialized public static class Arg extends PythonLibraryDescription.Arg implements HasSourceUnderTest { public Optional<ImmutableSet<String>> contacts; public Optional<ImmutableSet<Label>> labels; public Optional<ImmutableSortedSet<BuildTarget>> sourceUnderTest; public Optional<String> platform; public Optional<ImmutableList<String>> buildArgs; public Optional<ImmutableMap<String, String>> env; @Override public ImmutableSortedSet<BuildTarget> getSourceUnderTest() { return sourceUnderTest.get(); } } }
package com.chillenious.common.db.sync; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.CacheStats; import com.chillenious.common.ShutdownHooks; import com.chillenious.common.WithShutdown; import com.chillenious.common.util.Duration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; /** * Cache that works with {@link com.chillenious.common.db.sync.PersistentObject persistent objects} (it is known that these * objects are backed by persistent storage), and supports a mechanism for getting fresh data * pushed to it. * <p/> * Typical caching mechanisms expire elements and load from secondary storage when clients * try to retrieve these elements, resulting in a slower lookup. For this case though, fast * reads are important, which is why instead this cache relies on it's contents to be refreshed * periodically with just minimal interference (limited locks) for reads. * <p/> * Guava caches provide a mechanism for asynchronously refreshing contents of the cache * (see {@link CacheLoader#reload(Object, Object)} and friends) that comes close to what we * need here. However, this cache wouldn't load new values, and the refresher mechanism we have * here is more generic, and hence can be used outside of caches (e.g. for ETL-like problems that * also need to update in-memory structures). * <p/> * * @param <O> type of the elements in the cache */ public class PersistentObjectCache<O extends PersistentObject> implements WithShutdown, PersistentObjectLookup<O> { private static final Logger log = LoggerFactory.getLogger(PersistentObjectCache.class); /** * Listens in on refreshes and updates the internal cache */ final class RefreshListener extends DataRefreshListener<O> { @Override protected void onEvent(DataRefreshEvent<O> evt) { if (evt instanceof DataDeletedEvent) { Object id = evt.getId(); cache.invalidate(id); if (cacheListener != null) { cacheListener.afterDeleted(id); } } else if (evt instanceof DataCreatedEvent) { O object = ((DataCreatedEvent<O>) evt).getObject(); cache.put(evt.getId(), object); if (cacheListener != null) { cacheListener.afterPut(object); } } else if (evt instanceof DataChangedEvent) { O object = ((DataChangedEvent<O>) evt).getObject(); cache.put(evt.getId(), object); if (cacheListener != null) { cacheListener.afterPut(object); } } // else ignore; it's probably a special purpose event like req/ ack } } final class Refresher implements Runnable { @Override public void run() { try { log.debug(String.format("do refresh run for %s", name)); try { RefreshResults results = refresher.refresh(); if (results.getNumberRecordsFound() > 0) { if (log.isDebugEnabled()) { log.debug(String.format("cache %s refreshed: %s", name, results)); } } } catch (Exception e) { log.error(String.format("problem with cache refresh for %s: %s", name, e.getMessage()), e); } log.debug(String.format("exiting refresh daemon %s", this)); } catch (Exception e) { log.error(String.format( "problem with refresh run: %s; exiting", e.getMessage()), e); } } } protected final String name; // mostly for debugging, monitoring protected final Cache<Object, O> cache; protected final DataRefresher<O> refresher; protected final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); PersistentObjectCacheListener<O> cacheListener; // listener, for testing purposes protected final Map<String, Sorter<O>> sorters = new ConcurrentHashMap<>(); protected final Map<String, Indexer<O, ?>> indexers = new ConcurrentHashMap<>(); protected final ExecutorService seedSortsExecutor = Executors.newSingleThreadExecutor(); /** * Construct. This instance will NOT use a refresher (or rather, it * will use a {@link com.chillenious.common.db.sync.NoopRefresher dummy} with no refreshes scheduled). * * @param name name of the cache, mainly for debugging and monitoring * @param shutdownHooks shutdown hook registry * @param recordStats whether to record record stats (which will result * in a slight overhead of working with the cache, but * might give you interesting usage stats) * @param startWithRefresh whether to do a refresh when the instance is created (and hence * blocking the thread that is creating this instance, but also * guaranteeing that after creation, the cache will have a good * initial filling). If this is true, a refresh will be done * immediately, but the periodic refresh will be scheduled with an * initial delay that is equal to the delay between refreshes. If this is * false, construction will not block, but the refresh daemon will * be run immediately. */ public PersistentObjectCache( String name, ShutdownHooks shutdownHooks, boolean recordStats, boolean startWithRefresh) { this(name, shutdownHooks, null, null, recordStats, startWithRefresh); } /** * Construct. Some arguments are optional: * <ul> * <li>refresher - if this is null, a {@link com.chillenious.common.db.sync.NoopRefresher dummy} will * be used and no refreshes will be scheduled</li> * <li>refreshInterval - is this is null, no refreshes will be * scheduled (but an initial refresh will be done)</li> * </ul> * * @param name name of the cache, mainly for debugging and monitoring * @param shutdownHooks shutdown hook registry * @param refresher refresher this cache should use to keep * track of the backend; if null, a dummy will * be used and no refreshes will be scheduled * @param refreshInterval interval for refreshes; if null, no refreshes will be * done aside from the initial refresh * @param recordStats whether to record record stats (which will result * in a slight overhead of working with the cache, but * might give you interesting usage stats) * @param startWithRefresh whether to do a refresh when the instance is created (and hence * blocking the thread that is creating this instance, but also * guaranteeing that after creation, the cache will have a good * initial filling). If this is true, a refresh will be done * immediately, but the periodic refresh will be scheduled with an * initial delay that is equal to the delay between refreshes. If this is * false, construction will not block, but the refresh daemon will * be run immediately. */ public PersistentObjectCache( String name, ShutdownHooks shutdownHooks, DataRefresher<O> refresher, Duration refreshInterval, boolean recordStats, boolean startWithRefresh) { this.name = Strings.isNullOrEmpty(name) ? this.toString() : name; log.info("creating cache " + this.name); if (recordStats) { this.cache = CacheBuilder.<Long, O>newBuilder().recordStats().build(); } else { this.cache = CacheBuilder.<Long, O>newBuilder().build(); } this.refresher = refresher != null ? refresher : new NoopRefresher<O>(); shutdownHooks.add(this); initRefresher(refreshInterval, startWithRefresh); } /* * Initialize the refresher. */ private void initRefresher( Duration refreshInterval, boolean startWithRefresh) { refresher.addListener(new RefreshListener()); // the listener will update the cache Duration refreshDaemonDelay; if (startWithRefresh) { refreshDaemonDelay = refreshInterval; log.info(String.format("start refresher for %s", name)); RefreshResults results = refresher.refresh(); // load initial data log.info(String.format("initialized %s with %,d objects, in %,d milliseconds", name, (results.getNumberCreated() + results.getNumberChanged()), results.getMillisecondsItTook())); } else { refreshDaemonDelay = Duration.milliseconds(0); } if (refreshInterval != null) { scheduler.scheduleAtFixedRate( new Refresher(), refreshDaemonDelay.getMilliseconds(), refreshInterval.getMilliseconds(), TimeUnit.MILLISECONDS); } log.info(String.format("cache %s ready", name)); } /** * Add a index for objects in this cache so that elements in the cache * can be looked up using a particular key. Adding an index will result * in the cache keeping track of the element in a dedicated data structure, * which increases the memory and load during writes. * * @param id sort id * @param factory factory for creating lookup keys * @return future on the task of initial creation of the set; you can decide to * wait for that filling to be done in case you need the results right away, * or go on with execution if you don't. It returns the number of rows it sorted */ public <T extends Comparable> Future<Integer> addIndex( final String id, IndexKeyFactory<O, T> factory) { if (id == null) { throw new NullPointerException(); } if (factory == null) { throw new NullPointerException(); } synchronized (indexers) { final Indexer<O, T> indexer = new Indexer<>(factory); Indexer<O, ?> previous = indexers.put(id, indexer); if (previous != null) { log.debug(String.format("replaced sort id %s for cache %s", id, name)); previous.unbind(refresher); } // add listener that updates the sorted set when data changes indexer.bind(refresher); log.info(String.format("added index %s (key factory: %s) for cache %s", id, factory, name)); // and finally, seed sort set in a different thread and return the future of the work return seedSortsExecutor.submit(new Callable<Integer>() { @Override public Integer call() { try { AtomicInteger count = new AtomicInteger(0); for (O object : cache.asMap().values()) { indexer.put(object); count.incrementAndGet(); } return count.get(); } catch (Exception e) { String msg = String.format("problem adding index %s to cache %s: %s", id, name, e.getMessage()); log.error(msg, e); throw new IllegalStateException(e); } } }); } } /** * Gets indexer for the provided sort id. * * @param id id of the indexer * @return indexer or null if not found * @throws IllegalArgumentException when no indexer with the provided id was found */ @SuppressWarnings("unchecked") protected <T> Indexer<O, T> getIndexer(String id) { if (id == null) { throw new NullPointerException(); } Indexer indexer = indexers.get(id); if (indexer == null) { throw new IllegalArgumentException(String.format("no indexer found with id %s (cache %s)", id, name)); } return indexer; } /** * Gets set of matches for indexed field based on the indexer (id) and the index value. * * @param indexId id of the index * @param indexVal value of the index to fetch * @param <T> type of the indev value * @return set of matches, possibly empty, never null */ public <T> Set<O> getIndexed(String indexId, T indexVal) { return getIndexer(indexId).get(indexVal); } /** * Gets single matches for indexed field based on the indexer (id) and the index value. * * @param indexId id of the index * @param indexVal value of the index to fetch * @param <T> type of the indev value * @return single match, possibly null */ @Nullable public <T> O getIndexedSingle(String indexId, T indexVal) { Set<O> s = getIndexer(indexId).get(indexVal); Iterator<O> iterator = s.iterator(); if (iterator.hasNext()) { O object = iterator.next(); if (iterator.hasNext()) { throw new IllegalStateException( String.format("multiple matches for index %s, key %s (cache %s)", indexId, indexVal, name) ); } return object; } return null; } /** * Add a sort for objects in this cache so that elements in the cache * can be optimized for particular iteration. Adding a sort will result * in the cache keeping track of the element in a dedicated data structure, * which increases the memory and load during writes. * * @param id sort id * @param factory factory for creating sort keys * @return future on the task of initial creation of the set; you can decide to * wait for that filling to be done in case you need the results right away, * or go on with execution if you don't. It returns the number of rows it sorted */ public <T extends Comparable> Future<Integer> addSort( final String id, SortKeyFactory<O, T> factory) { if (id == null) { throw new NullPointerException(); } if (factory == null) { throw new NullPointerException(); } synchronized (sorters) { final Sorter<O> sorter = new Sorter<>(factory); Sorter<O> previous = sorters.put(id, sorter); if (previous != null) { log.info(String.format("replaced sort id %s for cache %s", id, name)); previous.unbind(refresher); } // add listener that updates the sorted set when data changes sorter.bind(refresher); log.info(String.format("added sort %s (key factory: %s) for cache %s", id, factory, name)); // and finally, seed sort set in a different thread and return the future of the work return seedSortsExecutor.submit(new Callable<Integer>() { @Override public Integer call() { try { AtomicInteger count = new AtomicInteger(0); for (O object : cache.asMap().values()) { sorter.put(object); count.incrementAndGet(); } return count.get(); } catch (Exception e) { String msg = String.format("problem adding sort %s to cache %s: %s", id, name, e.getMessage()); log.error(msg, e); throw new IllegalStateException(e); } } }); } } /** * Gets sorter for the provided sort id. * * @param id id of the sort * @return sorter or null if not found * @throws IllegalArgumentException when no sorter with the provided id was found */ protected Sorter<O> getSorter(String id) { if (id == null) { throw new NullPointerException(); } Sorter<O> sorter = sorters.get(id); if (sorter == null) { throw new IllegalArgumentException(String.format( "no sorter found with id %s in cache %s", id, name)); } return sorter; } /** * Gets an iterator for the provided sort id. * * @param id id of the sort * @return iterator or null if not found */ @Nullable public Collection<O> values(String id) { Sorter<O> sorter = getSorter(id); if (sorter != null) { return sorter.values(); } else { return null; } } /** * Get unmodifiable map (id -> persistent object) over all values in the cache. * * @return map with ids and objects */ public Map<Object, O> asMap() { return Collections.unmodifiableMap(cache.asMap()); } /** * Get all values in the cache (in unspecified order... if you need order, create * a {@link #addSort(String, com.chillenious.common.db.sync.SortKeyFactory) sort}). * * @return collection of all values */ public Collection<O> values() { return cache.asMap().values(); } /** * Get set of all ids values in the cache (in unspecified order). * * @return set of all ids */ public Set<Object> ids() { return cache.asMap().keySet(); } /** * Returns the value associated with the provided id in this cache, or {@code null} * if there is no cached value for it. */ @Nullable public O get(Object id) { return cache.getIfPresent(id); } /** * Put object in cache directly. * <p/> * WARNING: this method is meant primarily for testing, typical use is to rely * on the updater to work on the cache when new data comes in from the backend * * @param object object to put in the cache */ public void put(O object) { synchronized (cache) { cache.put(object.getId(), object); for (Sorter<O> sorter : sorters.values()) { sorter.put(object); } for (Indexer<O, ?> indexer : indexers.values()) { indexer.put(object); } } } /** * Invalidate object with given key in cache directly. * WARNING: this method is meant primarily for testing, typical use is to rely * on the updater to work on the cache when new data comes in from the backend * * @param id id of the object to remove */ public void remove(Object id) { synchronized (cache) { cache.invalidate(id); for (Sorter<O> sorter : sorters.values()) { sorter.remove(id); } for (Indexer<O, ?> indexer : indexers.values()) { indexer.remove(id); } } } /** * Gets {@link CacheStats stats} for this cache. * * @return gets cache stats. * @throws UnsupportedOperationException if the cache is not initialized to gather stats */ public CacheStats stats() { return cache.stats(); } /** * @return the approximate number of entries in this cache. */ public long size() { return cache.size(); } /** * Manually trigger refresh. * * @return refresh results */ public RefreshResults refresh() { return refresher.refresh(); } @SuppressWarnings("unchecked") @Override public O[] lookup(Object... ids) { Preconditions.checkNotNull(ids); O[] objects = (O[]) new Object[ids.length]; for (int i = 0, l = ids.length; i < l; i++) { objects[i] = get(ids[i]); } return objects; } /** * Shut this cache down. */ public void shutdown() { log.debug("shutting down cache " + name); scheduler.shutdownNow(); seedSortsExecutor.shutdownNow(); cache.invalidateAll(); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.refactoring.typeMigration.ui; import com.intellij.CommonBundle; import com.intellij.find.FindSettings; import com.intellij.ide.highlighter.JavaFileType; import com.intellij.ide.util.scopeChooser.ScopeChooserCombo; import com.intellij.java.JavaBundle; import com.intellij.java.refactoring.JavaRefactoringBundle; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.LabeledComponent; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.text.StringUtil; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.changeSignature.ChangeSignatureUtil; import com.intellij.refactoring.typeMigration.TypeMigrationLabeler; import com.intellij.refactoring.typeMigration.TypeMigrationProcessor; import com.intellij.refactoring.typeMigration.TypeMigrationRules; import com.intellij.refactoring.ui.RefactoringDialog; import com.intellij.refactoring.ui.TypeSelectorManagerImpl; import com.intellij.ui.EditorComboBox; import com.intellij.util.Function; import com.intellij.util.Functions; import com.intellij.util.VisibilityUtil; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.List; /** * @author anna */ public abstract class TypeMigrationDialog extends RefactoringDialog { private static final Logger LOG = Logger.getInstance(TypeMigrationDialog.class); protected final PsiElement[] myRoots; private TypeMigrationRules myRules; private final ScopeChooserCombo myScopeChooserCombo; public TypeMigrationDialog(@NotNull Project project, PsiElement @NotNull [] roots, @Nullable TypeMigrationRules rules) { super(project, false); myRoots = roots; myRules = rules; myScopeChooserCombo = new ScopeChooserCombo(project, false, true, FindSettings.getInstance().getDefaultScopeName()); Disposer.register(myDisposable, myScopeChooserCombo); myScopeChooserCombo.getChildComponent().addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { validateButtons(); } }); setTitle(JavaRefactoringBundle.message("type.migration.action.name")); } @Override protected void doAction() { if (myScopeChooserCombo.getSelectedScope() == null) { Messages.showErrorDialog(JavaRefactoringBundle.message("type.migration.no.scope.warning.message"), CommonBundle.getErrorTitle()); return; } FindSettings.getInstance().setDefaultScopeName(myScopeChooserCombo.getSelectedScopeName()); if (myRules == null) { myRules = new TypeMigrationRules(getProject()); myRules.setBoundScope(myScopeChooserCombo.getSelectedScope()); } invokeRefactoring(new TypeMigrationProcessor(myProject, myRoots, getMigrationTypeFunction(), myRules, true)); } @NotNull protected abstract Function<? super PsiElement, ? extends PsiType> getMigrationTypeFunction(); protected void appendMigrationTypeEditor(JPanel panel, GridBagConstraints cs) { } @Override protected JComponent createCenterPanel() { final JPanel panel = new JPanel(new GridBagLayout()); final GridBagConstraints gc = new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, JBUI.insets(5, 5, 0, 0), 0, 0); appendMigrationTypeEditor(panel, gc); LabeledComponent<ScopeChooserCombo> scopeChooserComponent = new LabeledComponent<>(); scopeChooserComponent.setComponent(myScopeChooserCombo); scopeChooserComponent.setText(JavaRefactoringBundle.message("type.migration.choose.scope.title")); panel.add(scopeChooserComponent, gc); return panel; } @Override public JComponent getPreferredFocusedComponent() { return myScopeChooserCombo; } @Override protected String getHelpId() { return "reference.typeMigrationDialog"; } public static class MultipleElements extends TypeMigrationDialog { private final Function<? super PsiElement, ? extends PsiType> myMigrationTypeFunction; public MultipleElements(@NotNull Project project, PsiElement @NotNull [] roots, @NotNull Function<? super PsiElement, ? extends PsiType> migrationTypeFunction, @NotNull TypeMigrationRules rules) { super(project, roots, rules); myMigrationTypeFunction = migrationTypeFunction; init(); } @Override protected @NotNull Function<? super PsiElement, ? extends PsiType> getMigrationTypeFunction() { return myMigrationTypeFunction; } } public static class SingleElement extends TypeMigrationDialog { private final PsiTypeCodeFragment myTypeCodeFragment; private final EditorComboBox myToTypeEditor; public SingleElement(@NotNull Project project, PsiElement @NotNull [] roots) { super(project, roots, null); LOG.assertTrue(roots.length > 0); final PsiType rootType = getRootType(); final String text = rootType != null ? rootType.getCanonicalText(true) : ""; int flags = 0; PsiElement root = roots[0]; if (root instanceof PsiParameter) { final PsiElement scope = ((PsiParameter)root).getDeclarationScope(); if (scope instanceof PsiMethod) { flags |= JavaCodeFragmentFactory.ALLOW_ELLIPSIS; } else if (scope instanceof PsiCatchSection && PsiUtil.getLanguageLevel(root).isAtLeast(LanguageLevel.JDK_1_7)) { flags |= JavaCodeFragmentFactory.ALLOW_DISJUNCTION; } } flags |= JavaCodeFragmentFactory.ALLOW_VOID; myTypeCodeFragment = JavaCodeFragmentFactory.getInstance(project).createTypeCodeFragment(text, root, true, flags); final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(project); final Document document = documentManager.getDocument(myTypeCodeFragment); assert document != null; myToTypeEditor = new EditorComboBox(document, project, JavaFileType.INSTANCE); final String[] types = getValidTypes(project, root); myToTypeEditor.setHistory(types != null ? types : new String[]{document.getText()}); document.addDocumentListener(new DocumentListener() { @Override public void documentChanged(@NotNull final DocumentEvent e) { documentManager.commitDocument(document); validateButtons(); } }); init(); } @Override protected void canRun() throws ConfigurationException { super.canRun(); if (!checkType(getMigrationType())) throw new ConfigurationException( JavaBundle.message("type.migration.dialog.message.invalid.type", StringUtil.escapeXmlEntities(myTypeCodeFragment.getText()))); if (isVoidVariableMigration()) throw new ConfigurationException( JavaBundle.message("type.migration.dialog.message.void.not.applicable")); } @Override public JComponent getPreferredFocusedComponent() { return myToTypeEditor; } @Override protected void appendMigrationTypeEditor(JPanel panel, GridBagConstraints gc) { final PsiType type = getRootType(); final String typeText = type != null ? type.getPresentableText() : "<unknown>"; panel.add(new JLabel(JavaRefactoringBundle.message("type.migration.label", getElementPresentation(myRoots[0]), typeText)), gc); panel.add(myToTypeEditor, gc); } private String @Nullable [] getValidTypes(final Project project, final PsiElement root) { if (root instanceof PsiField || root instanceof PsiMethod) { final PsiModifierList modifierList = ((PsiModifierListOwner)root).getModifierList(); if (VisibilityUtil.compare(VisibilityUtil.getVisibilityModifier(modifierList), PsiModifier.PRIVATE) < 0) return null; } final List<PsiExpression> expressions = new ArrayList<>(); for (PsiReference reference : ReferencesSearch.search(root, GlobalSearchScope.fileScope(root.getContainingFile()))) { final PsiElement element = reference.getElement(); final PsiExpression expr = PsiTreeUtil.getParentOfType(element, PsiExpression.class, false); if (expr != null) { expressions.add(expr); } } try { final PsiExpression[] occurrences = expressions.toArray(PsiExpression.EMPTY_ARRAY); final PsiType[] psiTypes = new TypeSelectorManagerImpl(project, myTypeCodeFragment.getType(), occurrences).getTypesForAll(); if (psiTypes.length > 0) { final String[] history = new String[psiTypes.length]; for (int i = 0; i < psiTypes.length; i++) { PsiType psiType = psiTypes[i]; history[i] = psiType.getCanonicalText(true); } return history; } } catch (PsiTypeCodeFragment.TypeSyntaxException | PsiTypeCodeFragment.NoTypeException e) { LOG.info(e); return null; } return null; } @Override protected void doAction() { final PsiType rootType = getRootType(); final PsiType migrationType = getMigrationType(); if (migrationType == null || ChangeSignatureUtil.deepTypeEqual(rootType, migrationType)) { close(DialogWrapper.OK_EXIT_CODE); return; } super.doAction(); } @Override protected @NotNull Function<? super PsiElement, ? extends PsiType> getMigrationTypeFunction() { return Functions.constant(getMigrationType()); } @Nullable public PsiType getMigrationType() { try { return myTypeCodeFragment.getType(); } catch (PsiTypeCodeFragment.TypeSyntaxException | PsiTypeCodeFragment.NoTypeException e) { LOG.debug(e); return null; } } @Nullable private PsiType getRootType() { return TypeMigrationLabeler.getElementType(myRoots[0]); } private static String getElementPresentation(PsiElement element) { if (element instanceof PsiMethod) { return "return type of method " + ((PsiMethod)element).getName(); } if (element instanceof PsiField) { return "type of field " + ((PsiField)element).getName(); } if (element instanceof PsiLocalVariable) { return "type of variable " + ((PsiLocalVariable)element).getName(); } if (element instanceof PsiReferenceParameterList) { return "class type arguments "; } if (element instanceof PsiParameter) { final PsiParameter param = (PsiParameter)element; String result = "type of parameter " + param.getName(); if (param.getParent() instanceof PsiParameterList) { final PsiMethod method = PsiTreeUtil.getParentOfType(param, PsiMethod.class); assert method != null; result += " of method " + method.getName(); } return result; } return element.toString(); } private boolean isVoidVariableMigration() { if (!PsiType.VOID.equals(getMigrationType())) return false; for (PsiElement root : myRoots) { if (root instanceof PsiVariable) return true; } return false; } private static boolean checkType(final PsiType type) { if (type == null) return false; if (!type.isValid()) return false; if (type instanceof PsiClassType){ final PsiClassType psiClassType = (PsiClassType)type; if (psiClassType.resolve() == null) return false; final PsiType[] types = psiClassType.getParameters(); for (PsiType paramType : types) { if (paramType instanceof PsiPrimitiveType || (paramType instanceof PsiWildcardType && ((PsiWildcardType)paramType).getBound() instanceof PsiPrimitiveType)) { return false; } if (!checkType(paramType)) return false; } } if (type instanceof PsiArrayType) { return checkType(type.getDeepComponentType()); } return true; } } }
/* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.remote; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.openqa.selenium.Capabilities; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.browserlaunchers.DoNotUseProxyPac; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; public class JsonToBeanConverter { public <T> T convert(Class<T> clazz, Object text) throws JsonException { try { return convert(clazz, text, 0); } catch (JSONException e) { throw new JsonException(e); } } @SuppressWarnings("unchecked") private <T> T convert(Class<T> clazz, Object text, int depth) throws JSONException { if (text == null) { return null; } if (String.class.equals(clazz)) { return (T) text; } if (isPrimitive(clazz)) { return (T) text; } if (text instanceof Number) { // Thank you type erasure. if (text instanceof Double || text instanceof Float) { return (T) Double.valueOf(String.valueOf(text)); } return (T) Long.valueOf(String.valueOf(text)); } if (isPrimitive(text.getClass())) { return (T) text; } if (isEnum(clazz, text)) { return (T) convertEnum(clazz, text); } if ("".equals(String.valueOf(text))) { return (T) text; } if (Command.class.equals(clazz)) { JSONObject rawCommand = new JSONObject((String) text); SessionId sessionId = null; if (rawCommand.has("sessionId")) { sessionId = convert(SessionId.class, rawCommand.getString("sessionId"), depth + 1); } String name = rawCommand.getString("name"); if (rawCommand.has("parameters")) { Map<String, ?> args = (Map<String, ?>) convert(HashMap.class, rawCommand.getJSONObject("parameters"), depth + 1); return (T) new Command(sessionId, name, args); } return (T) new Command(sessionId, name); } if (SessionId.class.equals(clazz)) { JSONObject object = new JSONObject((String) text); String value = object.getString("value"); return (T) new SessionId(value); } if (Capabilities.class.equals(clazz)) { JSONObject object = new JSONObject((String) text); DesiredCapabilities caps = new DesiredCapabilities(); Iterator allKeys = object.keys(); while (allKeys.hasNext()) { String key = (String) allKeys.next(); caps.setCapability(key, object.get(key)); } return (T) caps; } if (DoNotUseProxyPac.class.equals(clazz)) { JSONObject object = new JSONObject((String) text); DoNotUseProxyPac pac = new DoNotUseProxyPac(); if (object.has("directUrls")) { JSONArray allUrls = object.getJSONArray("directUrls"); for (int i = 0; i < allUrls.length(); i++) { pac.map(allUrls.getString(i)).toNoProxy(); } } if (object.has("directHosts")) { JSONArray allHosts = object.getJSONArray("directHosts"); for (int i = 0; i < allHosts.length(); i++) { pac.mapHost(allHosts.getString(i)).toNoProxy(); } } if (object.has("proxiedHosts")) { JSONObject proxied = object.getJSONObject("proxiedHosts"); Iterator allHosts = proxied.keys(); while (allHosts.hasNext()) { String host = (String) allHosts.next(); pac.mapHost(host).toProxy(proxied.getString(host)); } } if (object.has("proxiedUrls")) { JSONObject proxied = object.getJSONObject("proxiedUrls"); Iterator allUrls = proxied.keys(); while (allUrls.hasNext()) { String host = (String) allUrls.next(); pac.map(host).toProxy(proxied.getString(host)); } } if (object.has("proxiedRegexUrls")) { JSONObject proxied = object.getJSONObject("proxiedRegexUrls"); Iterator allUrls = proxied.keys(); while (allUrls.hasNext()) { String host = (String) allUrls.next(); pac.map(host).toProxy(proxied.getString(host)); } } if (object.has("defaultProxy")) { if ("'DIRECT'".equals(object.getString("defaultProxy"))) { pac.defaults().toNoProxy(); } else { pac.defaults().toProxy(object.getString("defaultProxy")); } } if (object.has("deriveFrom")) { try { pac.deriveFrom(new URI(object.getString("deriveFrom"))); } catch (URISyntaxException e) { throw new WebDriverException(e); } } return (T) pac; } if (Date.class.equals(clazz)) { return (T) new Date(Long.valueOf(String.valueOf(text))); } if (text != null && text instanceof String && !((String) text).startsWith("{") && Object.class .equals(clazz)) { return (T) text; } if (text instanceof JSONArray) { return (T) convertList((JSONArray) text, depth); } if (text == JSONObject.NULL) { return null; } if (depth == 0) { if (text != null && text instanceof String) { if (((String) text).startsWith("[")) { text = new JSONArray((String) text); } else { text = new JSONObject(String.valueOf(text)); } } } if (text instanceof JSONObject) { JSONObject o = (JSONObject) text; if (Map.class.isAssignableFrom(clazz)) { return (T) convertMap(o, depth); } if (isPrimitive(o.getClass())) { return (T) o; } if (Object.class.equals(clazz)) { return (T) convertMap(o, depth); } return convertBean(clazz, o, depth); } else if (text instanceof JSONArray) { return (T) convertList((JSONArray) text, depth + 1); } else { return (T) text; // Crap shoot here; probably a string. } } @SuppressWarnings("unchecked") private Enum convertEnum(Class clazz, Object text) { if (clazz.isEnum()) { return Enum.valueOf(clazz, String.valueOf(text)); } Class[] allClasses = clazz.getClasses(); for (Class current : allClasses) { if (current.isEnum()) { return Enum.valueOf(current, String.valueOf(text)); } } return null; } private boolean isEnum(Class<?> clazz, Object text) { return clazz.isEnum() || text instanceof Enum<?>; } public <T> T convertBean(Class<T> clazz, JSONObject toConvert, int depth) throws JSONException { T t = newInstance(clazz); SimplePropertyDescriptor[] allProperties = SimplePropertyDescriptor.getPropertyDescriptors(clazz); for (SimplePropertyDescriptor property : allProperties) { if (!toConvert.has(property.getName())) continue; Object value = toConvert.get(property.getName()); Method write = property.getWriteMethod(); if (write == null) { continue; } Class<?> type = write.getParameterTypes()[0]; try { write.invoke(t, convert(type, value, depth + 1)); } catch (IllegalAccessException e) { throw propertyWriteException(property, value, type, e); } catch (InvocationTargetException e) { throw propertyWriteException(property, value, type, e); } } return t; } private <T> T newInstance(Class<T> clazz) { try { return clazz.newInstance(); } catch (InstantiationException e) { throw new WebDriverException(e); } catch (IllegalAccessException e) { throw new WebDriverException(e); } } private WebDriverException propertyWriteException( SimplePropertyDescriptor property, Object value, Class<?> type, Throwable cause) { throw new WebDriverException( String.format("Property name: %s -> %s on class %s", property.getName(), value, type), cause); } @SuppressWarnings("unchecked") private Map convertMap(JSONObject toConvert, int depth) throws JSONException { Map map = new HashMap(); Iterator allEntries = toConvert.keys(); while (allEntries.hasNext()) { String key = (String) allEntries.next(); map.put(key, convert(Object.class, toConvert.get(key), depth + 1)); } return map; } @SuppressWarnings("unchecked") private List convertList(JSONArray toConvert, int depth) throws JSONException { ArrayList list = new ArrayList(toConvert.length()); for (int i = 0; i < toConvert.length(); i++) { list.add(convert(Object.class, toConvert.get(i), depth + 1)); } return list; } private boolean isPrimitive(Class<?> clazz) { if (clazz.isPrimitive()) { return true; } if (Boolean.class.isAssignableFrom(clazz)) { return true; } if (Byte.class.isAssignableFrom(clazz)) { return true; } if (Character.class.isAssignableFrom(clazz)) { return true; } if (Double.class.isAssignableFrom(clazz)) { return true; } if (Float.class.isAssignableFrom(clazz)) { return true; } if (Integer.class.isAssignableFrom(clazz)) { return true; } if (Long.class.isAssignableFrom(clazz)) { return true; } if (Short.class.isAssignableFrom(clazz)) { return true; } if (Void.class.isAssignableFrom(clazz)) { return true; } return false; } }
package rxbroadcast; import org.junit.Assert; import org.junit.Test; import rx.observers.TestSubscriber; public final class CausalOrderTest { @Test public final void prepareDoesIncrementTheLocalClock() { final Sender s = new Sender(new byte[]{1}); final CausalOrder<TestValue> causalOrder = new CausalOrder<>(s); final VectorTimestamped<TestValue> value = causalOrder.prepare(new TestValue('a')); Assert.assertEquals( new VectorTimestamped<>(new TestValue('a'), new VectorTimestamp(new Sender[]{s}, 1)), value); } @Test public final void prepareDoesAttachTheFullClockToTheMessage() { final Sender s1 = new Sender(new byte[]{1}); final Sender s2 = new Sender(new byte[]{2}); final CausalOrder<TestValue> causalOrder1 = new CausalOrder<>(s1); final CausalOrder<TestValue> causalOrder2 = new CausalOrder<>(s2); final TestSubscriber<TestValue> subscriber = new TestSubscriber<>(); causalOrder2.prepare(new TestValue('b')); causalOrder2.receive(s1, subscriber::onNext, causalOrder1.prepare(new TestValue('a'))); final VectorTimestamped<TestValue> value = causalOrder2.prepare(new TestValue('c')); Assert.assertEquals( new VectorTimestamped<>(new TestValue('c'), new VectorTimestamp(new Sender[]{s1, s2}, 1, 2)), value); } @SuppressWarnings("checkstyle:MagicNumber") @Test public final void firstMessagePreparedCanBeReceived() { final Sender s = new Sender(new byte[]{1}); final CausalOrder<TestValue> causalOrder = new CausalOrder<>(s); final TestSubscriber<TestValue> subscriber = new TestSubscriber<>(); causalOrder.receive(s, subscriber::onNext, causalOrder.prepare(new TestValue(42))); subscriber.assertNotCompleted(); subscriber.assertNoErrors(); subscriber.assertValues(new TestValue(42)); Assert.assertEquals(0, causalOrder.delayQueueSize()); } @SuppressWarnings("checkstyle:MagicNumber") @Test public final void duplicateMessageFromHostIsDelayedIndefinitely() { final Sender s = new Sender(new byte[]{1}); final CausalOrder<TestValue> causalOrder = new CausalOrder<>(s); final TestSubscriber<TestValue> subscriber = new TestSubscriber<>(); final VectorTimestamped<TestValue> value = causalOrder.prepare(new TestValue(42)); causalOrder.receive(s, subscriber::onNext, value); causalOrder.receive(s, subscriber::onNext, value); subscriber.assertNotCompleted(); subscriber.assertNoErrors(); subscriber.assertValues(new TestValue(42)); Assert.assertEquals(1, causalOrder.delayQueueSize()); } @SuppressWarnings("checkstyle:MagicNumber") @Test public final void receiveTwoOutOfOrderMessagesFromHost() { final Sender s = new Sender(new byte[]{1}); final CausalOrder<TestValue> causalOrder = new CausalOrder<>(s); final TestSubscriber<TestValue> subscriber = new TestSubscriber<>(); final VectorTimestamped<TestValue> value1 = causalOrder.prepare(new TestValue(42)); final VectorTimestamped<TestValue> value2 = causalOrder.prepare(new TestValue(43)); causalOrder.receive(s, subscriber::onNext, value2); causalOrder.receive(s, subscriber::onNext, value1); subscriber.assertNotCompleted(); subscriber.assertNoErrors(); subscriber.assertValues(new TestValue(42), new TestValue(43)); Assert.assertEquals(0, causalOrder.delayQueueSize()); } @SuppressWarnings("checkstyle:MagicNumber") @Test public final void receiveMessagesInReverseOrderFromHost() { final Sender s = new Sender(new byte[]{1}); final CausalOrder<TestValue> causalOrder = new CausalOrder<>(s); final TestSubscriber<TestValue> subscriber = new TestSubscriber<>(); final VectorTimestamped<TestValue> value1 = causalOrder.prepare(new TestValue(42)); final VectorTimestamped<TestValue> value2 = causalOrder.prepare(new TestValue(43)); final VectorTimestamped<TestValue> value3 = causalOrder.prepare(new TestValue(44)); causalOrder.receive(s, subscriber::onNext, value3); causalOrder.receive(s, subscriber::onNext, value2); causalOrder.receive(s, subscriber::onNext, value1); subscriber.assertNotCompleted(); subscriber.assertNoErrors(); subscriber.assertValues(new TestValue(42), new TestValue(43), new TestValue(44)); Assert.assertEquals(0, causalOrder.delayQueueSize()); } @SuppressWarnings("checkstyle:MagicNumber") @Test public final void duplicateMessagesReceivedInReverseOrderFromHostAreDelayedIndefinitely() { final Sender s = new Sender(new byte[]{1}); final CausalOrder<TestValue> causalOrder = new CausalOrder<>(s); final TestSubscriber<TestValue> subscriber = new TestSubscriber<>(); final VectorTimestamped<TestValue> value1 = causalOrder.prepare(new TestValue(42)); final VectorTimestamped<TestValue> value2 = causalOrder.prepare(new TestValue(43)); final VectorTimestamped<TestValue> value3 = causalOrder.prepare(new TestValue(44)); causalOrder.receive(s, subscriber::onNext, value3); causalOrder.receive(s, subscriber::onNext, value3); causalOrder.receive(s, subscriber::onNext, value2); causalOrder.receive(s, subscriber::onNext, value2); causalOrder.receive(s, subscriber::onNext, value1); causalOrder.receive(s, subscriber::onNext, value1); subscriber.assertNotCompleted(); subscriber.assertNoErrors(); subscriber.assertValues(new TestValue(42), new TestValue(43), new TestValue(44)); Assert.assertEquals(3, causalOrder.delayQueueSize()); } @SuppressWarnings("checkstyle:MagicNumber") @Test public final void receiveMessagesInSingleSourceFifoOrder() { final Sender s1 = new Sender(new byte[]{1}); final Sender s2 = new Sender(new byte[]{2}); final CausalOrder<TestValue> causalOrder1 = new CausalOrder<>(s1); final CausalOrder<TestValue> causalOrder2 = new CausalOrder<>(s2); final TestSubscriber<TestValue> subscriber = new TestSubscriber<>(); final VectorTimestamped<TestValue> v1 = causalOrder2.prepare(new TestValue(42)); final VectorTimestamped<TestValue> v2 = causalOrder2.prepare(new TestValue(43)); causalOrder1.receive(s2, subscriber::onNext, v1); causalOrder1.receive(s2, subscriber::onNext, v2); subscriber.assertNotCompleted(); subscriber.assertNoErrors(); subscriber.assertValues(new TestValue(42), new TestValue(43)); Assert.assertEquals(0, causalOrder1.delayQueueSize()); } @SuppressWarnings("checkstyle:MagicNumber") @Test public final void receiveMessagesInReverseOrderFromSingleSource() { final Sender s1 = new Sender(new byte[]{1}); final Sender s2 = new Sender(new byte[]{2}); final CausalOrder<TestValue> causalOrder1 = new CausalOrder<>(s1); final CausalOrder<TestValue> causalOrder2 = new CausalOrder<>(s2); final TestSubscriber<TestValue> subscriber = new TestSubscriber<>(); final VectorTimestamped<TestValue> v1 = causalOrder2.prepare(new TestValue(42)); final VectorTimestamped<TestValue> v2 = causalOrder2.prepare(new TestValue(43)); final VectorTimestamped<TestValue> v3 = causalOrder2.prepare(new TestValue(44)); causalOrder1.receive(s2, subscriber::onNext, v3); causalOrder1.receive(s2, subscriber::onNext, v2); causalOrder1.receive(s2, subscriber::onNext, v1); subscriber.assertNotCompleted(); subscriber.assertNoErrors(); subscriber.assertValues(new TestValue(42), new TestValue(43), new TestValue(44)); Assert.assertEquals(0, causalOrder1.delayQueueSize()); } @Test public final void receiveMessagesInCausalOrder1() { final Sender s1 = new Sender(new byte[]{1}); final Sender s2 = new Sender(new byte[]{2}); final Sender s3 = new Sender(new byte[]{3}); final CausalOrder<TestValue> causalOrder1 = new CausalOrder<>(s1); final CausalOrder<TestValue> causalOrder2 = new CausalOrder<>(s2); final CausalOrder<TestValue> causalOrder3 = new CausalOrder<>(s3); final TestSubscriber<TestValue> subscriber2 = new TestSubscriber<>(); final TestSubscriber<TestValue> subscriber3 = new TestSubscriber<>(); final VectorTimestamped<TestValue> valueA = causalOrder1.prepare(new TestValue('a')); causalOrder2.receive(s1, subscriber2::onNext, valueA); final VectorTimestamped<TestValue> valueB = causalOrder2.prepare(new TestValue('b')); causalOrder3.receive(s2, subscriber3::onNext, valueB); causalOrder3.receive(s1, subscriber3::onNext, valueA); subscriber2.assertNotCompleted(); subscriber2.assertNoErrors(); subscriber2.assertValue(new TestValue('a')); subscriber3.assertNotCompleted(); subscriber3.assertNoErrors(); subscriber3.assertValues(new TestValue('a'), new TestValue('b')); Assert.assertEquals(0, causalOrder2.delayQueueSize()); Assert.assertEquals(0, causalOrder3.delayQueueSize()); } @Test public final void receiveMessagesInCausalOrder2() { final Sender sender1 = new Sender(new byte[]{1}); final Sender sender2 = new Sender(new byte[]{2}); final Sender sender3 = new Sender(new byte[]{3}); final CausalOrder<TestValue> causalOrder1 = new CausalOrder<>(sender1); final CausalOrder<TestValue> causalOrder2 = new CausalOrder<>(sender2); final CausalOrder<TestValue> causalOrder3 = new CausalOrder<>(sender3); final TestSubscriber<TestValue> subscriber1 = new TestSubscriber<>(); final TestSubscriber<TestValue> subscriber2 = new TestSubscriber<>(); final TestSubscriber<TestValue> subscriber3 = new TestSubscriber<>(); final VectorTimestamped<TestValue> valueA = causalOrder1.prepare(new TestValue('a')); causalOrder1.receive(sender1, subscriber1::onNext, valueA); final VectorTimestamped<TestValue> valueL = causalOrder2.prepare(new TestValue('l')); causalOrder2.receive(sender2, subscriber2::onNext, valueL); causalOrder2.receive(sender1, subscriber2::onNext, valueA); final VectorTimestamped<TestValue> valueV = causalOrder3.prepare(new TestValue('v')); causalOrder3.receive(sender3, subscriber3::onNext, valueV); causalOrder3.receive(sender2, subscriber3::onNext, valueL); causalOrder3.receive(sender1, subscriber3::onNext, valueA); causalOrder2.receive(sender3, subscriber2::onNext, valueV); causalOrder1.receive(sender3, subscriber1::onNext, valueV); causalOrder1.receive(sender2, subscriber1::onNext, valueL); final VectorTimestamped<TestValue> valueM = causalOrder2.prepare(new TestValue('m')); causalOrder2.receive(sender2, subscriber2::onNext, valueM); causalOrder1.receive(sender2, subscriber1::onNext, valueM); causalOrder3.receive(sender2, subscriber3::onNext, valueM); subscriber1.assertNotCompleted(); subscriber1.assertNoErrors(); subscriber1.assertValues(new TestValue('a'), new TestValue('v'), new TestValue('l'), new TestValue('m')); subscriber2.assertNotCompleted(); subscriber2.assertNoErrors(); subscriber2.assertValues(new TestValue('l'), new TestValue('a'), new TestValue('v'), new TestValue('m')); subscriber3.assertNotCompleted(); subscriber3.assertNoErrors(); subscriber3.assertValues(new TestValue('v'), new TestValue('l'), new TestValue('a'), new TestValue('m')); final VectorTimestamped<TestValue> valueW = causalOrder3.prepare(new TestValue('w')); Assert.assertEquals( new VectorTimestamped<>(new TestValue('w'), new VectorTimestamp( new Sender[]{sender1, sender2, sender3}, 1, 2, 2)), valueW); } @Test public final void duplicateMessagesReceivedInCausalOrderAreDelayedIndefinitely1() { final Sender s1 = new Sender(new byte[]{1}); final Sender s2 = new Sender(new byte[]{2}); final Sender s3 = new Sender(new byte[]{3}); final CausalOrder<TestValue> causalOrder1 = new CausalOrder<>(s1); final CausalOrder<TestValue> causalOrder2 = new CausalOrder<>(s2); final CausalOrder<TestValue> causalOrder3 = new CausalOrder<>(s3); final TestSubscriber<TestValue> subscriber2 = new TestSubscriber<>(); final TestSubscriber<TestValue> subscriber3 = new TestSubscriber<>(); final VectorTimestamped<TestValue> valueA = causalOrder1.prepare(new TestValue('a')); causalOrder2.receive(s1, subscriber2::onNext, valueA); final VectorTimestamped<TestValue> valueB = causalOrder2.prepare(new TestValue('b')); causalOrder3.receive(s2, subscriber3::onNext, valueB); causalOrder3.receive(s1, subscriber3::onNext, valueA); causalOrder3.receive(s1, subscriber3::onNext, valueA); subscriber2.assertNotCompleted(); subscriber2.assertNoErrors(); subscriber2.assertValue(new TestValue('a')); subscriber3.assertNotCompleted(); subscriber3.assertNoErrors(); subscriber3.assertValues(new TestValue('a'), new TestValue('b')); Assert.assertEquals(0, causalOrder2.delayQueueSize()); Assert.assertEquals(1, causalOrder3.delayQueueSize()); } }
package com.sixin.widgets.layout.awesome; import android.content.Context; import android.graphics.Canvas; import android.util.AttributeSet; import android.view.MotionEvent; import android.widget.FrameLayout; import com.sixin.widgets.toolkits.ViewGroupInterceptor; public class AwesomeFrameLayout extends FrameLayout implements IAwesome { private static final String TAG = "AwesomeFrameLayout"; private IAwesome mAwesomeDelegate; private ViewGroupInterceptor mGroupInterceptor; public AwesomeFrameLayout(Context context) { this(context, null, 0); } public AwesomeFrameLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public AwesomeFrameLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); mAwesomeDelegate = new IAwesomeImpl(this); loadAttrs(attrs); setAlwaysDrawnWithCacheEnabled(false); mGroupInterceptor = (ViewGroupInterceptor) mAwesomeDelegate; } @Override public void loadAttrs(AttributeSet attrs) { mAwesomeDelegate.loadAttrs(attrs); } @Override public int getLeftOffset() { return mAwesomeDelegate.getLeftOffset(); } @Override public int getRightOffset() { return mAwesomeDelegate.getRightOffset(); } @Override public int getTopOffset() { return mAwesomeDelegate.getTopOffset(); } @Override public int getBottomOffset() { return mAwesomeDelegate.getBottomOffset(); } @Override public boolean getLeftTapBack() { return mAwesomeDelegate.getLeftTapBack(); } @Override public boolean getRightTapBack() { return mAwesomeDelegate.getRightTapBack(); } @Override public boolean getTopTapBack() { return mAwesomeDelegate.getTopTapBack(); } @Override public boolean getBottomTapBack() { return mAwesomeDelegate.getBottomTapBack(); } @Override public void setLeftTapBack(boolean tapBack) { mAwesomeDelegate.setLeftTapBack(tapBack); } @Override public void setRightTapBack(boolean tapBack) { mAwesomeDelegate.setRightTapBack(tapBack); } @Override public void setTopTapBack(boolean tapBack) { mAwesomeDelegate.setTopTapBack(tapBack); } @Override public void setBottomTapBack(boolean tapBack) { mAwesomeDelegate.setBottomTapBack(tapBack); } @Override public void left() { } @Override public void right() { } @Override public void top() { } @Override public void bottom() { } @Override public void open() { } @Override public void animateLeft() { mAwesomeDelegate.animateLeft(); } @Override public void animateRight() { mAwesomeDelegate.animateRight(); } @Override public void animateTop() { mAwesomeDelegate.animateTop(); } @Override public void animateBottom() { mAwesomeDelegate.animateBottom(); } @Override public void animateOpen() { mAwesomeDelegate.animateOpen(); } @Override public int getState() { return mAwesomeDelegate.getState(); } @Override public void setLeftAnimationListener(OnLeftAnimationListener listener) { mAwesomeDelegate.setLeftAnimationListener(listener); } @Override public void setRightAnimationListener(OnRightAnimationListener listener) { mAwesomeDelegate.setRightAnimationListener(listener); } @Override public void setTopAnimationListener(OnTopAnimationListener listener) { mAwesomeDelegate.setTopAnimationListener(listener); } @Override public void setBottomAnimationListener(OnBottomAnimationListener listener) { mAwesomeDelegate.setBottomAnimationListener(listener); } @Override public void setOpenAnimationListener(OnOpenAnimationListener listener) { mAwesomeDelegate.setOpenAnimationListener(listener); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); mGroupInterceptor.postInterceptMeasure(widthMeasureSpec, heightMeasureSpec); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { super.onLayout(changed, l, t, r, b); mGroupInterceptor.postInterceptLayout(changed, l, t, r, b); } @Override protected void dispatchDraw(Canvas canvas) { mGroupInterceptor.preInterceptDispatchDraw(canvas); super.dispatchDraw(canvas); mGroupInterceptor.postInterceptDispatchDraw(canvas); } @Override public boolean dispatchTouchEvent(MotionEvent ev) { return mGroupInterceptor.preInterceptDispatchTouchEvent(ev) || super.dispatchTouchEvent(ev); } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { return mGroupInterceptor.interceptInterceptTouchEvent(ev); } @Override public boolean onTouchEvent(MotionEvent event) { return mGroupInterceptor.interceptTouch(event); } }
// // FPlayAndroid is distributed under the FreeBSD License // // Copyright (c) 2013-2014, Carlos Rafael Gimenes das Neves // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND // ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // The views and conclusions contained in the software and documentation are those // of the authors and should not be interpreted as representing official policies, // either expressed or implied, of the FreeBSD Project. // // https://github.com/carlosrafaelgn/FPlayAndroid // package br.com.carlosrafaelgn.fplay.list; import android.content.Context; import android.os.Message; import android.view.View; import android.view.ViewGroup; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserFactory; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLEncoder; import java.util.HashSet; import br.com.carlosrafaelgn.fplay.R; import br.com.carlosrafaelgn.fplay.activity.MainHandler; import br.com.carlosrafaelgn.fplay.playback.Player; import br.com.carlosrafaelgn.fplay.ui.RadioStationView; import br.com.carlosrafaelgn.fplay.ui.UI; import br.com.carlosrafaelgn.fplay.util.ArraySorter; import br.com.carlosrafaelgn.fplay.util.Serializer; public final class RadioStationList extends BaseList<RadioStation> implements Runnable, ArraySorter.Comparer<RadioStation>, MainHandler.Callback { public interface RadioStationAddedObserver { void onRadioStationAdded(); } //after analyzing the results obtained from http://dir.xiph.org/xxx //I noticed that there are never more than 5 pages of results, //with 20 results each ;) private static final int MAX_COUNT = 100; private static final int MSG_FINISHED = 0x0300; private static final int MSG_MORE_RESULTS = 0x0301; //public static final int POPULAR_GENRE_COUNT = 32; //I took these genres from http://dir.xiph.org/yp.xml //... after grouping, counting, sorting and selecting properly ;) public static final String[] GENRES = new String[] { "8bit", "Alternative", "Anime", "Christian", "Classic", "Classical", "Dance", "Disco", "Electronic", "Hits", "House", "Jazz", "Lounge", "Metal", "Misc", "Music", "News", "Oldies", "Pop", "Radio", "Reggae", "Rock", "Salsa", "Ska", "Talk", "Techno", "Top", "Top40", "Top100", "Trance", "Various", "Video Game", //last popular genre "40s", "50s", "60s", "70s", "80s", "90s", "00s", "Adult", "Alternate", "Ambiance", "Ambient", "Argentina", "Baladas", "Bass", "Beatles", "Bible", "Blues", "Broadway", "Catholic", "Celtic", "Chill", "Chillout", "Chiptunes", "Club", "Comedy", "Contemporary", "Country", "Downtempo", "Dubstep", "Easy", "Eclectic", "Electro", "Electronica", "Elektro", "Eurodance", "Experimental", "Folk", "France", "Funk", "German", "Gospel", "Goth", "Hardcore", "Hardstyle", "Hindi", "Hiphop", "Hit", "Ibiza", "Indie", "Industrial", "Inspirational", "Instrumental", "International", "Italia", "Japan", "Jpop", "Jrock", "Jungle", "Korea", "Kpop", "Latin", "Latina", "Latinpop", "Layback", "Libre", "Live", "Lovesongs", "Mariachi", "Mashup", "Merengue", "Minecraft", "Mixed", "Modern", "Motown", "Mozart", "Musica", "Nederlands", "New", "Oldschool", "Paris", "Progressive", "Psytrance", "Punk", "Punkrock", "Rap", "Recuerdos", "Reggaeton", "Relax", "Remixes", "Rockabilly", "Romantica", "Roots", "Russian", "Schlager", "Sertanejo", "Slow", "Smooth", "Soul", "Soundtrack", "Southern", "Sports", "Student", "Tech", "Tropical", "Webradio", "Western", "World", "Zen", "Zouk" }; private boolean loading, favoritesLoaded, favoritesChanged; private final Object favoritesSync; private final HashSet<RadioStation> favorites; private final String tags, noOnAir, noDescription, noTags; private volatile boolean readyToFetch, isSavingFavorites; private volatile int version; private volatile String genreToFetch, searchTermToFetch; private volatile Context context; public RadioStationAddedObserver radioStationAddedObserver; public RadioStationList(String tags, String noOnAir, String noDescription, String noTags) { super(RadioStation.class, MAX_COUNT); this.items = new RadioStation[MAX_COUNT]; this.readyToFetch = true; this.favoritesSync = new Object(); this.favorites = new HashSet<>(32); this.tags = tags; this.noOnAir = noOnAir; this.noDescription = noDescription; this.noTags = noTags; } public boolean isLoading() { return loading; } private void loadingProcessChanged(boolean started) { loading = started; if (UI.browserActivity != null) UI.browserActivity.loadingProcessChanged(started); } public void cancel() { version++; if (loading) loadingProcessChanged(false); } private static String readStringIfPossible(XmlPullParser parser, StringBuilder sb) throws Throwable { sb.delete(0, sb.length()); switch (parser.getEventType()) { case XmlPullParser.COMMENT: break; case XmlPullParser.ENTITY_REF: break; case XmlPullParser.IGNORABLE_WHITESPACE: sb.append(' '); break; case XmlPullParser.PROCESSING_INSTRUCTION: case XmlPullParser.TEXT: if (parser.isWhitespace()) sb.append(' '); else sb.append(parser.getText()); break; default: return null; } for (; ; ) { switch (parser.nextToken()) { case XmlPullParser.COMMENT: break; case XmlPullParser.ENTITY_REF: break; case XmlPullParser.IGNORABLE_WHITESPACE: sb.append(' '); break; case XmlPullParser.PROCESSING_INSTRUCTION: case XmlPullParser.TEXT: if (parser.isWhitespace()) sb.append(' '); else sb.append(parser.getText()); break; default: return sb.toString(); } } } private static boolean parseIcecastColumn2(XmlPullParser parser, String[] fields) throws Throwable { boolean hasFields = false, linkContainsType = false; int ev; String v; while ((ev = parser.nextToken()) != XmlPullParser.END_DOCUMENT) { if (ev == XmlPullParser.END_TAG && parser.getName().equals("td")) break; if (ev == XmlPullParser.START_TAG) { if (parser.getName().equals("p") && hasFields) { linkContainsType = true; } else if (parser.getName().equals("a")) { if (linkContainsType) { if (parser.nextToken() != XmlPullParser.TEXT) { //impossible to determine the type of the stream... //just drop it! hasFields = false; } else { v = parser.getText().trim(); hasFields = (v.equals("MP3") || v.equals("Ogg Vorbis")); fields[2] = v; } } else { for (int a = parser.getAttributeCount() - 1; a >= 0; a--) { if (parser.getAttributeName(a).equals("href") && (v = parser.getAttributeValue(a)).endsWith("m3u")) { fields[7] = ((v.charAt(0) == '/') ? ("http://dir.xiph.org" + v) : (v)).trim(); hasFields = true; break; } } } } } } return hasFields; } private boolean parseIcecastColumn1(XmlPullParser parser, String[] fields, StringBuilder sb) throws Throwable { int ev = 0, pCount = 0; boolean hasFields = false, hasNextToken = false, parsingTags = false; String str; while (hasNextToken || ((ev = parser.nextToken()) != XmlPullParser.END_DOCUMENT)) { hasNextToken = false; if (ev == XmlPullParser.END_TAG && parser.getName().equals("td")) break; if (ev == XmlPullParser.START_TAG && parser.getName().equals("p")) { pCount++; } else if (ev == XmlPullParser.START_TAG && parser.getName().equals("ul")) { parsingTags = true; sb.delete(0, sb.length()); } else if (parsingTags) { if (ev == XmlPullParser.START_TAG && parser.getName().equals("a")) { if (parser.nextToken() == XmlPullParser.TEXT) { if (sb.length() > 0) { sb.append(' '); } else { sb.append(tags); sb.append(": "); } sb.append(parser.getText()); } else { hasNextToken = true; ev = parser.getEventType(); } } else if (ev == XmlPullParser.END_TAG && parser.getName().equals("ul")) { hasFields = true; fields[6] = sb.toString().trim(); } } else { switch (pCount) { case 1: if (ev == XmlPullParser.START_TAG) { if (parser.getName().equals("a")) { for (int a = parser.getAttributeCount() - 1; a >= 0; a--) { if (parser.getAttributeName(a).equals("href")) { fields[1] = parser.getAttributeValue(a).trim(); //set hasFields to true, only if the title has been found! //hasFields = true; break; } } parser.nextToken(); if ((str = readStringIfPossible(parser, sb)) != null) { hasFields = true; fields[0] = str.trim(); } hasNextToken = true; ev = parser.getEventType(); } else if (fields[0].length() != 0 && parser.getName().equals("span")) { if (parser.nextToken() == XmlPullParser.TEXT) { fields[3] = parser.getText().trim(); if (fields[3].length() > 0) fields[3] = fields[3].substring(1).trim(); } else { hasNextToken = true; ev = parser.getEventType(); } } } break; case 2: if (fields[4].length() == 0 && (str = readStringIfPossible(parser, sb)) != null) { hasFields = true; fields[4] = str.trim(); hasNextToken = true; ev = parser.getEventType(); } else { hasNextToken = false; } break; case 3: if (ev == XmlPullParser.END_TAG && parser.getName().equals("strong")) { if (fields[5].length() == 0) { parser.nextToken(); if ((str = readStringIfPossible(parser, sb)) != null) { hasFields = true; fields[5] = str.trim(); } hasNextToken = true; ev = parser.getEventType(); } } break; } } } return hasFields; } private boolean parseIcecastRow(XmlPullParser parser, String[] fields, StringBuilder sb) throws Throwable { fields[0] = ""; //title fields[1] = ""; //uri fields[2] = ""; //type fields[3] = ""; //listeners fields[4] = ""; //description fields[5] = ""; //onAir fields[6] = ""; //tags fields[7] = ""; //m3uUri int ev, colCount = 0; while ((ev = parser.nextToken()) != XmlPullParser.END_DOCUMENT && colCount < 2) { if (ev == XmlPullParser.END_TAG && parser.getName().equals("tr")) break; if (ev == XmlPullParser.START_TAG && parser.getName().equals("td")) { colCount++; if (colCount == 1) { if (!parseIcecastColumn1(parser, fields, sb)) return false; } else { if (!parseIcecastColumn2(parser, fields)) return false; } } } return true; } private boolean parseIcecastResults(InputStream is, String[] fields, int myVersion, StringBuilder sb, int[] currentStationIndex) throws Throwable { int b = 0; while (b >= 0) { if ((b = is.read()) == (int)'<' && (b = is.read()) == (int)'h' && (b = is.read()) == (int)'2' && (b = is.read()) == (int)'>') break; } if (b < 0) return false; while (b >= 0) { if ((b = is.read()) == (int)'<' && (b = is.read()) == (int)'/' && (b = is.read()) == (int)'h' && (b = is.read()) == (int)'2' && (b = is.read()) == (int)'>') break; } if (b < 0) return false; boolean hasResults = false; //According to these docs, kXML parser will accept some XML documents //that should actually be rejected (A robust "relaxed" mode for parsing //HTML or SGML files): //http://developer.android.com/training/basics/network-ops/xml.html //http://kxml.org/index.html try { XmlPullParserFactory factory = XmlPullParserFactory.newInstance(); XmlPullParser parser = factory.newPullParser(); parser.setInput(is, "UTF-8"); //special feature! (check out kXML2 source and you will find it!) parser.setFeature("http://xmlpull.org/v1/doc/features.html#relaxed", true); int ev; while ((ev = parser.nextToken()) != XmlPullParser.END_DOCUMENT && currentStationIndex[0] < MAX_COUNT) { if (ev == XmlPullParser.END_TAG && parser.getName().equals("table")) break; if (ev == XmlPullParser.START_TAG && parser.getName().equals("tr")) { if (myVersion != version) break; if (parseIcecastRow(parser, fields, sb) && myVersion == version) { final RadioStation station = new RadioStation(fields[0], fields[1], fields[2], fields[4].length() == 0 ? noDescription : fields[4], fields[5].length() == 0 ? noOnAir : fields[5], fields[6].length() == 0 ? noTags : fields[6], fields[7], false); synchronized (favoritesSync) { station.isFavorite = favorites.contains(station); } if (myVersion != version) break; items[currentStationIndex[0]++] = station; hasResults = true; } } } } catch (Throwable ex) { ex.printStackTrace(); } return hasResults; } private void loadFavoritesInternal(Context context) throws IOException { FileInputStream fs = null; BufferedInputStream bs = null; try { fs = context.openFileInput("_RadioFav"); bs = new BufferedInputStream(fs, 4096); final int version = Serializer.deserializeInt(bs); final int count = Math.min(Serializer.deserializeInt(bs), MAX_COUNT); if (version == 0x0100 && count > 0) { favorites.clear(); for (int i = 0; i < count; i++) favorites.add(RadioStation.deserialize(bs, true)); } } catch (IOException ex) { if (ex instanceof FileNotFoundException && fs == null) { favorites.clear(); } else { throw ex; } } finally { try { if (bs != null) bs.close(); } catch (Throwable ex) { ex.printStackTrace(); } try { if (fs != null) fs.close(); } catch (Throwable ex) { ex.printStackTrace(); } } } private void saveFavoritesInternal(Context context) throws IOException { FileOutputStream fs = null; BufferedOutputStream bs = null; try { final int count = Math.min(MAX_COUNT, favorites.size()); int i = 0; fs = context.openFileOutput("_RadioFav", 0); bs = new BufferedOutputStream(fs, 4096); Serializer.serializeInt(bs, 0x0100); Serializer.serializeInt(bs, count); for (RadioStation s : favorites) { if (i >= count) break; s.serialize(bs); i++; } bs.flush(); } finally { try { if (bs != null) bs.close(); } catch (Throwable ex) { ex.printStackTrace(); } try { if (fs != null) fs.close(); } catch (Throwable ex) { ex.printStackTrace(); } } } @Override public int compare(RadioStation a, RadioStation b) { int r = a.title.compareToIgnoreCase(b.title); if (r != 0) return r; r = a.onAir.compareToIgnoreCase(b.onAir); if (r != 0) return r; return a.m3uUri.compareTo(b.m3uUri); } @Override public void run() { final int myVersion = version; final Context context = this.context; final String genre = genreToFetch, searchTerm = searchTermToFetch; final boolean isSavingFavorites = this.isSavingFavorites; this.context = null; readyToFetch = true; int err = 0; if (!favoritesLoaded && !isSavingFavorites && context != null) { synchronized (favoritesSync) { if (!favoritesLoaded) { try { loadFavoritesInternal(context); favoritesLoaded = true; favoritesChanged = false; } catch (Throwable ex) { ex.printStackTrace(); } } } } if (genre == null && searchTerm == null) { //favorites synchronized (favoritesSync) { if (isSavingFavorites) { try { if (favoritesLoaded && favoritesChanged && context != null) { saveFavoritesInternal(context); favoritesChanged = false; } } catch (Throwable ex) { MainHandler.toast(R.string.error_gen); } } else { try { if (favoritesLoaded && context != null) { if (myVersion != version) return; final RadioStation[] stations = new RadioStation[favorites.size()]; favorites.toArray(stations); ArraySorter.sort(stations, 0, stations.length, this); if (myVersion == version) { final int count = Math.min(stations.length, MAX_COUNT); System.arraycopy(stations, 0, items, 0, count); MainHandler.sendMessage(RadioStationList.this, MSG_MORE_RESULTS, myVersion, count); } } } catch (Throwable ex) { err = -2; } finally { if (myVersion == version) MainHandler.sendMessage(RadioStationList.this, MSG_FINISHED, myVersion, err); } } } return; } try { int pageNumber = 0; boolean hasResults; String[] fields = new String[8]; final StringBuilder sb = new StringBuilder(256); final int[] currentStationIndex = { 0 }; //genre MUST be one of the predefined genres (due to the encoding) final String uri = ((genre != null) ? ("http://dir.xiph.org/by_genre/" + genre.replace(" ", "%20") + "?page=") : ("http://dir.xiph.org/search?search=" + URLEncoder.encode(searchTerm, "UTF-8") + "&page=")); do { if (myVersion != version) break; InputStream is = null; HttpURLConnection urlConnection = null; try { urlConnection = (HttpURLConnection)(new URL(uri + pageNumber)).openConnection(); if (myVersion != version) break; err = urlConnection.getResponseCode(); if (err == 200) { is = urlConnection.getInputStream(); hasResults = parseIcecastResults(is, fields, myVersion, sb, currentStationIndex); if (hasResults && myVersion == version) MainHandler.sendMessage(RadioStationList.this, MSG_MORE_RESULTS, myVersion, currentStationIndex[0]); err = 0; } else { hasResults = false; } } catch (Throwable ex) { hasResults = false; err = -1; } finally { try { if (urlConnection != null) urlConnection.disconnect(); } catch (Throwable ex) { ex.printStackTrace(); } try { if (is != null) is.close(); } catch (Throwable ex) { ex.printStackTrace(); } System.gc(); } pageNumber++; } while (hasResults && pageNumber < 5); } catch (Throwable ex) { err = -1; } finally { if (myVersion == version) MainHandler.sendMessage(RadioStationList.this, MSG_FINISHED, myVersion, err); } } public void addFavoriteStation(RadioStation station) { synchronized (favoritesSync) { if (favoritesLoaded) { station.isFavorite = true; favoritesChanged |= favorites.add(station); } } } public void removeFavoriteStation(RadioStation station) { synchronized (favoritesSync) { if (favoritesLoaded) { station.isFavorite = false; favoritesChanged |= favorites.remove(station); } } } public void fetchIcecast(Context context, String genre, String searchTerm) { while (!readyToFetch) Thread.yield(); cancel(); clear(); loadingProcessChanged(true); final Thread t = new Thread(this, "Icecast Station Fetcher Thread"); isSavingFavorites = false; genreToFetch = genre; searchTermToFetch = searchTerm; this.context = context; readyToFetch = false; try { t.start(); } catch (Throwable ex) { readyToFetch = true; loadingProcessChanged(false); } } public void fetchFavorites(Context context) { while (!readyToFetch) Thread.yield(); cancel(); clear(); loadingProcessChanged(true); final Thread t = new Thread(this, "Icecast Favorite Stations Fetcher Thread"); isSavingFavorites = false; genreToFetch = null; searchTermToFetch = null; this.context = context; readyToFetch = false; try { t.start(); } catch (Throwable ex) { readyToFetch = true; loadingProcessChanged(false); } } public void saveFavorites(Context context) { while (!readyToFetch) Thread.yield(); synchronized (favoritesSync) { if (!favoritesLoaded || !favoritesChanged) return; } final Thread t = new Thread(this, "Icecast Favorite Stations Storer Thread"); isSavingFavorites = true; genreToFetch = null; searchTermToFetch = null; this.context = context; readyToFetch = false; try { t.start(); } catch (Throwable ex) { readyToFetch = true; } } @Override public boolean handleMessage(Message msg) { if (msg.arg1 != version) return true; switch (msg.what) { case MSG_FINISHED: loadingProcessChanged(false); if (msg.arg2 != 0) UI.toast(Player.getService(), ((msg.arg2 != -2) && !Player.isConnectedToTheInternet()) ? R.string.error_connection : R.string.error_gen); break; case MSG_MORE_RESULTS: //protection against out of order messages... does this really happen? ;) if (msg.arg2 > count) { //items are always appended :) modificationVersion++; final int c = count; count = msg.arg2; addingItems(c, c - count); notifyDataSetChanged(-1, CONTENT_ADDED); if (radioStationAddedObserver != null) radioStationAddedObserver.onRadioStationAdded(); } break; } return true; } @Override public View getView(int position, View convertView, ViewGroup parent) { final RadioStationView view = ((convertView != null) ? (RadioStationView)convertView : new RadioStationView(Player.getService())); view.setItemState(items[position], position, getItemState(position)); return view; } @Override public int getViewHeight() { return RadioStationView.getViewHeight(); } }
package io.dropwizard.metrics.jersey2; import static io.dropwizard.metrics.MetricRegistry.name; import org.glassfish.jersey.server.model.ModelProcessor; import org.glassfish.jersey.server.model.Resource; import org.glassfish.jersey.server.model.ResourceMethod; import org.glassfish.jersey.server.model.ResourceModel; import org.glassfish.jersey.server.monitoring.ApplicationEvent; import org.glassfish.jersey.server.monitoring.ApplicationEventListener; import org.glassfish.jersey.server.monitoring.RequestEvent; import org.glassfish.jersey.server.monitoring.RequestEventListener; import io.dropwizard.metrics.annotation.ExceptionMetered; import io.dropwizard.metrics.annotation.Metered; import io.dropwizard.metrics.annotation.Timed; import io.dropwizard.metrics.Meter; import io.dropwizard.metrics.MetricName; import io.dropwizard.metrics.MetricRegistry; import io.dropwizard.metrics.Timer; import javax.ws.rs.ext.Provider; import javax.ws.rs.core.Configuration; import javax.ws.rs.ext.Provider; import java.lang.reflect.Method; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.lang.reflect.Method; /** * An application event listener that listens for Jersey application initialization to * be finished, then creates a map of resource method that have metrics annotations. * <p/> * Finally, it listens for method start events, and returns a {@link RequestEventListener} * that updates the relevant metric for suitably annotated methods when it gets the * request events indicating that the method is about to be invoked, or just got done * being invoked. */ @Provider public class InstrumentedResourceMethodApplicationListener implements ApplicationEventListener, ModelProcessor { private final MetricRegistry metrics; private ConcurrentMap<Method, Timer> timers = new ConcurrentHashMap<>(); private ConcurrentMap<Method, Meter> meters = new ConcurrentHashMap<>(); private ConcurrentMap<Method, ExceptionMeterMetric> exceptionMeters = new ConcurrentHashMap<>(); /** * Construct an application event listener using the given metrics registry. * <p/> * <p/> * When using this constructor, the {@link InstrumentedResourceMethodApplicationListener} * should be added to a Jersey {@code ResourceConfig} as a singleton. * * @param metrics a {@link MetricRegistry} */ public InstrumentedResourceMethodApplicationListener(final MetricRegistry metrics) { this.metrics = metrics; } /** * A private class to maintain the metric for a method annotated with the * {@link ExceptionMetered} annotation, which needs to maintain both a meter * and a cause for which the meter should be updated. */ private static class ExceptionMeterMetric { public final Meter meter; public final Class<? extends Throwable> cause; public ExceptionMeterMetric(final MetricRegistry registry, final ResourceMethod method, final ExceptionMetered exceptionMetered) { final MetricName name = chooseName(exceptionMetered.name(), exceptionMetered.absolute(), method, ExceptionMetered.DEFAULT_NAME_SUFFIX); this.meter = registry.meter(name); this.cause = exceptionMetered.cause(); } } private static class TimerRequestEventListener implements RequestEventListener { private final ConcurrentMap<Method, Timer> timers; private Timer.Context context = null; public TimerRequestEventListener(final ConcurrentMap<Method, Timer> timers) { this.timers = timers; } @Override public void onEvent(RequestEvent event) { if (event.getType() == RequestEvent.Type.RESOURCE_METHOD_START) { final Timer timer = this.timers.get(event.getUriInfo() .getMatchedResourceMethod().getInvocable().getDefinitionMethod()); if (timer != null) { this.context = timer.time(); } } else if (event.getType() == RequestEvent.Type.RESOURCE_METHOD_FINISHED) { if (this.context != null) { this.context.close(); } } } } private static class MeterRequestEventListener implements RequestEventListener { private final ConcurrentMap<Method, Meter> meters; public MeterRequestEventListener(final ConcurrentMap<Method, Meter> meters) { this.meters = meters; } @Override public void onEvent(RequestEvent event) { if (event.getType() == RequestEvent.Type.RESOURCE_METHOD_START) { final Meter meter = this.meters.get(event.getUriInfo() .getMatchedResourceMethod().getInvocable().getDefinitionMethod()); if (meter != null) { meter.mark(); } } } } private static class ExceptionMeterRequestEventListener implements RequestEventListener { private final ConcurrentMap<Method, ExceptionMeterMetric> exceptionMeters; public ExceptionMeterRequestEventListener(final ConcurrentMap<Method, ExceptionMeterMetric> exceptionMeters) { this.exceptionMeters = exceptionMeters; } @Override public void onEvent(RequestEvent event) { if (event.getType() == RequestEvent.Type.ON_EXCEPTION) { final ResourceMethod method = event.getUriInfo().getMatchedResourceMethod(); final ExceptionMeterMetric metric = (method != null) ? this.exceptionMeters.get(method.getInvocable().getDefinitionMethod()) : null; if (metric != null) { if (metric.cause.isAssignableFrom(event.getException().getClass()) || (event.getException().getCause() != null && metric.cause.isAssignableFrom(event.getException().getCause().getClass()))) { metric.meter.mark(); } } } } } private static class ChainedRequestEventListener implements RequestEventListener { private final RequestEventListener[] listeners; private ChainedRequestEventListener(final RequestEventListener... listeners) { this.listeners = listeners; } @Override public void onEvent(final RequestEvent event) { for (RequestEventListener listener : listeners) { listener.onEvent(event); } } } @Override public void onEvent(ApplicationEvent event) { if (event.getType() == ApplicationEvent.Type.INITIALIZATION_APP_FINISHED) { registerMetricsForModel(event.getResourceModel()); } } @Override public ResourceModel processResourceModel(ResourceModel resourceModel, Configuration configuration) { return resourceModel; } @Override public ResourceModel processSubResource(ResourceModel subResourceModel, Configuration configuration) { registerMetricsForModel(subResourceModel); return subResourceModel; } private void registerMetricsForModel(ResourceModel resourceModel) { for (final Resource resource : resourceModel.getResources()) { for (final ResourceMethod method : resource.getAllMethods()) { registerTimedAnnotations(method); registerMeteredAnnotations(method); registerExceptionMeteredAnnotations(method); } for (final Resource childResource : resource.getChildResources()) { for (final ResourceMethod method : childResource.getAllMethods()) { registerTimedAnnotations(method); registerMeteredAnnotations(method); registerExceptionMeteredAnnotations(method); } } } } @Override public RequestEventListener onRequest(final RequestEvent event) { final RequestEventListener listener = new ChainedRequestEventListener( new TimerRequestEventListener(timers), new MeterRequestEventListener(meters), new ExceptionMeterRequestEventListener(exceptionMeters)); return listener; } private void registerTimedAnnotations(final ResourceMethod method) { final Method definitionMethod = method.getInvocable().getDefinitionMethod(); final Timed annotation = definitionMethod.getAnnotation(Timed.class); if (annotation != null) { timers.putIfAbsent(definitionMethod, timerMetric(this.metrics, method, annotation)); } } private void registerMeteredAnnotations(final ResourceMethod method) { final Method definitionMethod = method.getInvocable().getDefinitionMethod(); final Metered annotation = definitionMethod.getAnnotation(Metered.class); if (annotation != null) { meters.putIfAbsent(definitionMethod, meterMetric(metrics, method, annotation)); } } private void registerExceptionMeteredAnnotations(final ResourceMethod method) { final Method definitionMethod = method.getInvocable().getDefinitionMethod(); final ExceptionMetered annotation = definitionMethod.getAnnotation(ExceptionMetered.class); if (annotation != null) { exceptionMeters.putIfAbsent(definitionMethod, new ExceptionMeterMetric(metrics, method, annotation)); } } private static Timer timerMetric(final MetricRegistry registry, final ResourceMethod method, final Timed timed) { final MetricName name = chooseName(timed.name(), timed.absolute(), method); return registry.timer(name); } private static Meter meterMetric(final MetricRegistry registry, final ResourceMethod method, final Metered metered) { final MetricName name = chooseName(metered.name(), metered.absolute(), method); return registry.meter(name); } protected static MetricName chooseName(final String explicitName, final boolean absolute, final ResourceMethod method, final String... suffixes) { if (explicitName != null && !explicitName.isEmpty()) { if (absolute) { return MetricName.build(explicitName); } return name(method.getInvocable().getDefinitionMethod().getDeclaringClass(), explicitName); } Method definitionMethod = method.getInvocable().getDefinitionMethod(); return MetricName.join(name(definitionMethod.getDeclaringClass(), definitionMethod.getName()), MetricName.build(suffixes)); } }
/*! * This program is free software; you can redistribute it and/or modify it under the * terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software * Foundation. * * You should have received a copy of the GNU Lesser General Public License along with this * program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html * or from the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * Copyright (c) 2002-2016 Pentaho Corporation.. All rights reserved. */ package org.pentaho.platform.scheduler2.quartz; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.pentaho.platform.api.action.IAction; import org.pentaho.platform.api.action.IPostProcessingAction; import org.pentaho.platform.api.action.IStreamingAction; import org.pentaho.platform.api.action.IVarArgsAction; import org.pentaho.platform.api.engine.IPluginManager; import org.pentaho.platform.api.engine.PluginBeanException; import org.pentaho.platform.api.repository.IContentItem; import org.pentaho.platform.api.repository2.unified.ISourcesStreamEvents; import org.pentaho.platform.api.repository2.unified.IStreamListener; import org.pentaho.platform.api.repository2.unified.IUnifiedRepository; import org.pentaho.platform.api.repository2.unified.RepositoryFile; import org.pentaho.platform.api.repository2.unified.data.simple.SimpleRepositoryFileData; import org.pentaho.platform.api.scheduler2.*; import org.pentaho.platform.engine.core.output.FileContentItem; import org.pentaho.platform.engine.core.system.PentahoSystem; import org.pentaho.platform.engine.security.SecurityHelper; import org.pentaho.platform.engine.services.solution.ActionSequenceCompatibilityFormatter; import org.pentaho.platform.scheduler2.blockout.BlockoutAction; import org.pentaho.platform.scheduler2.email.Emailer; import org.pentaho.platform.scheduler2.messsages.Messages; import org.pentaho.platform.util.beans.ActionHarness; import org.pentaho.platform.util.messages.LocaleHelper; import org.pentaho.platform.util.web.MimeHelper; import org.quartz.Job; import org.quartz.*; import java.io.OutputStream; import java.io.Serializable; import java.text.MessageFormat; import java.util.Date; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.concurrent.Callable; /** * A Quartz job that is responsible for executing the {@link IAction} referred to in the job context. * * @author aphillips */ public class ActionAdapterQuartzJob implements Job { static final Log log = LogFactory.getLog(ActionAdapterQuartzJob.class); private static final long RETRY_COUNT = 6; private static final long RETRY_SLEEP_AMOUNT = 10000; private String outputFilePath = null; // Without "final" here it's kind of scary... private final Object lock = new Object(); protected Class<?> resolveClass(JobDataMap jobDataMap) throws PluginBeanException, JobExecutionException { String actionClass = jobDataMap.getString(QuartzScheduler.RESERVEDMAPKEY_ACTIONCLASS); String actionId = jobDataMap.getString(QuartzScheduler.RESERVEDMAPKEY_ACTIONID); Class<?> clazz = null; if (StringUtils.isEmpty(actionId) && StringUtils.isEmpty(actionClass)) { /* StringBuilder sb = new StringBuilder(); for(Object key : jobDataMap.keySet()) { Object value = jobDataMap.get(key); sb.append("\t* ").append(key).append('=').append(value).append('/') .append(value == null ? "<null>" : value.getClass().getName()).append('\n'); } log.error(sb.toString()); */ throw new LoggingJobExecutionException(Messages.getInstance().getErrorString( "ActionAdapterQuartzJob.ERROR_0001_REQUIRED_PARAM_MISSING", //$NON-NLS-1$ QuartzScheduler.RESERVEDMAPKEY_ACTIONCLASS, QuartzScheduler.RESERVEDMAPKEY_ACTIONID)); } for (int i = 0; i < RETRY_COUNT; i++) { try { if (!StringUtils.isEmpty(actionId)) { IPluginManager pluginManager = PentahoSystem.get(IPluginManager.class); clazz = pluginManager.loadClass(actionId); return clazz; } else if (!StringUtils.isEmpty(actionClass)) { clazz = Class.forName(actionClass); return clazz; } } catch (Throwable t) { try { Thread.sleep(RETRY_SLEEP_AMOUNT); } catch (InterruptedException ie) { log.info(ie.getMessage(), ie); } } } // we have failed to locate the class for the actionClass // and we're giving up waiting for it to become available/registered // which can typically happen at system startup throw new LoggingJobExecutionException(Messages.getInstance().getErrorString( "ActionAdapterQuartzJob.ERROR_0002_FAILED_TO_CREATE_ACTION", //$NON-NLS-1$ StringUtils.isEmpty(actionId) ? actionClass : actionId)); } @SuppressWarnings("unchecked") public void execute(JobExecutionContext context) throws JobExecutionException { Scheduler scheduler = null; try { IScheduler pentahoScheduler = PentahoSystem.getObjectFactory().get(IScheduler.class, "IScheduler2", null); scheduler = pentahoScheduler instanceof QuartzScheduler ? ((QuartzScheduler) pentahoScheduler).getQuartzScheduler() : null; } catch (Exception e) { // ignore } QuartzSchedulerHelper.applyJobExecutionRules( QuartzSchedulerHelper.Phase.EXECUTION, scheduler, context.getJobDetail(), context); JobDataMap jobDataMap = context.getMergedJobDataMap(); String actionUser = jobDataMap.getString(QuartzScheduler.RESERVEDMAPKEY_ACTIONUSER); Object bean; Class<?> actionClass = null; try { actionClass = resolveClass(jobDataMap); bean = actionClass.newInstance(); } catch (Exception e) { throw new LoggingJobExecutionException(Messages.getInstance().getErrorString( "ActionAdapterQuartzJob.ERROR_0002_FAILED_TO_CREATE_ACTION", //$NON-NLS-1$ String.valueOf(context.getJobDetail())), e); //$NON-NLS-1$ } if (!(bean instanceof IAction)) { throw new LoggingJobExecutionException(Messages.getInstance().getErrorString( "ActionAdapterQuartzJob.ERROR_0003_ACTION_WRONG_TYPE", actionClass.getName(), //$NON-NLS-1$ IAction.class.getName())); } final IAction actionBean = (IAction) bean; try { invokeAction(actionBean, actionUser, context, jobDataMap.getWrappedMap()); } catch (Throwable t) { // ensure that scheduler thread isn't blocked on lock synchronized (lock) { lock.notifyAll(); } // We should not distinguish between checked and unchecked exceptions here. All job execution failures // should result in a rethrow of a quartz exception throw new LoggingJobExecutionException(Messages.getInstance().getErrorString( "ActionAdapterQuartzJob.ERROR_0004_ACTION_FAILED", actionBean //$NON-NLS-1$ .getClass().getName()), t); } } protected void invokeAction(final IAction actionBean, final String actionUser, final JobExecutionContext context, final Map<String, Serializable> params) throws Exception { final IScheduler scheduler = PentahoSystem.getObjectFactory().get(IScheduler.class, "IScheduler2", null); final Map<String, Serializable> jobParams = new HashMap<String, Serializable>(params); // shallow copy // remove the scheduling infrastructure properties params.remove(QuartzScheduler.RESERVEDMAPKEY_ACTIONCLASS); params.remove(QuartzScheduler.RESERVEDMAPKEY_ACTIONID); params.remove(QuartzScheduler.RESERVEDMAPKEY_ACTIONUSER); Object objsp = params.get(QuartzScheduler.RESERVEDMAPKEY_STREAMPROVIDER); IBackgroundExecutionStreamProvider sp = null; if (objsp != null && IBackgroundExecutionStreamProvider.class.isAssignableFrom(objsp.getClass())) { sp = (IBackgroundExecutionStreamProvider) objsp; } final IBackgroundExecutionStreamProvider streamProvider = sp; params.remove(QuartzScheduler.RESERVEDMAPKEY_STREAMPROVIDER); params.remove(QuartzScheduler.RESERVEDMAPKEY_UIPASSPARAM); // The scheduled_fire_time is useful only to the blockoutAction see PDI-10171 if (actionBean instanceof BlockoutAction) { params.put(IBlockoutManager.SCHEDULED_FIRE_TIME, context.getScheduledFireTime()); } if (log.isDebugEnabled()) { log.debug(MessageFormat.format( "Scheduling system invoking action {0} as user {1} with params [ {2} ]", actionBean //$NON-NLS-1$ .getClass().getName(), actionUser, QuartzScheduler.prettyPrintMap(params))); } Callable<Boolean> actionBeanRunner = new Callable<Boolean>() { public Boolean call() throws Exception { LocaleHelper.setLocaleOverride((Locale) params.get(LocaleHelper.USER_LOCALE_PARAM)); // sync job params to the action bean ActionHarness actionHarness = new ActionHarness(actionBean); boolean updateJob = false; final Map<String, Object> actionParams = new HashMap<String, Object>(); actionParams.putAll(params); if (streamProvider != null) { actionParams.put("inputStream", streamProvider.getInputStream()); } actionHarness.setValues(actionParams, new ActionSequenceCompatibilityFormatter()); if (actionBean instanceof IVarArgsAction) { actionParams.remove("inputStream"); actionParams.remove("outputStream"); ((IVarArgsAction) actionBean).setVarArgs(actionParams); } boolean waitForFileCreated = false; OutputStream stream = null; if (streamProvider != null) { actionParams.remove("inputStream"); if (actionBean instanceof IStreamingAction) { streamProvider.setStreamingAction((IStreamingAction) actionBean); } // BISERVER-9414 - validate that output path still exist SchedulerOutputPathResolver resolver = new SchedulerOutputPathResolver(streamProvider.getOutputPath(), actionUser); String outputPath = resolver.resolveOutputFilePath(); actionParams.put("useJcr", Boolean.TRUE); actionParams.put("jcrOutputPath", outputPath.substring(0, outputPath.lastIndexOf("/"))); if (!outputPath.equals(streamProvider.getOutputPath())) { streamProvider.setOutputFilePath(outputPath); // set fallback path updateJob = true; // job needs to be deleted and recreated with the new output path } stream = streamProvider.getOutputStream(); if (stream instanceof ISourcesStreamEvents) { ((ISourcesStreamEvents) stream).addListener(new IStreamListener() { public void fileCreated(final String filePath) { synchronized (lock) { outputFilePath = filePath; lock.notifyAll(); } } }); waitForFileCreated = true; } actionParams.put("outputStream", stream); // The lineage_id is only useful for the metadata and not needed at this level see PDI-10171 actionParams.remove(QuartzScheduler.RESERVEDMAPKEY_LINEAGE_ID); actionHarness.setValues(actionParams); } actionBean.execute(); if (stream != null) { IOUtils.closeQuietly(stream); } if (waitForFileCreated) { synchronized (lock) { if (outputFilePath == null) { lock.wait(); } } sendEmail(actionParams, params, outputFilePath); } if (actionBean instanceof IPostProcessingAction) { closeContentOutputStreams((IPostProcessingAction) actionBean); markContentAsGenerated((IPostProcessingAction) actionBean); } return updateJob; } private void closeContentOutputStreams(IPostProcessingAction actionBean) { for (IContentItem contentItem : actionBean.getActionOutputContents()) { contentItem.closeOutputStream(); } } private void markContentAsGenerated(IPostProcessingAction actionBean) { IUnifiedRepository repo = PentahoSystem.get(IUnifiedRepository.class); String lineageId = (String) params.get(QuartzScheduler.RESERVEDMAPKEY_LINEAGE_ID); for (IContentItem contentItem : actionBean.getActionOutputContents()) { RepositoryFile sourceFile = getRepositoryFileSafe(repo, contentItem.getPath()); // add metadata if we have access and we have file if (sourceFile != null) { Map<String, Serializable> metadata = repo.getFileMetadata(sourceFile.getId()); metadata.put(QuartzScheduler.RESERVEDMAPKEY_LINEAGE_ID, lineageId); repo.setFileMetadata(sourceFile.getId(), metadata); } else { String fileName = getFSFileNameSafe(contentItem); log.warn(Messages.getInstance().getString("ActionAdapterQuartzJob.WARN_0001_SKIP_REMOVING_OUTPUT_FILE", fileName)); } } } private RepositoryFile getRepositoryFileSafe(IUnifiedRepository repo, String path) { try { return repo.getFile(path); } catch (Exception e) { log.debug(MessageFormat.format("Cannot get repository file \"{0}\": {1}", path, e.getMessage()), e); return null; } } private String getFSFileNameSafe(IContentItem contentItem) { if (contentItem instanceof FileContentItem) { return ((FileContentItem) contentItem).getFile().getName(); } return null; } }; boolean requiresUpdate = false; if ((actionUser == null) || (actionUser.equals("system session"))) { //$NON-NLS-1$ // For now, don't try to run quartz jobs as authenticated if the user // that created the job is a system user. See PPP-2350 requiresUpdate = SecurityHelper.getInstance().runAsAnonymous(actionBeanRunner); } else { try { requiresUpdate = SecurityHelper.getInstance().runAsUser(actionUser, actionBeanRunner); } catch (Throwable t) { Object restartFlag = jobParams.get(QuartzScheduler.RESERVEDMAPKEY_RESTART_FLAG); if (restartFlag == null) { final SimpleJobTrigger trigger = new SimpleJobTrigger(new Date(), null, 0, 0); final Class<IAction> iaction = (Class<IAction>) actionBean.getClass(); // recreate the job in the context of the original creator SecurityHelper.getInstance().runAsUser(actionUser, new Callable<Void>() { @Override public Void call() throws Exception { if (streamProvider != null) { streamProvider.setStreamingAction(null); // remove generated content } QuartzJobKey jobKey = QuartzJobKey.parse(context.getJobDetail().getName()); String jobName = jobKey.getJobName(); jobParams.put(QuartzScheduler.RESERVEDMAPKEY_RESTART_FLAG, Boolean.TRUE); scheduler.createJob(jobName, iaction, jobParams, trigger, streamProvider); log.warn("New RunOnce job created for " + jobName + " -> possible startup synchronization error"); return null; } }); } else { log.warn("RunOnce already created, skipping"); throw new Exception(t); } } } scheduler.fireJobCompleted(actionBean, actionUser, params, streamProvider); if (requiresUpdate) { log.warn("Output path for job: " + context.getJobDetail().getName() + " has changed. Job requires update"); try { final IJobTrigger trigger = scheduler.getJob(context.getJobDetail().getName()).getJobTrigger(); final Class<IAction> iaction = (Class<IAction>) actionBean.getClass(); // remove job with outdated/invalid output path scheduler.removeJob(context.getJobDetail().getName()); // recreate the job in the context of the original creator SecurityHelper.getInstance().runAsUser(actionUser, new Callable<Void>() { @Override public Void call() throws Exception { streamProvider.setStreamingAction(null); // remove generated content QuartzJobKey jobKey = QuartzJobKey.parse(context.getJobDetail().getName()); String jobName = jobKey.getJobName(); org.pentaho.platform.api.scheduler2.Job j = scheduler.createJob(jobName, iaction, jobParams, trigger, streamProvider); log.warn("New Job: " + j.getJobId() + " created"); return null; } }); } catch (Exception e) { log.error(e.getMessage(), e); } } if (log.isDebugEnabled()) { log.debug(MessageFormat.format( "Scheduling system successfully invoked action {0} as user {1} with params [ {2} ]", actionBean //$NON-NLS-1$ .getClass().getName(), actionUser, QuartzScheduler.prettyPrintMap(params))); } } private void sendEmail(Map<String, Object> actionParams, Map<String, Serializable> params, String filePath) { try { IUnifiedRepository repo = PentahoSystem.get(IUnifiedRepository.class); RepositoryFile sourceFile = repo.getFile(filePath); // add metadata Map<String, Serializable> metadata = repo.getFileMetadata(sourceFile.getId()); String lineageId = (String) params.get(QuartzScheduler.RESERVEDMAPKEY_LINEAGE_ID); metadata.put(QuartzScheduler.RESERVEDMAPKEY_LINEAGE_ID, lineageId); repo.setFileMetadata(sourceFile.getId(), metadata); // send email SimpleRepositoryFileData data = repo.getDataForRead(sourceFile.getId(), SimpleRepositoryFileData.class); // if email is setup and we have tos, then do it Emailer emailer = new Emailer(); if (!emailer.setup()) { // email not configured return; } String to = (String) actionParams.get("_SCH_EMAIL_TO"); String cc = (String) actionParams.get("_SCH_EMAIL_CC"); String bcc = (String) actionParams.get("_SCH_EMAIL_BCC"); if ((to == null || "".equals(to)) && (cc == null || "".equals(cc)) && (bcc == null || "".equals(bcc))) { // no destination return; } emailer.setTo(to); emailer.setCc(cc); emailer.setBcc(bcc); emailer.setAttachment(data.getInputStream()); emailer.setAttachmentName("attachment"); String attachmentName = (String) actionParams.get("_SCH_EMAIL_ATTACHMENT_NAME"); if (attachmentName != null && !"".equals(attachmentName)) { String path = filePath; if (path.endsWith(".*")) { path = path.replace(".*", ""); } String extension = MimeHelper.getExtension(data.getMimeType()); if (extension == null) { extension = ".bin"; } if (!attachmentName.endsWith(extension)) { emailer.setAttachmentName(attachmentName + extension); } else { emailer.setAttachmentName(attachmentName); } } else if (data != null) { String path = filePath; if (path.endsWith(".*")) { path = path.replace(".*", ""); } String extension = MimeHelper.getExtension(data.getMimeType()); if (extension == null) { extension = ".bin"; } path = path.substring(path.lastIndexOf("/") + 1, path.length()); if (!path.endsWith(extension)) { emailer.setAttachmentName(path + extension); } else { emailer.setAttachmentName(path); } } if (data == null || data.getMimeType() == null || "".equals(data.getMimeType())) { emailer.setAttachmentMimeType("binary/octet-stream"); } else { emailer.setAttachmentMimeType(data.getMimeType()); } String subject = (String) actionParams.get("_SCH_EMAIL_SUBJECT"); if (subject != null && !"".equals(subject)) { emailer.setSubject(subject); } else { emailer.setSubject("Pentaho Scheduler: " + emailer.getAttachmentName()); } String message = (String) actionParams.get("_SCH_EMAIL_MESSAGE"); if (subject != null && !"".equals(subject)) { emailer.setBody(message); } emailer.send(); } catch (Exception e) { log.warn(e.getMessage(), e); } } class LoggingJobExecutionException extends JobExecutionException { private static final long serialVersionUID = -4124907454208034326L; public LoggingJobExecutionException(String msg) { super(msg); log.error(msg); } public LoggingJobExecutionException(String msg, Throwable t) { super(msg, t); log.error(msg, t); } } }
/* * Copyright (C) 2014 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.graph; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.common.collect.ImmutableSet; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for an undirected {@link ConfigurableMutableNetwork} with default graph properties. */ @RunWith(JUnit4.class) public class ConfigurableUndirectedNetworkTest extends ConfigurableSimpleUndirectedNetworkTest { @Override public MutableNetwork<Integer, String> createGraph() { return NetworkBuilder.undirected().build(); } @Test public void edges_selfLoop() { addEdge(E11, N1, N1); assertThat(graph.edges()).containsExactly(E11); } @Test public void incidentEdges_selfLoop() { addEdge(E11, N1, N1); assertThat(graph.incidentEdges(N1)).containsExactly(E11); } @Test public void incidentNodes_selfLoop() { addEdge(E11, N1, N1); assertThat(graph.incidentNodes(E11)).containsExactly(N1); } @Test public void adjacentNodes_selfLoop() { addEdge(E11, N1, N1); addEdge(E12, N1, N2); assertThat(graph.adjacentNodes(N1)).containsExactly(N1, N2); } @Test public void adjacentEdges_selfLoop() { // An edge is never adjacent to itself addEdge(E11, N1, N1); assertThat(graph.adjacentEdges(E11)).isEmpty(); addEdge(E12, N1, N2); assertThat(graph.adjacentEdges(E11)).containsExactly(E12); } @Test public void edgesConnecting_selfLoop() { addEdge(E11, N1, N1); assertThat(graph.edgesConnecting(N1, N1)).containsExactly(E11); addEdge(E12, N1, N2); assertThat(graph.edgesConnecting(N1, N2)).containsExactly(E12); assertThat(graph.edgesConnecting(N2, N1)).containsExactly(E12); assertThat(graph.edgesConnecting(N1, N1)).containsExactly(E11); } @Test public void inEdges_selfLoop() { addEdge(E11, N1, N1); assertThat(graph.inEdges(N1)).containsExactly(E11); addEdge(E12, N1, N2); assertThat(graph.inEdges(N1)).containsExactly(E11, E12); } @Test public void outEdges_selfLoop() { addEdge(E11, N1, N1); assertThat(graph.outEdges(N1)).containsExactly(E11); addEdge(E12, N2, N1); assertThat(graph.outEdges(N1)).containsExactly(E11, E12); } @Test public void predecessors_selfLoop() { addEdge(E11, N1, N1); assertThat(graph.predecessors(N1)).containsExactly(N1); addEdge(E12, N1, N2); assertThat(graph.predecessors(N1)).containsExactly(N1, N2); } @Test public void successors_selfLoop() { addEdge(E11, N1, N1); assertThat(graph.successors(N1)).containsExactly(N1); addEdge(E12, N2, N1); assertThat(graph.successors(N1)).containsExactly(N1, N2); } @Test public void degree_selfLoop() { addEdge(E11, N1, N1); assertEquals(1, graph.degree(N1)); addEdge(E12, N1, N2); assertEquals(2, graph.degree(N1)); } @Test public void inDegree_selfLoop() { addEdge(E11, N1, N1); assertEquals(1, graph.inDegree(N1)); addEdge(E12, N1, N2); assertEquals(2, graph.inDegree(N1)); } @Test public void outDegree_selfLoop() { addEdge(E11, N1, N1); assertEquals(1, graph.outDegree(N1)); addEdge(E12, N2, N1); assertEquals(2, graph.outDegree(N1)); } @Override @Test public void addEdge_selfLoop() { assertTrue(addEdge(E11, N1, N1)); assertThat(graph.edges()).contains(E11); assertThat(graph.edgesConnecting(N1, N1)).containsExactly(E11); } @Test public void addEdge_existingSelfLoopEdgeBetweenSameNodes() { addEdge(E11, N1, N1); ImmutableSet<String> edges = ImmutableSet.copyOf(graph.edges()); assertFalse(addEdge(E11, N1, N1)); assertThat(graph.edges()).containsExactlyElementsIn(edges); } @Test public void addEdge_existingEdgeBetweenDifferentNodes_selfLoops() { addEdge(E11, N1, N1); try { addEdge(E11, N1, N2); fail("Reusing an existing self-loop edge to connect different nodes succeeded"); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).contains(ERROR_REUSE_EDGE); } try { addEdge(E11, N2, N2); fail("Reusing an existing self-loop edge to make a different self-loop edge succeeded"); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).contains(ERROR_REUSE_EDGE); } addEdge(E12, N1, N2); try { addEdge(E12, N1, N1); fail("Reusing an existing edge to add a self-loop edge between different nodes succeeded"); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).contains(ERROR_REUSE_EDGE); } } @Test public void addEdge_parallelSelfLoopEdge() { addEdge(E11, N1, N1); try { addEdge(EDGE_NOT_IN_GRAPH, N1, N1); fail("Adding a parallel self-loop edge succeeded"); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).contains(ERROR_PARALLEL_EDGE); } } @Test public void removeNode_existingNodeWithSelfLoopEdge() { addNode(N1); addEdge(E11, N1, N1); assertTrue(graph.removeNode(N1)); assertThat(graph.nodes()).isEmpty(); assertThat(graph.edges()).doesNotContain(E11); } @Test public void removeEdge_existingSelfLoopEdge() { addEdge(E11, N1, N1); assertTrue(graph.removeEdge(E11)); assertThat(graph.edges()).doesNotContain(E11); assertThat(graph.edgesConnecting(N1, N1)).isEmpty(); } }
/** * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.subjects; import io.reactivex.annotations.CheckReturnValue; import java.util.concurrent.atomic.*; import io.reactivex.Observer; import io.reactivex.disposables.Disposable; import io.reactivex.plugins.RxJavaPlugins; /** * Subject that, once an {@link Observer} has subscribed, emits all subsequently observed items to the * subscriber. * <p> * <img width="640" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/S.PublishSubject.png" alt=""> * <p> * Example usage: * <p> * <pre> {@code PublishSubject<Object> subject = PublishSubject.create(); // observer1 will receive all onNext and onComplete events subject.subscribe(observer1); subject.onNext("one"); subject.onNext("two"); // observer2 will only receive "three" and onComplete subject.subscribe(observer2); subject.onNext("three"); subject.onComplete(); } </pre> * * @param <T> * the type of items observed and emitted by the Subject */ public final class PublishSubject<T> extends Subject<T> { /** The terminated indicator for the subscribers array. */ @SuppressWarnings("rawtypes") static final PublishDisposable[] TERMINATED = new PublishDisposable[0]; /** An empty subscribers array to avoid allocating it all the time. */ @SuppressWarnings("rawtypes") static final PublishDisposable[] EMPTY = new PublishDisposable[0]; /** The array of currently subscribed subscribers. */ final AtomicReference<PublishDisposable<T>[]> subscribers; /** The error, write before terminating and read after checking subscribers. */ Throwable error; /** * Constructs a PublishSubject. * @param <T> the value type * @return the new PublishSubject */ @CheckReturnValue public static <T> PublishSubject<T> create() { return new PublishSubject<T>(); } /** * Constructs a PublishSubject. * @since 2.0 */ @SuppressWarnings("unchecked") PublishSubject() { subscribers = new AtomicReference<PublishDisposable<T>[]>(EMPTY); } @Override public void subscribeActual(Observer<? super T> t) { PublishDisposable<T> ps = new PublishDisposable<T>(t, this); t.onSubscribe(ps); if (add(ps)) { // if cancellation happened while a successful add, the remove() didn't work // so we need to do it again if (ps.isDisposed()) { remove(ps); } } else { Throwable ex = error; if (ex != null) { t.onError(ex); } else { t.onComplete(); } } } /** * Tries to add the given subscriber to the subscribers array atomically * or returns false if the subject has terminated. * @param ps the subscriber to add * @return true if successful, false if the subject has terminated */ boolean add(PublishDisposable<T> ps) { for (;;) { PublishDisposable<T>[] a = subscribers.get(); if (a == TERMINATED) { return false; } int n = a.length; @SuppressWarnings("unchecked") PublishDisposable<T>[] b = new PublishDisposable[n + 1]; System.arraycopy(a, 0, b, 0, n); b[n] = ps; if (subscribers.compareAndSet(a, b)) { return true; } } } /** * Atomically removes the given subscriber if it is subscribed to the subject. * @param ps the subject to remove */ @SuppressWarnings("unchecked") void remove(PublishDisposable<T> ps) { for (;;) { PublishDisposable<T>[] a = subscribers.get(); if (a == TERMINATED || a == EMPTY) { return; } int n = a.length; int j = -1; for (int i = 0; i < n; i++) { if (a[i] == ps) { j = i; break; } } if (j < 0) { return; } PublishDisposable<T>[] b; if (n == 1) { b = EMPTY; } else { b = new PublishDisposable[n - 1]; System.arraycopy(a, 0, b, 0, j); System.arraycopy(a, j + 1, b, j, n - j - 1); } if (subscribers.compareAndSet(a, b)) { return; } } } @Override public void onSubscribe(Disposable s) { if (subscribers.get() == TERMINATED) { s.dispose(); } } @Override public void onNext(T t) { if (subscribers.get() == TERMINATED) { return; } if (t == null) { onError(new NullPointerException("onNext called with null. Null values are generally not allowed in 2.x operators and sources.")); return; } for (PublishDisposable<T> s : subscribers.get()) { s.onNext(t); } } @SuppressWarnings("unchecked") @Override public void onError(Throwable t) { if (subscribers.get() == TERMINATED) { RxJavaPlugins.onError(t); return; } if (t == null) { t = new NullPointerException("onError called with null. Null values are generally not allowed in 2.x operators and sources."); } error = t; for (PublishDisposable<T> s : subscribers.getAndSet(TERMINATED)) { s.onError(t); } } @SuppressWarnings("unchecked") @Override public void onComplete() { if (subscribers.get() == TERMINATED) { return; } for (PublishDisposable<T> s : subscribers.getAndSet(TERMINATED)) { s.onComplete(); } } @Override public boolean hasObservers() { return subscribers.get().length != 0; } @Override public Throwable getThrowable() { if (subscribers.get() == TERMINATED) { return error; } return null; } @Override public boolean hasThrowable() { return subscribers.get() == TERMINATED && error != null; } @Override public boolean hasComplete() { return subscribers.get() == TERMINATED && error == null; } /** * Wraps the actual subscriber, tracks its requests and makes cancellation * to remove itself from the current subscribers array. * * @param <T> the value type */ static final class PublishDisposable<T> extends AtomicBoolean implements Disposable { private static final long serialVersionUID = 3562861878281475070L; /** The actual subscriber. */ final Observer<? super T> actual; /** The subject state. */ final PublishSubject<T> parent; /** * Constructs a PublishSubscriber, wraps the actual subscriber and the state. * @param actual the actual subscriber * @param parent the parent PublishProcessor */ PublishDisposable(Observer<? super T> actual, PublishSubject<T> parent) { this.actual = actual; this.parent = parent; } public void onNext(T t) { if (!get()) { actual.onNext(t); } } public void onError(Throwable t) { if (get()) { RxJavaPlugins.onError(t); } else { actual.onError(t); } } public void onComplete() { if (!get()) { actual.onComplete(); } } @Override public void dispose() { if (compareAndSet(false, true)) { parent.remove(this); } } @Override public boolean isDisposed() { return get(); } } }
// // _ _ _ // (_) | | | | // _ __ ___ _ _ __ ___| |_ _ __ ___| | // | '_ ` _ \| | '_ \/ __| __| '__/ _ \ | // | | | | | | | | | \__ \ |_| | | __/ | // |_| |_| |_|_|_| |_|___/\__|_| \___|_| // // Author: Alberto Pettarin (www.albertopettarin.it) // Copyright: Copyright 2013-2015, ReadBeyond Srl (www.readbeyond.it) // License: MIT // Email: minstrel@readbeyond.it // Web: http://www.readbeyond.it/minstrel/ // Status: Production // package it.readbeyond.minstrel.commander; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.json.JSONObject; import org.json.JSONArray; import org.json.JSONException; import android.app.Activity; import android.content.pm.ActivityInfo; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.net.Uri; import android.os.Environment; import android.util.Log; import android.view.View; import android.view.WindowManager.LayoutParams; import android.view.WindowManager; import android.widget.Toast; import java.io.File; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.InputStream; import java.io.FileInputStream; import java.io.FileOutputStream; import java.nio.channels.FileChannel; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class Commander extends CordovaPlugin { // plugin action public static final String ACTION_COMMANDER = "commander"; // argument names public static final String ARGUMENT_COMMAND = "command"; public static final String ARGUMENT_OPTION = "option"; // command values public static final String COMMAND_TOAST = "toast"; public static final String COMMAND_DIM = "dim"; public static final String COMMAND_SET_BRIGHTNESS = "setbrightness"; public static final String COMMAND_GET_BRIGHTNESS = "getbrightness"; public static final String COMMAND_ORIENT = "orient"; public static final String COMMAND_OPEN_EXTERNAL_URL = "openExternalURL"; public static final String COMMAND_FILESYSTEM_INFO = "filesystemInfo"; public static final String COMMAND_CHECK_FILE_EXISTS = "checkFileExists"; public static final String COMMAND_DELETE_RELATIVE = "deleteRelative"; public static final String COMMAND_DELETE_ABSOLUTE = "deleteAbsolute"; public static final String COMMAND_WRITE_TO_FILE = "writeToFile"; public static final String COMMAND_COPY = "copy"; public static final String COMMAND_MOVE = "move"; //public static final String COMMAND_LIST_DIRECTORY = "listDirectory"; public static final String COMMAND_LIST_SUBDIRECTORIES = "listSubdirectories"; public static final String COMMAND_COPY_FROM_ASSETS_WWW = "copyFromAssetsWWW"; //public static final String COMMAND_GET_STORAGE_ROOTS = "getStorageRoots"; //public static final String COMMAND_CREATE_DIRECTORY = "createDirectory"; // orient options public static final String ORIENT_OPTION_AUTO = "auto"; public static final String ORIENT_OPTION_LANDSCAPE = "landscape"; public static final String ORIENT_OPTION_PORTRAIT = "portrait"; // return messages public static final String MESSAGE_REFRESH = "refresh"; public static final String MESSAGE_NO_REFRESH = "norefresh"; public static final String MESSAGE_FILE_EXISTS = "fileexists"; public static final String MESSAGE_FILE_DOES_NOT_EXIST = "filedoesnotexist"; public static final String MESSAGE_FILE_WRITTEN = "filewritten"; public static final String MESSAGE_FILE_NOT_WRITTEN = "filenotwritten"; public static final String MESSAGE_FILE_COPIED = "filecopied"; public static final String MESSAGE_FILE_MOVED = "filemoved"; public static final String MESSAGE_DIRECTORY_CREATED = "directorycreated"; public static final String MESSAGE_DIRECTORY_NOT_CREATED = "directorynotcreated"; public static final String MESSAGE_ERROR_WHILE_COPYING = "errorwhilecopying"; @Override public boolean execute(String action, final JSONArray args, final CallbackContext callbackContext) throws JSONException { if (ACTION_COMMANDER.equals(action)) { cordova.getThreadPool().execute(new Runnable() { public void run() { try { JSONObject argsJSONObject = args.getJSONObject(0); String commandName = argsJSONObject.getString(ARGUMENT_COMMAND); JSONObject parameters = new JSONObject(argsJSONObject.getString(ARGUMENT_OPTION)); // show toast if (commandName.equals(COMMAND_TOAST)) { toast(parameters.optString("message", ""), callbackContext); } // dim/undim home/back/settings bar if (commandName.equals(COMMAND_DIM)) { dim(parameters.optBoolean("dim", false), callbackContext); } // set screen brightness if (commandName.equals(COMMAND_SET_BRIGHTNESS)) { setBrightness(parameters.optString("value", "1.0"), callbackContext); } // get screen brightness if (commandName.equals(COMMAND_GET_BRIGHTNESS)) { getBrightness(callbackContext); } // set screen orientation if (commandName.equals(COMMAND_ORIENT)) { orient(parameters.optString("value", ORIENT_OPTION_AUTO), callbackContext); } // open external URL if (commandName.equals(COMMAND_OPEN_EXTERNAL_URL)) { openExternalURL(parameters.optString("url", "http://www.readbeyond.it/"), callbackContext); } // get filesystem info if (commandName.equals(COMMAND_FILESYSTEM_INFO)) { callbackContext.success(getFilesystemInfoJSONString()); } // check whether the given file exists if (commandName.equals(COMMAND_CHECK_FILE_EXISTS)) { if (doesFileExist(parameters.optString("path", null))) { callbackContext.success(MESSAGE_FILE_EXISTS); } callbackContext.success(MESSAGE_FILE_DOES_NOT_EXIST); } // delete file or directory, where option is the path relative to external storage if (commandName.equals(COMMAND_DELETE_RELATIVE)) { delete(new File(Environment.getExternalStorageDirectory(), parameters.optString("path", null)), callbackContext); } // delete file or directory, where option is the absolute path in the file system if (commandName.equals(COMMAND_DELETE_ABSOLUTE)) { delete(new File(parameters.optString("path", null)), callbackContext); } // copy file option into option2 if (commandName.equals(COMMAND_COPY)) { copyFile(parameters.optString("source", null), parameters.optString("destination", null), callbackContext); } // copy file option from assets/www/ into option2 if (commandName.equals(COMMAND_COPY_FROM_ASSETS_WWW)) { copyFileFromAssetsWWW(parameters.optString("source", null), parameters.optString("destination", null), callbackContext); } // move file option into option2 if (commandName.equals(COMMAND_MOVE)) { moveFile(parameters.optString("source", null), parameters.optString("destination", null), callbackContext); } // write string option2 to file option if (commandName.equals(COMMAND_WRITE_TO_FILE)) { callbackContext.success(writeStringToFile(parameters.optString("destination", null), parameters.optString("string", null))); } // list the subdirectories of the directory option, ignoring hidden subdirectories if option2 is true if (commandName.equals(COMMAND_LIST_SUBDIRECTORIES)) { callbackContext.success(getDirectoryListingJSONString(parameters.optString("path", null), parameters.optBoolean("recursive", false), parameters.optBoolean("ignoreHidden", true), true)); } // call success callbackContext.success(""); } catch (Exception e) { // call error callbackContext.error("Exception " + e); } } }); return true; } callbackContext.error("Invalid action"); return false; } private void copyFileFromAssetsWWW(String sourcePath, String destinationPath, final CallbackContext callbackContext) { String source = "www" + File.separator + sourcePath; String destination = this.normalizePath(destinationPath); try { File d = new File(destination); // create parent directory, if not existing File destinationParent = d.getParentFile(); destinationParent.mkdirs(); // TODO check for write permission? // copy in chunks of 4 KB final int BUFFER = 4096; BufferedInputStream is = new BufferedInputStream(cordova.getActivity().getApplicationContext().getAssets().open(source)); int numberOfBytesRead; byte data[] = new byte[BUFFER]; FileOutputStream fos = new FileOutputStream(d); BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER); while ((numberOfBytesRead = is.read(data, 0, BUFFER)) > -1) { dest.write(data, 0, numberOfBytesRead); } dest.flush(); dest.close(); is.close(); fos.close(); callbackContext.success(MESSAGE_FILE_COPIED); } catch (Exception e) { callbackContext.success(MESSAGE_ERROR_WHILE_COPYING); } } private void copyFile(String sourcePath, String destinationPath, final CallbackContext callbackContext) { String source = this.normalizePath(sourcePath); String destination = this.normalizePath(destinationPath); try { File f = new File(source); if (f.exists()) { File d = new File(destination); // create parent directory, if not existing File destinationParent = d.getParentFile(); destinationParent.mkdirs(); // TODO check for write permission? // copy file FileInputStream inStream = new FileInputStream(f); FileOutputStream outStream = new FileOutputStream(d); FileChannel inChannel = inStream.getChannel(); FileChannel outChannel = outStream.getChannel(); inChannel.transferTo(0, inChannel.size(), outChannel); inStream.close(); outStream.close(); callbackContext.success(MESSAGE_FILE_COPIED); } else { callbackContext.success(MESSAGE_FILE_DOES_NOT_EXIST); } } catch (Exception e) { callbackContext.success(MESSAGE_ERROR_WHILE_COPYING); } } private void moveFile(String sourcePath, String destinationPath, final CallbackContext callbackContext) { String source = this.normalizePath(sourcePath); String destination = this.normalizePath(destinationPath); try { File f = new File(source); if (f.exists()) { File d = new File(destination); // create parent directory, if not existing File destinationParent = d.getParentFile(); destinationParent.mkdirs(); // TODO check for write permission? // move file f.renameTo(d); callbackContext.success(MESSAGE_FILE_MOVED); } else { callbackContext.success(MESSAGE_FILE_DOES_NOT_EXIST); } } catch (Exception e) { callbackContext.success(MESSAGE_ERROR_WHILE_COPYING); } } private String[] getStoragePaths() { String[] storagePaths = Storage.getStoragePaths(); if ((storagePaths == null) || (storagePaths.length < 1)) { storagePaths = new String[1]; storagePaths[0] = Environment.getExternalStorageDirectory().getAbsolutePath(); } return storagePaths; } private JSONArray getStorageRootsJSONArray() { String[] storagePaths = this.getStoragePaths(); JSONArray arr = new JSONArray(); for (String s : storagePaths) { arr.put(s); } return arr; } private String getFilesystemInfoJSONString() throws JSONException { JSONObject obj = new JSONObject(); obj.put("root", Environment.getExternalStorageDirectory().getAbsolutePath()); obj.put("separator", File.separator); obj.put("documentsDir", ""); // TODO obj.put("cacheDir", ""); // TODO obj.put("storageRoots", this.getStorageRootsJSONArray()); return obj.toString(); } private boolean doesFileExist(String path) { try { File f = new File(this.normalizePath(path)); if (f.exists()) { return true; } } catch (Exception e) { // nop } return false; } private String writeStringToFile(String path, String contents) { try { // TODO check for write permission? File f = new File(this.normalizePath(path)); FileWriter out = new FileWriter(f); out.write(contents); out.flush(); out.close(); return MESSAGE_FILE_WRITTEN; } catch (Exception e) { // nop } return MESSAGE_FILE_NOT_WRITTEN; } private String getDirectoryListingJSONString(String path, boolean recursive, boolean ignoreHidden, boolean directoriesOnly) throws JSONException { JSONObject obj = new JSONObject(); if ((path == null) || (path.equals(""))) { List<StorageInfo> storagePathsInfo = Storage.getStoragePathsInfo(); for (int i = 0; i < storagePathsInfo.size(); i++) { StorageInfo si = storagePathsInfo.get(i); JSONObject sp = new JSONObject(); sp.put("internal", !si.removable); sp.put("subdirectories", this.getDirectoryListingJSONArray(si.path, recursive, ignoreHidden, directoriesOnly)); obj.put(si.path, sp); } } else { JSONObject sp = new JSONObject(); sp.put("internal", true); // dummy sp.put("subdirectories", this.getDirectoryListingJSONArray(path, recursive, ignoreHidden, directoriesOnly)); obj.put(path, sp); } return obj.toString(); } private JSONArray getDirectoryListingJSONArray(String path, boolean recursive, boolean ignoreHidden, boolean directoriesOnly) throws JSONException { List<String> acc = new ArrayList<String>(); try { listRecursively(new File(this.normalizePath(path)), recursive, ignoreHidden, directoriesOnly, acc); } catch (Exception e) { // nothing } JSONArray arr = new JSONArray(); for (String s : acc) { arr.put(s); } return arr; } private void listRecursively(File path, boolean recursive, boolean ignoreHidden, boolean directoriesOnly, List<String> acc) { File[] files = path.listFiles(); for (File f : files) { if ((f.canRead()) && (! f.isHidden())) { String n = f.getName(); if (!((n.startsWith(".")) && (ignoreHidden))) { if (f.isDirectory()) { if (directoriesOnly) { acc.add(f.getAbsolutePath()); } if (recursive) { listRecursively(f, recursive, ignoreHidden, directoriesOnly, acc); } } else { if (!directoriesOnly) { acc.add(f.getAbsolutePath()); } } } } } } // normalize path private String normalizePath(String path) { return this.normalizePath(path, Environment.getExternalStorageDirectory().getAbsolutePath()); } private String normalizePath(String path, String base) { if ((path.length() > 0) && (path.startsWith(File.separator))) { // path is an absolute path, like /sdcard0/minstrel/foo => return it return path; } else if ((path.length() > 7) && (path.startsWith("file://"))) { // path is a prefixed path, like file:///sdcard0/minstrel/foo => strip file:// return path.substring(7); } else { // path is relative => returns base/path return base + File.separator + path; } } // escape double quotes private String escape(String unescaped) { if (unescaped == null) { return ""; } return unescaped.replace("\"","\\\""); } // delete private void delete(File f, final CallbackContext callbackContext) { try { deleteRecursively(f); callbackContext.success(""); } catch (Exception e) { // nop callbackContext.error("Exception " + e); } } // delete recursively private void deleteRecursively(File f) { if (f.isDirectory()) { for (File child : f.listFiles()) { deleteRecursively(child); } } f.delete(); } private void toast(final String msg, final CallbackContext callbackContext) { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { Toast toast = Toast.makeText(cordova.getActivity().getApplicationContext(), msg, Toast.LENGTH_LONG); toast.show(); callbackContext.success(""); } }); } private void dim(final boolean dim, final CallbackContext callbackContext) { if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH) { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { try { // getRootView is really necessary here!!! View rootView = cordova.getActivity().getWindow().getDecorView().getRootView(); if (rootView != null) { if (dim) { rootView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LOW_PROFILE); } else { rootView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_VISIBLE); } } callbackContext.success(""); } catch (Exception e) { callbackContext.error("Exception " + e); } } }); } } private void setBrightness(final String option, final CallbackContext callbackContext) { final float brightness = (float)Double.parseDouble(option); final int ibrightness = (int)(brightness * 255); try { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { // // this changes the system brightness // // android.provider.Settings.System.putInt(cordova.getActivity().getApplicationContext().getContentResolver(), android.provider.Settings.System.SCREEN_BRIGHTNESS_MODE, android.provider.Settings.System.SCREEN_BRIGHTNESS_MODE_MANUAL); // android.provider.Settings.System.putInt(cordova.getActivity().getApplicationContext().getContentResolver(), android.provider.Settings.System.SCREEN_BRIGHTNESS, ibrightness); // // // this changes the brightness only for the app // WindowManager.LayoutParams layoutParams = cordova.getActivity().getWindow().getAttributes(); layoutParams.screenBrightness = brightness; cordova.getActivity().getWindow().setAttributes(layoutParams); callbackContext.success(""); } }); } catch (Exception e) { // nop callbackContext.error("Exception " + e); } } private void getBrightness(final CallbackContext callbackContext) { try { //cordova.getActivity().runOnUiThread(new Runnable() { // public void run() { WindowManager.LayoutParams layoutParams = cordova.getActivity().getWindow().getAttributes(); callbackContext.success("" + layoutParams.screenBrightness); // } //}); } catch (Exception e) { // nop callbackContext.error("Exception " + e); } } private void orient(final String option, final CallbackContext callbackContext) { try { Activity activity = cordova.getActivity(); if (option.equals(ORIENT_OPTION_PORTRAIT)) { // portrait or reverse portrait activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_PORTRAIT); } else if (option.equals(ORIENT_OPTION_LANDSCAPE)) { // landscape or reverse portrait activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_LANDSCAPE); } else if (option.equals(ORIENT_OPTION_AUTO)) { // unlock orientation activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED); } else { // default: unlock orientation activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED); } callbackContext.success(""); } catch (Exception e) { // nop callbackContext.error("Exception " + e); } } private void openExternalURL(final String url, final CallbackContext callbackContext) { try { Intent intent = null; intent = new Intent(Intent.ACTION_VIEW); Uri uri = Uri.parse(url); /* if ("file".equals(uri.getScheme())) { intent.setDataAndType(uri, webView.getResourceApi().getMimeType(uri)); } else { intent.setData(uri); } */ intent.setData(uri); cordova.getActivity().startActivity(intent); callbackContext.success(""); } catch (Exception e) { // nop callbackContext.error("Exception " + e); } } }
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core; import org.drools.core.common.AgendaFactory; import org.drools.core.common.AgendaGroupFactory; import org.drools.core.common.ProjectClassLoader; import org.drools.core.common.PropagationContextFactory; import org.drools.core.common.WorkingMemoryFactory; import org.drools.core.conflict.DepthConflictResolver; import org.drools.core.reteoo.KieComponentFactory; import org.drools.core.reteoo.builder.NodeFactory; import org.drools.core.runtime.rule.impl.DefaultConsequenceExceptionHandler; import org.drools.core.spi.ConflictResolver; import org.drools.core.util.ConfFileUtils; import org.drools.core.util.MVELSafeHelper; import org.drools.core.util.StringUtils; import org.kie.api.KieBaseConfiguration; import org.kie.api.conf.DeclarativeAgendaOption; import org.kie.api.conf.EqualityBehaviorOption; import org.kie.api.conf.EventProcessingOption; import org.kie.api.conf.KieBaseOption; import org.kie.api.conf.MBeansOption; import org.kie.api.conf.MultiValueKieBaseOption; import org.kie.api.conf.RemoveIdentitiesOption; import org.kie.api.conf.SingleValueKieBaseOption; import org.kie.api.runtime.rule.ConsequenceExceptionHandler; import org.kie.internal.builder.conf.ClassLoaderCacheOption; import org.kie.internal.builder.conf.RuleEngineOption; import org.kie.internal.builder.conf.SessionCacheOption; import org.kie.internal.conf.AlphaThresholdOption; import org.kie.internal.conf.CompositeKeyDepthOption; import org.kie.internal.conf.ConsequenceExceptionHandlerOption; import org.kie.internal.conf.ConstraintJittingThresholdOption; import org.kie.internal.conf.IndexLeftBetaMemoryOption; import org.kie.internal.conf.IndexPrecedenceOption; import org.kie.internal.conf.IndexRightBetaMemoryOption; import org.kie.internal.conf.MaxThreadsOption; import org.kie.internal.conf.MultithreadEvaluationOption; import org.kie.internal.conf.PermGenThresholdOption; import org.kie.internal.conf.SequentialAgendaOption; import org.kie.internal.conf.SequentialOption; import org.kie.internal.conf.ShareAlphaNodesOption; import org.kie.internal.conf.ShareBetaNodesOption; import org.kie.internal.utils.ChainedProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import static org.drools.core.util.MemoryUtil.hasPermGen; /** * RuleBaseConfiguration * * A class to store RuleBase related configuration. It must be used at rule base instantiation time * or not used at all. * This class will automatically load default values from system properties, so if you want to set * a default configuration value for all your new rule bases, you can simply set the property as * a System property. * * After RuleBase is created, it makes the configuration immutable and there is no way to make it * mutable again. This is to avoid inconsistent behavior inside rulebase. * * NOTE: This API is under review and may change in the future. */ /** * Available configuration options: * <pre> * drools.maintainTms = &lt;true|false&gt; * drools.sequential = &lt;true|false&gt; * drools.sequential.agenda = &lt;sequential|dynamic&gt; * drools.removeIdentities = &lt;true|false&gt; * drools.shareAlphaNodes = &lt;true|false&gt; * drools.shareBetaNodes = &lt;true|false&gt; * drools.alphaNodeHashingThreshold = &lt;1...n&gt; * drools.compositeKeyDepth = &lt;1..3&gt; * drools.indexLeftBetaMemory = &lt;true/false&gt; * drools.indexRightBetaMemory = &lt;true/false&gt; * drools.equalityBehavior = &lt;identity|equality&gt; * drools.executorService = &lt;qualified class name&gt; * drools.conflictResolver = &lt;qualified class name&gt; * drools.consequenceExceptionHandler = &lt;qualified class name&gt; * drools.ruleBaseUpdateHandler = &lt;qualified class name&gt; * drools.sessionClock = &lt;qualified class name&gt; * drools.mbeans = &lt;enabled|disabled&gt; * drools.classLoaderCacheEnabled = &lt;true|false&gt; * drools.phreakEnabled = &lt;true|false&gt; * drools.declarativeAgendaEnabled = &lt;true|false&gt; * drools.permgenThreshold = &lt;1...n&gt; * drools.jittingThreshold = &lt;1...n&gt; * </pre> */ public class RuleBaseConfiguration implements KieBaseConfiguration, Externalizable { private static final long serialVersionUID = 510l; public static final boolean DEFAULT_PHREAK = true; public static final boolean DEFAULT_SESSION_CACHE = true; public static final String DEFAULT_SIGN_ON_SERIALIZATION = "false"; protected static final transient Logger logger = LoggerFactory.getLogger(RuleBaseConfiguration.class); private ChainedProperties chainedProperties; private boolean immutable; private boolean sequential; private SequentialAgenda sequentialAgenda; private boolean maintainTms; private boolean removeIdentities; private boolean shareAlphaNodes; private boolean shareBetaNodes; private int permGenThreshold; private int jittingThreshold; private int alphaNodeHashingThreshold; private int compositeKeyDepth; private boolean indexLeftBetaMemory; private boolean indexRightBetaMemory; private AssertBehaviour assertBehaviour; private String executorService; private String consequenceExceptionHandler; private String ruleBaseUpdateHandler; private boolean classLoaderCacheEnabled; private boolean phreakEnabled; private boolean declarativeAgenda; private EventProcessingOption eventProcessingMode; private IndexPrecedenceOption indexPrecedenceOption; private SessionCacheOption sessionCacheOption; // if "true", rulebase builder will try to split // the rulebase into multiple partitions that can be evaluated // in parallel by using multiple internal threads private boolean multithread; private int maxThreads; // this property activates MBean monitoring and management private boolean mbeansEnabled; private ConflictResolver conflictResolver; private Map<String, ActivationListenerFactory> activationListeners; private List<Map<String, Object>> workDefinitions; private boolean advancedProcessRuleIntegration; private transient ClassLoader classLoader; private KieComponentFactory componentFactory; private static class DefaultRuleBaseConfigurationHolder { private static final RuleBaseConfiguration defaultConf = new RuleBaseConfiguration(); } public static RuleBaseConfiguration getDefaultInstance() { return DefaultRuleBaseConfigurationHolder.defaultConf; } public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(chainedProperties); out.writeBoolean(immutable); out.writeBoolean(sequential); out.writeObject(sequentialAgenda); out.writeBoolean(maintainTms); out.writeBoolean(removeIdentities); out.writeBoolean(shareAlphaNodes); out.writeBoolean(shareBetaNodes); out.writeInt(permGenThreshold); out.writeInt(jittingThreshold); out.writeInt(alphaNodeHashingThreshold); out.writeInt(compositeKeyDepth); out.writeBoolean(indexLeftBetaMemory); out.writeBoolean(indexRightBetaMemory); out.writeObject(indexPrecedenceOption); out.writeObject(assertBehaviour); out.writeObject(executorService); out.writeObject(consequenceExceptionHandler); out.writeObject(ruleBaseUpdateHandler); out.writeObject(conflictResolver); out.writeBoolean(advancedProcessRuleIntegration); out.writeBoolean(multithread); out.writeInt(maxThreads); out.writeObject(eventProcessingMode); out.writeBoolean(classLoaderCacheEnabled); out.writeBoolean(phreakEnabled); out.writeBoolean(declarativeAgenda); out.writeObject(componentFactory); out.writeObject(sessionCacheOption); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { chainedProperties = (ChainedProperties) in.readObject(); immutable = in.readBoolean(); sequential = in.readBoolean(); sequentialAgenda = (SequentialAgenda) in.readObject(); maintainTms = in.readBoolean(); removeIdentities = in.readBoolean(); shareAlphaNodes = in.readBoolean(); shareBetaNodes = in.readBoolean(); permGenThreshold = in.readInt(); jittingThreshold = in.readInt(); alphaNodeHashingThreshold = in.readInt(); compositeKeyDepth = in.readInt(); indexLeftBetaMemory = in.readBoolean(); indexRightBetaMemory = in.readBoolean(); indexPrecedenceOption = (IndexPrecedenceOption) in.readObject(); assertBehaviour = (AssertBehaviour) in.readObject(); executorService = (String) in.readObject(); consequenceExceptionHandler = (String) in.readObject(); ruleBaseUpdateHandler = (String) in.readObject(); conflictResolver = (ConflictResolver) in.readObject(); advancedProcessRuleIntegration = in.readBoolean(); multithread = in.readBoolean(); maxThreads = in.readInt(); eventProcessingMode = (EventProcessingOption) in.readObject(); classLoaderCacheEnabled = in.readBoolean(); phreakEnabled = in.readBoolean(); declarativeAgenda = in.readBoolean(); componentFactory = (KieComponentFactory) in.readObject(); sessionCacheOption = (SessionCacheOption) in.readObject(); } /** * Creates a new rulebase configuration using the provided properties * as configuration options. Also, if a Thread.currentThread().getContextClassLoader() * returns a non-null class loader, it will be used as the parent classloader * for this rulebase class loaders, otherwise, the RuleBaseConfiguration.class.getClassLoader() * class loader will be used. * * @param properties */ public RuleBaseConfiguration(Properties properties) { init(properties, null); } /** * Creates a new rulebase with a default parent class loader set according * to the following algorithm: * * If a Thread.currentThread().getContextClassLoader() returns a non-null class loader, * it will be used as the parent class loader for this rulebase class loaders, otherwise, * the RuleBaseConfiguration.class.getClassLoader() class loader will be used. */ public RuleBaseConfiguration() { init(null, null); } /** * A constructor that sets the parent classloader to be used * while dealing with this rule base * * @param classLoaders */ public RuleBaseConfiguration(ClassLoader... classLoaders) { init(null, classLoaders); } public void setProperty(String name, String value) { name = name.trim(); if (StringUtils.isEmpty(name)) { return; } if (name.equals(SequentialAgendaOption.PROPERTY_NAME)) { setSequentialAgenda(SequentialAgenda.determineSequentialAgenda(StringUtils.isEmpty(value) ? "sequential" : value)); } else if (name.equals(SequentialOption.PROPERTY_NAME)) { setSequential(StringUtils.isEmpty(value) ? false : Boolean.valueOf(value)); } else if (name.equals(RemoveIdentitiesOption.PROPERTY_NAME)) { setRemoveIdentities(StringUtils.isEmpty(value) ? false : Boolean.valueOf(value)); } else if (name.equals(ShareAlphaNodesOption.PROPERTY_NAME)) { setShareAlphaNodes(StringUtils.isEmpty(value) ? false : Boolean.valueOf(value)); } else if ( name.equals( ShareBetaNodesOption.PROPERTY_NAME ) ) { setShareBetaNodes(StringUtils.isEmpty(value) ? false : Boolean.valueOf(value)); } else if ( name.equals( PermGenThresholdOption.PROPERTY_NAME ) ) { setPermGenThreshold(StringUtils.isEmpty(value) ? PermGenThresholdOption.DEFAULT_VALUE : Integer.parseInt(value)); } else if ( name.equals( ConstraintJittingThresholdOption.PROPERTY_NAME ) ) { setJittingThreshold( StringUtils.isEmpty( value ) ? ConstraintJittingThresholdOption.DEFAULT_VALUE : Integer.parseInt( value ) ); } else if ( name.equals( AlphaThresholdOption.PROPERTY_NAME ) ) { setAlphaNodeHashingThreshold( StringUtils.isEmpty( value ) ? 3 : Integer.parseInt(value)); } else if ( name.equals( CompositeKeyDepthOption.PROPERTY_NAME ) ) { setCompositeKeyDepth( StringUtils.isEmpty( value ) ? 3 : Integer.parseInt(value)); } else if ( name.equals( IndexLeftBetaMemoryOption.PROPERTY_NAME ) ) { setIndexLeftBetaMemory( StringUtils.isEmpty( value ) ? true : Boolean.valueOf(value)); } else if ( name.equals( IndexRightBetaMemoryOption.PROPERTY_NAME ) ) { setIndexRightBetaMemory( StringUtils.isEmpty( value ) ? true : Boolean.valueOf(value)); } else if ( name.equals( IndexPrecedenceOption.PROPERTY_NAME ) ) { setIndexPrecedenceOption( StringUtils.isEmpty( value ) ? IndexPrecedenceOption.EQUALITY_PRIORITY : IndexPrecedenceOption.determineIndexPrecedence(value)); } else if ( name.equals( EqualityBehaviorOption.PROPERTY_NAME ) ) { setAssertBehaviour( AssertBehaviour.determineAssertBehaviour( StringUtils.isEmpty( value ) ? "identity" : value)); } else if ( name.equals( ConsequenceExceptionHandlerOption.PROPERTY_NAME ) ) { setConsequenceExceptionHandler( StringUtils.isEmpty( value ) ? DefaultConsequenceExceptionHandler.class.getName() : value); } else if ( name.equals( "drools.ruleBaseUpdateHandler" ) ) { setRuleBaseUpdateHandler( StringUtils.isEmpty( value ) ? "" : value); } else if ( name.equals( "drools.conflictResolver" ) ) { setConflictResolver( determineConflictResolver( StringUtils.isEmpty( value ) ? DepthConflictResolver.class.getName() : value)); } else if ( name.equals( "drools.advancedProcessRuleIntegration" ) ) { setAdvancedProcessRuleIntegration( StringUtils.isEmpty( value ) ? false : Boolean.valueOf(value)); } else if ( name.equals( MultithreadEvaluationOption.PROPERTY_NAME ) ) { setMultithreadEvaluation( StringUtils.isEmpty( value ) ? false : Boolean.valueOf(value)); } else if ( name.equals( MaxThreadsOption.PROPERTY_NAME ) ) { setMaxThreads( StringUtils.isEmpty( value ) ? 3 : Integer.parseInt(value)); } else if ( name.equals( EventProcessingOption.PROPERTY_NAME ) ) { setEventProcessingMode( EventProcessingOption.determineEventProcessingMode( StringUtils.isEmpty( value ) ? "cloud" : value)); } else if ( name.equals( MBeansOption.PROPERTY_NAME ) ) { setMBeansEnabled( MBeansOption.isEnabled(value)); } else if ( name.equals( ClassLoaderCacheOption.PROPERTY_NAME ) ) { setClassLoaderCacheEnabled( StringUtils.isEmpty( value ) ? true : Boolean.valueOf(value)); } else if ( name.equals( RuleEngineOption.PROPERTY_NAME ) ) { setPhreakEnabled( StringUtils.isEmpty( value ) ? DEFAULT_PHREAK : value.equalsIgnoreCase( RuleEngineOption.PHREAK.toString())); } else if ( name.equals( SessionCacheOption.PROPERTY_NAME ) ) { setSessionCacheOption(SessionCacheOption.determineOption(StringUtils.isEmpty(value) ? "none" : value)); } } public String getProperty(String name) { name = name.trim(); if ( StringUtils.isEmpty( name ) ) { return null; } if ( name.equals( SequentialAgendaOption.PROPERTY_NAME ) ) { return getSequentialAgenda().toExternalForm(); } else if ( name.equals( SequentialOption.PROPERTY_NAME ) ) { return Boolean.toString( isSequential() ); } else if ( name.equals( RemoveIdentitiesOption.PROPERTY_NAME ) ) { return Boolean.toString( isRemoveIdentities() ); } else if ( name.equals( ShareAlphaNodesOption.PROPERTY_NAME ) ) { return Boolean.toString( isShareAlphaNodes() ); } else if ( name.equals( ShareBetaNodesOption.PROPERTY_NAME ) ) { return Boolean.toString( isShareBetaNodes() ); } else if ( name.equals( PermGenThresholdOption.PROPERTY_NAME ) ) { return Integer.toString( getPermGenThreshold() ); } else if ( name.equals( ConstraintJittingThresholdOption.PROPERTY_NAME ) ) { return Integer.toString( getJittingThreshold() ); } else if ( name.equals( AlphaThresholdOption.PROPERTY_NAME ) ) { return Integer.toString( getAlphaNodeHashingThreshold() ); } else if ( name.equals( CompositeKeyDepthOption.PROPERTY_NAME ) ) { return Integer.toString( getCompositeKeyDepth() ); } else if ( name.equals( IndexLeftBetaMemoryOption.PROPERTY_NAME ) ) { return Boolean.toString( isIndexLeftBetaMemory() ); } else if ( name.equals( IndexRightBetaMemoryOption.PROPERTY_NAME ) ) { return Boolean.toString( isIndexRightBetaMemory()); } else if ( name.equals( IndexPrecedenceOption.PROPERTY_NAME ) ) { return getIndexPrecedenceOption().getValue(); } else if ( name.equals( EqualityBehaviorOption.PROPERTY_NAME ) ) { return getAssertBehaviour().toExternalForm(); } else if ( name.equals( "drools.executorService" ) ) { return getExecutorService(); } else if ( name.equals( ConsequenceExceptionHandlerOption.PROPERTY_NAME ) ) { return getConsequenceExceptionHandler(); } else if ( name.equals( "drools.ruleBaseUpdateHandler" ) ) { return getRuleBaseUpdateHandler(); } else if ( name.equals( "drools.conflictResolver" ) ) { return getConflictResolver().getClass().getName(); } else if ( name.equals( "drools.advancedProcessRuleIntegration" ) ) { return Boolean.toString(isAdvancedProcessRuleIntegration()); } else if ( name.equals( MultithreadEvaluationOption.PROPERTY_NAME ) ) { return Boolean.toString( isMultithreadEvaluation() ); } else if ( name.equals( MaxThreadsOption.PROPERTY_NAME ) ) { return Integer.toString( getMaxThreads()); } else if ( name.equals( EventProcessingOption.PROPERTY_NAME ) ) { return getEventProcessingMode().getMode(); } else if ( name.equals( MBeansOption.PROPERTY_NAME ) ) { return isMBeansEnabled() ? "enabled" : "disabled"; } else if ( name.equals( ClassLoaderCacheOption.PROPERTY_NAME ) ) { return Boolean.toString( isClassLoaderCacheEnabled() ); } else if ( name.equals( RuleEngineOption.PROPERTY_NAME ) ) { return Boolean.toString( isPhreakEnabled() ); } return null; } /** * A constructor that sets the classloader to be used as the parent classloader * of this rule base classloaders, and the properties to be used * as base configuration options * * @param classLoaders * @param properties */ public RuleBaseConfiguration(Properties properties, ClassLoader... classLoaders) { init( properties, classLoaders ); } private void init(Properties properties, ClassLoader... classLoaders) { if (classLoaders != null && classLoaders.length > 1) { throw new RuntimeException("Multiple classloaders are no longer supported"); } setClassLoader( classLoaders == null || classLoaders.length == 0 ? null : classLoaders[0] ); init(properties); } private void init(Properties properties) { this.immutable = false; this.chainedProperties = new ChainedProperties( "rulebase.conf", this.classLoader, true ); if ( properties != null ) { this.chainedProperties.addProperties( properties ); } setRemoveIdentities(Boolean.valueOf(this.chainedProperties.getProperty("drools.removeIdentities", "false")).booleanValue()); setShareAlphaNodes(Boolean.valueOf(this.chainedProperties.getProperty(ShareAlphaNodesOption.PROPERTY_NAME, "true")).booleanValue()); setShareBetaNodes(Boolean.valueOf(this.chainedProperties.getProperty(ShareBetaNodesOption.PROPERTY_NAME, "true")).booleanValue()); setPermGenThreshold(Integer.parseInt(this.chainedProperties.getProperty(PermGenThresholdOption.PROPERTY_NAME, "" + PermGenThresholdOption.DEFAULT_VALUE))); setJittingThreshold( Integer.parseInt( this.chainedProperties.getProperty( ConstraintJittingThresholdOption.PROPERTY_NAME, "" + ConstraintJittingThresholdOption.DEFAULT_VALUE))); setAlphaNodeHashingThreshold(Integer.parseInt(this.chainedProperties.getProperty(AlphaThresholdOption.PROPERTY_NAME, "3"))); setCompositeKeyDepth(Integer.parseInt(this.chainedProperties.getProperty(CompositeKeyDepthOption.PROPERTY_NAME, "3"))); setIndexLeftBetaMemory(Boolean.valueOf(this.chainedProperties.getProperty(IndexLeftBetaMemoryOption.PROPERTY_NAME, "true")).booleanValue()); setIndexRightBetaMemory(Boolean.valueOf(this.chainedProperties.getProperty(IndexRightBetaMemoryOption.PROPERTY_NAME, "true")).booleanValue()); setIndexPrecedenceOption(IndexPrecedenceOption.determineIndexPrecedence(this.chainedProperties.getProperty(IndexPrecedenceOption.PROPERTY_NAME, "equality"))); setAssertBehaviour(AssertBehaviour.determineAssertBehaviour(this.chainedProperties.getProperty(EqualityBehaviorOption.PROPERTY_NAME, "identity"))); setExecutorService(this.chainedProperties.getProperty("drools.executorService", "org.drools.core.concurrent.DefaultExecutorService")); setConsequenceExceptionHandler(this.chainedProperties.getProperty(ConsequenceExceptionHandlerOption.PROPERTY_NAME, "org.drools.core.runtime.rule.impl.DefaultConsequenceExceptionHandler")); setRuleBaseUpdateHandler(this.chainedProperties.getProperty("drools.ruleBaseUpdateHandler", "")); setSequentialAgenda(SequentialAgenda.determineSequentialAgenda(this.chainedProperties.getProperty(SequentialAgendaOption.PROPERTY_NAME, "sequential"))); setSequential(Boolean.valueOf(this.chainedProperties.getProperty(SequentialOption.PROPERTY_NAME, "false")).booleanValue()); setConflictResolver( determineConflictResolver( this.chainedProperties.getProperty( "drools.conflictResolver", "org.drools.core.conflict.DepthConflictResolver" ) ) ); setAdvancedProcessRuleIntegration( Boolean.valueOf( this.chainedProperties.getProperty( "drools.advancedProcessRuleIntegration", "false" ) ).booleanValue() ); setMultithreadEvaluation( Boolean.valueOf( this.chainedProperties.getProperty( MultithreadEvaluationOption.PROPERTY_NAME, "false" ) ).booleanValue() ); setMaxThreads( Integer.parseInt( this.chainedProperties.getProperty( MaxThreadsOption.PROPERTY_NAME, "3" ) ) ); setEventProcessingMode( EventProcessingOption.determineEventProcessingMode( this.chainedProperties.getProperty( EventProcessingOption.PROPERTY_NAME, "cloud" ) ) ); setMBeansEnabled( MBeansOption.isEnabled( this.chainedProperties.getProperty( MBeansOption.PROPERTY_NAME, "disabled" ) ) ); setClassLoaderCacheEnabled( Boolean.valueOf( this.chainedProperties.getProperty( ClassLoaderCacheOption.PROPERTY_NAME, "true" ) ) ); setPhreakEnabled(Boolean.valueOf(this.chainedProperties.getProperty(RuleEngineOption.PROPERTY_NAME, DEFAULT_PHREAK ? RuleEngineOption.PHREAK.toString() : RuleEngineOption.RETEOO.toString()) .equalsIgnoreCase(RuleEngineOption.PHREAK.toString()))); setSessionCacheOption(SessionCacheOption.determineOption(this.chainedProperties.getProperty(SessionCacheOption.PROPERTY_NAME, "none"))); setDeclarativeAgendaEnabled( Boolean.valueOf( this.chainedProperties.getProperty( DeclarativeAgendaOption.PROPERTY_NAME, "false" ) ) ); this.componentFactory = new KieComponentFactory(); } /** * Makes the configuration object immutable. Once it becomes immutable, * there is no way to make it mutable again. * This is done to keep consistency. */ public void makeImmutable() { this.immutable = true; } /** * Returns true if this configuration object is immutable or false otherwise. * @return */ public boolean isImmutable() { return this.immutable; } private void checkCanChange() { if ( this.immutable ) { throw new UnsupportedOperationException( "Can't set a property after configuration becomes immutable" ); } } public void setSequential(boolean sequential) { this.sequential = sequential; } public boolean isSequential() { return this.sequential; } public boolean isMaintainTms() { return this.maintainTms; } public void setMaintainTms(final boolean maintainTms) { checkCanChange(); // throws an exception if a change isn't possible; this.maintainTms = maintainTms; } public boolean isRemoveIdentities() { return this.removeIdentities; } public void setRemoveIdentities(final boolean removeIdentities) { checkCanChange(); // throws an exception if a change isn't possible; this.removeIdentities = removeIdentities; } public boolean isShareAlphaNodes() { return this.shareAlphaNodes; } public void setShareAlphaNodes(final boolean shareAlphaNodes) { checkCanChange(); // throws an exception if a change isn't possible; this.shareAlphaNodes = shareAlphaNodes; } public boolean isShareBetaNodes() { return this.shareBetaNodes; } public void setShareBetaNodes(final boolean shareBetaNodes) { checkCanChange(); // throws an exception if a change isn't possible; this.shareBetaNodes = shareBetaNodes; } public int getPermGenThreshold() { return this.permGenThreshold; } public void setPermGenThreshold(final int permGenThreshold) { checkCanChange(); // throws an exception if a change isn't possible; if (permGenThreshold < 0 || permGenThreshold > 100) { throw new UnsupportedOperationException( "The PermGen threshold should be a number between 0 and 100" ); } if (!hasPermGen()) { if (permGenThreshold != PermGenThresholdOption.DEFAULT_VALUE) { logger.warn( "JVM version " + System.getProperty("java.version") + " has no PermGen space. " + "Attempt to set the permgenThreshold to " + permGenThreshold + " will be ignored"); } this.permGenThreshold = 100; return; } this.permGenThreshold = permGenThreshold; } public int getJittingThreshold() { return jittingThreshold; } public void setJittingThreshold( int jittingThreshold ) { checkCanChange(); // throws an exception if a change isn't possible; this.jittingThreshold = jittingThreshold; } public int getAlphaNodeHashingThreshold() { return this.alphaNodeHashingThreshold; } public void setAlphaNodeHashingThreshold(final int alphaNodeHashingThreshold) { checkCanChange(); // throws an exception if a change isn't possible; this.alphaNodeHashingThreshold = alphaNodeHashingThreshold; } public AssertBehaviour getAssertBehaviour() { return this.assertBehaviour; } public void setAssertBehaviour(final AssertBehaviour assertBehaviour) { checkCanChange(); // throws an exception if a change isn't possible; this.assertBehaviour = assertBehaviour; } public EventProcessingOption getEventProcessingMode() { return this.eventProcessingMode; } public void setEventProcessingMode(final EventProcessingOption mode) { checkCanChange(); // throws an exception if a change isn't possible; this.eventProcessingMode = mode; } public int getCompositeKeyDepth() { return this.compositeKeyDepth; } public void setCompositeKeyDepth(final int compositeKeyDepth) { if ( !this.immutable ) { if ( compositeKeyDepth > 3 ) { throw new UnsupportedOperationException( "compositeKeyDepth cannot be greater than 3" ); } this.compositeKeyDepth = compositeKeyDepth; } else { throw new UnsupportedOperationException( "Can't set a property after configuration becomes immutable" ); } } public boolean isIndexLeftBetaMemory() { return this.indexLeftBetaMemory; } public void setIndexLeftBetaMemory(final boolean indexLeftBetaMemory) { checkCanChange(); // throws an exception if a change isn't possible; this.indexLeftBetaMemory = indexLeftBetaMemory; } public boolean isIndexRightBetaMemory() { return this.indexRightBetaMemory; } public void setIndexRightBetaMemory(final boolean indexRightBetaMemory) { checkCanChange(); // throws an exception if a change isn't possible; this.indexRightBetaMemory = indexRightBetaMemory; } public IndexPrecedenceOption getIndexPrecedenceOption() { return this.indexPrecedenceOption; } public void setIndexPrecedenceOption(final IndexPrecedenceOption precedence) { checkCanChange(); // throws an exception if a change isn't possible; this.indexPrecedenceOption = precedence; } public String getExecutorService() { return executorService; } public void setExecutorService(String executorService) { checkCanChange(); // throws an exception if a change isn't possible; this.executorService = executorService; } public String getConsequenceExceptionHandler() { return consequenceExceptionHandler; } public void setConsequenceExceptionHandler(String consequenceExceptionHandler) { checkCanChange(); // throws an exception if a change isn't possible; this.consequenceExceptionHandler = consequenceExceptionHandler; } public String getRuleBaseUpdateHandler() { return ruleBaseUpdateHandler; } public void setRuleBaseUpdateHandler(String ruleBaseUpdateHandler) { checkCanChange(); // throws an exception if a change isn't possible; this.ruleBaseUpdateHandler = ruleBaseUpdateHandler; } public AgendaGroupFactory getAgendaGroupFactory() { return getComponentFactory().getAgendaGroupFactory(); } public SequentialAgenda getSequentialAgenda() { return this.sequentialAgenda; } public void setSequentialAgenda(final SequentialAgenda sequentialAgenda) { checkCanChange(); // throws an exception if a change isn't possible; this.sequentialAgenda = sequentialAgenda; } /** * Defines if the RuleBase should be executed using a pool of * threads for evaluating the rules ("true"), or if the rulebase * should work in classic single thread mode ("false"). * * @param enableMultithread true for multi-thread or * false for single-thread. Default is false. */ public void setMultithreadEvaluation(boolean enableMultithread) { checkCanChange(); if( enableMultithread ) { throw new IllegalArgumentException( "Multithread mode is currently not supported. Please disable it." ); } this.multithread = enableMultithread; if (multithread && isPhreakEnabled()) { throw new IllegalArgumentException( "Multithread evaluation cannot be used when Left & Right Unlinking is enabled." ); } } /** * Returns true if the partitioning of the rulebase is enabled * and false otherwise. Default is false. * * @return */ public boolean isMultithreadEvaluation() { return this.multithread; } /** * If multi-thread evaluation is enabled, this parameter configures the * maximum number of threads each session can use for concurrent Rete * propagation. * * @param maxThreads the maximum number of threads to use. If 0 or a * negative number is set, the engine will use number * of threads equal to the number of partitions in the * rule base. Default number of threads is 0. */ public void setMaxThreads(final int maxThreads) { this.maxThreads = maxThreads; } /** * Returns the configured number of maximum threads to use for concurrent * propagation when multi-thread evaluation is enabled. Default is zero. * * @return */ public int getMaxThreads() { return this.maxThreads; } public boolean isClassLoaderCacheEnabled() { return this.classLoaderCacheEnabled; } public void setClassLoaderCacheEnabled(final boolean classLoaderCacheEnabled) { checkCanChange(); // throws an exception if a change isn't possible; this.classLoaderCacheEnabled = classLoaderCacheEnabled; } /** * @return whether or not Unlinking is enabled. */ public boolean isPhreakEnabled() { return this.phreakEnabled; } /** * Enable Unlinking. It will also disable sequential mode * and multithread evaluation as these are incompatible with L&R unlinking. * @param enabled */ public void setPhreakEnabled(boolean enabled) { checkCanChange(); // throws an exception if a change isn't possible; this.phreakEnabled = enabled; if (!isPhreakEnabled()) { configureReteComponentFactory(); } } public SessionCacheOption getSessionCacheOption() { return this.sessionCacheOption; } public void setSessionCacheOption(SessionCacheOption sessionCacheOption) { checkCanChange(); // throws an exception if a change isn't possible; this.sessionCacheOption = sessionCacheOption; } public boolean isDeclarativeAgenda() { return this.declarativeAgenda; } /** * Enable declarative agenda * @param enabled */ public void setDeclarativeAgendaEnabled(boolean enabled) { checkCanChange(); // throws an exception if a change isn't possible; this.declarativeAgenda = enabled; } public List<Map<String, Object>> getWorkDefinitions() { if ( this.workDefinitions == null ) { initWorkDefinitions(); } return this.workDefinitions; } private void initWorkDefinitions() { this.workDefinitions = new ArrayList<Map<String, Object>>(); // split on each space String locations[] = this.chainedProperties.getProperty( "drools.workDefinitions", "" ).split( "\\s" ); // load each SemanticModule for ( String factoryLocation : locations ) { // trim leading/trailing spaces and quotes factoryLocation = factoryLocation.trim(); if ( factoryLocation.startsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 1 ); } if ( factoryLocation.endsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 0, factoryLocation.length() - 1 ); } if ( !factoryLocation.equals( "" ) ) { loadWorkItems( factoryLocation ); } } } private void loadWorkItems(String location) { String content = ConfFileUtils.URLContentsToString( ConfFileUtils.getURL( location, null, RuleBaseConfiguration.class ) ); try { this.workDefinitions.addAll( (List<Map<String, Object>>) MVELSafeHelper.getEvaluator().eval( content, new HashMap() ) ); } catch ( Throwable t ) { logger.error("Error occurred while loading work definitions " + location + "\nContinuing without reading these work definitions", t); throw new RuntimeException( "Could not parse work definitions " + location + ": " + t.getMessage() ); } } public boolean isAdvancedProcessRuleIntegration() { return advancedProcessRuleIntegration; } public void setAdvancedProcessRuleIntegration(boolean advancedProcessRuleIntegration) { this.advancedProcessRuleIntegration = advancedProcessRuleIntegration; } public void addActivationListener(String name, ActivationListenerFactory factory) { if ( this.activationListeners == null ) { this.activationListeners = new HashMap<String, ActivationListenerFactory>(); } this.activationListeners.put( name, factory ); } public ActivationListenerFactory getActivationListenerFactory(String name) { ActivationListenerFactory factory = null; if ( this.activationListeners != null ) { factory = this.activationListeners.get( name ); } if ( factory != null ) { return factory; } else { if ( "query".equals( name )) { return QueryActivationListenerFactory.INSTANCE; } else if ( "agenda".equals( name ) || "direct".equals( name ) ) { return RuleActivationListenerFactory.INSTANCE; } } throw new IllegalArgumentException( "ActivationListenerFactory not found for '" + name + "'" ); } private boolean determineShadowProxy(String userValue) { if ( this.isSequential() ) { // sequential never needs shadowing, so always override return false; } if ( userValue != null ) { return Boolean.valueOf( userValue ).booleanValue(); } else { return true; } } private ConflictResolver determineConflictResolver(String className) { Class clazz = null; try { clazz = this.classLoader.loadClass( className ); } catch ( ClassNotFoundException e ) { throw new IllegalArgumentException( "conflict Resolver '" + className + "' not found" ); } try { return (ConflictResolver) clazz.getMethod( "getInstance", null ).invoke( null, null ); } catch ( Exception e ) { throw new IllegalArgumentException( "Unable to set Conflict Resolver '" + className + "'" ); } } public void setConflictResolver(ConflictResolver conflictResolver) { checkCanChange(); // throws an exception if a change isn't possible; this.conflictResolver = conflictResolver; } public ConflictResolver getConflictResolver() { return this.conflictResolver; } public ClassLoader getClassLoader() { return this.classLoader; } public void setClassLoader(ClassLoader classLoader) { this.classLoader = ProjectClassLoader.getClassLoader( classLoader, getClass(), isClassLoaderCacheEnabled()); } public KieComponentFactory getComponentFactory() { return componentFactory; } private void configureReteComponentFactory() { if (!(componentFactory.getWorkingMemoryFactory().getClass().getName().endsWith("ReteWorkingMemory"))) { try { componentFactory.setWorkingMemoryFactory( (WorkingMemoryFactory) getStaticInstance( "org.drools.reteoo.common.ReteWorkingMemoryFactory" ) ); } catch (ClassNotFoundException e) { logger.warn("Cannot find drools-reteoo.jar on the classpath, switching to phreak"); phreakEnabled = true; return; } catch (Exception e) { throw new RuntimeException(e); } } try { if (!(componentFactory.getNodeFactoryService().getClass().getName().endsWith("ReteNodeFactory"))) { componentFactory.setNodeFactoryProvider( (NodeFactory) getStaticInstance( "org.drools.reteoo.builder.ReteNodeFactory" ) ); } if (!(componentFactory.getPropagationContextFactory().getClass().getName().endsWith("RetePropagationContextFactory"))) { componentFactory.setPropagationContextFactory( (PropagationContextFactory) getStaticInstance( "org.drools.reteoo.common.RetePropagationContextFactory" ) ); } if (!(componentFactory.getAgendaFactory().getClass().getName().endsWith("ReteAgendaFactory"))) { componentFactory.setAgendaFactory( (AgendaFactory) getStaticInstance("org.drools.reteoo.common.ReteAgendaFactory") ); } if (!(componentFactory.getAgendaGroupFactory().getClass().getName().endsWith( "RetePriorityQueueAgendaGroupFactory"))) { componentFactory.setAgendaGroupFactory((AgendaGroupFactory) getStaticInstance( "org.drools.reteoo.common.RetePriorityQueueAgendaGroupFactory" )); } } catch (Exception e) { throw new RuntimeException(e); } } private Object getStaticInstance(String className) throws Exception { return Class.forName( className ).getMethod( "getInstance" ).invoke( null ); } public void setComponentFactory(KieComponentFactory componentFactory) { this.componentFactory = componentFactory; } /** * Defines if the RuleBase should expose management and monitoring MBeans * * @param mbeansEnabled true for multi-thread or * false for single-thread. Default is false. */ public void setMBeansEnabled(boolean mbeansEnabled) { checkCanChange(); this.mbeansEnabled = mbeansEnabled; } /** * Returns true if the management and monitoring through MBeans is active * * @return */ public boolean isMBeansEnabled() { return this.mbeansEnabled; } public static class AssertBehaviour implements Externalizable { private static final long serialVersionUID = 510l; public static final AssertBehaviour IDENTITY = new AssertBehaviour(0); public static final AssertBehaviour EQUALITY = new AssertBehaviour(1); private int value; public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { value = in.readInt(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(value); } public AssertBehaviour() { } private AssertBehaviour(final int value) { this.value = value; } public boolean equals(Object obj) { if (obj == this) return true; else if (obj instanceof AssertBehaviour) { AssertBehaviour that = (AssertBehaviour) obj; return value == that.value; } return false; } public static AssertBehaviour determineAssertBehaviour(final String value) { if ("IDENTITY".equalsIgnoreCase(value)) { return IDENTITY; } else if ("EQUALITY".equalsIgnoreCase(value)) { return EQUALITY; } else { throw new IllegalArgumentException("Illegal enum value '" + value + "' for AssertBehaviour"); } } private Object readResolve() throws java.io.ObjectStreamException { switch (this.value) { case 0: return IDENTITY; case 1: return EQUALITY; default: throw new IllegalArgumentException("Illegal enum value '" + this.value + "' for AssertBehaviour"); } } public String toExternalForm() { return (this.value == 0) ? "identity" : "equality"; } public String toString() { return "AssertBehaviour : " + ((this.value == 0) ? "identity" : "equality"); } } public static class LogicalOverride implements Externalizable { private static final long serialVersionUID = 510l; public static final LogicalOverride PRESERVE = new LogicalOverride(0); public static final LogicalOverride DISCARD = new LogicalOverride(1); private int value; public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { value = in.readInt(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(value); } public LogicalOverride() { } private LogicalOverride(final int value) { this.value = value; } public static LogicalOverride determineLogicalOverride(final String value) { if ("PRESERVE".equalsIgnoreCase(value)) { return PRESERVE; } else if ("DISCARD".equalsIgnoreCase(value)) { return DISCARD; } else { throw new IllegalArgumentException("Illegal enum value '" + value + "' for LogicalOverride"); } } private Object readResolve() throws java.io.ObjectStreamException { switch (this.value) { case 0: return PRESERVE; case 1: return DISCARD; default: throw new IllegalArgumentException("Illegal enum value '" + this.value + "' for LogicalOverride"); } } public boolean equals(Object obj) { if (obj == this) { return true; } else if (obj instanceof LogicalOverride) { return value == ((LogicalOverride) obj).value; } return false; } public String toExternalForm() { return (this.value == 0) ? "preserve" : "discard"; } public String toString() { return "LogicalOverride : " + ((this.value == 0) ? "preserve" : "discard"); } } public static class SequentialAgenda implements Externalizable { private static final long serialVersionUID = 510l; public static final SequentialAgenda SEQUENTIAL = new SequentialAgenda(0); public static final SequentialAgenda DYNAMIC = new SequentialAgenda(1); private int value; public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { value = in.readInt(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(value); } public SequentialAgenda() { } private SequentialAgenda(final int value) { this.value = value; } public static SequentialAgenda determineSequentialAgenda(final String value) { if ("sequential".equalsIgnoreCase(value)) { return SEQUENTIAL; } else if ("dynamic".equalsIgnoreCase(value)) { return DYNAMIC; } else { throw new IllegalArgumentException("Illegal enum value '" + value + "' for SequentialAgenda"); } } private Object readResolve() throws java.io.ObjectStreamException { switch (this.value) { case 0: return SEQUENTIAL; case 1: return DYNAMIC; default: throw new IllegalArgumentException("Illegal enum value '" + this.value + "' for SequentialAgenda"); } } public String toExternalForm() { return (this.value == 0) ? "sequential" : "dynamic"; } public String toString() { return "SequentialAgenda : " + ((this.value == 0) ? "sequential" : "dynamic"); } } @SuppressWarnings("unchecked") public <T extends SingleValueKieBaseOption> T getOption(Class<T> option) { if (SequentialOption.class.equals(option)) { return (T) (this.sequential ? SequentialOption.YES : SequentialOption.NO); } else if (RemoveIdentitiesOption.class.equals(option)) { return (T) (this.removeIdentities ? RemoveIdentitiesOption.YES : RemoveIdentitiesOption.NO); } else if (ShareAlphaNodesOption.class.equals(option)) { return (T) (this.shareAlphaNodes ? ShareAlphaNodesOption.YES : ShareAlphaNodesOption.NO); } else if (ShareBetaNodesOption.class.equals(option)) { return (T) (this.shareBetaNodes ? ShareBetaNodesOption.YES : ShareBetaNodesOption.NO); } else if (IndexLeftBetaMemoryOption.class.equals(option)) { return (T) (this.indexLeftBetaMemory ? IndexLeftBetaMemoryOption.YES : IndexLeftBetaMemoryOption.NO); } else if (IndexRightBetaMemoryOption.class.equals(option)) { return (T) (this.indexRightBetaMemory ? IndexRightBetaMemoryOption.YES : IndexRightBetaMemoryOption.NO); } else if (IndexPrecedenceOption.class.equals(option)) { return (T) getIndexPrecedenceOption(); } else if (EqualityBehaviorOption.class.equals(option)) { return (T) ((this.assertBehaviour == AssertBehaviour.IDENTITY) ? EqualityBehaviorOption.IDENTITY : EqualityBehaviorOption.EQUALITY); } else if (SequentialAgendaOption.class.equals(option)) { return (T) ((this.sequentialAgenda == SequentialAgenda.SEQUENTIAL) ? SequentialAgendaOption.SEQUENTIAL : SequentialAgendaOption.DYNAMIC); } else if (PermGenThresholdOption.class.equals(option)) { return (T) PermGenThresholdOption.get(permGenThreshold); } else if (ConstraintJittingThresholdOption.class.equals(option)) { return (T) ConstraintJittingThresholdOption.get(jittingThreshold); } else if (AlphaThresholdOption.class.equals(option)) { return (T) AlphaThresholdOption.get(alphaNodeHashingThreshold); } else if (CompositeKeyDepthOption.class.equals(option)) { return (T) CompositeKeyDepthOption.get(compositeKeyDepth); } else if (ConsequenceExceptionHandlerOption.class.equals(option)) { Class<? extends ConsequenceExceptionHandler> handler; try { handler = (Class<? extends ConsequenceExceptionHandler>) Class.forName(consequenceExceptionHandler); } catch (ClassNotFoundException e) { throw new RuntimeException("Unable to resolve ConsequenceExceptionHandler class: " + consequenceExceptionHandler, e); } return (T) ConsequenceExceptionHandlerOption.get(handler); } else if (EventProcessingOption.class.equals(option)) { return (T) getEventProcessingMode(); } else if (MaxThreadsOption.class.equals(option)) { return (T) MaxThreadsOption.get(getMaxThreads()); } else if (MultithreadEvaluationOption.class.equals(option)) { return (T) (this.multithread ? MultithreadEvaluationOption.YES : MultithreadEvaluationOption.NO); } else if (MBeansOption.class.equals(option)) { return (T) (this.isMBeansEnabled() ? MBeansOption.ENABLED : MBeansOption.DISABLED); } else if (ClassLoaderCacheOption.class.equals(option)) { return (T) (this.isClassLoaderCacheEnabled() ? ClassLoaderCacheOption.ENABLED : ClassLoaderCacheOption.DISABLED); } else if (RuleEngineOption.class.equals(option)) { return (T) (this.isPhreakEnabled() ? RuleEngineOption.PHREAK : RuleEngineOption.RETEOO); } else if (DeclarativeAgendaOption.class.equals(option)) { return (T) (this.isDeclarativeAgenda() ? DeclarativeAgendaOption.ENABLED : DeclarativeAgendaOption.DISABLED); } return null; } public <T extends KieBaseOption> void setOption(T option) { if (option instanceof SequentialOption) { setSequential(((SequentialOption) option).isSequential()); } else if (option instanceof RemoveIdentitiesOption) { setRemoveIdentities(((RemoveIdentitiesOption) option).isRemoveIdentities()); } else if (option instanceof ShareAlphaNodesOption) { setShareAlphaNodes(((ShareAlphaNodesOption) option).isShareAlphaNodes()); } else if (option instanceof ShareBetaNodesOption) { setShareBetaNodes(((ShareBetaNodesOption) option).isShareBetaNodes()); } else if (option instanceof IndexLeftBetaMemoryOption) { setIndexLeftBetaMemory(((IndexLeftBetaMemoryOption) option).isIndexLeftBetaMemory()); } else if (option instanceof IndexRightBetaMemoryOption) { setIndexRightBetaMemory(((IndexRightBetaMemoryOption) option).isIndexRightBetaMemory()); } else if (option instanceof IndexPrecedenceOption) { setIndexPrecedenceOption((IndexPrecedenceOption) option); } else if (option instanceof EqualityBehaviorOption) { setAssertBehaviour((option == EqualityBehaviorOption.IDENTITY) ? AssertBehaviour.IDENTITY : AssertBehaviour.EQUALITY); } else if (option instanceof SequentialAgendaOption) { setSequentialAgenda((option == SequentialAgendaOption.SEQUENTIAL) ? SequentialAgenda.SEQUENTIAL : SequentialAgenda.DYNAMIC); } else if (option instanceof PermGenThresholdOption) { setPermGenThreshold(((PermGenThresholdOption) option).getThreshold()); } else if (option instanceof ConstraintJittingThresholdOption) { setJittingThreshold( ( (ConstraintJittingThresholdOption) option ).getThreshold()); } else if (option instanceof AlphaThresholdOption) { setAlphaNodeHashingThreshold( ( (AlphaThresholdOption) option ).getThreshold()); } else if (option instanceof CompositeKeyDepthOption) { setCompositeKeyDepth( ( (CompositeKeyDepthOption) option ).getDepth()); } else if (option instanceof ConsequenceExceptionHandlerOption) { setConsequenceExceptionHandler( ( (ConsequenceExceptionHandlerOption) option ).getHandler().getName()); } else if (option instanceof EventProcessingOption) { setEventProcessingMode( (EventProcessingOption) option); } else if (option instanceof MaxThreadsOption) { setMaxThreads( ( (MaxThreadsOption) option ).getMaxThreads()); } else if (option instanceof MultithreadEvaluationOption) { setMultithreadEvaluation( ( (MultithreadEvaluationOption) option ).isMultithreadEvaluation()); } else if (option instanceof MBeansOption) { setMBeansEnabled( ( (MBeansOption) option ).isEnabled()); } else if (option instanceof ClassLoaderCacheOption) { setClassLoaderCacheEnabled( ( (ClassLoaderCacheOption) option ).isClassLoaderCacheEnabled()); } else if (option instanceof RuleEngineOption) { setPhreakEnabled( ( (RuleEngineOption) option ).isLRUnlinkingEnabled()); } else if (option instanceof SessionCacheOption) { setSessionCacheOption( (SessionCacheOption) option); } else if (option instanceof DeclarativeAgendaOption) { setDeclarativeAgendaEnabled(((DeclarativeAgendaOption) option).isDeclarativeAgendaEnabled()); } } public <T extends MultiValueKieBaseOption> T getOption(Class<T> option, String key) { return null; } }
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.carbon.esb.websocket.client; import io.netty.bootstrap.Bootstrap; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.codec.http.DefaultHttpHeaders; import io.netty.handler.codec.http.HttpClientCodec; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame; import io.netty.handler.codec.http.websocketx.PingWebSocketFrame; import io.netty.handler.codec.http.websocketx.TextWebSocketFrame; import io.netty.handler.codec.http.websocketx.WebSocketClientHandshakerFactory; import io.netty.handler.codec.http.websocketx.WebSocketVersion; import io.netty.handler.codec.http.websocketx.extensions.compression.WebSocketClientCompressionHandler; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.ProtocolException; import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import javax.net.ssl.SSLException; /** * WebSocket client class for test */ public class WebSocketTestClient { private static final Logger logger = LoggerFactory.getLogger(WebSocketTestClient.class); private String url = null; private final String subProtocol; private Map<String, String> customHeaders = new HashMap<>(); private Channel channel = null; private WebSocketClientHandler handler; private EventLoopGroup group; private CountDownLatch latch; public WebSocketTestClient(String url) { this.url = url; this.subProtocol = null; } public WebSocketTestClient(String url, String subProtocol, Map<String, String> customHeaders) { this.url = url; this.subProtocol = subProtocol; this.customHeaders = customHeaders; } public WebSocketTestClient(String url, CountDownLatch latch) { this.url = url; this.subProtocol = null; this.latch = latch; } public WebSocketTestClient(String url, String subProtocol, Map<String, String> customHeaders, CountDownLatch latch) { this.url = url; this.subProtocol = subProtocol; this.customHeaders = customHeaders; this.latch = latch; } /** * @return true if the handshake is done properly. * @throws URISyntaxException throws if there is an error in the URI syntax. * @throws InterruptedException throws if the connecting the server is interrupted. */ public boolean handhshake() throws InterruptedException, URISyntaxException, SSLException, ProtocolException { boolean isSuccess; URI uri = new URI(url); String scheme = uri.getScheme() == null ? "ws" : uri.getScheme(); final String host = uri.getHost() == null ? "127.0.0.1" : uri.getHost(); final int port; if (uri.getPort() == -1) { if ("ws".equalsIgnoreCase(scheme)) { port = 80; } else if ("wss".equalsIgnoreCase(scheme)) { port = 443; } else { port = -1; } } else { port = uri.getPort(); } if (!"ws".equalsIgnoreCase(scheme) && !"wss".equalsIgnoreCase(scheme)) { logger.error("Only WS(S) is supported."); return false; } final boolean ssl = "wss".equalsIgnoreCase(scheme); final SslContext sslCtx; if (ssl) { sslCtx = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build(); } else { sslCtx = null; } group = new NioEventLoopGroup(); HttpHeaders headers = new DefaultHttpHeaders(); for (Map.Entry<String, String> entry : customHeaders.entrySet()) { headers.add(entry.getKey(), entry.getValue()); } // Connect with V13 (RFC 6455 aka HyBi-17). You can change it to V08 or V00. // If you change it to V00, ping is not supported and remember to change // HttpResponseDecoder to WebSocketHttpResponseDecoder in the pipeline. handler = new WebSocketClientHandler( WebSocketClientHandshakerFactory.newHandshaker(uri, WebSocketVersion.V13, subProtocol, true, headers), latch); Bootstrap bootstrap = new Bootstrap(); bootstrap.group(group).channel(NioSocketChannel.class).handler(new ChannelInitializer<SocketChannel>() { @Override protected void initChannel(SocketChannel ch) { ChannelPipeline p = ch.pipeline(); if (sslCtx != null) { p.addLast(sslCtx.newHandler(ch.alloc(), host, port)); } p.addLast(new HttpClientCodec(), new HttpObjectAggregator(8192), WebSocketClientCompressionHandler.INSTANCE, handler); } }); channel = bootstrap.connect(uri.getHost(), port).sync().channel(); isSuccess = handler.handshakeFuture().sync().isSuccess(); logger.info("WebSocket Handshake successful : " + isSuccess); return isSuccess; } /** * Send text to the server. * * @param text text need to be sent. */ public void sendText(String text) { if (channel == null) { logger.error("Channel is null. Cannot send text."); throw new IllegalArgumentException("Cannot find the channel to write"); } channel.writeAndFlush(new TextWebSocketFrame(text)); } /** * Send binary data to server. * * @param buf buffer containing the data need to be sent. */ public void sendBinary(ByteBuffer buf) throws IOException { if (channel == null) { logger.error("Channel is null. Cannot send text."); throw new IllegalArgumentException("Cannot find the channel to write"); } channel.writeAndFlush(new BinaryWebSocketFrame(Unpooled.wrappedBuffer(buf))); } /** * Send a ping message to the server. * * @param buf content of the ping message to be sent. */ public void sendPing(ByteBuffer buf) throws IOException { if (channel == null) { logger.error("Channel is null. Cannot send text."); throw new IllegalArgumentException("Cannot find the channel to write"); } channel.writeAndFlush(new PingWebSocketFrame(Unpooled.wrappedBuffer(buf))); } /** * @return the text received from the server. */ public String getTextReceived() { return handler.getTextReceived(); } /** * @return the binary data received from the server. */ public ByteBuffer getBufferReceived() { return handler.getBufferReceived(); } /** * Check whether the connection is still open or not. * * @return true if connection is still open. */ public boolean isOpen() { return handler.isOpen(); } /** * Shutdown the WebSocket Client. */ public void shutDown() throws InterruptedException { logger.info("Shutting Down WebSocket Client...."); handler.shutDown(); group.shutdownGracefully(); } }
/** * Copyright 2012 Comcast Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.cqs.controller; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Executors; import javax.servlet.AsyncContext; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.ChannelFactory; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelHandler; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; import com.comcast.cmb.common.controller.Action; import com.comcast.cmb.common.controller.CMBControllerServlet; import com.comcast.cmb.common.persistence.PersistenceFactory; import com.comcast.cmb.common.util.CMBProperties; import com.comcast.cmb.common.util.ValueAccumulator.AccumulatorName; import com.comcast.cqs.io.CQSMessagePopulator; import com.comcast.cqs.model.CQSMessage; import com.comcast.cqs.model.CQSQueue; public class CQSLongPollReceiver { private static Logger logger = Logger.getLogger(CQSLongPollReceiver.class); private static boolean initialized = false; private static ChannelFactory serverSocketChannelFactory; public static volatile ConcurrentHashMap<String, ConcurrentLinkedQueue<AsyncContext>> contextQueues; // // current long poll design: // // http request handlers are now asynchronous: they offload action execution to a separate pool of worker threads // there is one single long poll receiver thread per api server listening on a dedicated port for "message available" notifications using netty nio library (asynchronous i/o) // use request wrapper to allow additional meta-data to travel with the async context, such as a timestamp when request was received etc. // upon receive() do everything as usual up until reading messages from redis/cassandra // if messages are found immediately, return these messages and complete the async context as usual // otherwise put async context on in-memory queue (e.g. ConcurrentLinkedQueue) and do NOT complete context (note: we need one in-mem queue per cqs queue, referenced via a concurrent hash map!) // when any of the async events occurs (complete, timeout, error) we mark the async context as outdated // when receiving an external send() notification we look up the correct in-mem queue and pull an async context from it // if no context there we do nothing (nobody is currently long-polling) // if a context is there but it's marked as outdated we simply discard it and check for further elements on the queue // if an active async context is found we try to read messages // if messages are found we generate a response and complete the context // if no messages are found (and there is long poll time left) we put the context back on the queue // // optimizations: // // if send and receive happens on the same server, bypass async i/o and place the async context directly on the queue in memory (done) // resue established netty channels instead of crerating new tcp connectiosn for every sendmessage() call (done) // only send notifications to endpoints that are actually waiting for messages or have recently been waiting for messages // reestablishing connections only on failure or after a set period of time (e.g. 1 hr), or send ping over connection every 60 sec // only send notifications if queue is empty or near empty (a full queue cannot have pending receivemessage() calls) // tune tcp settings - keep-alive etc. // // old long poll design: // // each long polling receive() api call does a wait(timeout) on a monitor (one monitor per queue) // when the long poll receiver thread receives a notification (which only consists of the queue arn that received a message) it will do a notify() on the monitor associated with the queue // this wakes up at most one waiting receive() api call, which will then try to read and return messages from redis/cassandra as usual // if no messages are found (race conditions etc.) and there is still long polling time left, receive will call wait() again // each send() api call will write the message to redis/cassandra as usual and send the target queue arn to all api servers using the netty nio library // each api server will write a heart beat (timestamp, ip, port) to cassandra, maybe once a minute or so // each api server will read the heart beat table once a minute or so to be aware of active api servers and their ip:port combinations // // some known limitations: // // each long poll request occupies a waiting thread on the worker pool (only applies to old design) // no short cut if send and receive happens on same api server (only applies to old design) // receive broadcasts to all api servers regardless of whether they are actually listening or not // long poll receiver thread is single point of failure on api server // private static class LongPollServerHandler extends SimpleChannelHandler { StringBuffer queueArn = new StringBuffer(""); @Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) { ChannelBuffer buf = (ChannelBuffer)e.getMessage(); while (buf.readable()) { char c = ((char)buf.readByte()); if (c == ';') { processNotification(queueArn.toString(), e.getRemoteAddress() != null ? e.getRemoteAddress().toString() : ""); // start reading new message queueArn = new StringBuffer(""); } else { queueArn.append(c); } } } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) { logger.error("event=longpoll_receiver_error", e.getCause()); e.getChannel().close(); } } public static int processNotification(String queueArn, String remoteAddress) { int messageCount = 0; long ts1 = System.currentTimeMillis(); CMBControllerServlet.valueAccumulator.initializeAllCounters(); contextQueues.putIfAbsent(queueArn, new ConcurrentLinkedQueue<AsyncContext>()); ConcurrentLinkedQueue<AsyncContext> contextQueue = contextQueues.get(queueArn); AsyncContext asyncContext = contextQueue.poll(); if (asyncContext == null) { logger.debug("event=no_pending_receive queue_arn=" + queueArn + " remote_address=" + remoteAddress); return messageCount; } if (asyncContext.getRequest() == null) { logger.info("event=skipping_invalid_context queue_arn=" + queueArn + " remote_address=" + remoteAddress); return messageCount; } if (!(asyncContext.getRequest() instanceof CQSHttpServletRequest)) { logger.info("event=skipping_invalid_request queue_arn=" + queueArn + " remote_address=" + remoteAddress); return messageCount; } CQSHttpServletRequest request = (CQSHttpServletRequest)asyncContext.getRequest(); // skip if request is already finished or outdated if (!request.isActive() || System.currentTimeMillis() - request.getRequestReceivedTimestamp() > request.getWaitTime()) { logger.info("event=skipping_outdated_context queue_arn=" + queueArn + " remote_address=" + remoteAddress); return messageCount; } logger.debug("event=notification_received queue_arn=" + queueArn + " remote_address=" + remoteAddress); try { CQSQueue queue = request.getQueue(); List<CQSMessage> messageList = PersistenceFactory.getCQSMessagePersistence().receiveMessage(queue, request.getReceiveAttributes()); if (messageList.size() > 0) { messageCount = messageList.size(); List<String> receiptHandles = new ArrayList<String>(); for (CQSMessage message : messageList) { receiptHandles.add(message.getReceiptHandle()); } request.setReceiptHandles(receiptHandles); request.setAttribute("lp", "yy"); // found lp call with messages CQSMonitor.getInstance().addNumberOfMessagesReturned(queue.getRelativeUrl(), messageList.size()); String out = CQSMessagePopulator.getReceiveMessageResponseAfterSerializing(messageList, request.getFilterAttributes(), request.getFilterMessageAttributes()); Action.writeResponse(out, (HttpServletResponse)asyncContext.getResponse()); long lp_ms = System.currentTimeMillis() - ts1; request.setAttribute("lp_ms", lp_ms); String cass_msString = String.valueOf(CQSControllerServlet.valueAccumulator.getCounter(AccumulatorName.CassandraTime)); request.setAttribute("cass_ms",cass_msString); request.setAttribute("cass_num_rd",CQSControllerServlet.valueAccumulator.getCounter(AccumulatorName.CassandraRead)); request.setAttribute("cass_num_wr",CQSControllerServlet.valueAccumulator.getCounter(AccumulatorName.CassandraWrite)); request.setAttribute("redis_ms",CQSControllerServlet.valueAccumulator.getCounter(AccumulatorName.RedisTime)); request.setAttribute("io_ms",CQSControllerServlet.valueAccumulator.getCounter(AccumulatorName.IOTime)); asyncContext.complete(); } else { // if there's longpoll time left, put back on queue if (request.getWaitTime() - System.currentTimeMillis() + request.getRequestReceivedTimestamp() > 0) { logger.info("event=no_messages_found_for_longpoll_receive action=re_queueing time_left_ms=" + (request.getWaitTime() - System.currentTimeMillis() + request.getRequestReceivedTimestamp()) + " queue_arn=" + queueArn + " remote_address=" + remoteAddress); contextQueue.offer(asyncContext); } } } catch (Exception ex) { logger.error("event=longpoll_queue_error queue_arn=" + queueArn, ex); } finally{ CMBControllerServlet.valueAccumulator.deleteAllCounters(); } return messageCount; } public static void listen() { if (!initialized) { contextQueues = new ConcurrentHashMap<String, ConcurrentLinkedQueue<AsyncContext>>(); serverSocketChannelFactory = new NioServerSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); ServerBootstrap serverBootstrap = new ServerBootstrap(serverSocketChannelFactory); serverBootstrap.setPipelineFactory(new ChannelPipelineFactory() { public ChannelPipeline getPipeline() { return Channels.pipeline(new LongPollServerHandler()); } }); serverBootstrap.setOption("child.tcpNoDelay", true); serverBootstrap.setOption("child.keepAlive", true); serverBootstrap.bind(new InetSocketAddress(CMBProperties.getInstance().getCQSLongPollPort())); initialized = true; logger.info("event=longpoll_receiver_service_listening port=" + CMBProperties.getInstance().getCQSLongPollPort()); } } public static void shutdown() { if (serverSocketChannelFactory != null) { serverSocketChannelFactory.releaseExternalResources(); } } }
package org.mxunit.xant.logparser; /* 4.29.08 * To Do: Normalize Directory paths .. address trailing slashes. * * * */ import java.util.StringTokenizer; import java.io.File; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.types.Commandline; import org.apache.tools.ant.Project; import org.apache.tools.ant.types.Resource; import org.apache.tools.ant.util.FileUtils; import java.text.DateFormat; import java.util.Calendar; public class FusionReactorLogParser extends MSLogParser { public static int ALL_LOGS = 5; public static String CRASH_HEADERS = "crashprotection-headers.txt"; public static int CRASH_LOG = 2; public static String CRASH_LOGS = "crashprotection-*.log"; public static int CSV_TYPE = 2; private static final FileUtils FILE_UTILS = FileUtils.getFileUtils(); public static int HTML_TYPE = 1; public static String JDBC_HEADERS = "jdbc-headers.txt"; public static int JDBC_LOG = 3; public static String JDBC_LOGS = "jdbc-*.log"; public static String REQUEST_HEADERS = "request-headers.txt"; public static int REQUEST_LOG = 1; public static String REQUEST_LOGS = "request-*.log"; public static String RESOURCE_HEADERS = "resource-headers.txt"; public static int RESOURCE_LOG = 4; public static String RESOURCE_LOGS = "resource-*.log"; //what to select from private String columnList = null; //ASC/DESC flag private String direction = "desc"; //file name for labeling generated reports private String fileName = null; //for where clause. Format: foo=bar and this=that or bar=fu private String filter = null; //html,csv, private String format = "html"; private String groupBy = null; //alternate location of header files private String headerFileLocation = null; //the unique identifier for the run private String id = null; private String log = null; private String logDir = null; private int maxRows = 1000; //Where to store output private String outputDir = "xant-temp"; private String sort = "date,time"; private String sql = ""; private String tempType = null; private String title = null; //type of report: request,jdbc,crash ... private String type = null; private String xsl = null; private XMLUtil xmlUtil = new XMLUtil(); private String xmlFileName = null; /** * * Configures the commandLine for this class * * @param java.lang.String * * */ public void configure(String type){ setTempType(type); if(type.equals("request")){ setHeaderFile(getLogDir() + REQUEST_HEADERS); setLog(REQUEST_LOGS); } else if(type.equals("crash")){ setHeaderFile(getLogDir() + CRASH_HEADERS); setLog(CRASH_LOGS); } else if(type.equals("jdbc")){ setHeaderFile(getLogDir() + JDBC_HEADERS); setLog(JDBC_LOGS); } else if(type.equals("resource")){ setHeaderFile(getLogDir() + RESOURCE_HEADERS); setLog(RESOURCE_LOGS); } else{ throw new RuntimeException("Unknown log type: " + type); } setFileName(type); if(getFormat().equals("csv")){ setOutput("csv"); } else{ //this will be either xml or html, so we need xml. Someday, chart ... setOutput("xml"); /* *schemaType="0" * */ setRootName("frantic"); setRowName("entry"); setCompact("ON"); setSchemaType("0"); } //Applies to all types setInput("TSV"); setSeparator("space"); setSql(); } /** * * Run by ant when invoked. * * */ public void execute() throws BuildException { log("Hello, FusionReactorLogParser."); log("Outputting results to: " + getOutputDir()); if(type == null || type.equals("all")){ setType("request,jdbc,crash,resource"); } StringTokenizer types = new StringTokenizer(getType()); //log("type=" + type); //log("Number of nested <sql> elements: " + sqlCommands.size()); //Maybe won't need this ... but maybe good for quick/dirty queries? //Run this only if nested sql elements are present. if(sqlCommands.size() > 0){ MSLogParser.SQLCommand sqlCmd = new MSLogParser.SQLCommand(); sqlCmd = (MSLogParser.SQLCommand)sqlCommands.elementAt(0); setInput("TSV"); setSeparator("space"); //this.sqlCommands = sqlCommands.elementAt(1). //SQLCommand sqlCmd = (SQLCommand)i.next(); this.sql = sqlCmd.getMessage(); Commandline cmd = new LogParserCommandLine(this); log("Running cmd: " + cmd.toString()); int results = super.run(cmd); return; } /* * 04.25.08 * To Do: If the type is html, then transform the xml * */ while(types.hasMoreTokens()){ String token = types.nextToken(","); configure(token); Commandline cmd = new LogParserCommandLine(this); log("Running cmd: " + cmd.toString()); System.out.println("Running cmd: " + cmd.toString()); int results = super.run(cmd); System.out.println(getOutput()); //assuming LogParser generated XML //how do we know we want html? what's the rule if(getFormat().equals("html")){ try{ transform(); } catch (Exception be){ throw new BuildException(be); } } // } } /***************************************************************** * * * Setters and Getters * * * **************************************************************/ public String getColumnList() { return (columnList != null ? columnList : "*"); } public String getDirection() { return direction; } public String getFileName(){ return this.fileName; } public String getFilter() { return (filter != null ? "where " + filter : ""); } public String getFormat() { return this.format; } public String getGroupBy(){ return (this.groupBy != null ? " group by " + groupBy : ""); } public String getHeaderFileLocation(){ return this.headerFileLocation; } public String getId() { return (id != null ? id : java.util.UUID.randomUUID().toString()); } public String getLog(){ return this.log; } public String getLogDir() { return this.logDir; } public int getMaxRows() { return this.maxRows; } public String getOutputDir () { //To Do: reg if(outputDir != null){ //strip trailing slash String slash = "/|\\$"; //slash = "/|\\$"; //char last = outputDir.charAt(outputDir.length()-1); //old: was working on 4/28/08 but smelly //if(outputDir.endsWith(slash)){ /// this.outputDir = outputDir.substring(0, outputDir.length()-2); //} this.outputDir = this.outputDir.replace(slash, ""); } return this.outputDir; } /** * * Specifies which columns are used to sort the query. * Default is <em>date,time</em> * * * */ public String getSort() { //To do: build so no sorting is required. //Case: If someone wants rows w/no date time info. return (sort != null ? " order by " + this.sort : "order by date,time"); } public String getSql(){ return this.sql; } public String getTempType() { return tempType; } public String getTitle() { return (title != null ? title : "FusionReactor Log Report"); } public String getType() { return (this.type == null ? "all" : this.type); } public String getXSL(){ return this.xsl; } /****************************** * * Setters * * ****************************/ public void setColumnList(String columnList) { this.columnList = columnList; } /** * * Adds sort direction to sql query. * * */ public void setDirection(String direction) { this.direction = direction; } public void setFileName(String type){ String fileName = "frantic-"; fileName += getTempType() + "-"; fileName += getId(); fileName += "." + getFormat(); this.fileName = fileName; } /** * * Appends filter expression to where clause in sql query. * Default is an empty string; * * */ public void setFilter(String filter) { this.filter = filter; } /** * One of HTML, CSV, or XML. * * */ public void setFormat(String format) { log(format); if( ! format.equals("csv") && ! format.equals("html") && ! format.equals("xml")){ throw new RuntimeException("Format: " + format + ", is not currently supported."); } this.format = format; } public void setGroupBy(String groupBy){ this.groupBy = groupBy; } /** * * Sets the alternative location from which to read the header information. * * * */ public void setHeaderFileLocation(String headerFileLocation){ this.headerFileLocation = headerFileLocation; } public void setId(String id) { this.id = id; } /** * * Sets the log from which to read. * * * */ public void setLog(String log){ this.log = log; } /** * Used to get the location of log and header files * * */ public void setLogDir(String logDir) { this.logDir = logDir; } /** * * How many rows to fetch from the query. Default is 1000. * * */ public void setMaxRows(int maxRows) { if(maxRows < 1){ throw new RuntimeException("maxRows must be greater than 1. You entered " + maxRows); } this.maxRows = maxRows; } /** * Where the output files should be sent to. * * */ public void setOutputDir (String outputDir) { this.outputDir = outputDir; } /** * Inject project for testing. * * */ public void setProject(Project project){ this.project = project; } public void setSort(String sort) { this.sort = sort; } /** * * Default SQL statement. May be overridden by nested <sql> elements. * * */ public void setSql(){ String logDir = getLogDir(); String outDir = getOutputDir(); String sql = ""; sql += "select top " + this.getMaxRows() + " " + getColumnList() + " "; sql += "into " + outDir + System.getProperty("file.separator") + setXmlFileName(getFileName()) + " "; sql += "from " + logDir +getLog() + " "; sql += " " + getFilter() + " "; sql += getSort(); sql += " " + getDirection(); sql += " " + getGroupBy(); this.sql = sql ; } public void setTempType(String tempType) { this.tempType = tempType; } public void setTitle(String title) { this.title = title; } /** * Used to determine what kind of data to fetch: * One of request,jdbc,crashprotection, etc.. * * */ public void setType(String type) { this.type = type; } public void setXSL(String xsl){ this.xsl = (xsl == null ? getClass().getResource("xsl/cf-app.xsl").toString() : xsl); } //Replaces .html w/.xml for output public String setXmlFileName(String fileName){ return fileName.replaceAll("\\.html$", ".xml"); } /** * * Wraps XMUtil.transorm(String, String) * * */ public void transform() throws Exception { System.out.print("transforming ..."); Resource resource = xmlUtil.getStylesheet("frantic.xsl"); File out = new File(getOutputDir() + System.getProperty("file.separator") + getFileName()); File in = new File(getOutputDir() + System.getProperty("file.separator") + setXmlFileName(getFileName())); log(in.toString()); log(out.toString()); try{ //Pass the title string to the Trax processor. //Displayed in title and header of result doc Calendar calendar = Calendar.getInstance(); String today = DateFormat.getDateInstance().format(calendar.getTime()); String time = DateFormat.getTimeInstance().format(calendar.getTime()); xmlUtil.setParam("title", getTitle()); xmlUtil.setParam("dateTimeString", today + " " + time); xmlUtil.traxform(in, out, resource); } catch(Exception e){ throw new BuildException("XML Transformation failed for " + in.toString(), e); } } /****************************** End FusionReactorLogParser **********************************/ } //
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.rmapp; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.ipc.CallerContext; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.ApplicationTimeoutType; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.LogAggregationStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.ReservationId; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.server.api.protocolrecords.LogAggregationReport; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; /** * The interface to an Application in the ResourceManager. Take a * look at {@link RMAppImpl} for its implementation. This interface * exposes methods to access various updates in application status/report. */ public interface RMApp extends EventHandler<RMAppEvent> { /** * The application id for this {@link RMApp}. * @return the {@link ApplicationId} for this {@link RMApp}. */ ApplicationId getApplicationId(); /** * The application submission context for this {@link RMApp} * @return the {@link ApplicationSubmissionContext} for this {@link RMApp} */ ApplicationSubmissionContext getApplicationSubmissionContext(); /** * The current state of the {@link RMApp}. * @return the current state {@link RMAppState} for this application. */ RMAppState getState(); /** * The user who submitted this application. * @return the user who submitted the application. */ String getUser(); /** * Progress of application. * @return the progress of the {@link RMApp}. */ float getProgress(); /** * {@link RMApp} can have multiple application attempts {@link RMAppAttempt}. * This method returns the {@link RMAppAttempt} corresponding to * {@link ApplicationAttemptId}. * @param appAttemptId the application attempt id * @return the {@link RMAppAttempt} corresponding to the {@link ApplicationAttemptId}. */ RMAppAttempt getRMAppAttempt(ApplicationAttemptId appAttemptId); /** * Each Application is submitted to a queue decided by {@link * ApplicationSubmissionContext#setQueue(String)}. * This method returns the queue to which an application was submitted. * @return the queue to which the application was submitted to. */ String getQueue(); /** * Reflects a change in the application's queue from the one specified in the * {@link ApplicationSubmissionContext}. * @param name the new queue name */ void setQueue(String name); /** * The name of the application as set in {@link * ApplicationSubmissionContext#setApplicationName(String)}. * @return the name of the application. */ String getName(); /** * {@link RMApp} can have multiple application attempts {@link RMAppAttempt}. * This method returns the current {@link RMAppAttempt}. * @return the current {@link RMAppAttempt} */ RMAppAttempt getCurrentAppAttempt(); /** * {@link RMApp} can have multiple application attempts {@link RMAppAttempt}. * This method returns the all {@link RMAppAttempt}s for the RMApp. * @return all {@link RMAppAttempt}s for the RMApp. */ Map<ApplicationAttemptId, RMAppAttempt> getAppAttempts(); /** * To get the status of an application in the RM, this method can be used. * If full access is not allowed then the following fields in the report * will be stubbed: * <ul> * <li>host - set to "N/A"</li> * <li>RPC port - set to -1</li> * <li>client token - set to "N/A"</li> * <li>diagnostics - set to "N/A"</li> * <li>tracking URL - set to "N/A"</li> * <li>original tracking URL - set to "N/A"</li> * <li>resource usage report - all values are -1</li> * </ul> * * @param clientUserName the user name of the client requesting the report * @param allowAccess whether to allow full access to the report * @return the {@link ApplicationReport} detailing the status of the application. */ ApplicationReport createAndGetApplicationReport(String clientUserName, boolean allowAccess); /** * To receive the collection of all {@link RMNode}s whose updates have been * received by the RMApp. Updates can be node becoming lost or becoming * healthy etc. The method clears the information from the {@link RMApp}. So * each call to this method gives the delta from the previous call. * @param updatedNodes Collection into which the updates are transferred * @return the number of nodes added to the {@link Collection} */ int pullRMNodeUpdates(Collection<RMNode> updatedNodes); /** * The finish time of the {@link RMApp} * @return the finish time of the application., */ long getFinishTime(); /** * the start time of the application. * @return the start time of the application. */ long getStartTime(); /** * the submit time of the application. * @return the submit time of the application. */ long getSubmitTime(); /** * The tracking url for the application master. * @return the tracking url for the application master. */ String getTrackingUrl(); /** * The collector address for the application. It should be used only if the * timeline service v.2 is enabled. * * @return the address for the application's collector, or null if the * timeline service v.2 is not enabled. */ String getCollectorAddr(); /** * Set collector address for the application. It should be used only if the * timeline service v.2 is enabled. * * @param collectorAddr the address of collector */ void setCollectorAddr(String collectorAddr); /** * Remove collector address when application is finished or killed. It should * be used only if the timeline service v.2 is enabled. */ void removeCollectorAddr(); /** * The original tracking url for the application master. * @return the original tracking url for the application master. */ String getOriginalTrackingUrl(); /** * the diagnostics information for the application master. * @return the diagnostics information for the application master. */ StringBuilder getDiagnostics(); /** * The final finish state of the AM when unregistering as in * {@link FinishApplicationMasterRequest#setFinalApplicationStatus(FinalApplicationStatus)}. * @return the final finish state of the AM as set in * {@link FinishApplicationMasterRequest#setFinalApplicationStatus(FinalApplicationStatus)}. */ FinalApplicationStatus getFinalApplicationStatus(); /** * The number of max attempts of the application. * @return the number of max attempts of the application. */ int getMaxAppAttempts(); /** * Returns the application type * @return the application type. */ String getApplicationType(); /** * Get tags for the application * @return tags corresponding to the application */ Set<String> getApplicationTags(); /** * Check whether this application's state has been saved to the state store. * @return the flag indicating whether the applications's state is stored. */ boolean isAppFinalStateStored(); /** * Nodes on which the containers for this {@link RMApp} ran. * @return the set of nodes that ran any containers from this {@link RMApp} * Add more node on which containers for this {@link RMApp} ran */ Set<NodeId> getRanNodes(); /** * Create the external user-facing state of ApplicationMaster from the * current state of the {@link RMApp}. * @return the external user-facing state of ApplicationMaster. */ YarnApplicationState createApplicationState(); /** * Get RMAppMetrics of the {@link RMApp}. * * @return metrics */ RMAppMetrics getRMAppMetrics(); ReservationId getReservationId(); List<ResourceRequest> getAMResourceRequests(); Map<NodeId, LogAggregationReport> getLogAggregationReportsForApp(); LogAggregationStatus getLogAggregationStatusForAppReport(); /** * Return the node label expression of the AM container. */ String getAmNodeLabelExpression(); String getAppNodeLabelExpression(); CallerContext getCallerContext(); Map<ApplicationTimeoutType, Long> getApplicationTimeouts(); /** * Get priority of the application. * @return priority */ Priority getApplicationPriority(); /** * To verify whether app has reached in its completing/completed states. * * @return True/False to confirm whether app is in final states */ boolean isAppInCompletedStates(); }
package org.jaudiotagger.tag.datatype; import org.jaudiotagger.tag.InvalidDataTypeException; import org.jaudiotagger.tag.TagOptionSingleton; import org.jaudiotagger.tag.id3.AbstractTagFrameBody; import org.jaudiotagger.tag.id3.valuepair.TextEncoding; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Represents a String which is not delimited by null character. * <p> * This type of String will usually only be used when it is the last field within a frame, when reading the remainder of * the byte array will be read, when writing the frame will be accommodate the required size for the String. The String * will be encoded based upon the text encoding of the frame that it belongs to. * <p> * All TextInformation frames support multiple strings, stored as a null separated list, where null is represented by * the termination code for the character encoding. This functionality is only officially support in ID3v24. * <p> * Most applications will ignore any but the first value, but some such as Foobar2000 will decode them properly * <p> * iTunes write null terminators characters after the String even though it only writes a single value. */ public class TextEncodedStringSizeTerminated extends AbstractString { /** * Creates a new empty TextEncodedStringSizeTerminated datatype. * * @param identifier identifies the frame type * @param frameBody */ public TextEncodedStringSizeTerminated(String identifier, AbstractTagFrameBody frameBody) { super(identifier, frameBody); } /** * Copy constructor * * @param object */ public TextEncodedStringSizeTerminated(TextEncodedStringSizeTerminated object) { super(object); } public boolean equals(Object obj) { if (this == obj) { return true; } return obj instanceof TextEncodedStringSizeTerminated && super.equals(obj); } /** * Read a 'n' bytes from buffer into a String where n is the framesize - offset * so therefore cannot use this if there are other objects after it because it has no * delimiter. * <p> * Must take into account the text encoding defined in the Encoding Object * ID3 Text Frames often allow multiple strings seperated by the null char * appropriate for the encoding. * * @param arr this is the buffer for the frame * @param offset this is where to start reading in the buffer for this field * @throws NullPointerException * @throws IndexOutOfBoundsException */ public void readByteArray(byte[] arr, int offset) throws InvalidDataTypeException { logger.finest("Reading from array from offset:" + offset); //Decode sliced inBuffer ByteBuffer inBuffer; //#302 [dallen] truncating array manually since the decoder.decode() does not honor the offset in the in buffer byte[] truncArr = new byte[arr.length - offset]; System.arraycopy(arr, offset, truncArr, 0, truncArr.length); inBuffer = ByteBuffer.wrap(truncArr); CharBuffer outBuffer = CharBuffer.allocate(arr.length - offset); CharsetDecoder decoder = getCorrectDecoder(inBuffer); CoderResult coderResult = decoder.decode(inBuffer, outBuffer, true); if (coderResult.isError()) { logger.warning("Decoding error:" + coderResult.toString()); } decoder.flush(outBuffer); outBuffer.flip(); //If using UTF16 with BOM we then search through the text removing any BOMs that could exist //for multiple values, BOM could be Big Endian or Little Endian if (StandardCharsets.UTF_16.equals(getTextEncodingCharSet())) { value = outBuffer.toString().replace("\ufeff", "").replace("\ufffe", ""); } else { value = outBuffer.toString(); } //SetSize, important this is correct for finding the next datatype setSize(arr.length - offset); logger.finest("Read SizeTerminatedString:" + value + " size:" + size); } /** * Write String using specified encoding * <p> * When this is called multiple times, all but the last value has a trailing null * * @param encoder * @param next * @param i * @param noOfValues * @return * @throws CharacterCodingException */ protected ByteBuffer writeString(CharsetEncoder encoder, String next, int i, int noOfValues) throws CharacterCodingException { ByteBuffer bb; if ((i + 1) == noOfValues) { bb = encoder.encode(CharBuffer.wrap(next)); } else { bb = encoder.encode(CharBuffer.wrap(next + '\0')); } bb.rewind(); return bb; } /** * Write String in UTF-LEBOM format * <p> * When this is called multiple times, all but the last value has a trailing null * <p> * Remember we are using this charset because the charset that writes BOM does it the wrong way for us * so we use this none and then manually add the BOM ourselves. * * @param next * @param i * @param noOfValues * @return * @throws CharacterCodingException */ protected ByteBuffer writeStringUTF16LEBOM(final String next, final int i, final int noOfValues) throws CharacterCodingException { final CharsetEncoder encoder = StandardCharsets.UTF_16LE.newEncoder(); encoder.onMalformedInput(CodingErrorAction.IGNORE); encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); ByteBuffer bb; //Note remember LE BOM is ff fe but this is handled by encoder Unicode char is fe ff if ((i + 1) == noOfValues) { bb = encoder.encode(CharBuffer.wrap('\ufeff' + next)); } else { bb = encoder.encode(CharBuffer.wrap('\ufeff' + next + '\0')); } bb.rewind(); return bb; } /** * Write String in UTF-BEBOM format * <p> * When this is called multiple times, all but the last value has a trailing null * * @param next * @param i * @param noOfValues * @return * @throws CharacterCodingException */ protected ByteBuffer writeStringUTF16BEBOM(final String next, final int i, final int noOfValues) throws CharacterCodingException { final CharsetEncoder encoder = StandardCharsets.UTF_16BE.newEncoder(); encoder.onMalformedInput(CodingErrorAction.IGNORE); encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); ByteBuffer bb; //Add BOM if ((i + 1) == noOfValues) { bb = encoder.encode(CharBuffer.wrap('\ufeff' + next)); } else { bb = encoder.encode(CharBuffer.wrap('\ufeff' + next + '\0')); } bb.rewind(); return bb; } /** * Removing trailing null from end of String, this should not be there but some applications continue to write * this unnecessary null char. */ protected void stripTrailingNull() { if (TagOptionSingleton.getInstance().isRemoveTrailingTerminatorOnWrite()) { String stringValue = (String) value; if (stringValue.length() > 0) { if (stringValue.charAt(stringValue.length() - 1) == '\0') { stringValue = (stringValue).substring(0, stringValue.length() - 1); value = stringValue; } } } } /** * Because nulls are stripped we need to check if not removing trailing nulls whether the original * value ended with a null and if so add it back in. * * @param values * @param stringValue */ protected void checkTrailingNull(List<String> values, String stringValue) { if (!TagOptionSingleton.getInstance().isRemoveTrailingTerminatorOnWrite()) { if (stringValue.length() > 0 && stringValue.charAt(stringValue.length() - 1) == '\0') { String lastVal = values.get(values.size() - 1); String newLastVal = lastVal + '\0'; values.set(values.size() - 1, newLastVal); } } } /** * Write String into byte array * <p> * It will remove a trailing null terminator if exists if the option * RemoveTrailingTerminatorOnWrite has been set. * * @return the data as a byte array in format to write to file */ public byte[] writeByteArray() { byte[] data; //Try and write to buffer using the CharSet defined by getTextEncodingCharSet() final Charset charset = getTextEncodingCharSet(); try { stripTrailingNull(); //Special Handling because there is no UTF16 BOM LE charset String stringValue = (String) value; Charset actualCharSet = null; if (StandardCharsets.UTF_16.equals(charset)) { if (TagOptionSingleton.getInstance().isEncodeUTF16BomAsLittleEndian()) { actualCharSet = StandardCharsets.UTF_16LE; } else { actualCharSet = StandardCharsets.UTF_16BE; } } //Ensure large enough for any encoding ByteBuffer outputBuffer = ByteBuffer.allocate((stringValue.length() + 3) * 3); //Ensure each string (if multiple values) is written with BOM by writing separately List<String> values = splitByNullSeperator(stringValue); checkTrailingNull(values, stringValue); //For each value for (int i = 0; i < values.size(); i++) { String next = values.get(i); if (StandardCharsets.UTF_16LE.equals(actualCharSet)) { outputBuffer.put(writeStringUTF16LEBOM(next, i, values.size())); } else if (StandardCharsets.UTF_16BE.equals(actualCharSet)) { outputBuffer.put(writeStringUTF16BEBOM(next, i, values.size())); } else { final CharsetEncoder charsetEncoder = charset.newEncoder(); charsetEncoder.onMalformedInput(CodingErrorAction.IGNORE); charsetEncoder.onUnmappableCharacter(CodingErrorAction.IGNORE); outputBuffer.put(writeString(charsetEncoder, next, i, values.size())); } } outputBuffer.flip(); data = new byte[outputBuffer.limit()]; outputBuffer.rewind(); outputBuffer.get(data, 0, outputBuffer.limit()); setSize(data.length); } //https://bitbucket.org/ijabz/jaudiotagger/issue/1/encoding-metadata-to-utf-16-can-fail-if catch (CharacterCodingException ce) { logger.severe(ce.getMessage() + ":" + charset + ":" + value); throw new RuntimeException(ce); } return data; } /** * Split the values separated by null character * * @param value the raw value * @return list of values, guaranteed to be at least one value */ public static List<String> splitByNullSeperator(String value) { String[] valuesarray = value.split("\\u0000"); List<String> values = Arrays.asList(valuesarray); //Read only list so if empty have to create new list if (values.size() == 0) { values = new ArrayList<String>(1); values.add(""); } return values; } /** * Add an additional String to the current String value * * @param value */ public void addValue(String value) { setValue(this.value + "\u0000" + value); } /** * How many values are held, each value is separated by a null terminator * * @return number of values held, usually this will be one. */ public int getNumberOfValues() { return splitByNullSeperator(((String) value)).size(); } /** * Get the nth value * * @param index * @return the nth value * @throws IndexOutOfBoundsException if value does not exist */ public String getValueAtIndex(int index) { //Split String into separate components List values = splitByNullSeperator((String) value); return (String) values.get(index); } /** * @return list of all values */ public List<String> getValues() { return splitByNullSeperator((String) value); } /** * Get value(s) whilst removing any trailing nulls * * @return */ public String getValueWithoutTrailingNull() { List<String> values = splitByNullSeperator((String) value); StringBuffer sb = new StringBuffer(); for (int i = 0; i < values.size(); i++) { if (i != 0) { sb.append("\u0000"); } sb.append(values.get(i)); } return sb.toString(); } }
/** * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.hystrix.contrib.codahalemetricspublisher; import com.codahale.metrics.Gauge; import com.codahale.metrics.MetricRegistry; import com.netflix.hystrix.HystrixCircuitBreaker; import com.netflix.hystrix.HystrixCommandGroupKey; import com.netflix.hystrix.HystrixCommandKey; import com.netflix.hystrix.HystrixCommandMetrics; import com.netflix.hystrix.HystrixCommandProperties; import com.netflix.hystrix.strategy.metrics.HystrixMetricsPublisherCommand; import com.netflix.hystrix.util.HystrixRollingNumberEvent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import rx.functions.Func0; /** * Implementation of {@link HystrixMetricsPublisherCommand} using Coda Hale Metrics (https://github.com/codahale/metrics) */ public class HystrixCodaHaleMetricsPublisherCommand implements HystrixMetricsPublisherCommand { private final HystrixCommandKey key; private final HystrixCommandGroupKey commandGroupKey; private final HystrixCommandMetrics metrics; private final HystrixCircuitBreaker circuitBreaker; private final HystrixCommandProperties properties; private final MetricRegistry metricRegistry; private final String metricGroup; private final String metricType; static final Logger logger = LoggerFactory.getLogger(HystrixCodaHaleMetricsPublisherCommand.class); public HystrixCodaHaleMetricsPublisherCommand(HystrixCommandKey commandKey, HystrixCommandGroupKey commandGroupKey, HystrixCommandMetrics metrics, HystrixCircuitBreaker circuitBreaker, HystrixCommandProperties properties, MetricRegistry metricRegistry) { this.key = commandKey; this.commandGroupKey = commandGroupKey; this.metrics = metrics; this.circuitBreaker = circuitBreaker; this.properties = properties; this.metricRegistry = metricRegistry; this.metricGroup = commandGroupKey.name(); this.metricType = key.name(); } /** * An implementation note. If there's a version mismatch between hystrix-core and hystrix-codahale-metrics-publisher, * the code below may reference a HystrixRollingNumberEvent that does not exist in hystrix-core. If this happens, * a j.l.NoSuchFieldError occurs. Since this data is not being generated by hystrix-core, it's safe to count it as 0 * and we should log an error to get users to update their dependency set. */ @Override public void initialize() { metricRegistry.register(createMetricName("isCircuitBreakerOpen"), new Gauge<Boolean>() { @Override public Boolean getValue() { return circuitBreaker.isOpen(); } }); // allow monitor to know exactly at what point in time these stats are for so they can be plotted accurately metricRegistry.register(createMetricName("currentTime"), new Gauge<Long>() { @Override public Long getValue() { return System.currentTimeMillis(); } }); // cumulative counts safelyCreateCumulativeCountForEvent("countBadRequests", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.BAD_REQUEST; } }); safelyCreateCumulativeCountForEvent("countCollapsedRequests", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.COLLAPSED; } }); safelyCreateCumulativeCountForEvent("countEmit", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.EMIT; } }); safelyCreateCumulativeCountForEvent("countExceptionsThrown", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.EXCEPTION_THROWN; } }); safelyCreateCumulativeCountForEvent("countFailure", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FAILURE; } }); safelyCreateCumulativeCountForEvent("countFallbackEmit", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_EMIT; } }); safelyCreateCumulativeCountForEvent("countFallbackFailure", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_FAILURE; } }); safelyCreateCumulativeCountForEvent("countFallbackMissing", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_MISSING; } }); safelyCreateCumulativeCountForEvent("countFallbackRejection", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_REJECTION; } }); safelyCreateCumulativeCountForEvent("countFallbackSuccess", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_SUCCESS; } }); safelyCreateCumulativeCountForEvent("countResponsesFromCache", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.RESPONSE_FROM_CACHE; } }); safelyCreateCumulativeCountForEvent("countSemaphoreRejected", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.SEMAPHORE_REJECTED; } }); safelyCreateCumulativeCountForEvent("countShortCircuited", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.SHORT_CIRCUITED; } }); safelyCreateCumulativeCountForEvent("countSuccess", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.SUCCESS; } }); safelyCreateCumulativeCountForEvent("countThreadPoolRejected", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.THREAD_POOL_REJECTED; } }); safelyCreateCumulativeCountForEvent("countTimeout", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.TIMEOUT; } }); // rolling counts safelyCreateRollingCountForEvent("rollingCountBadRequests", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.BAD_REQUEST; } }); safelyCreateRollingCountForEvent("rollingCountCollapsedRequests", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.COLLAPSED; } }); safelyCreateRollingCountForEvent("rollingCountEmit", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.EMIT; } }); safelyCreateRollingCountForEvent("rollingCountExceptionsThrown", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.EXCEPTION_THROWN; } }); safelyCreateRollingCountForEvent("rollingCountFailure", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FAILURE; } }); safelyCreateRollingCountForEvent("rollingCountFallbackEmit", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_EMIT; } }); safelyCreateRollingCountForEvent("rollingCountFallbackFailure", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_FAILURE; } }); safelyCreateRollingCountForEvent("rollingCountFallbackMissing", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_MISSING; } }); safelyCreateRollingCountForEvent("rollingCountFallbackRejection", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_REJECTION; } }); safelyCreateRollingCountForEvent("rollingCountFallbackSuccess", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.FALLBACK_SUCCESS; } }); safelyCreateRollingCountForEvent("rollingCountResponsesFromCache", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.RESPONSE_FROM_CACHE; } }); safelyCreateRollingCountForEvent("rollingCountSemaphoreRejected", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.SEMAPHORE_REJECTED; } }); safelyCreateRollingCountForEvent("rollingCountShortCircuited", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.SHORT_CIRCUITED; } }); safelyCreateRollingCountForEvent("rollingCountSuccess", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.SUCCESS; } }); safelyCreateRollingCountForEvent("rollingCountThreadPoolRejected", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.THREAD_POOL_REJECTED; } }); safelyCreateRollingCountForEvent("rollingCountTimeout", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.TIMEOUT; } }); // the rolling number of MaxConcurrentExecutionCount. Can be used to determine saturation safelyCreateRollingCountForEvent("rollingMaxConcurrentExecutionCount", new Func0<HystrixRollingNumberEvent>() { @Override public HystrixRollingNumberEvent call() { return HystrixRollingNumberEvent.COMMAND_MAX_ACTIVE; } }); // the number of executionSemaphorePermits in use right now metricRegistry.register(createMetricName("executionSemaphorePermitsInUse"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getCurrentConcurrentExecutionCount(); } }); // error percentage derived from current metrics metricRegistry.register(createMetricName("errorPercentage"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getHealthCounts().getErrorPercentage(); } }); // latency metrics metricRegistry.register(createMetricName("latencyExecute_mean"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getExecutionTimeMean(); } }); metricRegistry.register(createMetricName("latencyExecute_percentile_5"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getExecutionTimePercentile(5); } }); metricRegistry.register(createMetricName("latencyExecute_percentile_25"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getExecutionTimePercentile(25); } }); metricRegistry.register(createMetricName("latencyExecute_percentile_50"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getExecutionTimePercentile(50); } }); metricRegistry.register(createMetricName("latencyExecute_percentile_75"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getExecutionTimePercentile(75); } }); metricRegistry.register(createMetricName("latencyExecute_percentile_90"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getExecutionTimePercentile(90); } }); metricRegistry.register(createMetricName("latencyExecute_percentile_99"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getExecutionTimePercentile(99); } }); metricRegistry.register(createMetricName("latencyExecute_percentile_995"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getExecutionTimePercentile(99.5); } }); metricRegistry.register(createMetricName("latencyTotal_mean"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getTotalTimeMean(); } }); metricRegistry.register(createMetricName("latencyTotal_percentile_5"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getTotalTimePercentile(5); } }); metricRegistry.register(createMetricName("latencyTotal_percentile_25"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getTotalTimePercentile(25); } }); metricRegistry.register(createMetricName("latencyTotal_percentile_50"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getTotalTimePercentile(50); } }); metricRegistry.register(createMetricName("latencyTotal_percentile_75"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getTotalTimePercentile(75); } }); metricRegistry.register(createMetricName("latencyTotal_percentile_90"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getTotalTimePercentile(90); } }); metricRegistry.register(createMetricName("latencyTotal_percentile_99"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getTotalTimePercentile(99); } }); metricRegistry.register(createMetricName("latencyTotal_percentile_995"), new Gauge<Integer>() { @Override public Integer getValue() { return metrics.getTotalTimePercentile(99.5); } }); // group metricRegistry.register(createMetricName("commandGroup"), new Gauge<String>() { @Override public String getValue() { return commandGroupKey != null ? commandGroupKey.name() : null; } }); // properties (so the values can be inspected and monitored) metricRegistry.register(createMetricName("propertyValue_rollingStatisticalWindowInMilliseconds"), new Gauge<Number>() { @Override public Number getValue() { return properties.metricsRollingStatisticalWindowInMilliseconds().get(); } }); metricRegistry.register(createMetricName("propertyValue_circuitBreakerRequestVolumeThreshold"), new Gauge<Number>() { @Override public Number getValue() { return properties.circuitBreakerRequestVolumeThreshold().get(); } }); metricRegistry.register(createMetricName("propertyValue_circuitBreakerSleepWindowInMilliseconds"), new Gauge<Number>() { @Override public Number getValue() { return properties.circuitBreakerSleepWindowInMilliseconds().get(); } }); metricRegistry.register(createMetricName("propertyValue_circuitBreakerErrorThresholdPercentage"), new Gauge<Number>() { @Override public Number getValue() { return properties.circuitBreakerErrorThresholdPercentage().get(); } }); metricRegistry.register(createMetricName("propertyValue_circuitBreakerForceOpen"), new Gauge<Boolean>() { @Override public Boolean getValue() { return properties.circuitBreakerForceOpen().get(); } }); metricRegistry.register(createMetricName("propertyValue_circuitBreakerForceClosed"), new Gauge<Boolean>() { @Override public Boolean getValue() { return properties.circuitBreakerForceClosed().get(); } }); metricRegistry.register(createMetricName("propertyValue_executionIsolationThreadTimeoutInMilliseconds"), new Gauge<Number>() { @Override public Number getValue() { return properties.executionTimeoutInMilliseconds().get(); } }); metricRegistry.register(createMetricName("propertyValue_executionTimeoutInMilliseconds"), new Gauge<Number>() { @Override public Number getValue() { return properties.executionTimeoutInMilliseconds().get(); } }); metricRegistry.register(createMetricName("propertyValue_executionIsolationStrategy"), new Gauge<String>() { @Override public String getValue() { return properties.executionIsolationStrategy().get().name(); } }); metricRegistry.register(createMetricName("propertyValue_metricsRollingPercentileEnabled"), new Gauge<Boolean>() { @Override public Boolean getValue() { return properties.metricsRollingPercentileEnabled().get(); } }); metricRegistry.register(createMetricName("propertyValue_requestCacheEnabled"), new Gauge<Boolean>() { @Override public Boolean getValue() { return properties.requestCacheEnabled().get(); } }); metricRegistry.register(createMetricName("propertyValue_requestLogEnabled"), new Gauge<Boolean>() { @Override public Boolean getValue() { return properties.requestLogEnabled().get(); } }); metricRegistry.register(createMetricName("propertyValue_executionIsolationSemaphoreMaxConcurrentRequests"), new Gauge<Number>() { @Override public Number getValue() { return properties.executionIsolationSemaphoreMaxConcurrentRequests().get(); } }); metricRegistry.register(createMetricName("propertyValue_fallbackIsolationSemaphoreMaxConcurrentRequests"), new Gauge<Number>() { @Override public Number getValue() { return properties.fallbackIsolationSemaphoreMaxConcurrentRequests().get(); } }); } protected String createMetricName(String name) { return MetricRegistry.name(metricGroup, metricType, name); } protected void createCumulativeCountForEvent(final String name, final HystrixRollingNumberEvent event) { metricRegistry.register(createMetricName(name), new Gauge<Long>() { @Override public Long getValue() { return metrics.getCumulativeCount(event); } }); } protected void safelyCreateCumulativeCountForEvent(final String name, final Func0<HystrixRollingNumberEvent> eventThunk) { metricRegistry.register(createMetricName(name), new Gauge<Long>() { @Override public Long getValue() { try { return metrics.getCumulativeCount(eventThunk.call()); } catch (NoSuchFieldError error) { logger.error("While publishing CodaHale metrics, error looking up eventType for : {}. Please check that all Hystrix versions are the same!", name); return 0L; } } }); } protected void createRollingCountForEvent(final String name, final HystrixRollingNumberEvent event) { metricRegistry.register(createMetricName(name), new Gauge<Long>() { @Override public Long getValue() { return metrics.getRollingCount(event); } }); } protected void safelyCreateRollingCountForEvent(final String name, final Func0<HystrixRollingNumberEvent> eventThunk) { metricRegistry.register(createMetricName(name), new Gauge<Long>() { @Override public Long getValue() { try { return metrics.getRollingCount(eventThunk.call()); } catch (NoSuchFieldError error) { logger.error("While publishing CodaHale metrics, error looking up eventType for : {}. Please check that all Hystrix versions are the same!", name); return 0L; } } }); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.replication; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.NavigableMap; import java.util.TreeMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap; /** * This class is only a base for other integration-level replication tests. * Do not add tests here. * TestReplicationSmallTests is where tests that don't require bring machines up/down should go * All other tests should have their own classes and extend this one */ public class TestReplicationBase { private static final Logger LOG = LoggerFactory.getLogger(TestReplicationBase.class); protected static Configuration conf1 = HBaseConfiguration.create(); protected static Configuration conf2; protected static Configuration CONF_WITH_LOCALFS; protected static ZKWatcher zkw1; protected static ZKWatcher zkw2; protected static ReplicationAdmin admin; protected static Admin hbaseAdmin; protected static Table htable1; protected static Table htable2; protected static NavigableMap<byte[], Integer> scopes; protected static HBaseTestingUtility utility1; protected static HBaseTestingUtility utility2; protected static final int NB_ROWS_IN_BATCH = 100; protected static final int NB_ROWS_IN_BIG_BATCH = NB_ROWS_IN_BATCH * 10; protected static final long SLEEP_TIME = 500; protected static final int NB_RETRIES = 50; protected static final TableName tableName = TableName.valueOf("test"); protected static final byte[] famName = Bytes.toBytes("f"); protected static final byte[] row = Bytes.toBytes("row"); protected static final byte[] noRepfamName = Bytes.toBytes("norep"); protected static final String PEER_ID2 = "2"; protected boolean isSerialPeer() { return false; } protected boolean isSyncPeer() { return false; } protected final void cleanUp() throws IOException, InterruptedException { // Starting and stopping replication can make us miss new logs, // rolling like this makes sure the most recent one gets added to the queue for (JVMClusterUtil.RegionServerThread r : utility1.getHBaseCluster() .getRegionServerThreads()) { utility1.getAdmin().rollWALWriter(r.getRegionServer().getServerName()); } int rowCount = utility1.countRows(tableName); utility1.deleteTableData(tableName); // truncating the table will send one Delete per row to the slave cluster // in an async fashion, which is why we cannot just call deleteTableData on // utility2 since late writes could make it to the slave in some way. // Instead, we truncate the first table and wait for all the Deletes to // make it to the slave. Scan scan = new Scan(); int lastCount = 0; for (int i = 0; i < NB_RETRIES; i++) { if (i == NB_RETRIES - 1) { fail("Waited too much time for truncate"); } ResultScanner scanner = htable2.getScanner(scan); Result[] res = scanner.next(rowCount); scanner.close(); if (res.length != 0) { if (res.length < lastCount) { i--; // Don't increment timeout if we make progress } lastCount = res.length; LOG.info("Still got " + res.length + " rows"); Thread.sleep(SLEEP_TIME); } else { break; } } } protected static void waitForReplication(int expectedRows, int retries) throws IOException, InterruptedException { Scan scan; for (int i = 0; i < retries; i++) { scan = new Scan(); if (i== retries -1) { fail("Waited too much time for normal batch replication"); } ResultScanner scanner = htable2.getScanner(scan); Result[] res = scanner.next(expectedRows); scanner.close(); if (res.length != expectedRows) { LOG.info("Only got " + res.length + " rows"); Thread.sleep(SLEEP_TIME); } else { break; } } } protected static void loadData(String prefix, byte[] row) throws IOException { loadData(prefix, row, famName); } protected static void loadData(String prefix, byte[] row, byte[] familyName) throws IOException { List<Put> puts = new ArrayList<>(NB_ROWS_IN_BATCH); for (int i = 0; i < NB_ROWS_IN_BATCH; i++) { Put put = new Put(Bytes.toBytes(prefix + Integer.toString(i))); put.addColumn(familyName, row, row); puts.add(put); } htable1.put(puts); } protected static void configureClusters(){ conf1.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1"); // We don't want too many edits per batch sent to the ReplicationEndpoint to trigger // sufficient number of events. But we don't want to go too low because // HBaseInterClusterReplicationEndpoint partitions entries into batches and we want // more than one batch sent to the peer cluster for better testing. conf1.setInt("replication.source.size.capacity", 102400); conf1.setLong("replication.source.sleepforretries", 100); conf1.setInt("hbase.regionserver.maxlogs", 10); conf1.setLong("hbase.master.logcleaner.ttl", 10); conf1.setInt("zookeeper.recovery.retry", 1); conf1.setInt("zookeeper.recovery.retry.intervalmill", 10); conf1.setLong(HConstants.THREAD_WAKE_FREQUENCY, 100); conf1.setInt("replication.stats.thread.period.seconds", 5); conf1.setBoolean("hbase.tests.use.shortcircuit.reads", false); conf1.setLong("replication.sleep.before.failover", 2000); conf1.setInt("replication.source.maxretriesmultiplier", 10); conf1.setFloat("replication.source.ratio", 1.0f); conf1.setBoolean("replication.source.eof.autorecovery", true); conf1.setLong("hbase.serial.replication.waiting.ms", 100); utility1 = new HBaseTestingUtility(conf1); // Base conf2 on conf1 so it gets the right zk cluster. conf2 = HBaseConfiguration.create(conf1); conf2.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/2"); conf2.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 6); conf2.setBoolean("hbase.tests.use.shortcircuit.reads", false); utility2 = new HBaseTestingUtility(conf2); } protected static void startClusters() throws Exception{ utility1.startMiniZKCluster(); MiniZooKeeperCluster miniZK = utility1.getZkCluster(); // Have to reget conf1 in case zk cluster location different // than default conf1 = utility1.getConfiguration(); zkw1 = new ZKWatcher(conf1, "cluster1", null, true); admin = new ReplicationAdmin(conf1); LOG.info("Setup first Zk"); utility2.setZkCluster(miniZK); zkw2 = new ZKWatcher(conf2, "cluster2", null, true); LOG.info("Setup second Zk"); CONF_WITH_LOCALFS = HBaseConfiguration.create(conf1); utility1.startMiniCluster(2); // Have a bunch of slave servers, because inter-cluster shipping logic uses number of sinks // as a component in deciding maximum number of parallel batches to send to the peer cluster. utility2.startMiniCluster(4); hbaseAdmin = ConnectionFactory.createConnection(conf1).getAdmin(); TableDescriptor table = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(famName).setMaxVersions(100) .setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(noRepfamName)).build(); scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (ColumnFamilyDescriptor f : table.getColumnFamilies()) { scopes.put(f.getName(), f.getScope()); } Connection connection1 = ConnectionFactory.createConnection(conf1); Connection connection2 = ConnectionFactory.createConnection(conf2); try (Admin admin1 = connection1.getAdmin()) { admin1.createTable(table, HBaseTestingUtility.KEYS_FOR_HBA_CREATE_TABLE); } try (Admin admin2 = connection2.getAdmin()) { admin2.createTable(table, HBaseTestingUtility.KEYS_FOR_HBA_CREATE_TABLE); } utility1.waitUntilAllRegionsAssigned(tableName); utility2.waitUntilAllRegionsAssigned(tableName); htable1 = connection1.getTable(tableName); htable2 = connection2.getTable(tableName); } @BeforeClass public static void setUpBeforeClass() throws Exception { configureClusters(); startClusters(); } private boolean peerExist(String peerId) throws IOException { return hbaseAdmin.listReplicationPeers().stream().anyMatch(p -> peerId.equals(p.getPeerId())); } @Before public void setUpBase() throws Exception { if (!peerExist(PEER_ID2)) { ReplicationPeerConfigBuilder builder = ReplicationPeerConfig.newBuilder() .setClusterKey(utility2.getClusterKey()).setSerial(isSerialPeer()); if (isSyncPeer()) { FileSystem fs2 = utility2.getTestFileSystem(); // The remote wal dir is not important as we do not use it in DA state, here we only need to // confirm that a sync peer in DA state can still replicate data to remote cluster // asynchronously. builder.setReplicateAllUserTables(false) .setTableCFsMap(ImmutableMap.of(tableName, ImmutableList.of())) .setRemoteWALDir(new Path("/RemoteWAL") .makeQualified(fs2.getUri(), fs2.getWorkingDirectory()).toUri().toString()); } hbaseAdmin.addReplicationPeer(PEER_ID2, builder.build()); } } @After public void tearDownBase() throws Exception { if (peerExist(PEER_ID2)) { hbaseAdmin.removeReplicationPeer(PEER_ID2); } } protected static void runSimplePutDeleteTest() throws IOException, InterruptedException { Put put = new Put(row); put.addColumn(famName, row, row); htable1 = utility1.getConnection().getTable(tableName); htable1.put(put); Get get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { if (i == NB_RETRIES - 1) { fail("Waited too much time for put replication"); } Result res = htable2.get(get); if (res.isEmpty()) { LOG.info("Row not available"); Thread.sleep(SLEEP_TIME); } else { assertArrayEquals(row, res.value()); break; } } Delete del = new Delete(row); htable1.delete(del); get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { if (i == NB_RETRIES - 1) { fail("Waited too much time for del replication"); } Result res = htable2.get(get); if (res.size() >= 1) { LOG.info("Row not deleted"); Thread.sleep(SLEEP_TIME); } else { break; } } } protected static void runSmallBatchTest() throws IOException, InterruptedException { // normal Batch tests loadData("", row); Scan scan = new Scan(); ResultScanner scanner1 = htable1.getScanner(scan); Result[] res1 = scanner1.next(NB_ROWS_IN_BATCH); scanner1.close(); assertEquals(NB_ROWS_IN_BATCH, res1.length); waitForReplication(NB_ROWS_IN_BATCH, NB_RETRIES); } @AfterClass public static void tearDownAfterClass() throws Exception { htable2.close(); htable1.close(); admin.close(); utility2.shutdownMiniCluster(); utility1.shutdownMiniCluster(); } }
/* Copyright 2015 Samsung Electronics Co., LTD * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gearvrf; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.concurrent.Future; import org.gearvrf.GVRMaterial.GVRShaderType; import org.gearvrf.GVRMaterial.GVRShaderType.Unlit; /** * One of the key GVRF classes: a scene object. * * Every scene object has a {@linkplain #getTransform() location}, and can have * {@linkplain #children() children}. An invisible scene object can be used to * move a set of scene as a unit, preserving their relative geometry. Invisible * scene objects don't need any {@linkplain GVRSceneObject#getRenderData() * render data.} * * <p> * Visible scene objects must have render data * {@linkplain GVRSceneObject#attachRenderData(GVRRenderData) attached.} Each * {@link GVRRenderData} has a {@link GVRMesh GL mesh} that defines its * geometry, and a {@link GVRMaterial} that defines its surface. */ public class GVRSceneObject extends GVRHybridObject { private GVRTransform mTransform; private GVRRenderData mRenderData; private GVRCamera mCamera; private GVRCameraRig mCameraRig; private GVREyePointeeHolder mEyePointeeHolder; private GVRSceneObject mParent; private final List<GVRSceneObject> mChildren = new ArrayList<GVRSceneObject>(); /** * Constructs an empty scene object with a default {@link GVRTransform * transform}. * * @param gvrContext * current {@link GVRContext} */ public GVRSceneObject(GVRContext gvrContext) { super(gvrContext, NativeSceneObject.ctor()); attachTransform(new GVRTransform(getGVRContext())); } /** * Constructs a scene object with an arbitrarily complex mesh. * * @param gvrContext * current {@link GVRContext} * @param mesh * a {@link GVRMesh} - usually generated by one of the * {@link GVRContext#loadMesh(GVRAndroidResource)} methods, or * {@link GVRContext#createQuad(float, float)} */ public GVRSceneObject(GVRContext gvrContext, GVRMesh mesh) { this(gvrContext); GVRRenderData renderData = new GVRRenderData(gvrContext); attachRenderData(renderData); renderData.setMesh(mesh); } /** * Constructs a rectangular scene object, whose geometry is completely * specified by the width and height. * * @param gvrContext * current {@link GVRContext} * @param width * the scene object's width * @param height * the scene object's height */ public GVRSceneObject(GVRContext gvrContext, float width, float height) { this(gvrContext, gvrContext.createQuad(width, height)); } /** * The base texture constructor: Constructs a scene object with * {@linkplain GVRMesh an arbitrarily complex geometry} that uses a specific * shader to display a {@linkplain GVRTexture texture.} * * @param gvrContext * current {@link GVRContext} * @param mesh * a {@link GVRMesh} - usually generated by one of the * {@link GVRContext#loadMesh(GVRAndroidResource)} methods, or * {@link GVRContext#createQuad(float, float)} * @param texture * a {@link GVRTexture} * @param shaderId * a specific shader Id - see {@link GVRShaderType} and * {@link GVRMaterialShaderManager} * */ public GVRSceneObject(GVRContext gvrContext, GVRMesh mesh, GVRTexture texture, GVRMaterialShaderId shaderId) { this(gvrContext, mesh); GVRMaterial material = new GVRMaterial(gvrContext, shaderId); material.setMainTexture(texture); getRenderData().setMaterial(material); } private static final GVRMaterialShaderId STANDARD_SHADER = GVRShaderType.Unlit.ID; /** * Constructs a scene object with {@linkplain GVRMesh an arbitrarily complex * geometry} that uses the standard {@linkplain Unlit 'unlit shader'} to * display a {@linkplain GVRTexture texture.} * * * @param gvrContext * current {@link GVRContext} * @param mesh * a {@link GVRMesh} - usually generated by one of the * {@link GVRContext#loadMesh(GVRAndroidResource)} methods, or * {@link GVRContext#createQuad(float, float)} * @param texture * a {@link GVRTexture} */ public GVRSceneObject(GVRContext gvrContext, GVRMesh mesh, GVRTexture texture) { this(gvrContext, mesh, texture, STANDARD_SHADER); } /** * Very high-level constructor that asynchronously loads the mesh and * texture. * * Note that because of <a href="package-summary.html#async">asynchronous * request consolidation</a> you generally don't have to do anything special * to create several objects that share the same mesh or texture: if you * create all the objects in {@link GVRScript#onInit(GVRContext) onInit(),} * the meshes and textures will generally <em>not</em> have loaded before * your {@code onInit()} method finishes. Thus, the loading code will see * that, say, {@code sceneObject2} and {@code sceneObject3} are using the * same mesh as {@code sceneObject1}, and will only load the mesh once. * * @param gvrContext * current {@link GVRContext}. * @param futureMesh * mesh of the object. * @param futureTexture * texture of the object. * * @since 1.6.8 */ public GVRSceneObject(GVRContext gvrContext, Future<GVRMesh> futureMesh, Future<GVRTexture> futureTexture) { this(gvrContext); // Create the render data GVRRenderData renderData = new GVRRenderData(gvrContext); // Set the mesh renderData.setMesh(futureMesh); // Set the texture GVRMaterial material = new GVRMaterial(gvrContext); material.setMainTexture(futureTexture); renderData.setMaterial(material); // Attach the render data attachRenderData(renderData); } /** * Very high-level constructor that asynchronously loads the mesh and * texture. * * @param gvrContext * current {@link GVRContext}. * @param mesh * Basically, a stream containing a mesh file. * @param texture * Basically, a stream containing a texture file. This can be * either a compressed texture or a regular Android bitmap file. * * @since 1.6.7 */ public GVRSceneObject(GVRContext gvrContext, GVRAndroidResource mesh, GVRAndroidResource texture) { this(gvrContext, gvrContext.loadFutureMesh(mesh), gvrContext .loadFutureTexture(texture)); } /** * Create a standard, rectangular texture object, using a non-default shader * to apply complex visual affects. * * @param gvrContext * current {@link GVRContext} * @param width * the rectangle's width * @param height * the rectangle's height * @param texture * a {@link GVRTexture} * @param shaderId * a specific shader Id */ public GVRSceneObject(GVRContext gvrContext, float width, float height, GVRTexture texture, GVRMaterialShaderId shaderId) { this(gvrContext, gvrContext.createQuad(width, height), texture, shaderId); } /** * Constructs a 2D, rectangular scene object that uses the standard * {@linkplain Unlit 'unlit shader'} to display a {@linkplain GVRTexture * texture.} * * @param gvrContext * current {@link GVRContext} * @param width * the rectangle's width * @param height * the rectangle's height * @param texture * a {@link GVRTexture} */ public GVRSceneObject(GVRContext gvrContext, float width, float height, GVRTexture texture) { this(gvrContext, width, height, texture, STANDARD_SHADER); } /** * Get the (optional) name of the object. * * @return The name of the object. If no name has been assigned, the * returned string will be empty. */ public String getName() { return NativeSceneObject.getName(getNative()); } /** * Set the (optional) name of the object. * * Scene object names are not needed: they are only for the application's * convenience. * * @param name * Name of the object. */ public void setName(String name) { NativeSceneObject.setName(getNative(), name); } /** * Replace the current {@link GVRTransform transform} * * @param transform * New transform. */ void attachTransform(GVRTransform transform) { mTransform = transform; NativeSceneObject.attachTransform(getNative(), transform.getNative()); } /** * Remove the object's {@link GVRTransform transform}. After this call, the * object will have no transformations associated with it. */ void detachTransform() { mTransform = null; NativeSceneObject.detachTransform(getNative()); } /** * Get the {@link GVRTransform}. * * A {@link GVRTransform} encapsulates a 4x4 matrix that specifies how to * render the {@linkplain GVRMesh GL mesh:} transform methods let you move, * rotate, and scale your scene object. * * @return The current {@link GVRTransform transform}. If no transform is * currently attached to the object, returns {@code null}. */ public GVRTransform getTransform() { return mTransform; } /** * Attach {@linkplain GVRRenderData rendering data} to the object. * * If other rendering data is currently attached, it is replaced with the * new data. {@link GVRRenderData} contains the GL mesh, the texture, the * shader id, and various shader constants. * * @param renderData * New rendering data. */ public void attachRenderData(GVRRenderData renderData) { mRenderData = renderData; renderData.setOwnerObject(this); NativeSceneObject.attachRenderData(getNative(), renderData.getNative()); } /** * Detach the object's current {@linkplain GVRRenderData rendering data}. * * An object with no {@link GVRRenderData} is not visible. */ public void detachRenderData() { if (mRenderData != null) { mRenderData.setOwnerObject(null); } mRenderData = null; NativeSceneObject.detachRenderData(getNative()); } /** * Get the current {@link GVRRenderData}. * * @return The current {@link GVRRenderData rendering data}. If no rendering * data is currently attached to the object, returns {@code null}. */ public GVRRenderData getRenderData() { return mRenderData; } /** * Attach a new {@link GVRCamera camera} to the object. * * If another camera is currently attached, it is replaced with the new one. * * @param camera * New camera. */ public void attachCamera(GVRCamera camera) { mCamera = camera; camera.setOwnerObject(this); NativeSceneObject.attachCamera(getNative(), camera.getNative()); } /** * Detach the object's current {@link GVRCamera camera}. */ public void detachCamera() { if (mCamera != null) { mCamera.setOwnerObject(null); } mCamera = null; NativeSceneObject.detachCamera(getNative()); } /** * Get the {@link GVRCamera} attached to the object. * * @return The {@link GVRCamera camera} attached to the object. If no camera * is currently attached, returns {@code null}. */ public GVRCamera getCamera() { return mCamera; } /** * Attach a new {@linkplain GVRCameraRig camera rig.} * * If another camera rig is currently attached, it is replaced with the new * one. * * @param cameraRig * New camera rig. */ public void attachCameraRig(GVRCameraRig cameraRig) { mCameraRig = cameraRig; cameraRig.setOwnerObject(this); NativeSceneObject.attachCameraRig(getNative(), cameraRig.getNative()); } /** * Detach the object's current {@link GVRCameraRig camera rig}. */ public void detachCameraRig() { if (mCameraRig != null) { mCameraRig.setOwnerObject(null); } mCameraRig = null; NativeSceneObject.detachCameraRig(getNative()); } /** * Get the attached {@link GVRCameraRig} * * @return The {@link GVRCameraRig camera rig} attached to the object. If no * camera rig is currently attached, returns {@code null}. */ public GVRCameraRig getCameraRig() { return mCameraRig; } /** * Attach a new {@link GVREyePointeeHolder} to the object. * * If another {@link GVREyePointeeHolder} is currently attached, it is * replaced with the new one. * * @param eyePointeeHolder * New {@link GVREyePointeeHolder}. */ public void attachEyePointeeHolder(GVREyePointeeHolder eyePointeeHolder) { mEyePointeeHolder = eyePointeeHolder; eyePointeeHolder.setOwnerObject(this); NativeSceneObject.attachEyePointeeHolder(getNative(), eyePointeeHolder.getNative()); } /** * Attach a default {@link GVREyePointeeHolder} to the object. * * The default holder contains a single {@link GVRMeshEyePointee}, which * refers to the {@linkplain GVRMesh mesh} in this scene object's * {@linkplain GVRRenderData render data}. If you need anything more * complicated (such as multiple meshes) use the * {@linkplain #attachEyePointeeHolder(GVREyePointeeHolder) explicit * overload.} If another {@link GVREyePointeeHolder} is currently attached, * it is replaced with the new one. * * @return {@code true} if and only this scene object has render data * <em>and</em> you have called either * {@link GVRRenderData#setMesh(GVRMesh)} or * {@link GVRRenderData#setMesh(Future)}; {@code false}, otherwise. */ public boolean attachEyePointeeHolder() { GVRRenderData renderData = getRenderData(); if (renderData == null) { return false; } Future<GVREyePointee> eyePointee = renderData.getMeshEyePointee(); if (eyePointee == null) { return false; } GVREyePointeeHolder eyePointeeHolder = new GVREyePointeeHolder( getGVRContext()); eyePointeeHolder.addPointee(eyePointee); attachEyePointeeHolder(eyePointeeHolder); return true; } /** * Detach the object's current {@link GVREyePointeeHolder}. */ public void detachEyePointeeHolder() { if (mEyePointeeHolder != null) { mEyePointeeHolder.setOwnerObject(null); } mEyePointeeHolder = null; NativeSceneObject.detachEyePointeeHolder(getNative()); } /** * Get the attached {@link GVREyePointeeHolder} * * @return The {@link GVREyePointeeHolder} attached to the object. If no * {@link GVREyePointeeHolder} is currently attached, returns * {@code null}. */ public GVREyePointeeHolder getEyePointeeHolder() { return mEyePointeeHolder; } /** * Get the {@linkplain GVRSceneObject parent object.} * * If the object has been {@link #addChildObject(GVRSceneObject) added as a * child} to another {@link GVRSceneObject}, returns that object. Otherwise, * returns {@code null}. * * @return The parent {@link GVRSceneObject} or {@code null}. */ public GVRSceneObject getParent() { return mParent; } /** * Add {@code child} as a child of this object. * * @param child * {@link GVRSceneObject Object} to add as a child of this * object. */ public void addChildObject(GVRSceneObject child) { mChildren.add(child); child.mParent = this; NativeSceneObject.addChildObject(getNative(), child.getNative()); } /** * Remove {@code child} as a child of this object. * * @param child * {@link GVRSceneObject Object} to remove as a child of this * object. */ public void removeChildObject(GVRSceneObject child) { mChildren.remove(child); child.mParent = null; NativeSceneObject.removeChildObject(getNative(), child.getNative()); } /** * Get the number of child objects. * * @return Number of {@link GVRSceneObject objects} added as children of * this object. */ public int getChildrenCount() { return mChildren.size(); } /** * Get the child object at {@code index}. * * @param index * Position of the child to get. * @return {@link GVRSceneObject Child object}. * * @throws {@link java.lang.IndexOutOfBoundsException} if there is no child * at that position. */ public GVRSceneObject getChildByIndex(int index) { return mChildren.get(index); } /** * As an alternative to calling {@link #getChildrenCount()} then repeatedly * calling {@link #getChildByIndex(int)}, you can * * <pre> * for (GVRSceneObject child : parent.children()) { * } * </pre> * * @return An {@link Iterable}, so you can use Java's enhanced for loop. * This {@code Iterable} gives you an {@link Iterator} that does not * support {@link Iterator#remove()}. * <p> * At some point, this might actually return a * {@code List<GVRSceneObject>}, but that would require either * creating an immutable copy or writing a lot of code to support * methods like {@link List#addAll(java.util.Collection)} and * {@link List#clear()} - for now, we just create a very * light-weight class that only supports iteration. */ public Iterable<GVRSceneObject> children() { return new Children(this); } /** * Get all the children, in a single list. * * @return An un-modifiable list of this object's children. * * @since 2.0.0 */ public List<GVRSceneObject> getChildren() { return Collections.unmodifiableList(mChildren); } /** The internal list - do not make any changes! */ List<GVRSceneObject> rawGetChildren() { return mChildren; } private static class Children implements Iterable<GVRSceneObject>, Iterator<GVRSceneObject> { private final GVRSceneObject object; private int index; private Children(GVRSceneObject object) { this.object = object; this.index = 0; } @Override public Iterator<GVRSceneObject> iterator() { return this; } @Override public boolean hasNext() { return index < object.getChildrenCount(); } @Override public GVRSceneObject next() { return object.getChildByIndex(index++); } @Override public void remove() { throw new UnsupportedOperationException(); } } } class NativeSceneObject { static native long ctor(); static native String getName(long sceneObject); static native void setName(long sceneObject, String name); static native void attachTransform(long sceneObject, long transform); static native void detachTransform(long sceneObject); static native void attachRenderData(long sceneObject, long renderData); static native void detachRenderData(long sceneObject); static native void attachCamera(long sceneObject, long camera); static native void detachCamera(long sceneObject); static native void attachCameraRig(long sceneObject, long cameraRig); static native void detachCameraRig(long sceneObject); static native void attachEyePointeeHolder(long sceneObject, long eyePointeeHolder); static native void detachEyePointeeHolder(long sceneObject); static native long setParent(long sceneObject, long parent); static native void addChildObject(long sceneObject, long child); static native void removeChildObject(long sceneObject, long child); }
/** * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.basics.date; import java.io.Serializable; import java.time.LocalDate; import java.time.Period; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import org.joda.beans.Bean; import org.joda.beans.BeanDefinition; import org.joda.beans.ImmutableBean; import org.joda.beans.ImmutableValidator; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectFieldsBeanBuilder; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.opengamma.strata.basics.ReferenceData; import com.opengamma.strata.basics.Resolvable; /** * An adjustment that alters a date by adding a tenor. * <p> * This adjustment adds a {@link Tenor} to the input date using an addition convention, * followed by an adjustment to ensure the result is a valid business day. * <p> * Addition is performed using standard calendar addition. * It is not possible to add a number of business days using this class. * See {@link DaysAdjustment} for an alternative that can handle addition of business days. * <p> * There are two steps in the calculation: * <p> * In step one, the period is added using the specified {@link PeriodAdditionConvention}. * <p> * In step two, the result of step one is optionally adjusted to be a business day * using a {@code BusinessDayAdjustment}. * <p> * For example, a rule represented by this class might be: "the end date is 5 years after * the start date, with end-of-month rule based on the last business day of the month, * adjusted to be a valid London business day using the 'ModifiedFollowing' convention". */ @BeanDefinition public final class TenorAdjustment implements Resolvable<DateAdjuster>, ImmutableBean, Serializable { /** * The tenor to be added. * <p> * When the adjustment is performed, this tenor will be added to the input date. */ @PropertyDefinition(validate = "notNull") private final Tenor tenor; /** * The addition convention to apply. * <p> * When the adjustment is performed, this convention is used to refine the adjusted date. * The most common convention is to move the end date to the last business day of the month * if the start date is the last business day of the month. */ @PropertyDefinition(validate = "notNull") private final PeriodAdditionConvention additionConvention; /** * The business day adjustment that is performed to the result of the addition. * <p> * This adjustment is applied to the result of the addition calculation. * <p> * If no adjustment is required, use the 'None' business day adjustment. */ @PropertyDefinition(validate = "notNull") private final BusinessDayAdjustment adjustment; //------------------------------------------------------------------------- /** * Obtains an instance that can adjust a date by the specified tenor. * <p> * When adjusting a date, the specified tenor is added to the input date. * The business day adjustment will then be used to ensure the result is a valid business day. * * @param tenor the tenor to add to the input date * @param additionConvention the convention used to perform the addition * @param adjustment the business day adjustment to apply to the result of the addition * @return the tenor adjustment */ public static TenorAdjustment of( Tenor tenor, PeriodAdditionConvention additionConvention, BusinessDayAdjustment adjustment) { return new TenorAdjustment(tenor, additionConvention, adjustment); } /** * Obtains an instance that can adjust a date by the specified tenor using the * last day of month convention. * <p> * When adjusting a date, the specified tenor is added to the input date. * The business day adjustment will then be used to ensure the result is a valid business day. * <p> * The period must consist only of months and/or years. * * @param tenor the tenor to add to the input date * @param adjustment the business day adjustment to apply to the result of the addition * @return the tenor adjustment */ public static TenorAdjustment ofLastDay(Tenor tenor, BusinessDayAdjustment adjustment) { return new TenorAdjustment(tenor, PeriodAdditionConventions.LAST_DAY, adjustment); } /** * Obtains an instance that can adjust a date by the specified tenor using the * last business day of month convention. * <p> * When adjusting a date, the specified tenor is added to the input date. * The business day adjustment will then be used to ensure the result is a valid business day. * <p> * The period must consist only of months and/or years. * * @param tenor the tenor to add to the input date * @param adjustment the business day adjustment to apply to the result of the addition * @return the tenor adjustment */ public static TenorAdjustment ofLastBusinessDay(Tenor tenor, BusinessDayAdjustment adjustment) { return new TenorAdjustment(tenor, PeriodAdditionConventions.LAST_BUSINESS_DAY, adjustment); } //------------------------------------------------------------------------- @ImmutableValidator private void validate() { if (additionConvention.isMonthBased() && tenor.isMonthBased() == false) { throw new IllegalArgumentException("Tenor must not contain days when addition convention is month-based"); } } //------------------------------------------------------------------------- /** * Adjusts the date, adding the tenor and then applying the business day adjustment. * <p> * The calculation is performed in two steps. * <p> * Step one, use {@link PeriodAdditionConvention#adjust(LocalDate, Period, HolidayCalendar)} to add the period. * <p> * Step two, use {@link BusinessDayAdjustment#adjust(LocalDate, ReferenceData)} to adjust the result of step one. * * @param date the date to adjust * @param refData the reference data, used to find the holiday calendar * @return the adjusted date */ public LocalDate adjust(LocalDate date, ReferenceData refData) { HolidayCalendar holCal = adjustment.getCalendar().resolve(refData); BusinessDayConvention bda = adjustment.getConvention(); return bda.adjust(additionConvention.adjust(date, tenor.getPeriod(), holCal), holCal); } /** * Resolves this adjustment using the specified reference data, returning an adjuster. * <p> * This returns a {@link DateAdjuster} that performs the same calculation as this adjustment. * It binds the holiday calendar, looked up from the reference data, into the result. * As such, there is no need to pass the reference data in again. * * @param refData the reference data, used to find the holiday calendar * @return the adjuster, bound to a specific holiday calendar */ @Override public DateAdjuster resolve(ReferenceData refData) { HolidayCalendar holCal = adjustment.getCalendar().resolve(refData); BusinessDayConvention bda = adjustment.getConvention(); Period period = tenor.getPeriod(); return date -> bda.adjust(additionConvention.adjust(date, period, holCal), holCal); } //------------------------------------------------------------------------- /** * Returns a string describing the adjustment. * * @return the descriptive string */ @Override public String toString() { StringBuilder buf = new StringBuilder(64); buf.append(tenor); if (additionConvention != PeriodAdditionConventions.NONE) { buf.append(" with ").append(additionConvention); } if (adjustment.equals(BusinessDayAdjustment.NONE) == false) { buf.append(" then apply ").append(adjustment); } return buf.toString(); } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code TenorAdjustment}. * @return the meta-bean, not null */ public static TenorAdjustment.Meta meta() { return TenorAdjustment.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(TenorAdjustment.Meta.INSTANCE); } /** * The serialization version id. */ private static final long serialVersionUID = 1L; /** * Returns a builder used to create an instance of the bean. * @return the builder, not null */ public static TenorAdjustment.Builder builder() { return new TenorAdjustment.Builder(); } private TenorAdjustment( Tenor tenor, PeriodAdditionConvention additionConvention, BusinessDayAdjustment adjustment) { JodaBeanUtils.notNull(tenor, "tenor"); JodaBeanUtils.notNull(additionConvention, "additionConvention"); JodaBeanUtils.notNull(adjustment, "adjustment"); this.tenor = tenor; this.additionConvention = additionConvention; this.adjustment = adjustment; validate(); } @Override public TenorAdjustment.Meta metaBean() { return TenorAdjustment.Meta.INSTANCE; } @Override public <R> Property<R> property(String propertyName) { return metaBean().<R>metaProperty(propertyName).createProperty(this); } @Override public Set<String> propertyNames() { return metaBean().metaPropertyMap().keySet(); } //----------------------------------------------------------------------- /** * Gets the tenor to be added. * <p> * When the adjustment is performed, this tenor will be added to the input date. * @return the value of the property, not null */ public Tenor getTenor() { return tenor; } //----------------------------------------------------------------------- /** * Gets the addition convention to apply. * <p> * When the adjustment is performed, this convention is used to refine the adjusted date. * The most common convention is to move the end date to the last business day of the month * if the start date is the last business day of the month. * @return the value of the property, not null */ public PeriodAdditionConvention getAdditionConvention() { return additionConvention; } //----------------------------------------------------------------------- /** * Gets the business day adjustment that is performed to the result of the addition. * <p> * This adjustment is applied to the result of the addition calculation. * <p> * If no adjustment is required, use the 'None' business day adjustment. * @return the value of the property, not null */ public BusinessDayAdjustment getAdjustment() { return adjustment; } //----------------------------------------------------------------------- /** * Returns a builder that allows this bean to be mutated. * @return the mutable builder, not null */ public Builder toBuilder() { return new Builder(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { TenorAdjustment other = (TenorAdjustment) obj; return JodaBeanUtils.equal(tenor, other.tenor) && JodaBeanUtils.equal(additionConvention, other.additionConvention) && JodaBeanUtils.equal(adjustment, other.adjustment); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash = hash * 31 + JodaBeanUtils.hashCode(tenor); hash = hash * 31 + JodaBeanUtils.hashCode(additionConvention); hash = hash * 31 + JodaBeanUtils.hashCode(adjustment); return hash; } //----------------------------------------------------------------------- /** * The meta-bean for {@code TenorAdjustment}. */ public static final class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code tenor} property. */ private final MetaProperty<Tenor> tenor = DirectMetaProperty.ofImmutable( this, "tenor", TenorAdjustment.class, Tenor.class); /** * The meta-property for the {@code additionConvention} property. */ private final MetaProperty<PeriodAdditionConvention> additionConvention = DirectMetaProperty.ofImmutable( this, "additionConvention", TenorAdjustment.class, PeriodAdditionConvention.class); /** * The meta-property for the {@code adjustment} property. */ private final MetaProperty<BusinessDayAdjustment> adjustment = DirectMetaProperty.ofImmutable( this, "adjustment", TenorAdjustment.class, BusinessDayAdjustment.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap( this, null, "tenor", "additionConvention", "adjustment"); /** * Restricted constructor. */ private Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 110246592: // tenor return tenor; case 1652975501: // additionConvention return additionConvention; case 1977085293: // adjustment return adjustment; } return super.metaPropertyGet(propertyName); } @Override public TenorAdjustment.Builder builder() { return new TenorAdjustment.Builder(); } @Override public Class<? extends TenorAdjustment> beanType() { return TenorAdjustment.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code tenor} property. * @return the meta-property, not null */ public MetaProperty<Tenor> tenor() { return tenor; } /** * The meta-property for the {@code additionConvention} property. * @return the meta-property, not null */ public MetaProperty<PeriodAdditionConvention> additionConvention() { return additionConvention; } /** * The meta-property for the {@code adjustment} property. * @return the meta-property, not null */ public MetaProperty<BusinessDayAdjustment> adjustment() { return adjustment; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 110246592: // tenor return ((TenorAdjustment) bean).getTenor(); case 1652975501: // additionConvention return ((TenorAdjustment) bean).getAdditionConvention(); case 1977085293: // adjustment return ((TenorAdjustment) bean).getAdjustment(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { metaProperty(propertyName); if (quiet) { return; } throw new UnsupportedOperationException("Property cannot be written: " + propertyName); } } //----------------------------------------------------------------------- /** * The bean-builder for {@code TenorAdjustment}. */ public static final class Builder extends DirectFieldsBeanBuilder<TenorAdjustment> { private Tenor tenor; private PeriodAdditionConvention additionConvention; private BusinessDayAdjustment adjustment; /** * Restricted constructor. */ private Builder() { } /** * Restricted copy constructor. * @param beanToCopy the bean to copy from, not null */ private Builder(TenorAdjustment beanToCopy) { this.tenor = beanToCopy.getTenor(); this.additionConvention = beanToCopy.getAdditionConvention(); this.adjustment = beanToCopy.getAdjustment(); } //----------------------------------------------------------------------- @Override public Object get(String propertyName) { switch (propertyName.hashCode()) { case 110246592: // tenor return tenor; case 1652975501: // additionConvention return additionConvention; case 1977085293: // adjustment return adjustment; default: throw new NoSuchElementException("Unknown property: " + propertyName); } } @Override public Builder set(String propertyName, Object newValue) { switch (propertyName.hashCode()) { case 110246592: // tenor this.tenor = (Tenor) newValue; break; case 1652975501: // additionConvention this.additionConvention = (PeriodAdditionConvention) newValue; break; case 1977085293: // adjustment this.adjustment = (BusinessDayAdjustment) newValue; break; default: throw new NoSuchElementException("Unknown property: " + propertyName); } return this; } @Override public Builder set(MetaProperty<?> property, Object value) { super.set(property, value); return this; } @Override public Builder setString(String propertyName, String value) { setString(meta().metaProperty(propertyName), value); return this; } @Override public Builder setString(MetaProperty<?> property, String value) { super.setString(property, value); return this; } @Override public Builder setAll(Map<String, ? extends Object> propertyValueMap) { super.setAll(propertyValueMap); return this; } @Override public TenorAdjustment build() { return new TenorAdjustment( tenor, additionConvention, adjustment); } //----------------------------------------------------------------------- /** * Sets the tenor to be added. * <p> * When the adjustment is performed, this tenor will be added to the input date. * @param tenor the new value, not null * @return this, for chaining, not null */ public Builder tenor(Tenor tenor) { JodaBeanUtils.notNull(tenor, "tenor"); this.tenor = tenor; return this; } /** * Sets the addition convention to apply. * <p> * When the adjustment is performed, this convention is used to refine the adjusted date. * The most common convention is to move the end date to the last business day of the month * if the start date is the last business day of the month. * @param additionConvention the new value, not null * @return this, for chaining, not null */ public Builder additionConvention(PeriodAdditionConvention additionConvention) { JodaBeanUtils.notNull(additionConvention, "additionConvention"); this.additionConvention = additionConvention; return this; } /** * Sets the business day adjustment that is performed to the result of the addition. * <p> * This adjustment is applied to the result of the addition calculation. * <p> * If no adjustment is required, use the 'None' business day adjustment. * @param adjustment the new value, not null * @return this, for chaining, not null */ public Builder adjustment(BusinessDayAdjustment adjustment) { JodaBeanUtils.notNull(adjustment, "adjustment"); this.adjustment = adjustment; return this; } //----------------------------------------------------------------------- @Override public String toString() { StringBuilder buf = new StringBuilder(128); buf.append("TenorAdjustment.Builder{"); buf.append("tenor").append('=').append(JodaBeanUtils.toString(tenor)).append(',').append(' '); buf.append("additionConvention").append('=').append(JodaBeanUtils.toString(additionConvention)).append(',').append(' '); buf.append("adjustment").append('=').append(JodaBeanUtils.toString(adjustment)); buf.append('}'); return buf.toString(); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver12; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFFlowModFailedErrorMsgVer12 implements OFFlowModFailedErrorMsg { private static final Logger logger = LoggerFactory.getLogger(OFFlowModFailedErrorMsgVer12.class); // version: 1.2 final static byte WIRE_VERSION = 3; final static int MINIMUM_LENGTH = 12; private final static long DEFAULT_XID = 0x0L; private final static OFErrorCauseData DEFAULT_DATA = OFErrorCauseData.NONE; // OF message fields private final long xid; private final OFFlowModFailedCode code; private final OFErrorCauseData data; // // package private constructor - used by readers, builders, and factory OFFlowModFailedErrorMsgVer12(long xid, OFFlowModFailedCode code, OFErrorCauseData data) { if(code == null) { throw new NullPointerException("OFFlowModFailedErrorMsgVer12: property code cannot be null"); } if(data == null) { throw new NullPointerException("OFFlowModFailedErrorMsgVer12: property data cannot be null"); } this.xid = xid; this.code = code; this.data = data; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFType getType() { return OFType.ERROR; } @Override public long getXid() { return xid; } @Override public OFErrorType getErrType() { return OFErrorType.FLOW_MOD_FAILED; } @Override public OFFlowModFailedCode getCode() { return code; } @Override public OFErrorCauseData getData() { return data; } public OFFlowModFailedErrorMsg.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFFlowModFailedErrorMsg.Builder { final OFFlowModFailedErrorMsgVer12 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean codeSet; private OFFlowModFailedCode code; private boolean dataSet; private OFErrorCauseData data; BuilderWithParent(OFFlowModFailedErrorMsgVer12 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFType getType() { return OFType.ERROR; } @Override public long getXid() { return xid; } @Override public OFFlowModFailedErrorMsg.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFErrorType getErrType() { return OFErrorType.FLOW_MOD_FAILED; } @Override public OFFlowModFailedCode getCode() { return code; } @Override public OFFlowModFailedErrorMsg.Builder setCode(OFFlowModFailedCode code) { this.code = code; this.codeSet = true; return this; } @Override public OFErrorCauseData getData() { return data; } @Override public OFFlowModFailedErrorMsg.Builder setData(OFErrorCauseData data) { this.data = data; this.dataSet = true; return this; } @Override public OFFlowModFailedErrorMsg build() { long xid = this.xidSet ? this.xid : parentMessage.xid; OFFlowModFailedCode code = this.codeSet ? this.code : parentMessage.code; if(code == null) throw new NullPointerException("Property code must not be null"); OFErrorCauseData data = this.dataSet ? this.data : parentMessage.data; if(data == null) throw new NullPointerException("Property data must not be null"); // return new OFFlowModFailedErrorMsgVer12( xid, code, data ); } } static class Builder implements OFFlowModFailedErrorMsg.Builder { // OF message fields private boolean xidSet; private long xid; private boolean codeSet; private OFFlowModFailedCode code; private boolean dataSet; private OFErrorCauseData data; @Override public OFVersion getVersion() { return OFVersion.OF_12; } @Override public OFType getType() { return OFType.ERROR; } @Override public long getXid() { return xid; } @Override public OFFlowModFailedErrorMsg.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFErrorType getErrType() { return OFErrorType.FLOW_MOD_FAILED; } @Override public OFFlowModFailedCode getCode() { return code; } @Override public OFFlowModFailedErrorMsg.Builder setCode(OFFlowModFailedCode code) { this.code = code; this.codeSet = true; return this; } @Override public OFErrorCauseData getData() { return data; } @Override public OFFlowModFailedErrorMsg.Builder setData(OFErrorCauseData data) { this.data = data; this.dataSet = true; return this; } // @Override public OFFlowModFailedErrorMsg build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; if(!this.codeSet) throw new IllegalStateException("Property code doesn't have default value -- must be set"); if(code == null) throw new NullPointerException("Property code must not be null"); OFErrorCauseData data = this.dataSet ? this.data : DEFAULT_DATA; if(data == null) throw new NullPointerException("Property data must not be null"); return new OFFlowModFailedErrorMsgVer12( xid, code, data ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFFlowModFailedErrorMsg> { @Override public OFFlowModFailedErrorMsg readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 3 byte version = bb.readByte(); if(version != (byte) 0x3) throw new OFParseError("Wrong version: Expected=OFVersion.OF_12(3), got="+version); // fixed value property type == 1 byte type = bb.readByte(); if(type != (byte) 0x1) throw new OFParseError("Wrong type: Expected=OFType.ERROR(1), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property errType == 5 short errType = bb.readShort(); if(errType != (short) 0x5) throw new OFParseError("Wrong errType: Expected=OFErrorType.FLOW_MOD_FAILED(5), got="+errType); OFFlowModFailedCode code = OFFlowModFailedCodeSerializerVer12.readFrom(bb); OFErrorCauseData data = OFErrorCauseData.read(bb, length - (bb.readerIndex() - start), OFVersion.OF_12); OFFlowModFailedErrorMsgVer12 flowModFailedErrorMsgVer12 = new OFFlowModFailedErrorMsgVer12( xid, code, data ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", flowModFailedErrorMsgVer12); return flowModFailedErrorMsgVer12; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFFlowModFailedErrorMsgVer12Funnel FUNNEL = new OFFlowModFailedErrorMsgVer12Funnel(); static class OFFlowModFailedErrorMsgVer12Funnel implements Funnel<OFFlowModFailedErrorMsgVer12> { private static final long serialVersionUID = 1L; @Override public void funnel(OFFlowModFailedErrorMsgVer12 message, PrimitiveSink sink) { // fixed value property version = 3 sink.putByte((byte) 0x3); // fixed value property type = 1 sink.putByte((byte) 0x1); // FIXME: skip funnel of length sink.putLong(message.xid); // fixed value property errType = 5 sink.putShort((short) 0x5); OFFlowModFailedCodeSerializerVer12.putTo(message.code, sink); message.data.putTo(sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFFlowModFailedErrorMsgVer12> { @Override public void write(ChannelBuffer bb, OFFlowModFailedErrorMsgVer12 message) { int startIndex = bb.writerIndex(); // fixed value property version = 3 bb.writeByte((byte) 0x3); // fixed value property type = 1 bb.writeByte((byte) 0x1); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); // fixed value property errType = 5 bb.writeShort((short) 0x5); OFFlowModFailedCodeSerializerVer12.writeTo(bb, message.code); message.data.writeTo(bb); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFFlowModFailedErrorMsgVer12("); b.append("xid=").append(xid); b.append(", "); b.append("code=").append(code); b.append(", "); b.append("data=").append(data); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFFlowModFailedErrorMsgVer12 other = (OFFlowModFailedErrorMsgVer12) obj; if( xid != other.xid) return false; if (code == null) { if (other.code != null) return false; } else if (!code.equals(other.code)) return false; if (data == null) { if (other.data != null) return false; } else if (!data.equals(other.data)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((code == null) ? 0 : code.hashCode()); result = prime * result + ((data == null) ? 0 : data.hashCode()); return result; } }
package br.com.university.domain; import br.com.university.config.Constants; import com.fasterxml.jackson.annotation.JsonIgnore; import org.hibernate.annotations.BatchSize; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; import org.hibernate.validator.constraints.Email; import org.springframework.data.elasticsearch.annotations.Document; import javax.persistence.*; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import java.io.Serializable; import java.util.HashSet; import java.util.Locale; import java.util.Set; import java.time.Instant; /** * A user. */ @Entity @Table(name = "jhi_user") @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) @Document(indexName = "user") public class User extends AbstractAuditingEntity implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "sequenceGenerator") @SequenceGenerator(name = "sequenceGenerator") private Long id; @NotNull @Pattern(regexp = Constants.LOGIN_REGEX) @Size(min = 1, max = 50) @Column(length = 50, unique = true, nullable = false) private String login; @JsonIgnore @NotNull @Size(min = 60, max = 60) @Column(name = "password_hash",length = 60) private String password; @Size(max = 50) @Column(name = "first_name", length = 50) private String firstName; @Size(max = 50) @Column(name = "last_name", length = 50) private String lastName; @Email @Size(min = 5, max = 100) @Column(length = 100, unique = true) private String email; @NotNull @Column(nullable = false) private boolean activated = false; @Size(min = 2, max = 5) @Column(name = "lang_key", length = 5) private String langKey; @Size(max = 256) @Column(name = "image_url", length = 256) private String imageUrl; @Size(max = 20) @Column(name = "activation_key", length = 20) @JsonIgnore private String activationKey; @Size(max = 20) @Column(name = "reset_key", length = 20) @JsonIgnore private String resetKey; @Column(name = "reset_date") private Instant resetDate = null; @JsonIgnore @ManyToMany @JoinTable( name = "jhi_user_authority", joinColumns = {@JoinColumn(name = "user_id", referencedColumnName = "id")}, inverseJoinColumns = {@JoinColumn(name = "authority_name", referencedColumnName = "name")}) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) @BatchSize(size = 20) private Set<Authority> authorities = new HashSet<>(); public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getLogin() { return login; } //Lowercase the login before saving it in database public void setLogin(String login) { this.login = login.toLowerCase(Locale.ENGLISH); } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getImageUrl() { return imageUrl; } public void setImageUrl(String imageUrl) { this.imageUrl = imageUrl; } public boolean getActivated() { return activated; } public void setActivated(boolean activated) { this.activated = activated; } public String getActivationKey() { return activationKey; } public void setActivationKey(String activationKey) { this.activationKey = activationKey; } public String getResetKey() { return resetKey; } public void setResetKey(String resetKey) { this.resetKey = resetKey; } public Instant getResetDate() { return resetDate; } public void setResetDate(Instant resetDate) { this.resetDate = resetDate; } public String getLangKey() { return langKey; } public void setLangKey(String langKey) { this.langKey = langKey; } public Set<Authority> getAuthorities() { return authorities; } public void setAuthorities(Set<Authority> authorities) { this.authorities = authorities; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } User user = (User) o; return login.equals(user.login); } @Override public int hashCode() { return login.hashCode(); } @Override public String toString() { return "User{" + "login='" + login + '\'' + ", firstName='" + firstName + '\'' + ", lastName='" + lastName + '\'' + ", email='" + email + '\'' + ", imageUrl='" + imageUrl + '\'' + ", activated='" + activated + '\'' + ", langKey='" + langKey + '\'' + ", activationKey='" + activationKey + '\'' + "}"; } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.admanager.jaxws.v202102; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * * Describes predicted inventory availability for a {@link ProspectiveLineItem}. * * <p>Inventory has three threshold values along a line of possible inventory. From least to most, * these are: * * <ul> * <li>Available units -- How many units can be booked without affecting any other line items. * Booking more than this number can cause lower and same priority line items to underdeliver. * <li>Possible units -- How many units can be booked without affecting any higher priority line * items. Booking more than this number can cause the line item to underdeliver. * <li>Matched (forecast) units -- How many units satisfy all specified criteria. * </ul> * * <p>Underdelivery is caused by overbooking. However, if more impressions are served than are * predicted, the extra available inventory might enable all inventory guarantees to be met without * overbooking. * * * <p>Java class for AvailabilityForecast complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="AvailabilityForecast"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="lineItemId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="orderId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="unitType" type="{https://www.google.com/apis/ads/publisher/v202102}UnitType" minOccurs="0"/> * &lt;element name="availableUnits" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="deliveredUnits" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="matchedUnits" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="possibleUnits" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="reservedUnits" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="breakdowns" type="{https://www.google.com/apis/ads/publisher/v202102}ForecastBreakdown" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="targetingCriteriaBreakdowns" type="{https://www.google.com/apis/ads/publisher/v202102}TargetingCriteriaBreakdown" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="contendingLineItems" type="{https://www.google.com/apis/ads/publisher/v202102}ContendingLineItem" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="alternativeUnitTypeForecasts" type="{https://www.google.com/apis/ads/publisher/v202102}AlternativeUnitTypeForecast" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="demographicBreakdowns" type="{https://www.google.com/apis/ads/publisher/v202102}GrpDemographicBreakdown" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "AvailabilityForecast", propOrder = { "lineItemId", "orderId", "unitType", "availableUnits", "deliveredUnits", "matchedUnits", "possibleUnits", "reservedUnits", "breakdowns", "targetingCriteriaBreakdowns", "contendingLineItems", "alternativeUnitTypeForecasts", "demographicBreakdowns" }) public class AvailabilityForecast { protected Long lineItemId; protected Long orderId; @XmlSchemaType(name = "string") protected UnitType unitType; protected Long availableUnits; protected Long deliveredUnits; protected Long matchedUnits; protected Long possibleUnits; protected Long reservedUnits; protected List<ForecastBreakdown> breakdowns; protected List<TargetingCriteriaBreakdown> targetingCriteriaBreakdowns; protected List<ContendingLineItem> contendingLineItems; protected List<AlternativeUnitTypeForecast> alternativeUnitTypeForecasts; protected List<GrpDemographicBreakdown> demographicBreakdowns; /** * Gets the value of the lineItemId property. * * @return * possible object is * {@link Long } * */ public Long getLineItemId() { return lineItemId; } /** * Sets the value of the lineItemId property. * * @param value * allowed object is * {@link Long } * */ public void setLineItemId(Long value) { this.lineItemId = value; } /** * Gets the value of the orderId property. * * @return * possible object is * {@link Long } * */ public Long getOrderId() { return orderId; } /** * Sets the value of the orderId property. * * @param value * allowed object is * {@link Long } * */ public void setOrderId(Long value) { this.orderId = value; } /** * Gets the value of the unitType property. * * @return * possible object is * {@link UnitType } * */ public UnitType getUnitType() { return unitType; } /** * Sets the value of the unitType property. * * @param value * allowed object is * {@link UnitType } * */ public void setUnitType(UnitType value) { this.unitType = value; } /** * Gets the value of the availableUnits property. * * @return * possible object is * {@link Long } * */ public Long getAvailableUnits() { return availableUnits; } /** * Sets the value of the availableUnits property. * * @param value * allowed object is * {@link Long } * */ public void setAvailableUnits(Long value) { this.availableUnits = value; } /** * Gets the value of the deliveredUnits property. * * @return * possible object is * {@link Long } * */ public Long getDeliveredUnits() { return deliveredUnits; } /** * Sets the value of the deliveredUnits property. * * @param value * allowed object is * {@link Long } * */ public void setDeliveredUnits(Long value) { this.deliveredUnits = value; } /** * Gets the value of the matchedUnits property. * * @return * possible object is * {@link Long } * */ public Long getMatchedUnits() { return matchedUnits; } /** * Sets the value of the matchedUnits property. * * @param value * allowed object is * {@link Long } * */ public void setMatchedUnits(Long value) { this.matchedUnits = value; } /** * Gets the value of the possibleUnits property. * * @return * possible object is * {@link Long } * */ public Long getPossibleUnits() { return possibleUnits; } /** * Sets the value of the possibleUnits property. * * @param value * allowed object is * {@link Long } * */ public void setPossibleUnits(Long value) { this.possibleUnits = value; } /** * Gets the value of the reservedUnits property. * * @return * possible object is * {@link Long } * */ public Long getReservedUnits() { return reservedUnits; } /** * Sets the value of the reservedUnits property. * * @param value * allowed object is * {@link Long } * */ public void setReservedUnits(Long value) { this.reservedUnits = value; } /** * Gets the value of the breakdowns property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the breakdowns property. * * <p> * For example, to add a new item, do as follows: * <pre> * getBreakdowns().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link ForecastBreakdown } * * */ public List<ForecastBreakdown> getBreakdowns() { if (breakdowns == null) { breakdowns = new ArrayList<ForecastBreakdown>(); } return this.breakdowns; } /** * Gets the value of the targetingCriteriaBreakdowns property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the targetingCriteriaBreakdowns property. * * <p> * For example, to add a new item, do as follows: * <pre> * getTargetingCriteriaBreakdowns().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link TargetingCriteriaBreakdown } * * */ public List<TargetingCriteriaBreakdown> getTargetingCriteriaBreakdowns() { if (targetingCriteriaBreakdowns == null) { targetingCriteriaBreakdowns = new ArrayList<TargetingCriteriaBreakdown>(); } return this.targetingCriteriaBreakdowns; } /** * Gets the value of the contendingLineItems property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the contendingLineItems property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContendingLineItems().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link ContendingLineItem } * * */ public List<ContendingLineItem> getContendingLineItems() { if (contendingLineItems == null) { contendingLineItems = new ArrayList<ContendingLineItem>(); } return this.contendingLineItems; } /** * Gets the value of the alternativeUnitTypeForecasts property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the alternativeUnitTypeForecasts property. * * <p> * For example, to add a new item, do as follows: * <pre> * getAlternativeUnitTypeForecasts().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link AlternativeUnitTypeForecast } * * */ public List<AlternativeUnitTypeForecast> getAlternativeUnitTypeForecasts() { if (alternativeUnitTypeForecasts == null) { alternativeUnitTypeForecasts = new ArrayList<AlternativeUnitTypeForecast>(); } return this.alternativeUnitTypeForecasts; } /** * Gets the value of the demographicBreakdowns property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the demographicBreakdowns property. * * <p> * For example, to add a new item, do as follows: * <pre> * getDemographicBreakdowns().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link GrpDemographicBreakdown } * * */ public List<GrpDemographicBreakdown> getDemographicBreakdowns() { if (demographicBreakdowns == null) { demographicBreakdowns = new ArrayList<GrpDemographicBreakdown>(); } return this.demographicBreakdowns; } }
/* <<<<<<< HEAD * Copyright (C) 2009-2012 The Project Lombok Authors. ======= * Copyright (C) 2009-2013 The Project Lombok Authors. >>>>>>> f98bf919cc6701e98087d39fefb7bbfc85688834 * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package lombok.eclipse; import static lombok.eclipse.Eclipse.toQualifiedName; import static lombok.eclipse.handlers.EclipseHandlerUtil.*; import java.io.IOException; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; import java.util.WeakHashMap; import lombok.Lombok; import lombok.core.AST.Kind; import lombok.core.AnnotationValues; import lombok.core.AnnotationValues.AnnotationValueDecodeFail; import lombok.core.HandlerPriority; import lombok.core.SpiLoadUtil; import lombok.core.TypeLibrary; import lombok.core.TypeResolver; import org.eclipse.jdt.internal.compiler.ast.ASTNode; import org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration; import org.eclipse.jdt.internal.compiler.ast.TypeDeclaration; import org.eclipse.jdt.internal.compiler.ast.TypeReference; import org.eclipse.jdt.internal.compiler.impl.ITypeRequestor; import org.eclipse.jdt.internal.compiler.lookup.CompilationUnitScope; import org.eclipse.jdt.internal.compiler.lookup.TypeBinding; /** * This class tracks 'handlers' and knows how to invoke them for any given AST node. * * This class can find the handlers (via SPI discovery) and will set up the given AST node, such as * building an AnnotationValues instance. */ public class HandlerLibrary { /** * Creates a new HandlerLibrary. Errors will be reported to the Eclipse Error log. * You probably want to use {@link #load()} instead. */ public HandlerLibrary() {} private TypeLibrary typeLibrary = new TypeLibrary(); private static class VisitorContainer { private final EclipseASTVisitor visitor; private final long priority; private final boolean deferUntilPostDiet; VisitorContainer(EclipseASTVisitor visitor) { this.visitor = visitor; this.deferUntilPostDiet = visitor.getClass().isAnnotationPresent(DeferUntilPostDiet.class); HandlerPriority hp = visitor.getClass().getAnnotation(HandlerPriority.class); this.priority = hp == null ? 0L : (((long)hp.value()) << 32) + hp.subValue(); } public boolean deferUntilPostDiet() { return deferUntilPostDiet; } public long getPriority() { return priority; } } private static class AnnotationHandlerContainer<T extends Annotation> { private final EclipseAnnotationHandler<T> handler; private final Class<T> annotationClass; private final long priority; private final boolean deferUntilPostDiet; AnnotationHandlerContainer(EclipseAnnotationHandler<T> handler, Class<T> annotationClass) { this.handler = handler; this.annotationClass = annotationClass; this.deferUntilPostDiet = handler.getClass().isAnnotationPresent(DeferUntilPostDiet.class); HandlerPriority hp = handler.getClass().getAnnotation(HandlerPriority.class); this.priority = hp == null ? 0L : (((long)hp.value()) << 32) + hp.subValue(); } public void handle(org.eclipse.jdt.internal.compiler.ast.Annotation annotation, final EclipseNode annotationNode) { AnnotationValues<T> annValues = createAnnotation(annotationClass, annotationNode); handler.handle(annValues, annotation, annotationNode); } public void preHandle(org.eclipse.jdt.internal.compiler.ast.Annotation annotation, final EclipseNode annotationNode) { AnnotationValues<T> annValues = createAnnotation(annotationClass, annotationNode); handler.preHandle(annValues, annotation, annotationNode); } public boolean deferUntilPostDiet() { return deferUntilPostDiet; } public long getPriority() { return priority; } public boolean deferUntilBuildFieldsAndMethods() { return handler.getClass().isAnnotationPresent(DeferUntilBuildFieldsAndMethods.class); } } private Map<String, AnnotationHandlerContainer<?>> annotationHandlers = new HashMap<String, AnnotationHandlerContainer<?>>(); private Collection<VisitorContainer> visitorHandlers = new ArrayList<VisitorContainer>(); /** * Creates a new HandlerLibrary. Errors will be reported to the Eclipse Error log. * then uses SPI discovery to load all annotation and visitor based handlers so that future calls * to the handle methods will defer to these handlers. */ public static HandlerLibrary load() { HandlerLibrary lib = new HandlerLibrary(); loadAnnotationHandlers(lib); loadVisitorHandlers(lib); lib.calculatePriorities(); return lib; } private SortedSet<Long> priorities; public SortedSet<Long> getPriorities() { return priorities; } private void calculatePriorities() { SortedSet<Long> set = new TreeSet<Long>(); for (AnnotationHandlerContainer<?> container : annotationHandlers.values()) set.add(container.getPriority()); for (VisitorContainer container : visitorHandlers) set.add(container.getPriority()); this.priorities = Collections.unmodifiableSortedSet(set); } /** Uses SPI Discovery to find implementations of {@link EclipseAnnotationHandler}. */ @SuppressWarnings({"rawtypes", "unchecked"}) private static void loadAnnotationHandlers(HandlerLibrary lib) { try { for (EclipseAnnotationHandler<?> handler : SpiLoadUtil.findServices(EclipseAnnotationHandler.class, EclipseAnnotationHandler.class.getClassLoader())) { try { Class<? extends Annotation> annotationClass = handler.getAnnotationHandledByThisHandler(); AnnotationHandlerContainer<?> container = new AnnotationHandlerContainer(handler, annotationClass); String annotationClassName = container.annotationClass.getName().replace("$", "."); if (lib.annotationHandlers.put(annotationClassName, container) != null) { error(null, "Duplicate handlers for annotation type: " + annotationClassName, null); } lib.typeLibrary.addType(container.annotationClass.getName()); } catch (Throwable t) { error(null, "Can't load Lombok annotation handler for Eclipse: ", t); } } } catch (IOException e) { Lombok.sneakyThrow(e); } } /** Uses SPI Discovery to find implementations of {@link EclipseASTVisitor}. */ private static void loadVisitorHandlers(HandlerLibrary lib) { try { for (EclipseASTVisitor visitor : SpiLoadUtil.findServices(EclipseASTVisitor.class, EclipseASTVisitor.class.getClassLoader())) { lib.visitorHandlers.add(new VisitorContainer(visitor)); } } catch (Throwable t) { throw Lombok.sneakyThrow(t); } } private static final Map<ASTNode, Object> handledMap = new WeakHashMap<ASTNode, Object>(); private static final Object MARKER = new Object(); private boolean checkAndSetHandled(ASTNode node) { synchronized (handledMap) { return handledMap.put(node, MARKER) != MARKER; } } private boolean needsHandling(ASTNode node) { synchronized (handledMap) { return handledMap.get(node) != MARKER; } } /** * Handles the provided annotation node by first finding a qualifying instance of * {@link EclipseAnnotationHandler} and if one exists, calling it with a freshly cooked up * instance of {@link AnnotationValues}. * * Note that depending on the printASTOnly flag, the {@link lombok.core.PrintAST} annotation * will either be silently skipped, or everything that isn't {@code PrintAST} will be skipped. * * The HandlerLibrary will attempt to guess if the given annotation node represents a lombok annotation. * For example, if {@code lombok.*} is in the import list, then this method will guess that * {@code Getter} refers to {@code lombok.Getter}, presuming that {@link lombok.eclipse.handlers.HandleGetter} * has been loaded. * * @param ast The Compilation Unit that contains the Annotation AST Node. * @param annotationNode The Lombok AST Node representing the Annotation AST Node. * @param annotation 'node.get()' - convenience parameter. */ public void handleAnnotation(CompilationUnitDeclaration ast, EclipseNode annotationNode, org.eclipse.jdt.internal.compiler.ast.Annotation annotation, long priority) { TypeResolver resolver = new TypeResolver(annotationNode.getImportList()); TypeReference rawType = annotation.type; if (rawType == null) return; String fqn = resolver.typeRefToFullyQualifiedName(annotationNode, typeLibrary, toQualifiedName(annotation.type.getTypeName())); if (fqn == null) return; AnnotationHandlerContainer<?> container = annotationHandlers.get(fqn); if (container == null) return; if (priority != container.getPriority()) return; if (container.deferUntilBuildFieldsAndMethods()) return; if (!annotationNode.isCompleteParse() && container.deferUntilPostDiet()) { if (needsHandling(annotation)) container.preHandle(annotation, annotationNode); return; } try { if (checkAndSetHandled(annotation)) container.handle(annotation, annotationNode); } catch (AnnotationValueDecodeFail fail) { fail.owner.setError(fail.getMessage(), fail.idx); } catch (Throwable t) { error(ast, String.format("Lombok annotation handler %s failed", container.handler.getClass()), t); } } public void handleAnnotationOnBuildFieldsAndMethods(EclipseNode typeNode, org.eclipse.jdt.internal.compiler.ast.Annotation annotation) { TypeDeclaration decl = (TypeDeclaration) typeNode.get(); TypeBinding tb = resolveAnnotation(decl, annotation); if (tb == null) return; AnnotationHandlerContainer<?> container = annotationHandlers.get(new String(tb.readableName())); if (container == null) return; EclipseNode annotationNode = typeNode.getAst().get(annotation); if (annotationNode == null) { return; } if (isMethodAnnotation(annotationNode) && !typeNode.isCompleteParse() && (decl.scope != null)) { final CompilationUnitScope cus = decl.scope.compilationUnitScope(); final ITypeRequestor typeRequestor = cus.environment().typeRequestor; if (typeRequestor instanceof org.eclipse.jdt.internal.compiler.Compiler) { final org.eclipse.jdt.internal.compiler.Compiler c = (org.eclipse.jdt.internal.compiler.Compiler) typeRequestor; try { c.parser.getMethodBodies(cus.referenceContext); typeNode.rebuild(); } catch (Exception e) { // better break here } } } try { if (checkAndSetHandled(annotation)) container.handle(annotation, annotationNode); } catch (AnnotationValueDecodeFail fail) { fail.owner.setError(fail.getMessage(), fail.idx); } } private boolean isMethodAnnotation(EclipseNode annotationNode) { EclipseNode parent = annotationNode.up(); if (parent == null) return false; return parent.getKind() == Kind.METHOD; } private TypeBinding resolveAnnotation(TypeDeclaration decl, org.eclipse.jdt.internal.compiler.ast.Annotation ann) { TypeBinding tb = ann.resolvedType; if ((tb == null) && (ann.type != null)) { try { tb = ann.type.resolveType(decl.initializerScope); } catch (final Exception ignore) { // completion nodes may throw an exception here } } return tb; } /** * Will call all registered {@link EclipseASTVisitor} instances. */ public void callASTVisitors(EclipseAST ast, long priority, boolean isCompleteParse) { for (VisitorContainer container : visitorHandlers) { if (!isCompleteParse && container.deferUntilPostDiet()) continue; if (priority != container.getPriority()) continue; try { ast.traverse(container.visitor); } catch (Throwable t) { error((CompilationUnitDeclaration) ast.top().get(), String.format("Lombok visitor handler %s failed", container.visitor.getClass()), t); } } } }
package com.bioxx.tfc.api; import net.minecraft.block.Block; import net.minecraft.block.BlockFence; import com.bioxx.tfc.api.Constant.Global; public class TFCBlocks { public static int clayGrassRenderId; public static int peatGrassRenderId; public static int sulfurRenderId; public static int woodFruitRenderId; public static int leavesFruitRenderId; public static int woodThickRenderId; public static int woodSupportRenderIdH; public static int woodSupportRenderIdV; public static int grassRenderId; public static int oreRenderId; public static int moltenRenderId; public static int looseRockRenderId; public static int snowRenderId; public static int firepitRenderId; public static int anvilRenderId; public static int barrelRenderId; public static int loomRenderId; public static int standRenderId; public static int fenceRenderId; public static int fenceGateRenderId; public static int nestBoxRenderId; public static int bellowsRenderId; public static int forgeRenderId; public static int sluiceRenderId; public static int toolRackRenderId; public static int partialRenderId; public static int stairRenderId; public static int slabRenderId; public static int cropRenderId; public static int cookingPitRenderId; public static int leavesRenderId; public static int detailedRenderId; public static int foodPrepRenderId; public static int quernRenderId; public static int fluidRenderId; public static int woodConstructRenderId; public static int potteryRenderId; public static int tuyereRenderId; public static int crucibleRenderId; public static int berryRenderId; public static int pipeRenderId; public static int pipeValveRenderId; public static int waterPlantRenderId; public static int bloomeryRenderId; public static int metalsheetRenderId; public static int chestRenderId; public static int leatherRackRenderId; public static int grillRenderId; public static int metalTrapDoorRenderId; public static int vesselRenderId; public static int torchRenderId; public static int smokeRenderId; public static int smokeRackRenderId; public static int oilLampRenderId; public static int wallRenderId; public static int hopperRenderId; public static int flowerPotRenderId; public static Block stoneIgIn; public static Block stoneIgEx; public static Block stoneSed; public static Block stoneMM; public static Block stoneIgInCobble; public static Block stoneIgExCobble; public static Block stoneSedCobble; public static Block stoneMMCobble; public static Block stoneIgInBrick; public static Block stoneIgExBrick; public static Block stoneSedBrick; public static Block stoneMMBrick; public static Block stoneIgInSmooth; public static Block stoneIgExSmooth; public static Block stoneSedSmooth; public static Block stoneMMSmooth; public static Block ore; public static Block ore2; public static Block ore3; public static Block sulfur; public static Block planks; public static Block planks2; public static Block leaves; public static Block sapling; public static Block leaves2; public static Block sapling2; public static Block woodSupportV; public static Block woodSupportH; public static Block woodSupportV2; public static Block woodSupportH2; public static Block grass; public static Block grass2; public static Block dirt; public static Block dirt2; public static Block clay; public static Block clay2; public static Block clayGrass; public static Block clayGrass2; public static Block peat; public static Block peatGrass; public static Block worldItem; public static Block logPile; public static Block tilledSoil; public static Block tilledSoil2; public static Block firepit; public static Block bellows; public static Block anvil; public static Block anvil2; public static Block forge; public static Block blastFurnace; public static Block molten; public static Block sluice; public static Block fruitTreeWood; public static Block fruitTreeLeaves; public static Block fruitTreeLeaves2; public static Block stoneStairs; public static Block stoneSlabs; public static Block stoneStalac; public static Block sand; public static Block sand2; public static Block dryGrass; public static Block dryGrass2; public static Block tallGrass; public static Block charcoal; public static Block detailed; public static Block woodConstruct; public static Block woodVert; public static Block woodHoriz; public static Block woodHoriz2; public static Block toolRack; public static Block spawnMeter; public static Block foodPrep; public static Block quern; public static Block wallCobbleIgIn; public static Block wallCobbleIgEx; public static Block wallCobbleSed; public static Block wallCobbleMM; public static Block wallRawIgIn; public static Block wallRawIgEx; public static Block wallRawSed; public static Block wallRawMM; public static Block wallBrickIgIn; public static Block wallBrickIgEx; public static Block wallBrickSed; public static Block wallBrickMM; public static Block wallSmoothIgIn; public static Block wallSmoothIgEx; public static Block wallSmoothSed; public static Block wallSmoothMM; public static Block[] doors = new Block[Global.WOOD_ALL.length]; public static Block ingotPile; public static Block barrel; public static Block loom; public static Block pottery; public static Block thatch; public static Block moss; public static Block berryBush; public static Block crops; public static Block lilyPad; public static Block flowers; public static Block flowers2; public static Block fungi; public static Block flora; public static Block bloomery; public static Block bloom; public static Block crucible; public static Block fireBrick; public static Block metalSheet; public static Block nestBox; public static Block fence; public static Block fenceGate; public static Block fence2; public static Block fenceGate2; public static Block strawHideBed; public static Block armorStand; public static Block armorStand2; public static Block logNatural; public static Block logNatural2; public static Block woodHoriz3; public static Block woodHoriz4; public static Block woodVert2; public static Block saltWater; public static Block saltWaterStationary; public static Block freshWater; public static Block freshWaterStationary; public static Block hotWater; public static Block hotWaterStationary; public static Block lava; public static Block lavaStationary; public static Block ice; public static Block waterPlant; public static Block bookshelf; public static Block torch; public static Block chest; public static Block workbench; public static Block cactus; public static Block reeds; public static Block pumpkin; public static Block litPumpkin; public static Block buttonWood; public static Block vine; public static Block leatherRack; public static Block gravel; public static Block gravel2; public static Block grill; public static Block metalTrapDoor; public static Block vessel; public static Block smoke; public static Block smokeRack; public static Block snow; public static Block oilLamp; public static Block hopper; public static Block flowerPot; public static boolean isBlockVSupport(Block block) { return block == woodSupportV || block == woodSupportV2; } public static boolean isBlockHSupport(Block block) { return block == woodSupportH || block == woodSupportH2; } public static boolean isBlockAFence(Block block) { return block == fence || block == fence2 || BlockFence.func_149825_a(block); } public static boolean canFenceConnectTo(Block block) { return isBlockAFence(block) || block == fenceGate || block == fenceGate2; } public static boolean isArmourStand(Block block) { return block == armorStand || block == armorStand2; } }
/* * Copyright (C) 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.client.android; import com.google.zxing.BarcodeFormat; import com.google.zxing.DecodeHintType; import com.google.zxing.Result; import com.google.zxing.ResultMetadataType; import com.google.zxing.ResultPoint; import com.google.zxing.client.android.camera.CameraManager; import com.google.zxing.client.android.clipboard.ClipboardInterface; import com.google.zxing.client.android.history.HistoryActivity; import com.google.zxing.client.android.history.HistoryItem; import com.google.zxing.client.android.history.HistoryManager; import com.google.zxing.client.android.result.ResultButtonListener; import com.google.zxing.client.android.result.ResultHandler; import com.google.zxing.client.android.result.ResultHandlerFactory; import com.google.zxing.client.android.result.supplement.SupplementalInfoRetriever; import com.google.zxing.client.android.share.ShareActivity; import android.app.Activity; import android.app.AlertDialog; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.ActivityInfo; import android.content.res.Configuration; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Paint; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.preference.PreferenceManager; import android.util.Log; import android.util.TypedValue; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.view.WindowManager; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import java.io.IOException; import java.text.DateFormat; import java.util.Collection; import java.util.Date; import java.util.EnumSet; import java.util.Map; /** * This activity opens the camera and does the actual scanning on a background thread. It draws a * viewfinder to help the user place the barcode correctly, shows feedback as the image processing * is happening, and then overlays the results when a scan is successful. * * @author dswitkin@google.com (Daniel Switkin) * @author Sean Owen */ public final class CaptureActivity extends Activity implements SurfaceHolder.Callback { private static final String TAG = CaptureActivity.class.getSimpleName(); private static final long DEFAULT_INTENT_RESULT_DURATION_MS = 1500L; private static final long BULK_MODE_SCAN_DELAY_MS = 1000L; private static final String[] ZXING_URLS = { "http://zxing.appspot.com/scan", "zxing://scan/" }; private static final int HISTORY_REQUEST_CODE = 0x0000bacc; private static final Collection<ResultMetadataType> DISPLAYABLE_METADATA_TYPES = EnumSet.of(ResultMetadataType.ISSUE_NUMBER, ResultMetadataType.SUGGESTED_PRICE, ResultMetadataType.ERROR_CORRECTION_LEVEL, ResultMetadataType.POSSIBLE_COUNTRY); private CameraManager cameraManager; private CaptureActivityHandler handler; private Result savedResultToShow; private ViewfinderView viewfinderView; private TextView statusView; private View resultView; private Result lastResult; private boolean hasSurface; private boolean copyToClipboard; private IntentSource source; private String sourceUrl; private ScanFromWebPageManager scanFromWebPageManager; private Collection<BarcodeFormat> decodeFormats; private Map<DecodeHintType,?> decodeHints; private String characterSet; private HistoryManager historyManager; private InactivityTimer inactivityTimer; //private BeepManager beepManager; private AmbientLightManager ambientLightManager; ViewfinderView getViewfinderView() { return viewfinderView; } public Handler getHandler() { return handler; } CameraManager getCameraManager() { return cameraManager; } @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); Window window = getWindow(); window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.capture); hasSurface = false; inactivityTimer = new InactivityTimer(this); //beepManager = new BeepManager(this); ambientLightManager = new AmbientLightManager(this); PreferenceManager.setDefaultValues(this, R.xml.preferences, false); } @Override protected void onResume() { super.onResume(); // historyManager must be initialized here to update the history preference historyManager = new HistoryManager(this); historyManager.trimHistory(); // CameraManager must be initialized here, not in onCreate(). This is necessary because we don't // want to open the camera driver and measure the screen size if we're going to show the help on // first launch. That led to bugs where the scanning rectangle was the wrong size and partially // off screen. cameraManager = new CameraManager(getApplication()); viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view); viewfinderView.setCameraManager(cameraManager); resultView = findViewById(R.id.result_view); statusView = (TextView) findViewById(R.id.status_view); handler = null; lastResult = null; SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); if (prefs.getBoolean(PreferencesActivity.KEY_DISABLE_AUTO_ORIENTATION, true)) { setRequestedOrientation(getCurrentOrientation()); } else { setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_LANDSCAPE); } resetStatusView(); //beepManager.updatePrefs(); ambientLightManager.start(cameraManager); inactivityTimer.onResume(); Intent intent = getIntent(); copyToClipboard = prefs.getBoolean(PreferencesActivity.KEY_COPY_TO_CLIPBOARD, true) && (intent == null || intent.getBooleanExtra(Intents.Scan.SAVE_HISTORY, true)); source = IntentSource.NONE; sourceUrl = null; scanFromWebPageManager = null; decodeFormats = null; characterSet = null; if (intent != null) { String action = intent.getAction(); String dataString = intent.getDataString(); if (Intents.Scan.ACTION.equals(action)) { // Scan the formats the intent requested, and return the result to the calling activity. source = IntentSource.NATIVE_APP_INTENT; decodeFormats = DecodeFormatManager.parseDecodeFormats(intent); decodeHints = DecodeHintManager.parseDecodeHints(intent); if (intent.hasExtra(Intents.Scan.WIDTH) && intent.hasExtra(Intents.Scan.HEIGHT)) { int width = intent.getIntExtra(Intents.Scan.WIDTH, 0); int height = intent.getIntExtra(Intents.Scan.HEIGHT, 0); if (width > 0 && height > 0) { cameraManager.setManualFramingRect(width, height); } } if (intent.hasExtra(Intents.Scan.CAMERA_ID)) { int cameraId = intent.getIntExtra(Intents.Scan.CAMERA_ID, -1); if (cameraId >= 0) { cameraManager.setManualCameraId(cameraId); } } String customPromptMessage = intent.getStringExtra(Intents.Scan.PROMPT_MESSAGE); if (customPromptMessage != null) { statusView.setText(customPromptMessage); } } else if (dataString != null && dataString.contains("http://www.google") && dataString.contains("/m/products/scan")) { // Scan only products and send the result to mobile Product Search. source = IntentSource.PRODUCT_SEARCH_LINK; sourceUrl = dataString; decodeFormats = DecodeFormatManager.PRODUCT_FORMATS; } else if (isZXingURL(dataString)) { // Scan formats requested in query string (all formats if none specified). // If a return URL is specified, send the results there. Otherwise, handle it ourselves. source = IntentSource.ZXING_LINK; sourceUrl = dataString; Uri inputUri = Uri.parse(dataString); scanFromWebPageManager = new ScanFromWebPageManager(inputUri); decodeFormats = DecodeFormatManager.parseDecodeFormats(inputUri); // Allow a sub-set of the hints to be specified by the caller. decodeHints = DecodeHintManager.parseDecodeHints(inputUri); } characterSet = intent.getStringExtra(Intents.Scan.CHARACTER_SET); } SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view); SurfaceHolder surfaceHolder = surfaceView.getHolder(); if (hasSurface) { // The activity was paused but not stopped, so the surface still exists. Therefore // surfaceCreated() won't be called, so init the camera here. initCamera(surfaceHolder); } else { // Install the callback and wait for surfaceCreated() to init the camera. surfaceHolder.addCallback(this); } } private int getCurrentOrientation() { int rotation = getWindowManager().getDefaultDisplay().getRotation(); if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) { switch (rotation) { case Surface.ROTATION_0: case Surface.ROTATION_90: return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE; default: return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE; } } else { switch (rotation) { case Surface.ROTATION_0: case Surface.ROTATION_270: return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT; default: return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT; } } } private static boolean isZXingURL(String dataString) { if (dataString == null) { return false; } for (String url : ZXING_URLS) { if (dataString.startsWith(url)) { return true; } } return false; } @Override protected void onPause() { if (handler != null) { handler.quitSynchronously(); handler = null; } inactivityTimer.onPause(); ambientLightManager.stop(); //beepManager.close(); cameraManager.closeDriver(); //historyManager = null; // Keep for onActivityResult if (!hasSurface) { SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view); SurfaceHolder surfaceHolder = surfaceView.getHolder(); surfaceHolder.removeCallback(this); } super.onPause(); } @Override protected void onDestroy() { inactivityTimer.shutdown(); super.onDestroy(); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { switch (keyCode) { case KeyEvent.KEYCODE_BACK: if (source == IntentSource.NATIVE_APP_INTENT) { setResult(RESULT_CANCELED); finish(); return true; } if ((source == IntentSource.NONE || source == IntentSource.ZXING_LINK) && lastResult != null) { restartPreviewAfterDelay(0L); return true; } break; case KeyEvent.KEYCODE_FOCUS: case KeyEvent.KEYCODE_CAMERA: // Handle these events so they don't launch the Camera app return true; // Use volume up/down to turn on light case KeyEvent.KEYCODE_VOLUME_DOWN: cameraManager.setTorch(false); return true; case KeyEvent.KEYCODE_VOLUME_UP: cameraManager.setTorch(true); return true; } return super.onKeyDown(keyCode, event); } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater menuInflater = getMenuInflater(); menuInflater.inflate(R.menu.capture, menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { Intent intent = new Intent(Intent.ACTION_VIEW); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); int i = item.getItemId(); if (i == R.id.menu_share) { intent.setClassName(this, ShareActivity.class.getName()); startActivity(intent); } else if (i == R.id.menu_history) { intent.setClassName(this, HistoryActivity.class.getName()); startActivityForResult(intent, HISTORY_REQUEST_CODE); } else if (i == R.id.menu_settings) { intent.setClassName(this, PreferencesActivity.class.getName()); startActivity(intent); } else if (i == R.id.menu_help) { intent.setClassName(this, HelpActivity.class.getName()); startActivity(intent); } else { return super.onOptionsItemSelected(item); } return true; } @Override public void onActivityResult(int requestCode, int resultCode, Intent intent) { if (resultCode == RESULT_OK && requestCode == HISTORY_REQUEST_CODE && historyManager != null) { int itemNumber = intent.getIntExtra(Intents.History.ITEM_NUMBER, -1); if (itemNumber >= 0) { HistoryItem historyItem = historyManager.buildHistoryItem(itemNumber); decodeOrStoreSavedBitmap(null, historyItem.getResult()); } } } private void decodeOrStoreSavedBitmap(Bitmap bitmap, Result result) { // Bitmap isn't used yet -- will be used soon if (handler == null) { savedResultToShow = result; } else { if (result != null) { savedResultToShow = result; } if (savedResultToShow != null) { Message message = Message.obtain(handler, R.id.decode_succeeded, savedResultToShow); handler.sendMessage(message); } savedResultToShow = null; } } @Override public void surfaceCreated(SurfaceHolder holder) { if (holder == null) { Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!"); } if (!hasSurface) { hasSurface = true; initCamera(holder); } } @Override public void surfaceDestroyed(SurfaceHolder holder) { hasSurface = false; } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } /** * A valid barcode has been found, so give an indication of success and show the results. * * @param rawResult The contents of the barcode. * @param scaleFactor amount by which thumbnail was scaled * @param barcode A greyscale bitmap of the camera data which was decoded. */ public void handleDecode(Result rawResult, Bitmap barcode, float scaleFactor) { inactivityTimer.onActivity(); lastResult = rawResult; ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, rawResult); boolean fromLiveScan = barcode != null; if (fromLiveScan) { historyManager.addHistoryItem(rawResult, resultHandler); // Then not from history, so beep/vibrate and we have an image to draw on //beepManager.playBeepSoundAndVibrate(); drawResultPoints(barcode, scaleFactor, rawResult); } switch (source) { case NATIVE_APP_INTENT: case PRODUCT_SEARCH_LINK: handleDecodeExternally(rawResult, resultHandler, barcode); break; case ZXING_LINK: if (scanFromWebPageManager == null || !scanFromWebPageManager.isScanFromWebPage()) { handleDecodeInternally(rawResult, resultHandler, barcode); } else { handleDecodeExternally(rawResult, resultHandler, barcode); } break; case NONE: SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); if (fromLiveScan && prefs.getBoolean(PreferencesActivity.KEY_BULK_MODE, false)) { Toast.makeText(getApplicationContext(), getResources().getString(R.string.msg_bulk_mode_scanned) + " (" + rawResult.getText() + ')', Toast.LENGTH_SHORT).show(); // Wait a moment or else it will scan the same barcode continuously about 3 times restartPreviewAfterDelay(BULK_MODE_SCAN_DELAY_MS); } else { handleDecodeInternally(rawResult, resultHandler, barcode); } break; } } /** * Superimpose a line for 1D or dots for 2D to highlight the key features of the barcode. * * @param barcode A bitmap of the captured image. * @param scaleFactor amount by which thumbnail was scaled * @param rawResult The decoded results which contains the points to draw. */ private void drawResultPoints(Bitmap barcode, float scaleFactor, Result rawResult) { ResultPoint[] points = rawResult.getResultPoints(); if (points != null && points.length > 0) { Canvas canvas = new Canvas(barcode); Paint paint = new Paint(); paint.setColor(getResources().getColor(R.color.result_points)); if (points.length == 2) { paint.setStrokeWidth(4.0f); drawLine(canvas, paint, points[0], points[1], scaleFactor); } else if (points.length == 4 && (rawResult.getBarcodeFormat() == BarcodeFormat.UPC_A || rawResult.getBarcodeFormat() == BarcodeFormat.EAN_13)) { // Hacky special case -- draw two lines, for the barcode and metadata drawLine(canvas, paint, points[0], points[1], scaleFactor); drawLine(canvas, paint, points[2], points[3], scaleFactor); } else { paint.setStrokeWidth(10.0f); for (ResultPoint point : points) { if (point != null) { canvas.drawPoint(scaleFactor * point.getX(), scaleFactor * point.getY(), paint); } } } } } private static void drawLine(Canvas canvas, Paint paint, ResultPoint a, ResultPoint b, float scaleFactor) { if (a != null && b != null) { canvas.drawLine(scaleFactor * a.getX(), scaleFactor * a.getY(), scaleFactor * b.getX(), scaleFactor * b.getY(), paint); } } // Put up our own UI for how to handle the decoded contents. private void handleDecodeInternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) { CharSequence displayContents = resultHandler.getDisplayContents(); if (copyToClipboard && !resultHandler.areContentsSecure()) { ClipboardInterface.setText(displayContents, this); } SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); if (resultHandler.getDefaultButtonID() != null && prefs.getBoolean(PreferencesActivity.KEY_AUTO_OPEN_WEB, false)) { resultHandler.handleButtonPress(resultHandler.getDefaultButtonID()); return; } statusView.setVisibility(View.GONE); viewfinderView.setVisibility(View.GONE); resultView.setVisibility(View.VISIBLE); ImageView barcodeImageView = (ImageView) findViewById(R.id.barcode_image_view); if (barcode == null) { barcodeImageView.setImageBitmap(BitmapFactory.decodeResource(getResources(), R.drawable.launcher_icon)); } else { barcodeImageView.setImageBitmap(barcode); } TextView formatTextView = (TextView) findViewById(R.id.format_text_view); formatTextView.setText(rawResult.getBarcodeFormat().toString()); TextView typeTextView = (TextView) findViewById(R.id.type_text_view); typeTextView.setText(resultHandler.getType().toString()); DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT); TextView timeTextView = (TextView) findViewById(R.id.time_text_view); timeTextView.setText(formatter.format(new Date(rawResult.getTimestamp()))); TextView metaTextView = (TextView) findViewById(R.id.meta_text_view); View metaTextViewLabel = findViewById(R.id.meta_text_view_label); metaTextView.setVisibility(View.GONE); metaTextViewLabel.setVisibility(View.GONE); Map<ResultMetadataType,Object> metadata = rawResult.getResultMetadata(); if (metadata != null) { StringBuilder metadataText = new StringBuilder(20); for (Map.Entry<ResultMetadataType,Object> entry : metadata.entrySet()) { if (DISPLAYABLE_METADATA_TYPES.contains(entry.getKey())) { metadataText.append(entry.getValue()).append('\n'); } } if (metadataText.length() > 0) { metadataText.setLength(metadataText.length() - 1); metaTextView.setText(metadataText); metaTextView.setVisibility(View.VISIBLE); metaTextViewLabel.setVisibility(View.VISIBLE); } } TextView contentsTextView = (TextView) findViewById(R.id.contents_text_view); contentsTextView.setText(displayContents); int scaledSize = Math.max(22, 32 - displayContents.length() / 4); contentsTextView.setTextSize(TypedValue.COMPLEX_UNIT_SP, scaledSize); TextView supplementTextView = (TextView) findViewById(R.id.contents_supplement_text_view); supplementTextView.setText(""); supplementTextView.setOnClickListener(null); if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean( PreferencesActivity.KEY_SUPPLEMENTAL, true)) { SupplementalInfoRetriever.maybeInvokeRetrieval(supplementTextView, resultHandler.getResult(), historyManager, this); } int buttonCount = resultHandler.getButtonCount(); ViewGroup buttonView = (ViewGroup) findViewById(R.id.result_button_view); buttonView.requestFocus(); for (int x = 0; x < ResultHandler.MAX_BUTTON_COUNT; x++) { TextView button = (TextView) buttonView.getChildAt(x); if (x < buttonCount) { button.setVisibility(View.VISIBLE); button.setText(resultHandler.getButtonText(x)); button.setOnClickListener(new ResultButtonListener(resultHandler, x)); } else { button.setVisibility(View.GONE); } } } // Briefly show the contents of the barcode, then handle the result outside Barcode Scanner. private void handleDecodeExternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) { if (barcode != null) { viewfinderView.drawResultBitmap(barcode); } long resultDurationMS; if (getIntent() == null) { resultDurationMS = DEFAULT_INTENT_RESULT_DURATION_MS; } else { resultDurationMS = getIntent().getLongExtra(Intents.Scan.RESULT_DISPLAY_DURATION_MS, DEFAULT_INTENT_RESULT_DURATION_MS); } if (resultDurationMS > 0) { String rawResultString = String.valueOf(rawResult); if (rawResultString.length() > 32) { rawResultString = rawResultString.substring(0, 32) + " ..."; } statusView.setText(getString(resultHandler.getDisplayTitle()) + " : " + rawResultString); } if (copyToClipboard && !resultHandler.areContentsSecure()) { CharSequence text = resultHandler.getDisplayContents(); ClipboardInterface.setText(text, this); } if (source == IntentSource.NATIVE_APP_INTENT) { // Hand back whatever action they requested - this can be changed to Intents.Scan.ACTION when // the deprecated intent is retired. Intent intent = new Intent(getIntent().getAction()); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.putExtra(Intents.Scan.RESULT, rawResult.toString()); intent.putExtra(Intents.Scan.RESULT_FORMAT, rawResult.getBarcodeFormat().toString()); byte[] rawBytes = rawResult.getRawBytes(); if (rawBytes != null && rawBytes.length > 0) { intent.putExtra(Intents.Scan.RESULT_BYTES, rawBytes); } Map<ResultMetadataType,?> metadata = rawResult.getResultMetadata(); if (metadata != null) { if (metadata.containsKey(ResultMetadataType.UPC_EAN_EXTENSION)) { intent.putExtra(Intents.Scan.RESULT_UPC_EAN_EXTENSION, metadata.get(ResultMetadataType.UPC_EAN_EXTENSION).toString()); } Number orientation = (Number) metadata.get(ResultMetadataType.ORIENTATION); if (orientation != null) { intent.putExtra(Intents.Scan.RESULT_ORIENTATION, orientation.intValue()); } String ecLevel = (String) metadata.get(ResultMetadataType.ERROR_CORRECTION_LEVEL); if (ecLevel != null) { intent.putExtra(Intents.Scan.RESULT_ERROR_CORRECTION_LEVEL, ecLevel); } @SuppressWarnings("unchecked") Iterable<byte[]> byteSegments = (Iterable<byte[]>) metadata.get(ResultMetadataType.BYTE_SEGMENTS); if (byteSegments != null) { int i = 0; for (byte[] byteSegment : byteSegments) { intent.putExtra(Intents.Scan.RESULT_BYTE_SEGMENTS_PREFIX + i, byteSegment); i++; } } } sendReplyMessage(R.id.return_scan_result, intent, resultDurationMS); } else if (source == IntentSource.PRODUCT_SEARCH_LINK) { // Reformulate the URL which triggered us into a query, so that the request goes to the same // TLD as the scan URL. int end = sourceUrl.lastIndexOf("/scan"); String replyURL = sourceUrl.substring(0, end) + "?q=" + resultHandler.getDisplayContents() + "&source=zxing"; sendReplyMessage(R.id.launch_product_query, replyURL, resultDurationMS); } else if (source == IntentSource.ZXING_LINK) { if (scanFromWebPageManager != null && scanFromWebPageManager.isScanFromWebPage()) { String replyURL = scanFromWebPageManager.buildReplyURL(rawResult, resultHandler); scanFromWebPageManager = null; sendReplyMessage(R.id.launch_product_query, replyURL, resultDurationMS); } } } private void sendReplyMessage(int id, Object arg, long delayMS) { if (handler != null) { Message message = Message.obtain(handler, id, arg); if (delayMS > 0L) { handler.sendMessageDelayed(message, delayMS); } else { handler.sendMessage(message); } } } private void initCamera(SurfaceHolder surfaceHolder) { if (surfaceHolder == null) { throw new IllegalStateException("No SurfaceHolder provided"); } if (cameraManager.isOpen()) { Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?"); return; } try { cameraManager.openDriver(surfaceHolder); // Creating the handler starts the preview, which can also throw a RuntimeException. if (handler == null) { handler = new CaptureActivityHandler(this, decodeFormats, decodeHints, characterSet, cameraManager); } decodeOrStoreSavedBitmap(null, null); } catch (IOException ioe) { Log.w(TAG, ioe); displayFrameworkBugMessageAndExit(); } catch (RuntimeException e) { // Barcode Scanner has seen crashes in the wild of this variety: // java.?lang.?RuntimeException: Fail to connect to camera service Log.w(TAG, "Unexpected error initializing camera", e); displayFrameworkBugMessageAndExit(); } } private void displayFrameworkBugMessageAndExit() { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(getString(R.string.app_name)); builder.setMessage(getString(R.string.msg_camera_framework_bug)); builder.setPositiveButton(R.string.button_ok, new FinishListener(this)); builder.setOnCancelListener(new FinishListener(this)); builder.show(); } public void restartPreviewAfterDelay(long delayMS) { if (handler != null) { handler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS); } resetStatusView(); } private void resetStatusView() { resultView.setVisibility(View.GONE); statusView.setText(R.string.msg_default_status); statusView.setVisibility(View.VISIBLE); viewfinderView.setVisibility(View.VISIBLE); lastResult = null; } public void drawViewfinder() { viewfinderView.drawViewfinder(); } }
package org.zstack.compute.vm; import org.apache.commons.lang.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; import org.zstack.core.cloudbus.CloudBus; import org.zstack.core.db.*; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.core.errorcode.ErrorFacade; import org.zstack.header.allocator.HostCapacityVO; import org.zstack.header.allocator.HostCapacityVO_; import org.zstack.header.apimediator.ApiMessageInterceptionException; import org.zstack.header.apimediator.ApiMessageInterceptor; import org.zstack.header.apimediator.StopRoutingException; import org.zstack.header.cluster.ClusterState; import org.zstack.header.cluster.ClusterVO; import org.zstack.header.cluster.ClusterVO_; import org.zstack.header.configuration.*; import org.zstack.header.errorcode.OperationFailureException; import org.zstack.header.host.HostState; import org.zstack.header.host.HostStatus; import org.zstack.header.host.HostVO; import org.zstack.header.host.HostVO_; import org.zstack.header.image.ImageConstant.ImageMediaType; import org.zstack.header.image.ImagePlatform; import org.zstack.header.image.ImageState; import org.zstack.header.image.ImageVO; import org.zstack.header.image.ImageVO_; import org.zstack.header.message.APIMessage; import org.zstack.header.network.l3.*; import org.zstack.header.vm.*; import org.zstack.header.zone.ZoneState; import org.zstack.header.zone.ZoneVO; import org.zstack.header.zone.ZoneVO_; import org.zstack.utils.SizeUtils; import org.zstack.utils.data.SizeUnit; import org.zstack.utils.network.NetworkUtils; import static org.zstack.core.Platform.argerr; import static org.zstack.core.Platform.operr; import static org.zstack.utils.CollectionDSL.list; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import java.util.*; /** * Created with IntelliJ IDEA. * User: frank * Time: 9:55 PM * To change this template use File | Settings | File Templates. */ public class VmInstanceApiInterceptor implements ApiMessageInterceptor { @Autowired private CloudBus bus; @Autowired private DatabaseFacade dbf; @Autowired private ErrorFacade errf; private void setServiceId(APIMessage msg) { if (msg instanceof VmInstanceMessage) { VmInstanceMessage vmsg = (VmInstanceMessage) msg; bus.makeTargetServiceIdByResourceUuid(msg, VmInstanceConstant.SERVICE_ID, vmsg.getVmInstanceUuid()); } } @Override public APIMessage intercept(APIMessage msg) throws ApiMessageInterceptionException { if (msg instanceof APIDestroyVmInstanceMsg) { validate((APIDestroyVmInstanceMsg) msg); } else if (msg instanceof APICreateVmInstanceMsg) { validate((APICreateVmInstanceMsg) msg); } else if (msg instanceof APIGetVmAttachableDataVolumeMsg) { validate((APIGetVmAttachableDataVolumeMsg) msg); } else if (msg instanceof APIDetachL3NetworkFromVmMsg) { validate((APIDetachL3NetworkFromVmMsg) msg); } else if (msg instanceof APIAttachL3NetworkToVmMsg) { validate((APIAttachL3NetworkToVmMsg) msg); } else if (msg instanceof APIAttachIsoToVmInstanceMsg) { validate((APIAttachIsoToVmInstanceMsg) msg); } else if (msg instanceof APISetVmBootOrderMsg) { validate((APISetVmBootOrderMsg) msg); } else if (msg instanceof APIDeleteVmStaticIpMsg) { validate((APIDeleteVmStaticIpMsg) msg); } else if (msg instanceof APISetVmStaticIpMsg) { validate((APISetVmStaticIpMsg) msg); } else if (msg instanceof APIStartVmInstanceMsg) { validate((APIStartVmInstanceMsg) msg); } else if (msg instanceof APICreateStartVmInstanceSchedulerMsg) { validate((APICreateStartVmInstanceSchedulerMsg) msg); } else if (msg instanceof APICreateStopVmInstanceSchedulerMsg) { validate((APICreateStopVmInstanceSchedulerMsg) msg); } else if (msg instanceof APICreateRebootVmInstanceSchedulerMsg) { validate((APICreateRebootVmInstanceSchedulerMsg) msg); } else if (msg instanceof APIGetInterdependentL3NetworksImagesMsg) { validate((APIGetInterdependentL3NetworksImagesMsg) msg); } else if (msg instanceof APIUpdateVmInstanceMsg) { validate((APIUpdateVmInstanceMsg) msg); } else if (msg instanceof APISetVmConsolePasswordMsg) { validate((APISetVmConsolePasswordMsg) msg); } else if (msg instanceof APIChangeInstanceOfferingMsg) { validate((APIChangeInstanceOfferingMsg) msg); } else if (msg instanceof APIMigrateVmMsg) { validate((APIMigrateVmMsg) msg); } setServiceId(msg); return msg; } private void validate(APIMigrateVmMsg msg) { new SQLBatch() { @Override protected void scripts() { VmInstanceVO vo = findByUuid(msg.getVmInstanceUuid(), VmInstanceVO.class); if (vo.getHostUuid().equals(msg.getHostUuid())) { throw new ApiMessageInterceptionException(argerr( "the vm[uuid:%s] is already on host[uuid:%s]", msg.getVmInstanceUuid(), msg.getHostUuid() )); } } }.execute(); } private void validate(APIChangeInstanceOfferingMsg msg) { new SQLBatch() { @Override protected void scripts() { VmInstanceVO vo = Q.New(VmInstanceVO.class).eq(VmInstanceVO_.uuid, msg.getVmInstanceUuid()).find(); InstanceOfferingVO instanceOfferingVO = Q.New(InstanceOfferingVO.class).eq(InstanceOfferingVO_.uuid, msg.getInstanceOfferingUuid()).find(); if (!VmGlobalConfig.NUMA.value(Boolean.class) && !VmInstanceState.Stopped.equals(vo.getState())) { throw new ApiMessageInterceptionException(argerr( "the VM cannot do online cpu/memory update because it is not of NUMA architecture. Please stop the VM then do the cpu/memory update again" )); } if (!VmInstanceState.Stopped.equals(vo.getState()) && !VmInstanceState.Running.equals(vo.getState())) { throw new OperationFailureException(operr("The state of vm[uuid:%s] is %s. Only these state[%s] is allowed to update cpu or memory.", vo.getUuid(), vo.getState(), StringUtils.join(list(VmInstanceState.Running, VmInstanceState.Stopped), ","))); } if (VmInstanceState.Stopped.equals(vo.getState())) { return; } if (instanceOfferingVO.getCpuNum() < vo.getCpuNum() || instanceOfferingVO.getMemorySize() < vo.getMemorySize()) { throw new ApiMessageInterceptionException(argerr( "can't not decrease capacity when vm[:uuid] is running", vo.getUuid() )); } } }.execute(); } private void validate(APIUpdateVmInstanceMsg msg) { new SQLBatch() { @Override protected void scripts() { VmInstanceVO vo = Q.New(VmInstanceVO.class).eq(VmInstanceVO_.uuid, msg.getVmInstanceUuid()).find(); Integer cpuSum = msg.getCpuNum(); Long memorySize = msg.getMemorySize(); if ((cpuSum == null && memorySize == null)) { return; } VmInstanceState vmState = Q.New(VmInstanceVO.class).select(VmInstanceVO_.state).eq(VmInstanceVO_.uuid, msg.getVmInstanceUuid()).findValue(); if (!VmGlobalConfig.NUMA.value(Boolean.class) && !VmInstanceState.Stopped.equals(vmState)) { throw new ApiMessageInterceptionException(argerr( "the VM cannot do online cpu/memory update because it is not of NUMA architecture. Please stop the VM then do the cpu/memory update again" )); } if (!VmInstanceState.Stopped.equals(vo.getState()) && !VmInstanceState.Running.equals(vo.getState())) { throw new OperationFailureException(operr("The state of vm[uuid:%s] is %s. Only these state[%s] is allowed to update cpu or memory.", vo.getUuid(), vo.getState(), StringUtils.join(list(VmInstanceState.Running, VmInstanceState.Stopped), ","))); } if (VmInstanceState.Stopped.equals(vmState)) { return; } if (msg.getCpuNum() != null && msg.getCpuNum() < vo.getCpuNum()) { throw new ApiMessageInterceptionException(argerr( "can't not decrease cpu of vm[:uuid] when it is running", vo.getUuid() )); } if (msg.getMemorySize() != null && msg.getMemorySize() < vo.getMemorySize()) { throw new ApiMessageInterceptionException(argerr( "can't not decrease memory size of vm[:uuid] when it is running", vo.getUuid() )); } } }.execute(); } private void validate(APIGetInterdependentL3NetworksImagesMsg msg) { if (msg.getL3NetworkUuids() == null && msg.getImageUuid() == null) { throw new ApiMessageInterceptionException(argerr( "either l3NetworkUuids or imageUuid must be set" )); } } private void validate(APIStartVmInstanceMsg msg) { // host uuid overrides cluster uuid if (msg.getHostUuid() != null) { msg.setClusterUuid(null); } } private void validate(APICreateStartVmInstanceSchedulerMsg msg) { // host uuid overrides cluster uuid if (msg.getHostUuid() != null) { msg.setClusterUuid(null); } SimpleQuery<VmInstanceVO> q = dbf.createQuery(VmInstanceVO.class); q.select(VmInstanceVO_.state); q.add(VmInstanceVO_.uuid, Op.EQ, msg.getVmInstanceUuid()); VmInstanceState state = q.findValue(); if (state == VmInstanceState.Destroyed) { throw new ApiMessageInterceptionException(operr("vm[uuid:%s] can only create scheduler when state is not Destroyed", msg.getVmInstanceUuid())); } } private void validate(APICreateStopVmInstanceSchedulerMsg msg) { SimpleQuery<VmInstanceVO> q = dbf.createQuery(VmInstanceVO.class); q.select(VmInstanceVO_.state); q.add(VmInstanceVO_.uuid, Op.EQ, msg.getVmInstanceUuid()); VmInstanceState state = q.findValue(); if (state == VmInstanceState.Destroyed) { throw new ApiMessageInterceptionException(operr("vm[uuid:%s] can only create scheduler when state is not Destroyed", msg.getVmInstanceUuid())); } } private void validate(APICreateRebootVmInstanceSchedulerMsg msg) { SimpleQuery<VmInstanceVO> q = dbf.createQuery(VmInstanceVO.class); q.select(VmInstanceVO_.state); q.add(VmInstanceVO_.uuid, Op.EQ, msg.getVmInstanceUuid()); VmInstanceState state = q.findValue(); if (state == VmInstanceState.Destroyed) { throw new ApiMessageInterceptionException(operr("vm[uuid:%s] can only create scheduler when state is not Destroyed", msg.getVmInstanceUuid())); } } private void validate(APISetVmStaticIpMsg msg) { if (!NetworkUtils.isIpv4Address(msg.getIp())) { throw new ApiMessageInterceptionException(argerr("%s is not a valid IPv4 address", msg.getIp())); } SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class); q.add(VmNicVO_.vmInstanceUuid, Op.EQ, msg.getVmInstanceUuid()); q.add(VmNicVO_.l3NetworkUuid, Op.EQ, msg.getL3NetworkUuid()); if (!q.isExists()) { throw new ApiMessageInterceptionException(argerr("the VM[uuid:%s] has no nic on the L3 network[uuid:%s]", msg.getVmInstanceUuid(), msg.getL3NetworkUuid())); } } private void validate(APIDeleteVmStaticIpMsg msg) { SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class); q.add(VmNicVO_.vmInstanceUuid, Op.EQ, msg.getVmInstanceUuid()); q.add(VmNicVO_.l3NetworkUuid, Op.EQ, msg.getL3NetworkUuid()); if (!q.isExists()) { throw new ApiMessageInterceptionException(argerr("the VM[uuid:%s] has no nic on the L3 network[uuid:%s]", msg.getVmInstanceUuid(), msg.getL3NetworkUuid())); } } private void validate(APISetVmBootOrderMsg msg) { if (msg.getBootOrder() != null) { for (String o : msg.getBootOrder()) { try { VmBootDevice.valueOf(o); } catch (IllegalArgumentException e) { throw new ApiMessageInterceptionException(argerr("invalid boot device[%s] in boot order%s", o, msg.getBootOrder())); } } } } private void validate(APIAttachIsoToVmInstanceMsg msg) { String isoUuid = new IsoOperator().getIsoUuidByVmUuid(msg.getVmInstanceUuid()); if (isoUuid != null) { throw new ApiMessageInterceptionException(operr("VM[uuid:%s] already has an ISO[uuid:%s] attached", msg.getVmInstanceUuid(), isoUuid)); } } private void validate(APIAttachL3NetworkToVmMsg msg) { SimpleQuery<VmInstanceVO> q = dbf.createQuery(VmInstanceVO.class); q.select(VmInstanceVO_.type, VmInstanceVO_.state); q.add(VmInstanceVO_.uuid, Op.EQ, msg.getVmInstanceUuid()); Tuple t = q.findTuple(); String type = t.get(0, String.class); VmInstanceState state = t.get(1, VmInstanceState.class); if (!VmInstanceConstant.USER_VM_TYPE.equals(type)) { throw new ApiMessageInterceptionException(operr("unable to attach a L3 network. The vm[uuid: %s] is not a user vm", type)); } if (!VmInstanceState.Running.equals(state) && !VmInstanceState.Stopped.equals(state)) { throw new ApiMessageInterceptionException(operr("unable to detach a L3 network. The vm[uuid: %s] is not Running or Stopped; the current state is %s", msg.getVmInstanceUuid(), state)); } SimpleQuery<VmNicVO> nq = dbf.createQuery(VmNicVO.class); nq.add(VmNicVO_.l3NetworkUuid, Op.EQ, msg.getL3NetworkUuid()); nq.add(VmNicVO_.vmInstanceUuid, Op.EQ, msg.getVmInstanceUuid()); if (nq.isExists()) { throw new ApiMessageInterceptionException(operr("unable to attach a L3 network. The L3 network[uuid:%s] is already attached to the vm[uuid: %s]", msg.getL3NetworkUuid(), msg.getVmInstanceUuid())); } SimpleQuery<L3NetworkVO> l3q = dbf.createQuery(L3NetworkVO.class); l3q.select(L3NetworkVO_.state, L3NetworkVO_.system); l3q.add(L3NetworkVO_.uuid, Op.EQ, msg.getL3NetworkUuid()); t = l3q.findTuple(); L3NetworkState l3state = t.get(0, L3NetworkState.class); boolean system = t.get(1, Boolean.class); if (l3state == L3NetworkState.Disabled) { throw new ApiMessageInterceptionException(operr("unable to attach a L3 network. The L3 network[uuid:%s] is disabled", msg.getL3NetworkUuid())); } if (system) { throw new ApiMessageInterceptionException(operr("unable to attach a L3 network. The L3 network[uuid:%s] is a system network", msg.getL3NetworkUuid())); } if (msg.getStaticIp() != null) { SimpleQuery<IpRangeVO> iprq = dbf.createQuery(IpRangeVO.class); iprq.add(IpRangeVO_.l3NetworkUuid, Op.EQ, msg.getL3NetworkUuid()); List<IpRangeVO> iprs = iprq.list(); boolean found = false; for (IpRangeVO ipr : iprs) { if (NetworkUtils.isIpv4InRange(msg.getStaticIp(), ipr.getStartIp(), ipr.getEndIp())) { found = true; break; } } if (!found) { throw new ApiMessageInterceptionException(argerr("the static IP[%s] is not in any IP range of the L3 network[uuid:%s]", msg.getStaticIp(), msg.getL3NetworkUuid())); } SimpleQuery<UsedIpVO> uq = dbf.createQuery(UsedIpVO.class); uq.add(UsedIpVO_.l3NetworkUuid, Op.EQ, msg.getL3NetworkUuid()); uq.add(UsedIpVO_.ip, Op.EQ, msg.getStaticIp()); if (uq.isExists()) { throw new ApiMessageInterceptionException(operr("the static IP[%s] has been occupied on the L3 network[uuid:%s]", msg.getStaticIp(), msg.getL3NetworkUuid())); } } } @Transactional(readOnly = true) private void validate(APIDetachL3NetworkFromVmMsg msg) { String sql = "select vm.uuid, vm.type, vm.state from VmInstanceVO vm, VmNicVO nic where vm.uuid = nic.vmInstanceUuid and nic.uuid = :uuid"; TypedQuery<Tuple> q = dbf.getEntityManager().createQuery(sql, Tuple.class); q.setParameter("uuid", msg.getVmNicUuid()); Tuple t = q.getSingleResult(); String vmUuid = t.get(0, String.class); String vmType = t.get(1, String.class); VmInstanceState state = t.get(2, VmInstanceState.class); if (!VmInstanceConstant.USER_VM_TYPE.equals(vmType)) { throw new ApiMessageInterceptionException(operr("unable to detach a L3 network. The vm[uuid: %s] is not a user vm", msg.getVmInstanceUuid())); } if (!VmInstanceState.Running.equals(state) && !VmInstanceState.Stopped.equals(state)) { throw new ApiMessageInterceptionException(operr("unable to detach a L3 network. The vm[uuid: %s] is not Running or Stopped; the current state is %s", msg.getVmInstanceUuid(), state)); } msg.setVmInstanceUuid(vmUuid); } private static <T> List<T> getDuplicateElements(List<T> list) { List<T> result = new ArrayList<T>(); Set<T> set = new HashSet<T>(); for (T e : list) { if (!set.add(e)) { result.add(e); } } return result; } private void validate(APIGetVmAttachableDataVolumeMsg msg) { SimpleQuery<VmInstanceVO> q = dbf.createQuery(VmInstanceVO.class); q.select(VmInstanceVO_.state); q.add(VmInstanceVO_.uuid, Op.EQ, msg.getVmInstanceUuid()); VmInstanceState state = q.findValue(); if (state != VmInstanceState.Stopped && state != VmInstanceState.Running) { throw new ApiMessageInterceptionException(operr("vm[uuid:%s] can only attach volume when state is Running or Stopped, current state is %s", msg.getVmInstanceUuid(), state)); } } private void validate(APICreateVmInstanceMsg msg) { SimpleQuery<InstanceOfferingVO> iq = dbf.createQuery(InstanceOfferingVO.class); iq.select(InstanceOfferingVO_.state); iq.add(InstanceOfferingVO_.uuid, Op.EQ, msg.getInstanceOfferingUuid()); InstanceOfferingState istate = iq.findValue(); if (istate == InstanceOfferingState.Disabled) { throw new ApiMessageInterceptionException(operr("instance offering[uuid:%s] is Disabled, can't create vm from it", msg.getInstanceOfferingUuid())); } SimpleQuery<ImageVO> imgq = dbf.createQuery(ImageVO.class); imgq.select(ImageVO_.state, ImageVO_.system, ImageVO_.mediaType); imgq.add(ImageVO_.uuid, Op.EQ, msg.getImageUuid()); Tuple imgt = imgq.findTuple(); ImageState imgState = imgt.get(0, ImageState.class); if (imgState == ImageState.Disabled) { throw new ApiMessageInterceptionException(operr("image[uuid:%s] is Disabled, can't create vm from it", msg.getImageUuid())); } ImageMediaType imgFormat = imgt.get(2, ImageMediaType.class); if (imgFormat != ImageMediaType.RootVolumeTemplate && imgFormat != ImageMediaType.ISO) { throw new ApiMessageInterceptionException(argerr("image[uuid:%s] is of mediaType: %s, only RootVolumeTemplate and ISO can be used to create vm", msg.getImageUuid(), imgFormat)); } if (imgFormat == ImageMediaType.ISO && msg.getRootDiskOfferingUuid() == null) { throw new ApiMessageInterceptionException(argerr("rootDiskOfferingUuid cannot be null when image mediaType is ISO")); } boolean isSystemImage = imgt.get(1, Boolean.class); if (isSystemImage && (msg.getType() == null || VmInstanceConstant.USER_VM_TYPE.equals(msg.getType()))) { throw new ApiMessageInterceptionException(argerr("image[uuid:%s] is system image, can't be used to create user vm", msg.getImageUuid())); } List<String> allDiskOfferingUuids = new ArrayList<String>(); if (msg.getRootDiskOfferingUuid() != null) { allDiskOfferingUuids.add(msg.getRootDiskOfferingUuid()); } if (msg.getDataDiskOfferingUuids() != null) { allDiskOfferingUuids.addAll(msg.getDataDiskOfferingUuids()); } if (!allDiskOfferingUuids.isEmpty()) { SimpleQuery<DiskOfferingVO> dq = dbf.createQuery(DiskOfferingVO.class); dq.select(DiskOfferingVO_.uuid); dq.add(DiskOfferingVO_.state, Op.EQ, DiskOfferingState.Disabled); dq.add(DiskOfferingVO_.uuid, Op.IN, allDiskOfferingUuids); List<String> diskUuids = dq.listValue(); if (!diskUuids.isEmpty()) { throw new ApiMessageInterceptionException(operr("disk offerings[uuids:%s] are Disabled, can not create vm from it", diskUuids)); } } SimpleQuery<L3NetworkVO> l3q = dbf.createQuery(L3NetworkVO.class); l3q.select(L3NetworkVO_.uuid, L3NetworkVO_.system, L3NetworkVO_.state); List<String> uuids = new ArrayList<>(msg.getL3NetworkUuids()); List<String> duplicateElements = getDuplicateElements(uuids); if (duplicateElements.size() > 0) { throw new ApiMessageInterceptionException(operr("Can't add same uuid in the l3Network,uuid: %s", duplicateElements.get(0))); } l3q.add(L3NetworkVO_.uuid, Op.IN, msg.getL3NetworkUuids()); List<Tuple> l3ts = l3q.listTuple(); for (Tuple t : l3ts) { String l3Uuid = t.get(0, String.class); Boolean system = t.get(1, Boolean.class); L3NetworkState state = t.get(2, L3NetworkState.class); if (state != L3NetworkState.Enabled) { throw new ApiMessageInterceptionException(operr("l3Network[uuid:%s] is Disabled, can not create vm on it", l3Uuid)); } if (system && (msg.getType() == null || VmInstanceConstant.USER_VM_TYPE.equals(msg.getType()))) { throw new ApiMessageInterceptionException(operr("l3Network[uuid:%s] is system network, can not create user vm on it", l3Uuid)); } } // smaller taking precedence if (msg.getHostUuid() != null) { msg.setClusterUuid(null); msg.setZoneUuid(null); } else if (msg.getClusterUuid() != null) { msg.setZoneUuid(null); } if (msg.getZoneUuid() != null) { SimpleQuery<ZoneVO> zq = dbf.createQuery(ZoneVO.class); zq.select(ZoneVO_.state); zq.add(ZoneVO_.uuid, Op.EQ, msg.getZoneUuid()); ZoneState zoneState = zq.findValue(); if (zoneState == ZoneState.Disabled) { throw new ApiMessageInterceptionException(operr("zone[uuid:%s] is specified but it's Disabled, can not create vm from it", msg.getZoneUuid())); } } if (msg.getClusterUuid() != null) { SimpleQuery<ClusterVO> cq = dbf.createQuery(ClusterVO.class); cq.select(ClusterVO_.state); cq.add(ClusterVO_.uuid, Op.EQ, msg.getClusterUuid()); ClusterState clusterState = cq.findValue(); if (clusterState == ClusterState.Disabled) { throw new ApiMessageInterceptionException(operr("cluster[uuid:%s] is specified but it's Disabled, can not create vm from it", msg.getClusterUuid())); } } if (msg.getHostUuid() != null) { SimpleQuery<HostVO> hq = dbf.createQuery(HostVO.class); hq.select(HostVO_.state, HostVO_.status); hq.add(HostVO_.uuid, Op.EQ, msg.getHostUuid()); Tuple t = hq.findTuple(); HostState hostState = t.get(0, HostState.class); if (hostState == HostState.Disabled) { throw new ApiMessageInterceptionException(operr("host[uuid:%s] is specified but it's Disabled, can not create vm from it", msg.getHostUuid())); } HostStatus connectionState = t.get(1, HostStatus.class); if (connectionState != HostStatus.Connected) { throw new ApiMessageInterceptionException(operr("host[uuid:%s] is specified but it's connection status is %s, can not create vm from it", msg.getHostUuid(), connectionState)); } } if (msg.getType() == null) { msg.setType(VmInstanceConstant.USER_VM_TYPE); } if (VmInstanceConstant.USER_VM_TYPE.equals(msg.getType())) { if (msg.getDefaultL3NetworkUuid() == null && msg.getL3NetworkUuids().size() != 1) { throw new ApiMessageInterceptionException(argerr("there are more than one L3 network specified in l3NetworkUuids, but defaultL3NetworkUuid is null")); } else if (msg.getDefaultL3NetworkUuid() == null && msg.getL3NetworkUuids().size() == 1) { msg.setDefaultL3NetworkUuid(msg.getL3NetworkUuids().get(0)); } else if (msg.getDefaultL3NetworkUuid() != null && !msg.getL3NetworkUuids().contains(msg.getDefaultL3NetworkUuid())) { throw new ApiMessageInterceptionException(argerr("defaultL3NetworkUuid[uuid:%s] is not in l3NetworkUuids%s", msg.getDefaultL3NetworkUuid(), msg.getL3NetworkUuids())); } } } private void validate(APIDestroyVmInstanceMsg msg) { if (!dbf.isExist(msg.getUuid(), VmInstanceVO.class)) { APIDestroyVmInstanceEvent evt = new APIDestroyVmInstanceEvent(msg.getId()); bus.publish(evt); throw new StopRoutingException(); } } private void validate(APISetVmConsolePasswordMsg msg) { String pwd = msg.getConsolePassword(); if (pwd.startsWith("password")){ throw new ApiMessageInterceptionException(argerr("The console password cannot start with 'password' which may trigger a VNC security issue")); } } }
package client; import java.io.*; import java.net.*; import java.util.*; public class Requester { /*============================= Variables =============================*/ Socket requestSocket; ObjectOutputStream out; ObjectInputStream in; String message = ""; String ipaddress; InetAddress inetAddress; Scanner scanner; // for sending files FileInputStream fis; // to receive files FileOutputStream fos; // when client receives server finished message, client can send message to server static final String SERVER_FINISHED_MSG = "_server+finished_"; static final String SERVER_SENDING_FILE_MSG = "_server+sending+file_"; static final String CLIENT_SENDING_FILE_MSG = "_client+sending+file_"; StringBuilder fileListSB = new StringBuilder(); StringBuilder directoryListSB = new StringBuilder(); String[] clientInput; int inputLength = 0; /*============================= Methods =============================*/ /*============================= main() =============================*/ // the main method public static void main(String[] args){ Requester client = new Requester(); client.run(); } // main() /*============================= sendFile() =============================*/ // sends a file to the server void sendFile(File file){ int bytesRead = 0; try { // create a file input stream for the file being sent fis = new FileInputStream(file); // create a byte array to hold the bytes of the file byte[] fileBytes = new byte[fis.available()]; // can't handle files bigger then 1GB // track the number of bytes read bytesRead = fis.read(fileBytes); System.out.println("Client > bytes read: " + bytesRead); // send the byte array to client out.writeObject(fileBytes); } catch (FileNotFoundException e) { System.out.println("Client > ERROR, File not found!"); } catch (IOException e) { e.printStackTrace(); } finally { try { // close file input stream fis.close(); } catch (IOException e) { e.printStackTrace(); } // try catch } // try catch } // sendFile() /*============================= receiveFile() =============================*/ // receives a file from the server void receiveFile(String fileName, int fileSize){ // create the downloads directory file /*File file = new File("Downloads"); // make the file a directory file.mkdirs();*/ try { // create an array of btyes // receive an array of bytes that make up the file and place them in fileBytes byte[] fileBytes = (byte[]) in.readObject(); // create a file output stream fos = new FileOutputStream(fileName); // write the received file bytes to the file fos.write(fileBytes); } catch (FileNotFoundException e) { System.err.println("ERROR, File not found!"); } catch (IOException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { System.err.println("data received in unknown format"); } finally { try { // close the file output stream fos.close(); } catch (IOException e) { e.printStackTrace(); } // try catch } // try catch } // receiveFile() /*============================= sendMessage() =============================*/ // sends a String to the server void sendMessage(String msg){ try{ out.writeObject(msg); out.flush(); //System.out.println("client>" + msg); } catch(IOException ioException){ ioException.printStackTrace(); } // try catch } // sendMessage() /*============================= run() =============================*/ // run method that starts the thread public void run(){ scanner = new Scanner(System.in); boolean canSendMessage = false; boolean clientIsFinished = true; int menuChoice = 0; System.out.println("For controls on how to use the Server/Client, see README."); try{ do{ // Print out Menu Options System.out.println("\n1.) Type IP Address."); System.out.println("2.) Type Domain Name To Perform DNS Lookup On. (May Crash If TimeOut Occurs!)"); System.out.print("\nEnter Option: "); // make sure user enters a number while(!scanner.hasNextInt()){ System.out.print("Enter Option: "); scanner.next(); // to advance Scanner past input } // while // get users input menuChoice = scanner.nextInt(); // make sure number is in correct range }while(menuChoice < 1 || menuChoice > 2); // do while switch(menuChoice){ case 1: // type IP address in System.out.print("Client > Please Enter your IP Address: "); ipaddress = scanner.next(); scanner.nextLine(); // flush buffer break; case 2: // type domain name for DNS Lookup String domain = ""; // flush buffer scanner.nextLine(); // get Domain from user for the DNS Lookup System.out.print("Client > Please Enter Domain Name For DNS Lookup: "); domain = scanner.nextLine(); System.out.println("Client > Performing DNS Lookup On '" + domain + "'."); // perform DNS lookup on domain inetAddress = InetAddress.getByName(domain); // get ip address from lookup ipaddress = inetAddress.getHostAddress(); break; } // switch // create a socket to the server requestSocket = new Socket(ipaddress, 2004); System.out.println("Connected to "+ipaddress+" in port 2004"); // get Input and Output streams out = new ObjectOutputStream(requestSocket.getOutputStream()); out.flush(); in = new ObjectInputStream(requestSocket.getInputStream()); // Communicating with the server do{ try{ // if the client is finished if(clientIsFinished){ // read message from server message = (String)in.readObject(); } // if // only show message from server if it's not server finished message if(!message.equals(SERVER_FINISHED_MSG)){ System.out.println("Server > " + message); } // if // if the server is finished sending messages if(message.equals(SERVER_FINISHED_MSG)){ // the client can now send a message canSendMessage = true; // client is not finished clientIsFinished = false; message = ""; } // if // check if the server is sending a file if(message.equals(SERVER_SENDING_FILE_MSG)){ String fileName = ""; int fileSize = 0; // receive file name fileName = (String)in.readObject(); // receive file size fileSize = Integer.parseInt((String)in.readObject()); // receive file receiveFile(fileName, fileSize); System.out.println("Client > File recieved!"); } // if // if the client is allowed send a message or client is not finished if(canSendMessage || !clientIsFinished){ // get message from client System.out.print("Client > "); message = scanner.nextLine(); // client is finished unless stated otherwise later clientIsFinished = true; // splits string to get command and parameter clientInput = message.split(" "); // get the number of parameters inputLength = clientInput.length; // if the user input is not greater then 2 commands or shorter then one command if(!(inputLength > 2 || inputLength < 1) && !(clientInput[0].equals(""))){ switch(clientInput[0]){ // get command case "ls": // list the files on clients PC and files on the server // create a file to track current directory File currentDirectory = new File("."); // create an array of all the files in current directory File[] listOfFiles = currentDirectory.listFiles(); fileListSB.append("Files:"); directoryListSB.append("Directories:"); // loop through the list of files in the directory for (int i = 0; i < listOfFiles.length; i++) { // if the file is a file if (listOfFiles[i].isFile()) { // add name to list of files string fileListSB.append(" ").append(listOfFiles[i].getName()); } else if (listOfFiles[i].isDirectory()) { // if the file is a directory // add name to list of directories string directoryListSB.append(" ").append(listOfFiles[i].getName()); } // if } // for // print out files in home directory System.out.println("Client > Client's Files."); System.out.println("Client > " + directoryListSB.toString()); System.out.println("Client > " + fileListSB.toString()); // send list command to server too // so the server can list files and directories as well sendMessage(message); // clear string builders directoryListSB.setLength(0); fileListSB.setLength(0); // client is finished clientIsFinished = true; // can't send any messages until server is finished canSendMessage = false; break; case "send": // send a file to the server // check that the command has a parameter eg file name if(clientInput.length == 2){ File file = new File(clientInput[1]); // if the file is a Directory if(file.isDirectory()){ // Tell the client System.out.println("Client > " + clientInput[1] + " is a Directory! Cannot Get!"); // action failed so client is not finished clientIsFinished = false; break; } // if // if the file does not exist if (!file.exists()) { // Tell the client System.out.println("Client > Sorry, file does not exist!"); // action failed so client is not finished clientIsFinished = false; break; } else { // if the file exists System.out.println("Client > File Found!"); // send message to server telling it to prepare to receive file sendMessage(CLIENT_SENDING_FILE_MSG); // send file name sendMessage(file.getName()); // send file size sendMessage(String.valueOf(file.length())); // send file sendFile(file); System.out.println("Client > File sent!"); // the client is finished clientIsFinished = true; // wait for server to finish canSendMessage = false; } // if } else { // if there isn't a name for file after Get // tell the client System.out.println("Client > ERROR, Command Get must be followed by a file name that is in the current directory!"); } // if break; default: // if not one of the above commands, send to server // if the client is finished, send message to the server if(clientIsFinished){ // send message to the server sendMessage(message); // message sent, cant send again until server says so canSendMessage = false; } // if break; } // switch } // if } // if } catch(ClassNotFoundException classNot){ System.err.println("data received in unknown format"); } catch(EOFException e){ System.err.println("Connection To Server Was Lost!"); // Closing connection try{ in.close(); out.close(); requestSocket.close(); } catch(IOException ioException){ ioException.printStackTrace(); } // try catch // make message "bye" to exit the while loop message = "bye"; } // try catch }while(!message.equals("bye")); // do while } catch(UnknownHostException unknownHost){ System.err.println("You are trying to connect to an unknown host!"); } catch(ConnectException e){ System.err.println("Client > Connection Timed Out!"); } catch(EOFException e){ System.err.println("Connection To Server Was Lost!"); } catch(IOException ioException){ ioException.printStackTrace(); } finally{ // Closing connection try{ in.close(); out.close(); requestSocket.close(); System.out.println("Client > Program Exiting."); } catch(IOException ioException){ ioException.printStackTrace(); } // try catch } // try catch } // run() } // class
/* * Copyright (c) 2016. Bottle Rocket LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bottlerocketstudios.groundcontrol.executor; import android.os.SystemClock; import android.util.Log; import com.bottlerocketstudios.groundcontrol.inactivity.InactivityCleanupListener; import com.bottlerocketstudios.groundcontrol.inactivity.InactivityCleanupRunnable; import com.bottlerocketstudios.groundcontrol.inactivity.StandardInactivityCleanupRunnable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; /** * Standard implementation of the {@link PriorityQueueingPoolExecutorService}. This will execute queued * jobs based on their priority and discard overdue jobs. */ public class StandardPriorityQueueingPoolExecutorService implements PriorityQueueingPoolExecutorService, InactivityCleanupListener { private static final JobPriorityAndIdComparator JOB_PRIORITY_COMPARATOR = new JobPriorityAndIdComparator(); private static final int DEFAULT_QUEUE_SIZE = 10; private final int mMaxSimultaneousJobs; private final PriorityBlockingQueue<Job> mJobQueue; private final List<RunningJob> mRunningJobs; private final boolean mEnableLogging; private final ThreadFactory mThreadFactory; private final String mLogTag; private final int mHighSpeedQueueThreshold; private final IdSequence mIdSequence; private final InactivityCleanupRunnable mJobCleanupRunnable; private final int mOsThreadPriority; private ExecutorService mExecutorService; private boolean mIdle; /** * Enforce Builder usage. */ private StandardPriorityQueueingPoolExecutorService(Builder builder) { mMaxSimultaneousJobs = builder.maxSimultaneousJobs; mJobQueue = new PriorityBlockingQueue<>(DEFAULT_QUEUE_SIZE, JOB_PRIORITY_COMPARATOR); mRunningJobs = Collections.synchronizedList(new ArrayList<RunningJob>()); mJobCleanupRunnable = builder.jobCleanupRunnable; mEnableLogging = builder.enableLogging; mThreadFactory = builder.threadFactory; mLogTag = builder.logTag; mHighSpeedQueueThreshold = builder.highSpeedQueueThreshold; mOsThreadPriority = builder.osThreadPriority; mIdSequence = new IdSequence(); } @Override public void enqueue(List<Job> jobList) { for (Job job: jobList) { if (mEnableLogging) Log.i(mLogTag, "Job entered Queue " + job.toString()); if (job.getPriority().equals(JobPriority.IMMEDIATE)) { if (mEnableLogging) Log.i(mLogTag, "Executing immediate priority work " + job.toString()); executeJob(job); } else { if (mEnableLogging) Log.i(mLogTag, "Queueing job " + job.toString()); mJobQueue.offer(job); } } processQueue(); } @Override public void enqueue(Job ... jobList) { enqueue(Arrays.asList(jobList)); } private void processQueue() { synchronized (mJobQueue) { while (mJobQueue.size() > 0 && mRunningJobs.size() < mMaxSimultaneousJobs) { Job job = mJobQueue.remove(); executeJob(job); } } if (mJobQueue.size() > mHighSpeedQueueThreshold && !mJobCleanupRunnable.isHighSpeedMode()) { if (mEnableLogging) Log.i(mLogTag, "The job queue is very large, entering high speed queue processing mode."); mJobCleanupRunnable.enterHighSpeedMode(); } else if (mJobQueue.size() < mHighSpeedQueueThreshold && mJobCleanupRunnable.isHighSpeedMode()) { if (mEnableLogging) Log.i(mLogTag, "Exiting high speed queue processing mode."); mJobCleanupRunnable.exitHighSpeedMode(); } } /** * Start job running on the ExecutorService. */ private void executeJob(Job job) { if (mEnableLogging) Log.i(mLogTag, "Executing job " + job.toString()); mIdle = false; mJobCleanupRunnable.restartTimer(); mRunningJobs.add(new RunningJob(job, getExecutorService().submit(job.getRunnable()), getTime())); job.notifyJobExecuted(); } private long getTime() { return SystemClock.uptimeMillis(); } @Override public boolean isBusy() { return hasRunningJobs(); } @Override public void enterIdleState() { mIdle = true; if(mExecutorService != null) { if (mEnableLogging) Log.i(mLogTag, "Entering idle state"); mExecutorService.shutdown(); mExecutorService = null; } } @Override public void performCleanup() { synchronized (mRunningJobs) { for (Iterator<RunningJob> runningJobIterator = mRunningJobs.iterator(); runningJobIterator.hasNext(); ) { RunningJob runningJob = runningJobIterator.next(); long now = getTime(); if (runningJob.isComplete()) { if (mEnableLogging) Log.i(mLogTag, "Cleaning up completed job " + runningJob.toString()); runningJobIterator.remove(); } else if (runningJob.isPastExecutionTimeLimit(now)) { Log.w(mLogTag, "Killing overdue job " + runningJob.toString()); runningJob.cancel(true); runningJobIterator.remove(); } } } processQueue(); } /** * Lazy load ExecutorService and reinitialize if we have been idled. */ private ExecutorService getExecutorService() { if (mExecutorService == null) { if (mEnableLogging) Log.i(mLogTag, "Creating ExecutorService"); mExecutorService = ThreadPoolExecutorWithExceptions.newCachedThreadPool(mThreadFactory, mOsThreadPriority); } return mExecutorService; } @Override public boolean isIdle() { return mIdle; } @Override public long getNextJobId() { return mIdSequence.getNext(); } @Override public boolean hasRunningJobs() { return !(mRunningJobs.size() == 0 && mJobQueue.size() == 0); } @Override public void updateJobPriority(long jobId, JobPriority priority) { synchronized (mRunningJobs) { for (RunningJob runningJob : mRunningJobs) { //First determine if job is already running and skip it if it is. if (runningJob.getJobId() == jobId) { return; } } } synchronized (mJobQueue) { for (Iterator<Job> jobIterator = mJobQueue.iterator(); jobIterator.hasNext(); ) { Job job = jobIterator.next(); if (job.getId() == jobId) { //Found it in the queue. if (priority.equals(JobPriority.IMMEDIATE)) { //If it is immediate priority, remove from queue and execute. jobIterator.remove(); executeJob(job); } else { //Otherwise, just change the priority. It will dequeue appropriately. job.setPriority(priority); } return; } } } } public static Builder builder() { return new Builder(); } public static class Builder { private static final long DEFAULT_INACTIVITY_IDLE_MS = TimeUnit.MINUTES.toMillis(2); private static final long DEFAULT_NORMAL_CLEANUP_INTERVAL_MS = 100; private static final long DEFAULT_HIGH_SPEED_CLEANUP_INTERVAL_MS = 5; private static final int DEFAULT_MAX_SIMULTANEOUS_JOBS = 10; private static final int DEFAULT_HIGH_SPEED_QUEUE_THRESHOLD = 20; private static final int DEFAULT_OS_THREAD_PRIORITY = android.os.Process.THREAD_PRIORITY_BACKGROUND; private InactivityCleanupRunnable jobCleanupRunnable; private int maxSimultaneousJobs; private Boolean enableLogging; private ThreadFactory threadFactory; private String logTag; private int highSpeedQueueThreshold; private Integer osThreadPriority; public PriorityQueueingPoolExecutorService build() { //Configure defaults if unspecified if (logTag == null) { logTag = StandardPriorityQueueingPoolExecutorService.class.getSimpleName(); } if (enableLogging == null) { enableLogging = false; } if (enableLogging) { Log.w(logTag, "Logging is enabled. This will reduce performance."); } if (jobCleanupRunnable == null) { setJobCleanupRunnable(new StandardInactivityCleanupRunnable(DEFAULT_INACTIVITY_IDLE_MS, DEFAULT_NORMAL_CLEANUP_INTERVAL_MS, DEFAULT_HIGH_SPEED_CLEANUP_INTERVAL_MS)); } if (maxSimultaneousJobs <= 0) { setMaxSimultaneousJobs(DEFAULT_MAX_SIMULTANEOUS_JOBS); } if (threadFactory == null) { setThreadFactory(Executors.defaultThreadFactory()); } if (highSpeedQueueThreshold <= 0) { setHighSpeedQueueThreshold(DEFAULT_HIGH_SPEED_QUEUE_THRESHOLD); } if (osThreadPriority == null) { setOsThreadPriority(DEFAULT_OS_THREAD_PRIORITY); } //Finish creation and linking. StandardPriorityQueueingPoolExecutorService priorityQueueingPoolExecutorService = new StandardPriorityQueueingPoolExecutorService(this); jobCleanupRunnable.setListener(priorityQueueingPoolExecutorService); return priorityQueueingPoolExecutorService; } /** * Runnable that will handle notifying the service to cleanup expired items and enter idle state. */ public Builder setJobCleanupRunnable(InactivityCleanupRunnable jobCleanupRunnable) { this.jobCleanupRunnable = jobCleanupRunnable; return this; } /** * Maximum number of prioritized jobs to execute simultaneously. Immediate priority jobs will * be executed immediately, but will count toward the total when scheduling the next job. */ public Builder setMaxSimultaneousJobs(int maxSimultaneousJobs) { this.maxSimultaneousJobs = maxSimultaneousJobs; return this; } /** * Enable logging for debugging purposes. */ public Builder setEnableLogging(boolean enableLogging) { this.enableLogging = enableLogging; return this; } /** * Provide a thread factory to use for threads executed by this service. */ public Builder setThreadFactory(ThreadFactory threadFactory) { this.threadFactory = threadFactory; return this; } /** * Set the log tag to be used with this service if logging is enabled. */ public Builder setLogTag(String logTag) { this.logTag = logTag; return this; } /** * Set the threshold size of the queue at which the queue will be processed at the InactivityCleanupRunnable high speed interval. */ public Builder setHighSpeedQueueThreshold(int highSpeedQueueThreshold) { this.highSpeedQueueThreshold = highSpeedQueueThreshold; return this; } /** * Set the Linux OS Thread priority that thread pool executions will run with. * Use priority values from android.os.Process. */ public Builder setOsThreadPriority(int osThreadPriority) { this.osThreadPriority = osThreadPriority; return this; } } }
/* * Copyright 2010 Henry Coles * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.pitest.mutationtest.engine.gregor.config; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.pitest.functional.F; import org.pitest.functional.FCollection; import org.pitest.functional.prelude.Prelude; import org.pitest.mutationtest.engine.gregor.MethodMutatorFactory; import org.pitest.mutationtest.engine.gregor.mutators.ConditionalsBoundaryMutator; import org.pitest.mutationtest.engine.gregor.mutators.ConstructorCallMutator; import org.pitest.mutationtest.engine.gregor.mutators.IncrementsMutator; import org.pitest.mutationtest.engine.gregor.mutators.InlineConstantMutator; import org.pitest.mutationtest.engine.gregor.mutators.InvertNegsMutator; import org.pitest.mutationtest.engine.gregor.mutators.MathMutator; import org.pitest.mutationtest.engine.gregor.mutators.NegateConditionalsMutator; import org.pitest.mutationtest.engine.gregor.mutators.NonVoidMethodCallMutator; import org.pitest.mutationtest.engine.gregor.mutators.RemoveConditionalMutator; import org.pitest.mutationtest.engine.gregor.mutators.ReturnValsMutator; import org.pitest.mutationtest.engine.gregor.mutators.VoidMethodCallMutator; import org.pitest.mutationtest.engine.gregor.mutators.experimental.RemoveIncrementsMutator; import org.pitest.mutationtest.engine.gregor.mutators.experimental.RemoveSwitchMutator; import org.pitest.mutationtest.engine.gregor.mutators.experimental.SwitchMutator; public final class Mutator { private final static Map<String, Iterable<MethodMutatorFactory>> mutators = new LinkedHashMap<String, Iterable<MethodMutatorFactory>>(); static { /** * Default mutator that inverts the negation of integer and floating point * numbers. */ add("INVERT_NEGS", InvertNegsMutator.INVERT_NEGS_MUTATOR); /** * Default mutator that mutates the return values of methods. */ add("RETURN_VALS", ReturnValsMutator.RETURN_VALS_MUTATOR); /** * Optional mutator that mutates integer and floating point inline * constants. */ add("INLINE_CONSTS", new InlineConstantMutator()); /** * Default mutator that mutates binary arithmetic operations. */ add("MATH", MathMutator.MATH_MUTATOR); /** * Default mutator that removes method calls to void methods. * */ add("VOID_METHOD_CALLS", VoidMethodCallMutator.VOID_METHOD_CALL_MUTATOR); /** * Default mutator that negates conditionals. */ add("NEGATE_CONDITIONALS", NegateConditionalsMutator.NEGATE_CONDITIONALS_MUTATOR); /** * Default mutator that replaces the relational operators with their * boundary counterpart. */ add("CONDITIONALS_BOUNDARY", ConditionalsBoundaryMutator.CONDITIONALS_BOUNDARY_MUTATOR); /** * Default mutator that mutates increments, decrements and assignment * increments and decrements of local variables. */ add("INCREMENTS", IncrementsMutator.INCREMENTS_MUTATOR); /** * Optional mutator that removes local variable increments. */ add("REMOVE_INCREMENTS", RemoveIncrementsMutator.REMOVE_INCREMENTS_MUTATOR); /** * Optional mutator that removes method calls to non void methods. */ add("NON_VOID_METHOD_CALLS", NonVoidMethodCallMutator.NON_VOID_METHOD_CALL_MUTATOR); /** * Optional mutator that replaces constructor calls with null values. */ add("CONSTRUCTOR_CALLS", ConstructorCallMutator.CONSTRUCTOR_CALL_MUTATOR); /** * Removes conditional statements so that guarded statements always execute */ add("REMOVE_CONDITIONALS", RemoveConditionalMutator.REMOVE_CONDITIONALS_MUTATOR); /** * Experimental mutator that removed assignments to member variables. */ add("EXPERIMENTAL_MEMBER_VARIABLE", new org.pitest.mutationtest.engine.gregor.mutators.experimental.MemberVariableMutator()); add("EXPERIMENTAL_SWITCH", new org.pitest.mutationtest.engine.gregor.mutators.experimental.SwitchMutator()); addGroup("REMOVE_SWITCH", RemoveSwitchMutator.makeMutators()); addGroup("DEFAULTS", defaults()); addGroup("STRONGER", stronger()); addGroup("ALL", all()); } public static Collection<MethodMutatorFactory> all() { return fromStrings(mutators.keySet()); } private static Collection<MethodMutatorFactory> stronger() { return combine(defaults(),group(RemoveConditionalMutator.REMOVE_CONDITIONALS_MUTATOR, new SwitchMutator())); } private static Collection<MethodMutatorFactory> combine( Collection<MethodMutatorFactory> a, Collection<MethodMutatorFactory> b) { List<MethodMutatorFactory> l = new ArrayList<MethodMutatorFactory>(a); l.addAll(b); return l; } /** * Default set of mutators - designed to provide balance between strength and * performance */ public static Collection<MethodMutatorFactory> defaults() { return group(InvertNegsMutator.INVERT_NEGS_MUTATOR, ReturnValsMutator.RETURN_VALS_MUTATOR, MathMutator.MATH_MUTATOR, VoidMethodCallMutator.VOID_METHOD_CALL_MUTATOR, NegateConditionalsMutator.NEGATE_CONDITIONALS_MUTATOR, ConditionalsBoundaryMutator.CONDITIONALS_BOUNDARY_MUTATOR, IncrementsMutator.INCREMENTS_MUTATOR); } private static Collection<MethodMutatorFactory> group( final MethodMutatorFactory... ms) { return Arrays.asList(ms); } public static Collection<MethodMutatorFactory> byName(final String name) { return FCollection.map(mutators.get(name), Prelude.id(MethodMutatorFactory.class)); } private static void add(final String key, final MethodMutatorFactory value) { mutators.put(key, Collections.singleton(value)); } private static void addGroup(final String key, final Iterable<MethodMutatorFactory> value) { mutators.put(key, value); } public static Collection<MethodMutatorFactory> fromStrings( final Collection<String> names) { final Set<MethodMutatorFactory> unique = new TreeSet<MethodMutatorFactory>( compareId()); FCollection.flatMapTo(names, fromString(), unique); return unique; } private static Comparator<? super MethodMutatorFactory> compareId() { return new Comparator<MethodMutatorFactory>() { public int compare(final MethodMutatorFactory o1, final MethodMutatorFactory o2) { return o1.getGloballyUniqueId().compareTo(o2.getGloballyUniqueId()); } }; } private static F<String, Iterable<MethodMutatorFactory>> fromString() { return new F<String, Iterable<MethodMutatorFactory>>() { public Iterable<MethodMutatorFactory> apply(final String a) { return mutators.get(a); } }; } }
package de.halfbit.tinybus.impl; import junit.framework.TestCase; import de.greenrobot.event.EventBus; import de.halfbit.tinybus.Bus; import de.halfbit.tinybus.Subscribe; import de.halfbit.tinybus.TinyBus; import de.halfbit.tinybus.mocks.Event1; import de.halfbit.tinybus.mocks.Event2; /** * Copyright notice. * * Otto by Square, Inc. * https://github.com/square/otto * http://www.apache.org/licenses/LICENSE-2.0 * * EventBus by Markus Junginger, greenrobot (http://greenrobot.de) * https://github.com/greenrobot/EventBus * http://www.apache.org/licenses/LICENSE-2.0 */ public class TinyBusPerformanceTest extends TestCase { private static final int EVENTS_NUMBER = 10000; private Bus mTinyBus; private com.squareup.otto.Bus mOttoBus; private EventBus mEventBus; private Subsriber1 mSubscriber1; private Subsriber2 mSubscriber2; private Subsriber3 mSubscriber3; private class Subsriber1 { @Subscribe @com.squareup.otto.Subscribe public void onEvent(Event1 event) { } @Subscribe @com.squareup.otto.Subscribe public void onEvent(Event2 event) { } } private class Subsriber2 { @Subscribe @com.squareup.otto.Subscribe public void onEvent(Event1 event) { } } private class Subsriber3 { @Subscribe @com.squareup.otto.Subscribe public void onEvent(Event2 event) { } } @Override protected void setUp() throws Exception { super.setUp(); mTinyBus = new TinyBus(); mEventBus = EventBus.builder() .eventInheritance(false) .sendNoSubscriberEvent(false).build(); mOttoBus = new com.squareup.otto.Bus(new com.squareup.otto.ThreadEnforcer() { public void enforce(com.squareup.otto.Bus bus) {} }); mSubscriber1 = new Subsriber1(); mSubscriber2 = new Subsriber2(); mSubscriber3 = new Subsriber3(); } public void testA() { } //-- post events to empty bus public void testPostNoSubscribersTinyBus() { Event1 event1 = new Event1("event"); Event2 event2 = new Event2(2); for (int i=0; i<EVENTS_NUMBER; i++) { mTinyBus.post(event1); mTinyBus.post(event2); } } public void testPostNoSubscribersOtto() { Event1 event1 = new Event1("event"); Event2 event2 = new Event2(2); for (int i=0; i<EVENTS_NUMBER; i++) { mOttoBus.post(event1); mOttoBus.post(event2); } } public void testPostNoSubscribersEventBus() { Event1 event1 = new Event1("event"); Event2 event2 = new Event2(2); for (int i=0; i<EVENTS_NUMBER; i++) { mEventBus.post(event1); mEventBus.post(event2); } } //-- post event to subscribers public void testPostThreeDynamicSubscribersTinyBus() { Event1 event1 = new Event1("event"); Event2 event2 = new Event2(2); for (int i=0; i<EVENTS_NUMBER; i++) { mTinyBus.register(mSubscriber1); mTinyBus.register(mSubscriber2); mTinyBus.register(mSubscriber3); mTinyBus.post(event1); mTinyBus.post(event2); mTinyBus.unregister(mSubscriber3); mTinyBus.unregister(mSubscriber2); mTinyBus.unregister(mSubscriber1); } } public void testPostThreeDynamicSubscribersOtto() { Event1 event1 = new Event1("event"); Event2 event2 = new Event2(2); for (int i=0; i<EVENTS_NUMBER; i++) { mOttoBus.register(mSubscriber1); mOttoBus.register(mSubscriber2); mOttoBus.register(mSubscriber3); mOttoBus.post(event1); mOttoBus.post(event2); mOttoBus.unregister(mSubscriber3); mOttoBus.unregister(mSubscriber2); mOttoBus.unregister(mSubscriber1); } } public void testPostThreeDynamicSubscribersEventBus() { Event1 event1 = new Event1("event"); Event2 event2 = new Event2(2); for (int i=0; i<EVENTS_NUMBER; i++) { mEventBus.register(mSubscriber1); mEventBus.register(mSubscriber2); mEventBus.register(mSubscriber3); mEventBus.post(event1); mEventBus.post(event2); mEventBus.unregister(mSubscriber3); mEventBus.unregister(mSubscriber2); mEventBus.unregister(mSubscriber1); } } public void testPostThreeStaticSubscribersTinyBus() { Event1 event1 = new Event1("event"); Event2 event2 = new Event2(2); mTinyBus.register(mSubscriber1); mTinyBus.register(mSubscriber2); mTinyBus.register(mSubscriber3); for (int i=0; i<EVENTS_NUMBER; i++) { mTinyBus.post(event1); mTinyBus.post(event2); } mTinyBus.unregister(mSubscriber3); mTinyBus.unregister(mSubscriber2); mTinyBus.unregister(mSubscriber1); } public void testPostThreeStaticSubscribersOtto() { Event1 event1 = new Event1("event"); Event2 event2 = new Event2(2); mOttoBus.register(mSubscriber1); mOttoBus.register(mSubscriber2); mOttoBus.register(mSubscriber3); for (int i=0; i<EVENTS_NUMBER; i++) { mOttoBus.post(event1); mOttoBus.post(event2); } mOttoBus.unregister(mSubscriber3); mOttoBus.unregister(mSubscriber2); mOttoBus.unregister(mSubscriber1); } public void testPostThreeStaticSubscribersEventBus() { Event1 event1 = new Event1("event"); Event2 event2 = new Event2(2); mEventBus.register(mSubscriber1); mEventBus.register(mSubscriber2); mEventBus.register(mSubscriber3); for (int i=0; i<EVENTS_NUMBER; i++) { mEventBus.post(event1); mEventBus.post(event2); } mEventBus.unregister(mSubscriber3); mEventBus.unregister(mSubscriber2); mEventBus.unregister(mSubscriber1); } //-- register and unregister subscribers public void testRegisterThreeSubscribersTinyBus() { for (int i=0; i<EVENTS_NUMBER; i++) { mTinyBus.register(mSubscriber1); mTinyBus.register(mSubscriber2); mTinyBus.register(mSubscriber3); mTinyBus.unregister(mSubscriber3); mTinyBus.unregister(mSubscriber2); mTinyBus.unregister(mSubscriber1); } } public void testRegisterThreeSubscribersOtto() { for (int i=0; i<EVENTS_NUMBER; i++) { mOttoBus.register(mSubscriber1); mOttoBus.register(mSubscriber2); mOttoBus.register(mSubscriber3); mOttoBus.unregister(mSubscriber3); mOttoBus.unregister(mSubscriber2); mOttoBus.unregister(mSubscriber1); } } public void testRegisterThreeSubscribersEventBus() { for (int i=0; i<EVENTS_NUMBER; i++) { mEventBus.register(mSubscriber1); mEventBus.register(mSubscriber2); mEventBus.register(mSubscriber3); mEventBus.unregister(mSubscriber3); mEventBus.unregister(mSubscriber2); mEventBus.unregister(mSubscriber1); } } public void testZ() { } }
package com.cmput301.cs.project.controllers; import android.content.Context; import com.cmput301.cs.project.listeners.TagsChangedListener; import com.cmput301.cs.project.models.Tag; import com.cmput301.cs.project.serialization.LocalSaver; import com.cmput301.cs.project.utils.Utils; import java.util.*; /** * Controls the tags that are shown in {@link com.cmput301.cs.project.activities.TagManagerActivity TagManagerActivity}. <p> * Loads any locally saved claims from the {@link com.cmput301.cs.project.serialization.LocalSaver LocalClaimSaver} <p> * <p/> * Use {@link TagsManager#get(Context)} to obtain the singleton. * There are getter methods, as well as the search methods, for using either the {@link #findTagByName(String) name} * or the {@link #findTagById(String) id} to find the tag. * You can also delete the tag by either its {@link #deleteTagByName(String) name} or its {@link #deleteTagById(String) id}. */ public class TagsManager { private static TagsManager sInstance; /** * Obtains the singleton for {@code TagsManager}. * * @param context non-null instance of {@link Context} * @return a non-null instance of {@code TagsManager} */ public static TagsManager get(Context context) { if (sInstance == null) { sInstance = new TagsManager(context); } return sInstance; } /** * TEST ONLY. Uses the supplied {@link com.cmput301.cs.project.serialization.LocalSaver} for all operations. * * @param claimSaves non-null instance of {@code LocalClaimSaver} * @return a non-null instance of {@code TagsManager} */ public static TagsManager ofClaimSaves(LocalSaver claimSaves) { return new TagsManager(claimSaves); } private final LocalSaver mClaimSaves; private final SortedSet<Tag> mTags = new TreeSet<Tag>(); private final List<TagsChangedListener> mListeners = new ArrayList<TagsChangedListener>(); private TagsManager(Context context) { this(LocalSaver.ofAndroid(context)); } private TagsManager(LocalSaver claimSaves) { mClaimSaves = claimSaves; mTags.addAll(claimSaves.readAllTags()); } /** * Adds a {@link TagsChangedListener} for listening to changes of {@link Tag Tags}. Refer to specific methods for * info of callbacks. * * @param listener non-null instance of {@code TagsChangedListener} * @see #removeTagChangedListener(TagsChangedListener) * @see #getTagByName(String) * @see #renameTag(Tag, String) * @see #deleteTagById(String) * @see #deleteTagByName(String) */ public void addTagChangedListener(TagsChangedListener listener) { Utils.nonNullOrThrow(listener, "listener"); mListeners.add(listener); } /** * Removes the listener by reference checking ({@code==} operator). * * @param removing an instance of {@link TagsChangedListener} */ public void removeTagChangedListener(TagsChangedListener removing) { for (Iterator<TagsChangedListener> iterator = mListeners.iterator(); iterator.hasNext(); ) { final TagsChangedListener listener = iterator.next(); if (listener == removing) { // ref. check iterator.remove(); break; } } } /** * Finds or <em>creates</em> a {@link Tag} with the supplied {@code name}. If a new {@code Tag} is created, * {@link TagsChangedListener#onTagCreated(Tag)} is called after the creation. * * @param name non-null {@code String} name * @return a non-null instance of {@code Tag} */ public Tag getTagByName(String name) { name = name.trim(); final Tag tag = findTagByName(name); final Tag out; if (tag == null) { out = new Tag(name, this); mTags.add(out); tagCreatedInternal(out); } else { out = tag; } return out; } /** * Finds an <em>existing</em> {@link Tag} with the supplied {@code name}. Returns null if none is found. * * @param name the {@code String} name; usually obtained by {@link Tag#getName()} * @return an instance of {@code Tag} with the same name; null if not found */ public Tag findTagByName(String name) { name = name.trim(); for (Tag tag : mTags) { if (tag.getName().equals(name)) { return tag; } } return null; } /** * Finds an <em>existing</em> {@link Tag} with the supplied {@code id}. Returns null if none is found. * * @param id the {@code String} id; usually obtained by {@link Tag#getId()} * @return an instance of {@code Tag} with the same id; null if not found */ public Tag findTagById(String id) { for (Tag tag : mTags) { if (tag.getId().equals(id)) { return tag; } } return null; } /** * Renames a {@link Tag}. A new instance of {code Tag} will be created with the same {@link Tag#getId() id}. * The old instance should be discarded. {@link TagsChangedListener#onTagRenamed(Tag, Tag)} is called after a * successful removal. * * @param oldTag the {@code Tag} to be renamed * @param newName non-null {@code String} name * @return a non-null instance of {@code Tag} with the same id */ public Tag renameTag(Tag oldTag, String newName) { final Tag newTag = new Tag(newName, this, oldTag.getId()); mTags.remove(oldTag); mTags.add(newTag); tagRenamedInternal(newTag, oldTag); return newTag; } /** * Finds and deletes an existing {@link Tag} by its {@code id}. No-op if the tag is not found. * {@link TagsChangedListener#onTagDeleted(Tag)} is called after a successful removal. * * @param id id of the {@code Tag}; usually obtained by {@link Tag#getId()} */ public void deleteTagById(String id) { final Tag tag = findTagById(id); if (tag != null) { mTags.remove(tag); tagDeletedInternal(tag); } } /** * Finds and deletes an existing {@link Tag} by its {@code name}. No-op if the tag is not found. * {@link TagsChangedListener#onTagDeleted(Tag)} is called after a successful removal. * * @param name id of the {@code Tag}; usually obtained by {@link Tag#getName()} */ public void deleteTagByName(String name) { final Tag tag = findTagByName(name); if (tag != null) { mTags.remove(tag); tagDeletedInternal(tag); } } private void tagCreatedInternal(Tag tag) { mClaimSaves.saveAllTags(peekTags()); notifyListenersCreated(tag); } private void tagRenamedInternal(Tag tag, Tag oldTag) { mClaimSaves.saveAllTags(peekTags()); notifyListenersRenamed(tag, oldTag); } private void tagDeletedInternal(Tag tag) { mClaimSaves.saveAllTags(peekTags()); notifyListenersDeleted(tag); } private void notifyListenersCreated(Tag tag) { for (TagsChangedListener listener : mListeners) { listener.onTagCreated(tag); } } private void notifyListenersRenamed(Tag tag, Tag oldTag) { for (TagsChangedListener listener : mListeners) { listener.onTagRenamed(tag, oldTag); } } private void notifyListenersDeleted(Tag tag) { for (TagsChangedListener listener : mListeners) { listener.onTagDeleted(tag); } } /** * Peeks at all the existing {@link Tag Tags}. Sorted by the natural order of {@code Tag}. * * @return unmodifiable {@link SortedSet} of {@code Tags}; sorted by {@link Tag#compareTo(Tag)} */ public SortedSet<Tag> peekTags() { return Collections.unmodifiableSortedSet(mTags); } }
package org.apache.lucene.facet.taxonomy; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.lucene.facet.FacetTestCase; import org.apache.lucene.facet.SlowRAMDirectory; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.junit.Test; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ @SuppressCodecs("SimpleText") public class TestTaxonomyCombined extends FacetTestCase { /** The following categories will be added to the taxonomy by fillTaxonomy(), and tested by all tests below: */ private final static String[][] categories = { { "Author", "Tom Clancy" }, { "Author", "Richard Dawkins" }, { "Author", "Richard Adams" }, { "Price", "10", "11" }, { "Price", "10", "12" }, { "Price", "20", "27" }, { "Date", "2006", "05" }, { "Date", "2005" }, { "Date", "2006" }, { "Subject", "Nonfiction", "Children", "Animals" }, { "Author", "Stephen Jay Gould" }, { "Author", "\u05e0\u05d3\u05d1\u3042\u0628" }, }; /** When adding the above categories with TaxonomyWriter.addCategory(), the following paths are expected to be returned: (note that currently the full path is not returned, and therefore not tested - rather, just the last component, the ordinal, is returned and tested. */ private final static int[][] expectedPaths = { { 1, 2 }, { 1, 3 }, { 1, 4 }, { 5, 6, 7 }, { 5, 6, 8 }, { 5, 9, 10 }, { 11, 12, 13 }, { 11, 14 }, { 11, 12 }, { 15, 16, 17, 18 }, { 1, 19 }, { 1, 20 } }; /** The taxonomy index is expected to then contain the following generated categories, with increasing ordinals (note how parent categories are be added automatically when subcategories are added). */ private final static String[][] expectedCategories = { { }, // the root category { "Author" }, { "Author", "Tom Clancy" }, { "Author", "Richard Dawkins" }, { "Author", "Richard Adams" }, { "Price" }, { "Price", "10" }, { "Price", "10", "11" }, { "Price", "10", "12" }, { "Price", "20" }, { "Price", "20", "27" }, { "Date" }, { "Date", "2006" }, { "Date", "2006", "05" }, { "Date", "2005" }, { "Subject" }, { "Subject", "Nonfiction" }, { "Subject", "Nonfiction", "Children" }, { "Subject", "Nonfiction", "Children", "Animals" }, { "Author", "Stephen Jay Gould" }, { "Author", "\u05e0\u05d3\u05d1\u3042\u0628" }, }; /** fillTaxonomy adds the categories in the categories[] array, and asserts that the additions return exactly the ordinals (in the past - paths) specified in expectedPaths[]. Note that this assumes that fillTaxonomy() is called on an empty taxonomy index. Calling it after something else was already added to the taxonomy index will surely have this method fail. */ public static void fillTaxonomy(TaxonomyWriter tw) throws IOException { for (int i = 0; i < categories.length; i++) { int ordinal = tw.addCategory(new FacetLabel(categories[i])); int expectedOrdinal = expectedPaths[i][expectedPaths[i].length-1]; if (ordinal!=expectedOrdinal) { fail("For category "+showcat(categories[i])+" expected ordinal "+ expectedOrdinal+", but got "+ordinal); } } } public static String showcat(String[] path) { if (path==null) { return "<null>"; } if (path.length==0) { return "<empty>"; } if (path.length==1 && path[0].length()==0) { return "<\"\">"; } StringBuilder sb = new StringBuilder(path[0]); for (int i=1; i<path.length; i++) { sb.append('/'); sb.append(path[i]); } return sb.toString(); } private String showcat(FacetLabel path) { if (path==null) { return "<null>"; } if (path.length==0) { return "<empty>"; } return "<"+path.toString()+">"; } /** Basic tests for TaxonomyWriter. Basically, we test that IndexWriter.addCategory works, i.e. returns the expected ordinals (this is tested by calling the fillTaxonomy() method above). We do not test here that after writing the index can be read - this will be done in more tests below. */ @Test public void testWriter() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); // Also check TaxonomyWriter.getSize() - see that the taxonomy's size // is what we expect it to be. assertEquals(expectedCategories.length, tw.getSize()); tw.close(); indexDir.close(); } /** testWriterTwice is exactly like testWriter, except that after adding all the categories, we add them again, and see that we get the same old ids again - not new categories. */ @Test public void testWriterTwice() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); // run fillTaxonomy again - this will try to add the same categories // again, and check that we see the same ordinal paths again, not // different ones. fillTaxonomy(tw); // Let's check the number of categories again, to see that no // extraneous categories were created: assertEquals(expectedCategories.length, tw.getSize()); tw.close(); indexDir.close(); } /** testWriterTwice2 is similar to testWriterTwice, except that the index is closed and reopened before attempting to write to it the same categories again. While testWriterTwice can get along with writing and reading correctly just to the cache, testWriterTwice2 checks also the actual disk read part of the writer: */ @Test public void testWriterTwice2() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); tw.close(); tw = new DirectoryTaxonomyWriter(indexDir); // run fillTaxonomy again - this will try to add the same categories // again, and check that we see the same ordinals again, not different // ones, and that the number of categories hasn't grown by the new // additions fillTaxonomy(tw); assertEquals(expectedCategories.length, tw.getSize()); tw.close(); indexDir.close(); } /** * testWriterTwice3 is yet another test which tests creating a taxonomy * in two separate writing sessions. This test used to fail because of * a bug involving commit(), explained below, and now should succeed. */ @Test public void testWriterTwice3() throws Exception { Directory indexDir = newDirectory(); // First, create and fill the taxonomy TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); tw.close(); // Now, open the same taxonomy and add the same categories again. // After a few categories, the LuceneTaxonomyWriter implementation // will stop looking for each category on disk, and rather read them // all into memory and close its reader. The bug was that it closed // the reader, but forgot that it did (because it didn't set the reader // reference to null). tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); // Add one new category, just to make commit() do something: tw.addCategory(new FacetLabel("hi")); // Do a commit(). Here was a bug - if tw had a reader open, it should // be reopened after the commit. However, in our case the reader should // not be open (as explained above) but because it was not set to null, // we forgot that, tried to reopen it, and got an AlreadyClosedException. tw.commit(); assertEquals(expectedCategories.length+1, tw.getSize()); tw.close(); indexDir.close(); } /** Another set of tests for the writer, which don't use an array and * try to distill the different cases, and therefore may be more helpful * for debugging a problem than testWriter() which is hard to know why * or where it failed. */ @Test public void testWriterSimpler() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); assertEquals(1, tw.getSize()); // the root only // Test that adding a new top-level category works assertEquals(1, tw.addCategory(new FacetLabel("a"))); assertEquals(2, tw.getSize()); // Test that adding the same category again is noticed, and the // same ordinal (and not a new one) is returned. assertEquals(1, tw.addCategory(new FacetLabel("a"))); assertEquals(2, tw.getSize()); // Test that adding another top-level category returns a new ordinal, // not the same one assertEquals(2, tw.addCategory(new FacetLabel("b"))); assertEquals(3, tw.getSize()); // Test that adding a category inside one of the above adds just one // new ordinal: assertEquals(3, tw.addCategory(new FacetLabel("a","c"))); assertEquals(4, tw.getSize()); // Test that adding the same second-level category doesn't do anything: assertEquals(3, tw.addCategory(new FacetLabel("a","c"))); assertEquals(4, tw.getSize()); // Test that adding a second-level category with two new components // indeed adds two categories assertEquals(5, tw.addCategory(new FacetLabel("d","e"))); assertEquals(6, tw.getSize()); // Verify that the parents were added above in the order we expected assertEquals(4, tw.addCategory(new FacetLabel("d"))); // Similar, but inside a category that already exists: assertEquals(7, tw.addCategory(new FacetLabel("b", "d","e"))); assertEquals(8, tw.getSize()); // And now inside two levels of categories that already exist: assertEquals(8, tw.addCategory(new FacetLabel("b", "d","f"))); assertEquals(9, tw.getSize()); tw.close(); indexDir.close(); } /** Test writing an empty index, and seeing that a reader finds in it the root category, and only it. We check all the methods on that root category return the expected results. */ @Test public void testRootOnly() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); // right after opening the index, it should already contain the // root, so have size 1: assertEquals(1, tw.getSize()); tw.close(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); assertEquals(1, tr.getSize()); assertEquals(0, tr.getPath(0).length); assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getParallelTaxonomyArrays().parents()[0]); assertEquals(0, tr.getOrdinal(new FacetLabel())); tr.close(); indexDir.close(); } /** The following test is exactly the same as testRootOnly, except we * do not close the writer before opening the reader. We want to see * that the root is visible to the reader not only after the writer is * closed, but immediately after it is created. */ @Test public void testRootOnly2() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); tw.commit(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); assertEquals(1, tr.getSize()); assertEquals(0, tr.getPath(0).length); assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getParallelTaxonomyArrays().parents()[0]); assertEquals(0, tr.getOrdinal(new FacetLabel())); tw.close(); tr.close(); indexDir.close(); } /** Basic tests for TaxonomyReader's category &lt;=&gt; ordinal transformations (getSize(), getCategory() and getOrdinal()). We test that after writing the index, it can be read and all the categories and ordinals are there just as we expected them to be. */ @Test public void testReaderBasic() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); tw.close(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); // test TaxonomyReader.getSize(): assertEquals(expectedCategories.length, tr.getSize()); // test round trips of ordinal => category => ordinal for (int i=0; i<tr.getSize(); i++) { assertEquals(i, tr.getOrdinal(tr.getPath(i))); } // test TaxonomyReader.getCategory(): for (int i = 1; i < tr.getSize(); i++) { FacetLabel expectedCategory = new FacetLabel(expectedCategories[i]); FacetLabel category = tr.getPath(i); if (!expectedCategory.equals(category)) { fail("For ordinal "+i+" expected category "+ showcat(expectedCategory)+", but got "+showcat(category)); } } // (also test invalid ordinals:) assertNull(tr.getPath(-1)); assertNull(tr.getPath(tr.getSize())); assertNull(tr.getPath(TaxonomyReader.INVALID_ORDINAL)); // test TaxonomyReader.getOrdinal(): for (int i = 1; i < expectedCategories.length; i++) { int expectedOrdinal = i; int ordinal = tr.getOrdinal(new FacetLabel(expectedCategories[i])); if (expectedOrdinal != ordinal) { fail("For category "+showcat(expectedCategories[i])+" expected ordinal "+ expectedOrdinal+", but got "+ordinal); } } // (also test invalid categories:) assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(new FacetLabel("non-existant"))); assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(new FacetLabel("Author", "Jules Verne"))); tr.close(); indexDir.close(); } /** Tests for TaxonomyReader's getParent() method. We check it by comparing its results to those we could have gotten by looking at the category string paths (where the parentage is obvious). Note that after testReaderBasic(), we already know we can trust the ordinal &lt;=&gt; category conversions. Note: At the moment, the parent methods in the reader are deprecated, but this does not mean they should not be tested! Until they are removed (*if* they are removed), these tests should remain to see that they still work correctly. */ @Test public void testReaderParent() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); tw.close(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); // check that the parent of the root ordinal is the invalid ordinal: int[] parents = tr.getParallelTaxonomyArrays().parents(); assertEquals(TaxonomyReader.INVALID_ORDINAL, parents[0]); // check parent of non-root ordinals: for (int ordinal=1; ordinal<tr.getSize(); ordinal++) { FacetLabel me = tr.getPath(ordinal); int parentOrdinal = parents[ordinal]; FacetLabel parent = tr.getPath(parentOrdinal); if (parent==null) { fail("Parent of "+ordinal+" is "+parentOrdinal+ ", but this is not a valid category."); } // verify that the parent is indeed my parent, according to the strings if (!me.subpath(me.length-1).equals(parent)) { fail("Got parent "+parentOrdinal+" for ordinal "+ordinal+ " but categories are "+showcat(parent)+" and "+showcat(me)+ " respectively."); } } tr.close(); indexDir.close(); } /** * Tests for TaxonomyWriter's getParent() method. We check it by comparing * its results to those we could have gotten by looking at the category * string paths using a TaxonomyReader (where the parentage is obvious). * Note that after testReaderBasic(), we already know we can trust the * ordinal &lt;=&gt; category conversions from TaxonomyReader. * * The difference between testWriterParent1 and testWriterParent2 is that * the former closes the taxonomy writer before reopening it, while the * latter does not. * * This test code is virtually identical to that of testReaderParent(). */ @Test public void testWriterParent1() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); tw.close(); tw = new DirectoryTaxonomyWriter(indexDir); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); checkWriterParent(tr, tw); tw.close(); tr.close(); indexDir.close(); } @Test public void testWriterParent2() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); tw.commit(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); checkWriterParent(tr, tw); tw.close(); tr.close(); indexDir.close(); } private void checkWriterParent(TaxonomyReader tr, TaxonomyWriter tw) throws Exception { // check that the parent of the root ordinal is the invalid ordinal: assertEquals(TaxonomyReader.INVALID_ORDINAL, tw.getParent(0)); // check parent of non-root ordinals: for (int ordinal = 1; ordinal < tr.getSize(); ordinal++) { FacetLabel me = tr.getPath(ordinal); int parentOrdinal = tw.getParent(ordinal); FacetLabel parent = tr.getPath(parentOrdinal); if (parent == null) { fail("Parent of " + ordinal + " is " + parentOrdinal + ", but this is not a valid category."); } // verify that the parent is indeed my parent, according to the // strings if (!me.subpath(me.length - 1).equals(parent)) { fail("Got parent " + parentOrdinal + " for ordinal " + ordinal + " but categories are " + showcat(parent) + " and " + showcat(me) + " respectively."); } } // check parent of of invalid ordinals: try { tw.getParent(-1); fail("getParent for -1 should throw exception"); } catch (ArrayIndexOutOfBoundsException e) { // ok } try { tw.getParent(TaxonomyReader.INVALID_ORDINAL); fail("getParent for INVALID_ORDINAL should throw exception"); } catch (ArrayIndexOutOfBoundsException e) { // ok } try { int parent = tw.getParent(tr.getSize()); fail("getParent for getSize() should throw exception, but returned " + parent); } catch (ArrayIndexOutOfBoundsException e) { // ok } } /** * Test TaxonomyReader's child browsing method, getChildrenArrays() * This only tests for correctness of the data on one example - we have * below further tests on data refresh etc. */ @Test public void testChildrenArrays() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); tw.close(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); ParallelTaxonomyArrays ca = tr.getParallelTaxonomyArrays(); int[] youngestChildArray = ca.children(); assertEquals(tr.getSize(), youngestChildArray.length); int[] olderSiblingArray = ca.siblings(); assertEquals(tr.getSize(), olderSiblingArray.length); for (int i=0; i<expectedCategories.length; i++) { // find expected children by looking at all expectedCategories // for children ArrayList<Integer> expectedChildren = new ArrayList<>(); for (int j=expectedCategories.length-1; j>=0; j--) { if (expectedCategories[j].length != expectedCategories[i].length+1) { continue; // not longer by 1, so can't be a child } boolean ischild=true; for (int k=0; k<expectedCategories[i].length; k++) { if (!expectedCategories[j][k].equals(expectedCategories[i][k])) { ischild=false; break; } } if (ischild) { expectedChildren.add(j); } } // check that children and expectedChildren are the same, with the // correct reverse (youngest to oldest) order: if (expectedChildren.size()==0) { assertEquals(TaxonomyReader.INVALID_ORDINAL, youngestChildArray[i]); } else { int child = youngestChildArray[i]; assertEquals(expectedChildren.get(0).intValue(), child); for (int j=1; j<expectedChildren.size(); j++) { child = olderSiblingArray[child]; assertEquals(expectedChildren.get(j).intValue(), child); // if child is INVALID_ORDINAL we should stop, but // assertEquals would fail in this case anyway. } // When we're done comparing, olderSiblingArray should now point // to INVALID_ORDINAL, saying there are no more children. If it // doesn't, we found too many children... assertEquals(-1, olderSiblingArray[child]); } } tr.close(); indexDir.close(); } /** * Similar to testChildrenArrays, except rather than look at * expected results, we test for several "invariants" that the results * should uphold, e.g., that a child of a category indeed has this category * as its parent. This sort of test can more easily be extended to larger * example taxonomies, because we do not need to build the expected list * of categories like we did in the above test. */ @Test public void testChildrenArraysInvariants() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); tw.close(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); ParallelTaxonomyArrays ca = tr.getParallelTaxonomyArrays(); int[] children = ca.children(); assertEquals(tr.getSize(), children.length); int[] olderSiblingArray = ca.siblings(); assertEquals(tr.getSize(), olderSiblingArray.length); // test that the "youngest child" of every category is indeed a child: int[] parents = tr.getParallelTaxonomyArrays().parents(); for (int i=0; i<tr.getSize(); i++) { int youngestChild = children[i]; if (youngestChild != TaxonomyReader.INVALID_ORDINAL) { assertEquals(i, parents[youngestChild]); } } // test that the "older sibling" of every category is indeed older (lower) // (it can also be INVALID_ORDINAL, which is lower than any ordinal) for (int i=0; i<tr.getSize(); i++) { assertTrue("olderSiblingArray["+i+"] should be <"+i, olderSiblingArray[i] < i); } // test that the "older sibling" of every category is indeed a sibling // (they share the same parent) for (int i=0; i<tr.getSize(); i++) { int sibling = olderSiblingArray[i]; if (sibling == TaxonomyReader.INVALID_ORDINAL) { continue; } assertEquals(parents[i], parents[sibling]); } // And now for slightly more complex (and less "invariant-like"...) // tests: // test that the "youngest child" is indeed the youngest (so we don't // miss the first children in the chain) for (int i=0; i<tr.getSize(); i++) { // Find the really youngest child: int j; for (j=tr.getSize()-1; j>i; j--) { if (parents[j]==i) { break; // found youngest child } } if (j==i) { // no child found j=TaxonomyReader.INVALID_ORDINAL; } assertEquals(j, children[i]); } // test that the "older sibling" is indeed the least oldest one - and // not a too old one or -1 (so we didn't miss some children in the // middle or the end of the chain). for (int i=0; i<tr.getSize(); i++) { // Find the youngest older sibling: int j; for (j=i-1; j>=0; j--) { if (parents[j]==parents[i]) { break; // found youngest older sibling } } if (j<0) { // no sibling found j=TaxonomyReader.INVALID_ORDINAL; } assertEquals(j, olderSiblingArray[i]); } tr.close(); indexDir.close(); } /** * Test how getChildrenArrays() deals with the taxonomy's growth: */ @Test public void testChildrenArraysGrowth() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); tw.addCategory(new FacetLabel("hi", "there")); tw.commit(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); ParallelTaxonomyArrays ca = tr.getParallelTaxonomyArrays(); assertEquals(3, tr.getSize()); assertEquals(3, ca.siblings().length); assertEquals(3, ca.children().length); assertTrue(Arrays.equals(new int[] { 1, 2, -1 }, ca.children())); assertTrue(Arrays.equals(new int[] { -1, -1, -1 }, ca.siblings())); tw.addCategory(new FacetLabel("hi", "ho")); tw.addCategory(new FacetLabel("hello")); tw.commit(); // Before refresh, nothing changed.. ParallelTaxonomyArrays newca = tr.getParallelTaxonomyArrays(); assertSame(newca, ca); // we got exactly the same object assertEquals(3, tr.getSize()); assertEquals(3, ca.siblings().length); assertEquals(3, ca.children().length); // After the refresh, things change: TaxonomyReader newtr = TaxonomyReader.openIfChanged(tr); assertNotNull(newtr); tr.close(); tr = newtr; ca = tr.getParallelTaxonomyArrays(); assertEquals(5, tr.getSize()); assertEquals(5, ca.siblings().length); assertEquals(5, ca.children().length); assertTrue(Arrays.equals(new int[] { 4, 3, -1, -1, -1 }, ca.children())); assertTrue(Arrays.equals(new int[] { -1, -1, -1, 2, 1 }, ca.siblings())); tw.close(); tr.close(); indexDir.close(); } // Test that getParentArrays is valid when retrieved during refresh @Test public void testTaxonomyReaderRefreshRaces() throws Exception { // compute base child arrays - after first chunk, and after the other Directory indexDirBase = newDirectory(); TaxonomyWriter twBase = new DirectoryTaxonomyWriter(indexDirBase); twBase.addCategory(new FacetLabel("a", "0")); final FacetLabel abPath = new FacetLabel("a", "b"); twBase.addCategory(abPath); twBase.commit(); TaxonomyReader trBase = new DirectoryTaxonomyReader(indexDirBase); final ParallelTaxonomyArrays ca1 = trBase.getParallelTaxonomyArrays(); final int abOrd = trBase.getOrdinal(abPath); final int abYoungChildBase1 = ca1.children()[abOrd]; final int numCategories = atLeast(800); for (int i = 0; i < numCategories; i++) { twBase.addCategory(new FacetLabel("a", "b", Integer.toString(i))); } twBase.close(); TaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(trBase); assertNotNull(newTaxoReader); trBase.close(); trBase = newTaxoReader; final ParallelTaxonomyArrays ca2 = trBase.getParallelTaxonomyArrays(); final int abYoungChildBase2 = ca2.children()[abOrd]; int numRetries = atLeast(50); for (int retry = 0; retry < numRetries; retry++) { assertConsistentYoungestChild(abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry, numCategories); } trBase.close(); indexDirBase.close(); } private void assertConsistentYoungestChild(final FacetLabel abPath, final int abOrd, final int abYoungChildBase1, final int abYoungChildBase2, final int retry, int numCategories) throws Exception { SlowRAMDirectory indexDir = new SlowRAMDirectory(-1, null); // no slowness for initialization TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); tw.addCategory(new FacetLabel("a", "0")); tw.addCategory(abPath); tw.commit(); final DirectoryTaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); for (int i = 0; i < numCategories; i++) { final FacetLabel cp = new FacetLabel("a", "b", Integer.toString(i)); tw.addCategory(cp); assertEquals("Ordinal of "+cp+" must be invalid until Taxonomy Reader was refreshed", TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(cp)); } tw.close(); final AtomicBoolean stop = new AtomicBoolean(false); final Throwable[] error = new Throwable[] { null }; final int retrieval[] = { 0 }; Thread thread = new Thread("Child Arrays Verifier") { @Override public void run() { setPriority(1 + getPriority()); try { while (!stop.get()) { int lastOrd = tr.getParallelTaxonomyArrays().parents().length - 1; assertNotNull("path of last-ord " + lastOrd + " is not found!", tr.getPath(lastOrd)); assertChildrenArrays(tr.getParallelTaxonomyArrays(), retry, retrieval[0]++); sleep(10); // don't starve refresh()'s CPU, which sleeps every 50 bytes for 1 ms } } catch (Throwable e) { error[0] = e; stop.set(true); } } private void assertChildrenArrays(ParallelTaxonomyArrays ca, int retry, int retrieval) { final int abYoungChild = ca.children()[abOrd]; assertTrue( "Retry "+retry+": retrieval: "+retrieval+": wrong youngest child for category "+abPath+" (ord="+abOrd+ ") - must be either "+abYoungChildBase1+" or "+abYoungChildBase2+" but was: "+abYoungChild, abYoungChildBase1==abYoungChild || abYoungChildBase2==ca.children()[abOrd]); } }; thread.start(); indexDir.setSleepMillis(1); // some delay for refresh TaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(tr); if (newTaxoReader != null) { newTaxoReader.close(); } stop.set(true); thread.join(); assertNull("Unexpcted exception at retry "+retry+" retrieval "+retrieval[0]+": \n"+stackTraceStr(error[0]), error[0]); tr.close(); } /** Grab the stack trace into a string since the exception was thrown in a thread and we want the assert * outside the thread to show the stack trace in case of failure. */ private String stackTraceStr(final Throwable error) { if (error == null) { return ""; } StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); error.printStackTrace(pw); pw.close(); return sw.toString(); } /** Test that if separate reader and writer objects are opened, new categories written into the writer are available to a reader only after a commit(). Note that this test obviously doesn't cover all the different concurrency scenarios, all different methods, and so on. We may want to write more tests of this sort. This test simulates what would happen when there are two separate processes, one doing indexing, and the other searching, and each opens its own object (with obviously no connection between the objects) using the same disk files. Note, though, that this test does not test what happens when the two processes do their actual work at exactly the same time. It also doesn't test multi-threading. */ @Test public void testSeparateReaderAndWriter() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); tw.commit(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); assertEquals(1, tr.getSize()); // the empty taxonomy has size 1 (the root) tw.addCategory(new FacetLabel("Author")); assertEquals(1, tr.getSize()); // still root only... assertNull(TaxonomyReader.openIfChanged(tr)); // this is not enough, because tw.commit() hasn't been done yet assertEquals(1, tr.getSize()); // still root only... tw.commit(); assertEquals(1, tr.getSize()); // still root only... TaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(tr); assertNotNull(newTaxoReader); tr.close(); tr = newTaxoReader; int author = 1; try { assertEquals(TaxonomyReader.ROOT_ORDINAL, tr.getParallelTaxonomyArrays().parents()[author]); // ok } catch (ArrayIndexOutOfBoundsException e) { fail("After category addition, commit() and refresh(), getParent for "+author+" should NOT throw exception"); } assertEquals(2, tr.getSize()); // finally, see there are two categories // now, add another category, and verify that after commit and refresh // the parent of this category is correct (this requires the reader // to correctly update its prefetched parent vector), and that the // old information also wasn't ruined: tw.addCategory(new FacetLabel("Author", "Richard Dawkins")); int dawkins = 2; tw.commit(); newTaxoReader = TaxonomyReader.openIfChanged(tr); assertNotNull(newTaxoReader); tr.close(); tr = newTaxoReader; int[] parents = tr.getParallelTaxonomyArrays().parents(); assertEquals(author, parents[dawkins]); assertEquals(TaxonomyReader.ROOT_ORDINAL, parents[author]); assertEquals(TaxonomyReader.INVALID_ORDINAL, parents[TaxonomyReader.ROOT_ORDINAL]); assertEquals(3, tr.getSize()); tw.close(); tr.close(); indexDir.close(); } @Test public void testSeparateReaderAndWriter2() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); tw.commit(); TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir); // Test getOrdinal(): FacetLabel author = new FacetLabel("Author"); assertEquals(1, tr.getSize()); // the empty taxonomy has size 1 (the root) assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(author)); tw.addCategory(author); // before commit and refresh, no change: assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(author)); assertEquals(1, tr.getSize()); // still root only... assertNull(TaxonomyReader.openIfChanged(tr)); // this is not enough, because tw.commit() hasn't been done yet assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(author)); assertEquals(1, tr.getSize()); // still root only... tw.commit(); // still not enough before refresh: assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(author)); assertEquals(1, tr.getSize()); // still root only... TaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(tr); assertNotNull(newTaxoReader); tr.close(); tr = newTaxoReader; assertEquals(1, tr.getOrdinal(author)); assertEquals(2, tr.getSize()); tw.close(); tr.close(); indexDir.close(); } /** * fillTaxonomyCheckPaths adds the categories in the categories[] array, * and asserts that the additions return exactly paths specified in * expectedPaths[]. This is the same add fillTaxonomy() but also checks * the correctness of getParent(), not just addCategory(). * Note that this assumes that fillTaxonomyCheckPaths() is called on an empty * taxonomy index. Calling it after something else was already added to the * taxonomy index will surely have this method fail. */ public static void fillTaxonomyCheckPaths(TaxonomyWriter tw) throws IOException { for (int i = 0; i < categories.length; i++) { int ordinal = tw.addCategory(new FacetLabel(categories[i])); int expectedOrdinal = expectedPaths[i][expectedPaths[i].length-1]; if (ordinal!=expectedOrdinal) { fail("For category "+showcat(categories[i])+" expected ordinal "+ expectedOrdinal+", but got "+ordinal); } for (int j=expectedPaths[i].length-2; j>=0; j--) { ordinal = tw.getParent(ordinal); expectedOrdinal = expectedPaths[i][j]; if (ordinal!=expectedOrdinal) { fail("For category "+showcat(categories[i])+" expected ancestor level "+ (expectedPaths[i].length-1-j)+" was "+expectedOrdinal+ ", but got "+ordinal); } } } } // After fillTaxonomy returned successfully, checkPaths() checks that // the getParent() calls return as expected, from the table public static void checkPaths(TaxonomyWriter tw) throws IOException { for (int i = 0; i < categories.length; i++) { int ordinal = expectedPaths[i][expectedPaths[i].length-1]; for (int j=expectedPaths[i].length-2; j>=0; j--) { ordinal = tw.getParent(ordinal); int expectedOrdinal = expectedPaths[i][j]; if (ordinal!=expectedOrdinal) { fail("For category "+showcat(categories[i])+" expected ancestor level "+ (expectedPaths[i].length-1-j)+" was "+expectedOrdinal+ ", but got "+ordinal); } } assertEquals(TaxonomyReader.ROOT_ORDINAL, tw.getParent(expectedPaths[i][0])); } assertEquals(TaxonomyReader.INVALID_ORDINAL, tw.getParent(TaxonomyReader.ROOT_ORDINAL)); } /** * Basic test for TaxonomyWriter.getParent(). This is similar to testWriter * above, except we also check the parents of the added categories, not just * the categories themselves. */ @Test public void testWriterCheckPaths() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomyCheckPaths(tw); // Also check TaxonomyWriter.getSize() - see that the taxonomy's size // is what we expect it to be. assertEquals(expectedCategories.length, tw.getSize()); tw.close(); indexDir.close(); } /** * testWriterCheckPaths2 is the path-checking variant of testWriterTwice * and testWriterTwice2. After adding all the categories, we add them again, * and see that we get the same old ids and paths. We repeat the path checking * yet again after closing and opening the index for writing again - to see * that the reading of existing data from disk works as well. */ @Test public void testWriterCheckPaths2() throws Exception { Directory indexDir = newDirectory(); TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir); fillTaxonomy(tw); checkPaths(tw); fillTaxonomy(tw); checkPaths(tw); tw.close(); tw = new DirectoryTaxonomyWriter(indexDir); checkPaths(tw); fillTaxonomy(tw); checkPaths(tw); tw.close(); indexDir.close(); } @Test public void testNRT() throws Exception { Directory dir = newDirectory(); DirectoryTaxonomyWriter writer = new DirectoryTaxonomyWriter(dir); TaxonomyReader reader = new DirectoryTaxonomyReader(writer); FacetLabel cp = new FacetLabel("a"); writer.addCategory(cp); TaxonomyReader newReader = TaxonomyReader.openIfChanged(reader); assertNotNull("expected a new instance", newReader); assertEquals(2, newReader.getSize()); assertNotSame(TaxonomyReader.INVALID_ORDINAL, newReader.getOrdinal(cp)); reader.close(); reader = newReader; writer.close(); reader.close(); dir.close(); } // TODO (Facet): test multiple readers, one writer. Have the multiple readers // using the same object (simulating threads) or different objects // (simulating processes). }
/* * Title: HashTable * Author: Matthew Boyette * Date: 6/19/2013 * * A specialized hash table data structure designed to allow a comparative analysis between Linear probing and Quadratic probing. */ package api.util.datastructures; import java.lang.reflect.Array; import api.util.Mathematics; public class HashTable<T> { private int currentSize = 0; private T[] hashArray = null; private double loadFactor = 0.0; private int maximumSize = 0; private boolean probeIsLinear = false; private Class<T> storageType = null; private int totalProbeLenFailure = 0; private int totalProbeLenSuccess = 0; @SuppressWarnings({ "unchecked" }) public HashTable(final Class<T> type, final double loadFactor, final boolean useLinearProbe, final int fillSize) { this.currentSize = 0; this.loadFactor = loadFactor; this.probeIsLinear = useLinearProbe; this.storageType = type; this.setMaximumSize((int) ( Math.ceil(fillSize / this.getLoadFactor()) )); this.hashArray = (T[]) Array.newInstance(this.storageType, this.getMaximumSize()); } public int find(final T data) { if ( this.isEmpty() ) { return -1; } int retVal = this.hash(data); int quadStep = 0; int probeLen = 1; if ( retVal < 0 ) { return retVal; } while ( ( this.wasIndexOccupiedPreviously(retVal) ) && ( data.equals(this.hashArray[retVal]) == false ) ) { if ( this.isProbeLinear() ) { retVal++; } else { quadStep++; retVal = (int) ( retVal + Math.pow(quadStep, 2) ); } if ( retVal >= this.getMaximumSize() ) { retVal %= this.getMaximumSize(); } probeLen++; } if ( this.hashArray[retVal] == null ) { retVal = -1; this.totalProbeLenFailure += probeLen; } else { this.totalProbeLenSuccess += probeLen; } return retVal; } public int getCurrentSize() { return this.currentSize; } public double getLoadFactor() { return this.loadFactor; } public int getMaximumSize() { return this.maximumSize; } public int getTotalProbeLenFailure() { return this.totalProbeLenFailure; } public int getTotalProbeLenSuccess() { return this.totalProbeLenSuccess; } public int hash(final T data) { int hashVal = 0; if ( data == null ) { hashVal = -1; } else { if ( data instanceof String ) { String s = (String) data; for ( int i = 0; i < s.length(); i++ ) { // Take the integer value at the current character index, invert its bits (take one's complement), and store the absolute value as seedVal. int seedVal = Math.abs( ~( (int) ( s.charAt(i) ) )); // The hash is equal to the remainder of the hashTable size divided by the hash multiplied by 256 and added to seedVal. hashVal = ( ( hashVal * 256 ) + seedVal ) % this.getMaximumSize(); } } else if ( data instanceof Integer ) { String s = data.toString(); for ( int i = 0; i < s.length(); i++ ) { // Take the integer value at the current digit index, invert its bits (take one's complement), and store the absolute value as seedVal. int seedVal = Math.abs( ~( Integer.parseInt(s.substring(i, i + 1)) )); // The hash is equal to the remainder of the hashTable size divided by the hash multiplied by 10 and added to seedVal. hashVal = ( ( hashVal * 10 ) + seedVal ) % this.getMaximumSize(); } } else { hashVal = data.hashCode(); } } return hashVal; } public int insert(final T data) { if ( this.getCurrentSize() >= this.maximumSize ) { return -1; } int step = 0; int hashVal = this.hash(data); if ( hashVal < 0 ) { return hashVal; } while ( this.isIndexOccupied(hashVal) ) { if ( this.isProbeLinear() ) { hashVal++; } else { step++; hashVal = (int) ( hashVal + Math.pow(step, 2) ); } if ( hashVal >= this.getMaximumSize() ) { hashVal %= this.getMaximumSize(); } } this.hashArray[hashVal] = data; this.currentSize++; return hashVal; } public boolean isEmpty() { return ( this.getCurrentSize() <= 0 ); } private boolean isIndexOccupied(final int index) { boolean retVal = true; if ( ( this.hashArray[index] == null ) || ( this.hashArray[index].equals(0) ) || ( this.hashArray[index].equals("") ) ) { retVal = false; } return retVal; } public boolean isProbeLinear() { return this.probeIsLinear; } private void setMaximumSize(final int maximumSize) { this.maximumSize = (int) Mathematics.makePrimeGreater(maximumSize); } private boolean wasIndexOccupiedPreviously(final int index) { boolean retVal = true; if ( this.hashArray[index] == null ) { retVal = false; } return retVal; } }
/* * Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.reporting.core.utils; import net.sf.jasperreports.engine.JRException; import net.sf.jasperreports.engine.xml.JRXmlLoader; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.impl.builder.StAXOMBuilder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.registry.core.*; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.reporting.api.ReportingException; import org.wso2.carbon.reporting.core.ReportConstants; import org.wso2.carbon.utils.CarbonUtils; import org.wso2.carbon.utils.FileUtil; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import java.io.*; import java.util.ArrayList; import java.util.List; /** * used to include common utility functions */ public class CommonUtil { private static Log log = LogFactory.getLog(CommonUtil.class); /** * @param templateName name of the report * @param registry Registry * @throws ReportingException if failed to delete report template */ public static void deleteReportTemplate(String templateName, Registry registry) throws ReportingException { try { String resourcePath = RegistryUtils.getRelativePathToOriginal(ReportConstants.JRXML_PATH, RegistryConstants.CONFIG_REGISTRY_BASE_PATH); if (registry.resourceExists(resourcePath)) { registry.delete(resourcePath + RegistryConstants.PATH_SEPARATOR + templateName + ".jrxml"); } else { if (log.isDebugEnabled()) { log.info("no any report templates called " + templateName + " , to delete"); } } } catch (RegistryException e) { throw new ReportingException("Error occurred deleting the report template : " + templateName, e); } } /** * @param registry Registry * @return report name list * @throws ReportingException if failed to get report name list */ public static List<String> getAllReports(Registry registry) throws ReportingException { Resource resource; List<String> reportNames = null; try { String relativePath = RegistryUtils.getRelativePathToOriginal(ReportConstants.JRXML_PATH, RegistryConstants.CONFIG_REGISTRY_BASE_PATH); if (registry.resourceExists(relativePath)) { resource = registry.get(relativePath); if (resource instanceof Collection) { reportNames = new ArrayList<String>(); String[] paths = ((Collection) resource).getChildren(); for (String resourcePath : paths) { Resource childResource = registry.get(resourcePath); if (!(childResource instanceof Collection)) { String name = ((ResourceImpl) childResource).getName(); reportNames.add(name.split(".jrxml")[0]); } } } } else { if (log.isDebugEnabled()) { log.info("no any report templates available to generate reports"); } } } catch (RegistryException e) { throw new ReportingException("Error occurred getting all the reports names", e); } return reportNames; } /** * @param componentName name of the report requesting component * @param reportTemplate name of the template * @param registry Registry * @return report template as string * @throws ReportingException if failed to get report template */ public static String getReportResources(String componentName, String reportTemplate, Registry registry) throws ReportingException, XMLStreamException { String jrXmlPath; if (reportTemplate != null && !"".equals(reportTemplate)) { jrXmlPath = ReportConstants.JRXML_PATH + RegistryConstants.PATH_SEPARATOR + reportTemplate + ".jrxml"; } else { throw new ReportingException("Can't generate report without template "); } Resource resource; InputStream reportDefinitionOmStream; StAXOMBuilder stAXOMBuilder; OMElement reportJrXmlOmElement; try { resource = registry.get(RegistryUtils.getRelativePathToOriginal(jrXmlPath, RegistryConstants.CONFIG_REGISTRY_BASE_PATH)); reportDefinitionOmStream = resource.getContentStream(); } catch (RegistryException e) { throw new ReportingException(reportTemplate + " getting failed from " + componentName, e); } XMLInputFactory xmlInputFactory; XMLStreamReader xmlStreamReader = null; xmlInputFactory = XMLInputFactory.newInstance(); try { xmlStreamReader = xmlInputFactory.createXMLStreamReader(reportDefinitionOmStream); stAXOMBuilder = new StAXOMBuilder(xmlStreamReader); reportJrXmlOmElement = stAXOMBuilder.getDocumentElement(); return reportJrXmlOmElement.toString(); } catch (XMLStreamException e) { throw new ReportingException(reportTemplate + " getting failed from " + componentName, e); } finally { if (xmlStreamReader != null) { xmlStreamReader.close(); } } } public static String getJRXMLFileContent(String componentName, String reportTemplate, Registry registry) throws ReportingException { String jrXmlPath; if (reportTemplate != null && !"".equals(reportTemplate)) { jrXmlPath = ReportConstants.JRXML_PATH + RegistryConstants.PATH_SEPARATOR + reportTemplate + ".jrxml"; } else { throw new ReportingException("Can't generate report without template "); } Resource resource; InputStream reportDefinitionOmStream; try { resource = registry.get(RegistryUtils.getRelativePathToOriginal(jrXmlPath, RegistryConstants.CONFIG_REGISTRY_BASE_PATH)); reportDefinitionOmStream = resource.getContentStream(); BufferedReader br = new BufferedReader(new InputStreamReader(reportDefinitionOmStream)); StringBuilder sb = new StringBuilder(); String line; while ((line = br.readLine()) != null) { sb.append(line); } br.close(); return sb.toString(); } catch (RegistryException e) { throw new ReportingException(reportTemplate + " getting failed from " + componentName, e); } catch (IOException e) { throw new ReportingException(reportTemplate + " failed to read"); } } /** * @param fileName name of the modifying report name * @param fileContent modified content * @param registry Registry * @return status of the update process * @throws org.wso2.carbon.reporting.api.ReportingException * */ public static boolean updateReport(String fileName, String fileContent, Registry registry) throws ReportingException, JRException { boolean status; try { try { // validate report template before updating byte[] fileContentBytes = fileContent.getBytes(); InputStream inputStream = new ByteArrayInputStream(fileContentBytes); JRXmlLoader.load(inputStream); } catch (JRException e) { throw new JRException("This is not valid report template", e); } Resource reportFilesResource = registry.newResource(); reportFilesResource.setContent(fileContent); registry.put(ReportConstants.JRXML_PATH + fileName + ".jrxml", reportFilesResource); status = true; } catch (RegistryException e) { throw new ReportingException("Failed to update report template " + fileName, e); } return status; } public static void addJrxmlConfigs(Registry systemRegistry) throws RegistryException { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String tenantDomain = carbonContext.getTenantDomain(); try { PrivilegedCarbonContext.startTenantFlow(); if (tenantDomain == null) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(MultitenantConstants .SUPER_TENANT_DOMAIN_NAME); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(MultitenantConstants .SUPER_TENANT_ID); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(tenantId); } String rxtDir = CarbonUtils.getCarbonHome() + File.separator + "repository" + File.separator + "resources" + File.separator + "reports"; File file = new File(rxtDir); if (!file.exists()) { return; } //create a FilenameFilter FilenameFilter filenameFilter = new FilenameFilter() { public boolean accept(File dir, String name) { //if the file extension is .rxt return true, else false return name.endsWith(".jrxml"); } }; String[] rxtFilePaths = file.list(filenameFilter); if (rxtFilePaths.length == 0) { return; } for (String rxtPath : rxtFilePaths) { String resourcePath = ReportConstants.JRXML_PATH + "/" + rxtPath; try { if (systemRegistry.resourceExists(resourcePath)) { continue; } String rxt = FileUtil.readFileToString(rxtDir + File.separator + rxtPath); Resource resource = systemRegistry.newResource(); resource.setContent(rxt.getBytes()); resource.setMediaType("application/xml"); systemRegistry.put(resourcePath, resource); } catch (IOException e) { String msg = "Failed to read rxt files"; throw new RegistryException(msg, e); } catch (RegistryException e) { String msg = "Failed to add rxt to registry "; throw new RegistryException(msg, e); } } } finally { PrivilegedCarbonContext.endTenantFlow(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.contrib.streaming.state.snapshot; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.contrib.streaming.state.RocksDBKeyedStateBackend; import org.apache.flink.contrib.streaming.state.RocksIteratorWrapper; import org.apache.flink.contrib.streaming.state.iterator.RocksQueueIterator; import org.apache.flink.contrib.streaming.state.iterator.RocksStatesPerKeyGroupMergeIterator; import org.apache.flink.contrib.streaming.state.iterator.RocksTransformingIteratorWrapper; import org.apache.flink.contrib.streaming.state.iterator.SingleStateIterator; import org.apache.flink.core.fs.CloseableRegistry; import org.apache.flink.runtime.state.FullSnapshotResources; import org.apache.flink.runtime.state.KeyGroupRange; import org.apache.flink.runtime.state.KeyValueStateIterator; import org.apache.flink.runtime.state.RegisteredKeyValueStateBackendMetaInfo; import org.apache.flink.runtime.state.StateSnapshotTransformer; import org.apache.flink.runtime.state.StreamCompressionDecorator; import org.apache.flink.runtime.state.heap.HeapPriorityQueueSnapshotRestoreWrapper; import org.apache.flink.runtime.state.heap.HeapPriorityQueueStateSnapshot; import org.apache.flink.runtime.state.metainfo.StateMetaInfoSnapshot; import org.apache.flink.util.IOUtils; import org.apache.flink.util.ResourceGuard; import org.rocksdb.ColumnFamilyHandle; import org.rocksdb.ReadOptions; import org.rocksdb.RocksDB; import org.rocksdb.RocksIterator; import org.rocksdb.Snapshot; import javax.annotation.Nonnegative; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** A {@link FullSnapshotResources} for the RocksDB backend. */ public class RocksDBFullSnapshotResources<K> implements FullSnapshotResources<K> { private final List<StateMetaInfoSnapshot> stateMetaInfoSnapshots; private final ResourceGuard.Lease lease; private final Snapshot snapshot; private final RocksDB db; private final List<MetaData> metaData; /** Number of bytes in the key-group prefix. */ @Nonnegative private final int keyGroupPrefixBytes; private final KeyGroupRange keyGroupRange; private final TypeSerializer<K> keySerializer; private final StreamCompressionDecorator streamCompressionDecorator; private final List<HeapPriorityQueueStateSnapshot<?>> heapPriorityQueuesSnapshots; public RocksDBFullSnapshotResources( ResourceGuard.Lease lease, Snapshot snapshot, List<RocksDBKeyedStateBackend.RocksDbKvStateInfo> metaDataCopy, List<HeapPriorityQueueStateSnapshot<?>> heapPriorityQueuesSnapshots, List<StateMetaInfoSnapshot> stateMetaInfoSnapshots, RocksDB db, int keyGroupPrefixBytes, KeyGroupRange keyGroupRange, TypeSerializer<K> keySerializer, StreamCompressionDecorator streamCompressionDecorator) { this.lease = lease; this.snapshot = snapshot; this.stateMetaInfoSnapshots = stateMetaInfoSnapshots; this.heapPriorityQueuesSnapshots = heapPriorityQueuesSnapshots; this.db = db; this.keyGroupPrefixBytes = keyGroupPrefixBytes; this.keyGroupRange = keyGroupRange; this.keySerializer = keySerializer; this.streamCompressionDecorator = streamCompressionDecorator; // we need to to this in the constructor, i.e. in the synchronous part of the snapshot // TODO: better yet, we can do it outside the constructor this.metaData = fillMetaData(metaDataCopy); } public static <K> RocksDBFullSnapshotResources<K> create( final LinkedHashMap<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation, // TODO: was it important that this is a LinkedHashMap final Map<String, HeapPriorityQueueSnapshotRestoreWrapper<?>> registeredPQStates, final RocksDB db, final ResourceGuard rocksDBResourceGuard, final KeyGroupRange keyGroupRange, final TypeSerializer<K> keySerializer, final int keyGroupPrefixBytes, final StreamCompressionDecorator keyGroupCompressionDecorator) throws IOException { final List<StateMetaInfoSnapshot> stateMetaInfoSnapshots = new ArrayList<>(kvStateInformation.size()); final List<RocksDBKeyedStateBackend.RocksDbKvStateInfo> metaDataCopy = new ArrayList<>(kvStateInformation.size()); for (RocksDBKeyedStateBackend.RocksDbKvStateInfo stateInfo : kvStateInformation.values()) { // snapshot meta info stateMetaInfoSnapshots.add(stateInfo.metaInfo.snapshot()); metaDataCopy.add(stateInfo); } List<HeapPriorityQueueStateSnapshot<?>> heapPriorityQueuesSnapshots = new ArrayList<>(registeredPQStates.size()); for (HeapPriorityQueueSnapshotRestoreWrapper<?> stateInfo : registeredPQStates.values()) { stateMetaInfoSnapshots.add(stateInfo.getMetaInfo().snapshot()); heapPriorityQueuesSnapshots.add(stateInfo.stateSnapshot()); } final ResourceGuard.Lease lease = rocksDBResourceGuard.acquireResource(); final Snapshot snapshot = db.getSnapshot(); return new RocksDBFullSnapshotResources<>( lease, snapshot, metaDataCopy, heapPriorityQueuesSnapshots, stateMetaInfoSnapshots, db, keyGroupPrefixBytes, keyGroupRange, keySerializer, keyGroupCompressionDecorator); } private List<MetaData> fillMetaData( List<RocksDBKeyedStateBackend.RocksDbKvStateInfo> metaDataCopy) { List<MetaData> metaData = new ArrayList<>(metaDataCopy.size()); for (RocksDBKeyedStateBackend.RocksDbKvStateInfo rocksDbKvStateInfo : metaDataCopy) { StateSnapshotTransformer<byte[]> stateSnapshotTransformer = null; if (rocksDbKvStateInfo.metaInfo instanceof RegisteredKeyValueStateBackendMetaInfo) { stateSnapshotTransformer = ((RegisteredKeyValueStateBackendMetaInfo<?, ?>) rocksDbKvStateInfo.metaInfo) .getStateSnapshotTransformFactory() .createForSerializedState() .orElse(null); } metaData.add(new MetaData(rocksDbKvStateInfo, stateSnapshotTransformer)); } return metaData; } @Override public KeyValueStateIterator createKVStateIterator() throws IOException { CloseableRegistry closeableRegistry = new CloseableRegistry(); try { ReadOptions readOptions = new ReadOptions(); closeableRegistry.registerCloseable(readOptions::close); readOptions.setSnapshot(snapshot); List<Tuple2<RocksIteratorWrapper, Integer>> kvStateIterators = createKVStateIterators(closeableRegistry, readOptions); List<SingleStateIterator> heapPriorityQueueIterators = createHeapPriorityQueueIterators(); // Here we transfer ownership of the required resources to the // RocksStatesPerKeyGroupMergeIterator return new RocksStatesPerKeyGroupMergeIterator( closeableRegistry, kvStateIterators, heapPriorityQueueIterators, keyGroupPrefixBytes); } catch (Throwable t) { // If anything goes wrong, clean up our stuff. If things went smoothly the // merging iterator is now responsible for closing the resources IOUtils.closeQuietly(closeableRegistry); throw new IOException("Error creating merge iterator", t); } } private List<SingleStateIterator> createHeapPriorityQueueIterators() { int kvStateId = metaData.size(); List<SingleStateIterator> queuesIterators = new ArrayList<>(heapPriorityQueuesSnapshots.size()); for (HeapPriorityQueueStateSnapshot<?> queuesSnapshot : heapPriorityQueuesSnapshots) { queuesIterators.add( new RocksQueueIterator( queuesSnapshot, keyGroupRange, keyGroupPrefixBytes, kvStateId++)); } return queuesIterators; } private List<Tuple2<RocksIteratorWrapper, Integer>> createKVStateIterators( CloseableRegistry closeableRegistry, ReadOptions readOptions) throws IOException { final List<Tuple2<RocksIteratorWrapper, Integer>> kvStateIterators = new ArrayList<>(metaData.size()); int kvStateId = 0; for (MetaData metaDataEntry : metaData) { RocksIteratorWrapper rocksIteratorWrapper = createRocksIteratorWrapper( db, metaDataEntry.rocksDbKvStateInfo.columnFamilyHandle, metaDataEntry.stateSnapshotTransformer, readOptions); kvStateIterators.add(Tuple2.of(rocksIteratorWrapper, kvStateId)); closeableRegistry.registerCloseable(rocksIteratorWrapper); ++kvStateId; } return kvStateIterators; } private static RocksIteratorWrapper createRocksIteratorWrapper( RocksDB db, ColumnFamilyHandle columnFamilyHandle, StateSnapshotTransformer<byte[]> stateSnapshotTransformer, ReadOptions readOptions) { RocksIterator rocksIterator = db.newIterator(columnFamilyHandle, readOptions); return stateSnapshotTransformer == null ? new RocksIteratorWrapper(rocksIterator) : new RocksTransformingIteratorWrapper(rocksIterator, stateSnapshotTransformer); } @Override public List<StateMetaInfoSnapshot> getMetaInfoSnapshots() { return stateMetaInfoSnapshots; } @Override public KeyGroupRange getKeyGroupRange() { return keyGroupRange; } @Override public TypeSerializer<K> getKeySerializer() { return keySerializer; } @Override public StreamCompressionDecorator getStreamCompressionDecorator() { return streamCompressionDecorator; } @Override public void release() { db.releaseSnapshot(snapshot); IOUtils.closeQuietly(snapshot); IOUtils.closeQuietly(lease); } private static class MetaData { final RocksDBKeyedStateBackend.RocksDbKvStateInfo rocksDbKvStateInfo; final StateSnapshotTransformer<byte[]> stateSnapshotTransformer; private MetaData( RocksDBKeyedStateBackend.RocksDbKvStateInfo rocksDbKvStateInfo, StateSnapshotTransformer<byte[]> stateSnapshotTransformer) { this.rocksDbKvStateInfo = rocksDbKvStateInfo; this.stateSnapshotTransformer = stateSnapshotTransformer; } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v9.services.stub; import com.google.ads.googleads.v9.resources.KeywordPlanCampaign; import com.google.ads.googleads.v9.services.GetKeywordPlanCampaignRequest; import com.google.ads.googleads.v9.services.MutateKeywordPlanCampaignsRequest; import com.google.ads.googleads.v9.services.MutateKeywordPlanCampaignsResponse; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link KeywordPlanCampaignServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li> The default service address (googleads.googleapis.com) and default port (443) are used. * <li> Credentials are acquired automatically through Application Default Credentials. * <li> Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of getKeywordPlanCampaign to 30 seconds: * * <pre>{@code * KeywordPlanCampaignServiceStubSettings.Builder keywordPlanCampaignServiceSettingsBuilder = * KeywordPlanCampaignServiceStubSettings.newBuilder(); * keywordPlanCampaignServiceSettingsBuilder * .getKeywordPlanCampaignSettings() * .setRetrySettings( * keywordPlanCampaignServiceSettingsBuilder * .getKeywordPlanCampaignSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * KeywordPlanCampaignServiceStubSettings keywordPlanCampaignServiceSettings = * keywordPlanCampaignServiceSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class KeywordPlanCampaignServiceStubSettings extends StubSettings<KeywordPlanCampaignServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/adwords").build(); private final UnaryCallSettings<GetKeywordPlanCampaignRequest, KeywordPlanCampaign> getKeywordPlanCampaignSettings; private final UnaryCallSettings< MutateKeywordPlanCampaignsRequest, MutateKeywordPlanCampaignsResponse> mutateKeywordPlanCampaignsSettings; /** Returns the object with the settings used for calls to getKeywordPlanCampaign. */ public UnaryCallSettings<GetKeywordPlanCampaignRequest, KeywordPlanCampaign> getKeywordPlanCampaignSettings() { return getKeywordPlanCampaignSettings; } /** Returns the object with the settings used for calls to mutateKeywordPlanCampaigns. */ public UnaryCallSettings<MutateKeywordPlanCampaignsRequest, MutateKeywordPlanCampaignsResponse> mutateKeywordPlanCampaignsSettings() { return mutateKeywordPlanCampaignsSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public KeywordPlanCampaignServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcKeywordPlanCampaignServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "googleads.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "googleads.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(KeywordPlanCampaignServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected KeywordPlanCampaignServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); getKeywordPlanCampaignSettings = settingsBuilder.getKeywordPlanCampaignSettings().build(); mutateKeywordPlanCampaignsSettings = settingsBuilder.mutateKeywordPlanCampaignsSettings().build(); } /** Builder for KeywordPlanCampaignServiceStubSettings. */ public static class Builder extends StubSettings.Builder<KeywordPlanCampaignServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<GetKeywordPlanCampaignRequest, KeywordPlanCampaign> getKeywordPlanCampaignSettings; private final UnaryCallSettings.Builder< MutateKeywordPlanCampaignsRequest, MutateKeywordPlanCampaignsResponse> mutateKeywordPlanCampaignsSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(3600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(3600000L)) .setTotalTimeout(Duration.ofMillis(3600000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); getKeywordPlanCampaignSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); mutateKeywordPlanCampaignsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( getKeywordPlanCampaignSettings, mutateKeywordPlanCampaignsSettings); initDefaults(this); } protected Builder(KeywordPlanCampaignServiceStubSettings settings) { super(settings); getKeywordPlanCampaignSettings = settings.getKeywordPlanCampaignSettings.toBuilder(); mutateKeywordPlanCampaignsSettings = settings.mutateKeywordPlanCampaignsSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( getKeywordPlanCampaignSettings, mutateKeywordPlanCampaignsSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .getKeywordPlanCampaignSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .mutateKeywordPlanCampaignsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to getKeywordPlanCampaign. */ public UnaryCallSettings.Builder<GetKeywordPlanCampaignRequest, KeywordPlanCampaign> getKeywordPlanCampaignSettings() { return getKeywordPlanCampaignSettings; } /** Returns the builder for the settings used for calls to mutateKeywordPlanCampaigns. */ public UnaryCallSettings.Builder< MutateKeywordPlanCampaignsRequest, MutateKeywordPlanCampaignsResponse> mutateKeywordPlanCampaignsSettings() { return mutateKeywordPlanCampaignsSettings; } @Override public KeywordPlanCampaignServiceStubSettings build() throws IOException { return new KeywordPlanCampaignServiceStubSettings(this); } } }
package org.saiku.olap.query2.util; import org.apache.commons.lang.StringUtils; import org.saiku.olap.dto.SaikuCube; import org.saiku.olap.query2.ThinAxis; import org.saiku.olap.query2.ThinCalculatedMeasure; import org.saiku.olap.query2.ThinCalculatedMember; import org.saiku.olap.query2.ThinDetails; import org.saiku.olap.query2.ThinHierarchy; import org.saiku.olap.query2.ThinLevel; import org.saiku.olap.query2.ThinMeasure; import org.saiku.olap.query2.ThinMeasure.Type; import org.saiku.olap.query2.ThinMember; import org.saiku.olap.query2.ThinQuery; import org.saiku.olap.query2.ThinQueryModel; import org.saiku.olap.query2.ThinQueryModel.AxisLocation; import org.saiku.olap.query2.ThinSelection; import org.saiku.olap.query2.common.ThinQuerySet; import org.saiku.olap.query2.common.ThinSortableQuerySet; import org.saiku.olap.query2.common.ThinSortableQuerySet.HierarchizeMode; import org.saiku.olap.query2.common.ThinSortableQuerySet.SortOrder; import org.saiku.olap.query2.filter.ThinFilter; import org.saiku.olap.query2.filter.ThinFilter.FilterFlavour; import org.saiku.olap.query2.filter.ThinFilter.FilterFunction; import org.saiku.olap.query2.filter.ThinFilter.FilterOperator; import org.saiku.query.IQuerySet; import org.saiku.query.ISortableQuerySet; import org.saiku.query.Query; import org.saiku.query.QueryAxis; import org.saiku.query.QueryDetails; import org.saiku.query.QueryHierarchy; import org.saiku.query.QueryLevel; import org.saiku.query.mdx.GenericFilter; import org.saiku.query.mdx.IFilterFunction; import org.saiku.query.mdx.NFilter; import org.saiku.query.mdx.NameFilter; import org.saiku.query.mdx.NameLikeFilter; import org.saiku.query.metadata.CalculatedMeasure; import org.saiku.query.metadata.CalculatedMember; import org.olap4j.Axis; import org.olap4j.impl.NamedListImpl; import org.olap4j.metadata.Measure; import org.olap4j.metadata.Member; import org.olap4j.metadata.NamedList; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; public class Thin { public static ThinQuery convert(Query query, SaikuCube cube) throws Exception { ThinQuery tq = new ThinQuery(query.getName(), cube); ThinQueryModel tqm = convert(query, tq); tq.setQueryModel(tqm); if (query.getParameters() != null) { query.retrieveParameters(); tq.setParameters(query.getParameters()); } tq.setMdx(query.getMdx()); return tq; } private static ThinQueryModel convert(Query query, ThinQuery tq) { ThinQueryModel tqm = new ThinQueryModel(); tqm.setAxes(convertAxes(query.getAxes(), tq)); ThinDetails td = convert(query.getDetails()); tqm.setDetails(td); List<ThinCalculatedMeasure> cms = convert(query.getCalculatedMeasures()); List<ThinCalculatedMember> cmem = convertCM(query.getCalculatedMembers()); tqm.setCalculatedMeasures(cms); tqm.setCalculatedMembers(cmem); tqm.setVisualTotals(query.isVisualTotals()); tqm.setVisualTotalsPattern(query.getVisualTotalsPattern()); return tqm; } private static List<ThinCalculatedMeasure> convert(List<CalculatedMeasure> qcms) { List<ThinCalculatedMeasure> tcms = new ArrayList<>(); if (qcms != null && qcms.size() > 0) { for (CalculatedMeasure qcm : qcms) { ThinCalculatedMeasure tcm = new ThinCalculatedMeasure( qcm.getHierarchy().getUniqueName(), qcm.getName(), qcm.getUniqueName(), qcm.getCaption(), qcm.getFormula(), qcm.getFormatProperties()); tcms.add(tcm); } } return tcms; } private static List<ThinCalculatedMember> convertCM(List<CalculatedMember> qcms) { List<ThinCalculatedMember> tcms = new ArrayList<>(); if (qcms != null && qcms.size() > 0) { for (CalculatedMember qcm : qcms) { String pplevel = null; if(qcm.getParentMember()!= null && qcm.getParentMember().getParentMember()!=null){ pplevel= qcm.getParentMember().getParentMember().getLevel().getName(); } String plevel = null; if(qcm.getParentMember()!=null){ plevel = qcm.getParentMember().getLevel().getName(); } ThinCalculatedMember tcm = new ThinCalculatedMember( qcm.getHierarchy().getDimension().getName(), qcm.getHierarchy().getUniqueName(), qcm.getName(), qcm.getUniqueName(), qcm.getCaption(), qcm.getFormula(), qcm.getFormatProperties(), qcm.getParentMember()!=null ? qcm.getParentMember().toString(): null, plevel, pplevel ); tcms.add(tcm); } } return tcms; } private static ThinDetails convert(QueryDetails details) { ThinDetails.Location location = ThinDetails.Location.valueOf(details.getLocation().toString()); AxisLocation axis = AxisLocation.valueOf(details.getAxis().toString()); List<ThinMeasure> measures = new ArrayList<>(); if (details.getMeasures().size() > 0) { for (Measure m : details.getMeasures()) { ThinMeasure.Type type = Type.EXACT; if (m instanceof CalculatedMeasure) { type = Type.CALCULATED; } ThinMeasure tm = new ThinMeasure(m.getName(), m.getUniqueName(), m.getCaption(), type); measures.add(tm); } } return new ThinDetails(axis, location, measures); } private static Map<AxisLocation, ThinAxis> convertAxes(Map<Axis, QueryAxis> axes, ThinQuery tq) { Map<ThinQueryModel.AxisLocation, ThinAxis> thinAxes = new TreeMap<>(); if (axes != null) { for (Axis axis : sortAxes(axes.keySet())) { if (axis != null) { ThinAxis ta = convertAxis(axes.get(axis), tq); thinAxes.put(ta.getLocation(), ta); } } } return thinAxes; } private static List<Axis> sortAxes(Set<Axis> axes) { List<Axis> ax = new ArrayList<>(); for (Axis a : Axis.Standard.values()) { if (axes.contains(a)){ ax.add(a); } } return ax; } private static ThinAxis convertAxis(QueryAxis queryAxis, ThinQuery tq) { AxisLocation loc = getLocation(queryAxis); List<String> aggs = queryAxis.getQuery().getAggregators(queryAxis.getLocation().toString()); ThinAxis ta = new ThinAxis(loc, convertHierarchies(queryAxis.getQueryHierarchies(), tq), queryAxis.isNonEmpty(), aggs); extendSortableQuerySet(ta, queryAxis); return ta; } private static NamedList<ThinHierarchy> convertHierarchies(List<QueryHierarchy> queryHierarchies, ThinQuery tq) { NamedListImpl<ThinHierarchy> hs = new NamedListImpl<>(); if (queryHierarchies != null) { for (QueryHierarchy qh : queryHierarchies) { ThinHierarchy th = convertHierarchy(qh, tq); hs.add(th); } } return hs; } private static ThinHierarchy convertHierarchy(QueryHierarchy qh, ThinQuery tq) { List<String> s = new ArrayList<>(); for(CalculatedMember cmember: qh.getCalculatedMembers()){ s.add(cmember.getUniqueName()); } ThinHierarchy th = new ThinHierarchy(qh.getUniqueName(), qh.getCaption(), qh.getHierarchy().getDimension() .getName(), convertLevels(qh .getActiveQueryLevels(), tq), s); extendSortableQuerySet(th, qh); return th; } private static Map<String, ThinLevel> convertLevels(List<QueryLevel> levels, ThinQuery tq) { Map<String, ThinLevel> tl = new HashMap<>(); if (levels != null) { for (QueryLevel ql : levels) { ThinLevel l = convertLevel(ql, tq); tl.put(ql.getName(), l); } } return tl; } private static ThinLevel convertLevel(QueryLevel ql, ThinQuery tq) { List<ThinMember> inclusions = convertMembers(ql.getInclusions(), tq); List<ThinMember> exclusions = convertMembers(ql.getExclusions(), tq); ThinMember rangeStart = convertMember(ql.getRangeStart(), tq); ThinMember rangeEnd = convertMember(ql.getRangeEnd(), tq); ThinSelection ts = new ThinSelection(ThinSelection.Type.INCLUSION, null); if (inclusions.size() > 0) { ts = new ThinSelection(ThinSelection.Type.INCLUSION, inclusions); } else if (exclusions.size() > 0) { ts = new ThinSelection(ThinSelection.Type.EXCLUSION, exclusions); } else if (rangeStart != null && rangeEnd != null){ List<ThinMember> range = new ArrayList<>(); range.add(rangeStart); range.add(rangeEnd); ts = new ThinSelection(ThinSelection.Type.RANGE, range); } if (ql.hasParameter()) { ts.setParameterName(ql.getParameterName()); tq.addParameter(ql.getParameterName()); } List<String> aggs = ql.getQueryHierarchy().getQuery().getAggregators(ql.getUniqueName()); ThinLevel l = new ThinLevel(ql.getName(), ql.getCaption(), ts, aggs); extendQuerySet(l, ql); return l; } private static List<ThinMember> convertMembers(List<Member> members, ThinQuery tq) { List<ThinMember> ms = new ArrayList<>(); if (members != null) { for (Member m : members) { ms.add(convertMember(m, tq)); } } return ms; } private static ThinMember convertMember(Member m, ThinQuery tq) { if (m != null) { String type = null; if(m instanceof CalculatedMember){ type = "calculatedmember"; } ThinMember tm = new ThinMember(m.getName(), m.getUniqueName(), m.getCaption()); tm.setType(type); return tm; } return null; } private static AxisLocation getLocation(QueryAxis axis) { Axis ax = axis.getLocation(); if (Axis.ROWS.equals(ax)) { return AxisLocation.ROWS; } else if (Axis.COLUMNS.equals(ax)) { return AxisLocation.COLUMNS; } else if (Axis.FILTER.equals(ax)) { return AxisLocation.FILTER; } else if (Axis.PAGES.equals(ax)) { return AxisLocation.PAGES; } return null; } private static void extendQuerySet(ThinQuerySet ts, IQuerySet qs) { if (StringUtils.isNotBlank(qs.getMdxSetExpression())) { ts.setMdx(qs.getMdxSetExpression()); } if (qs.getFilters() != null && qs.getFilters().size() > 0) { List<ThinFilter> filters = convertFilters(qs.getFilters()); ts.getFilters().addAll(filters); } } private static List<ThinFilter> convertFilters(List<IFilterFunction> filters) { List<ThinFilter> tfs = new ArrayList<>(); for (IFilterFunction f : filters) { if (f instanceof NameFilter) { NameFilter nf = (NameFilter) f; List<String> expressions = nf.getFilterExpression(); expressions.add(0, nf.getHierarchy().getUniqueName()); FilterOperator type = FilterOperator.LIKE; if(nf.getOp() != null && nf.getOp().equals("NOTEQUAL")){ type = FilterOperator.NOTEQUAL; } ThinFilter tf = new ThinFilter(FilterFlavour.Name, type, FilterFunction.Filter, expressions); tfs.add(tf); } if (f instanceof NameLikeFilter) { NameLikeFilter nf = (NameLikeFilter) f; List<String> expressions = nf.getFilterExpression(); expressions.add(0, nf.getHierarchy().getUniqueName()); FilterOperator type = FilterOperator.LIKE; if(nf.getOp()!=null && nf.getOp().equals("NOTEQUAL")){ type = FilterOperator.NOTEQUAL; } ThinFilter tf = new ThinFilter(FilterFlavour.NameLike, type, FilterFunction.Filter, expressions); tfs.add(tf); } if (f instanceof GenericFilter) { GenericFilter nf = (GenericFilter) f; List<String> expressions = new ArrayList<>(); expressions.add(nf.getFilterExpression()); ThinFilter tf = new ThinFilter(FilterFlavour.Generic, null, FilterFunction.Filter, expressions); tfs.add(tf); } if (f instanceof NFilter) { NFilter nf = (NFilter) f; List<String> expressions = new ArrayList<>(); expressions.add(Integer.toString(nf.getN())); if (nf.getFilterExpression() != null) { expressions.add(nf.getFilterExpression()); } FilterFunction type = FilterFunction.valueOf(nf.getFunctionType().toString()); ThinFilter tf = new ThinFilter(FilterFlavour.N, null, type, expressions); tfs.add(tf); } } return tfs; } private static void extendSortableQuerySet(ThinSortableQuerySet ts, ISortableQuerySet qs) { extendQuerySet(ts, qs); if (qs.getHierarchizeMode() != null) { ts.setHierarchizeMode(HierarchizeMode.valueOf(qs.getHierarchizeMode().toString())); } if (qs.getSortOrder() != null) { ts.sort(SortOrder.valueOf(qs.getSortOrder().toString()), qs.getSortEvaluationLiteral()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.protocol.http.proxy.gui; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Cursor; import java.awt.Dimension; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.UnsupportedFlavorException; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.io.IOException; import java.net.BindException; import java.util.Arrays; import java.util.Collection; import java.util.LinkedList; import java.util.List; import javax.swing.BorderFactory; import javax.swing.Box; import javax.swing.DefaultComboBoxModel; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JScrollBar; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.JTextField; import org.apache.jmeter.control.Controller; import org.apache.jmeter.control.gui.LogicControllerGui; import org.apache.jmeter.control.gui.TreeNodeWrapper; import org.apache.jmeter.engine.util.ValueReplacer; import org.apache.jmeter.functions.InvalidVariableException; import org.apache.jmeter.gui.GuiPackage; import org.apache.jmeter.gui.JMeterGUIComponent; import org.apache.jmeter.gui.UnsharedComponent; import org.apache.jmeter.gui.tree.JMeterTreeNode; import org.apache.jmeter.gui.util.HeaderAsPropertyRenderer; import org.apache.jmeter.gui.util.HorizontalPanel; import org.apache.jmeter.gui.util.MenuFactory; import org.apache.jmeter.gui.util.PowerTableModel; import org.apache.jmeter.gui.util.VerticalPanel; import org.apache.jmeter.protocol.http.control.RecordingController; import org.apache.jmeter.protocol.http.proxy.ProxyControl; import org.apache.jmeter.protocol.http.sampler.HTTPSamplerFactory; import org.apache.jmeter.testelement.TestElement; import org.apache.jmeter.testelement.TestPlan; import org.apache.jmeter.testelement.WorkBench; import org.apache.jmeter.testelement.property.PropertyIterator; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.exec.KeyToolUtils; import org.apache.jorphan.gui.GuiUtils; import org.apache.jorphan.gui.JLabeledTextField; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; public class ProxyControlGui extends LogicControllerGui implements JMeterGUIComponent, ActionListener, ItemListener, KeyListener, UnsharedComponent { private static final Logger log = LoggingManager.getLoggerForClass(); private static final long serialVersionUID = 232L; private static final String NEW_LINE = "\n"; // $NON-NLS-1$ private static final String SPACE = " "; // $NON-NLS-1$ /** * This choice means don't explicitly set Implementation and rely on default, see Bug 54154 */ private static final String USE_DEFAULT_HTTP_IMPL = ""; // $NON-NLS-1$ private static final String SUGGESTED_EXCLUSIONS = JMeterUtils.getPropDefault("proxy.excludes.suggested", "(?i).*\\.(bmp|css|js|gif|ico|jpe?g|png|swf|woff)"); // $NON-NLS-1$ private JTextField portField; private JLabeledTextField sslDomains; /** * Used to indicate that HTTP request headers should be captured. The * default is to capture the HTTP request headers, which are specific to * particular browser settings. */ private JCheckBox httpHeaders; /** * Whether to group requests together based on inactivity separation periods -- * and how to handle such grouping afterwards. */ private JComboBox<String> groupingMode; /** * Add an Assertion to the first sample of each set */ private JCheckBox addAssertions; /** * Set/clear the Use Keep-Alive box on the samplers (default is true) */ private JCheckBox useKeepAlive; /* * Use regexes to match the source data */ private JCheckBox regexMatch; /** * The list of sampler type names to choose from */ private JComboBox<String> samplerTypeName; /** * Set/clear the Redirect automatically box on the samplers (default is false) */ private JCheckBox samplerRedirectAutomatically; /** * Set/clear the Follow-redirects box on the samplers (default is true) */ private JCheckBox samplerFollowRedirects; /** * Set/clear the Download images box on the samplers (default is false) */ private JCheckBox samplerDownloadImages; /** * Add a prefix to HTTP sample name recorded */ private JTextField prefixHTTPSampleName; /** * Regular expression to include results based on content type */ private JTextField contentTypeInclude; /** * Regular expression to exclude results based on content type */ private JTextField contentTypeExclude; /** * List of available target controllers */ private JComboBox<Object> targetNodes; /** * Notify child Listener of Filtered Samplers */ private JCheckBox notifyChildSamplerListenerOfFilteredSamplersCB; private DefaultComboBoxModel<Object> targetNodesModel; private ProxyControl model; private JTable excludeTable; private PowerTableModel excludeModel; private JTable includeTable; private PowerTableModel includeModel; private static final String CHANGE_TARGET = "change_target"; // $NON-NLS-1$ private JButton stop, start, restart; //+ action names private static final String STOP = "stop"; // $NON-NLS-1$ private static final String START = "start"; // $NON-NLS-1$ private static final String RESTART = "restart"; // $NON-NLS-1$ // This is applied to fields that should cause a restart when changed private static final String ENABLE_RESTART = "enable_restart"; // $NON-NLS-1$ private static final String ADD_INCLUDE = "add_include"; // $NON-NLS-1$ private static final String ADD_EXCLUDE = "add_exclude"; // $NON-NLS-1$ private static final String DELETE_INCLUDE = "delete_include"; // $NON-NLS-1$ private static final String DELETE_EXCLUDE = "delete_exclude"; // $NON-NLS-1$ private static final String ADD_TO_INCLUDE_FROM_CLIPBOARD = "include_clipboard"; // $NON-NLS-1$ private static final String ADD_TO_EXCLUDE_FROM_CLIPBOARD = "exclude_clipboard"; // $NON-NLS-1$ private static final String ADD_SUGGESTED_EXCLUDES = "exclude_suggested"; private static final String PREFIX_HTTP_SAMPLER_NAME = "proxy_prefix_http_sampler_name"; // $NON-NLS-1$ //- action names // Resource names for column headers private static final String INCLUDE_COL = "patterns_to_include"; // $NON-NLS-1$ private static final String EXCLUDE_COL = "patterns_to_exclude"; // $NON-NLS-1$ // Used by itemListener private static final String PORTFIELD = "portField"; // $NON-NLS-1$ public ProxyControlGui() { super(); log.debug("Creating ProxyControlGui"); init(); } /** {@inheritDoc} */ @Override public TestElement createTestElement() { model = makeProxyControl(); log.debug("creating/configuring model = " + model); modifyTestElement(model); return model; } protected ProxyControl makeProxyControl() { ProxyControl local = new ProxyControl(); return local; } /** {@inheritDoc} */ @Override public void modifyTestElement(TestElement el) { GuiUtils.stopTableEditing(excludeTable); GuiUtils.stopTableEditing(includeTable); configureTestElement(el); if (el instanceof ProxyControl) { model = (ProxyControl) el; model.setPort(portField.getText()); model.setSslDomains(sslDomains.getText()); setIncludeListInProxyControl(model); setExcludeListInProxyControl(model); model.setCaptureHttpHeaders(httpHeaders.isSelected()); model.setGroupingMode(groupingMode.getSelectedIndex()); model.setAssertions(addAssertions.isSelected()); if(samplerTypeName.getSelectedIndex()< HTTPSamplerFactory.getImplementations().length) { model.setSamplerTypeName(HTTPSamplerFactory.getImplementations()[samplerTypeName.getSelectedIndex()]); } else { model.setSamplerTypeName(USE_DEFAULT_HTTP_IMPL); } model.setSamplerRedirectAutomatically(samplerRedirectAutomatically.isSelected()); model.setSamplerFollowRedirects(samplerFollowRedirects.isSelected()); model.setUseKeepAlive(useKeepAlive.isSelected()); model.setSamplerDownloadImages(samplerDownloadImages.isSelected()); model.setPrefixHTTPSampleName(prefixHTTPSampleName.getText()); model.setNotifyChildSamplerListenerOfFilteredSamplers(notifyChildSamplerListenerOfFilteredSamplersCB.isSelected()); model.setRegexMatch(regexMatch.isSelected()); model.setContentTypeInclude(contentTypeInclude.getText()); model.setContentTypeExclude(contentTypeExclude.getText()); TreeNodeWrapper nw = (TreeNodeWrapper) targetNodes.getSelectedItem(); if (nw == null) { model.setTarget(null); } else { model.setTarget(nw.getTreeNode()); } } } protected void setIncludeListInProxyControl(ProxyControl element) { List<String> includeList = getDataList(includeModel, INCLUDE_COL); element.setIncludeList(includeList); } protected void setExcludeListInProxyControl(ProxyControl element) { List<String> excludeList = getDataList(excludeModel, EXCLUDE_COL); element.setExcludeList(excludeList); } private List<String> getDataList(PowerTableModel pModel, String colName) { String[] dataArray = pModel.getData().getColumn(colName); List<String> list = new LinkedList<>(); for (String data : dataArray) { list.add(data); } return list; } /** {@inheritDoc} */ @Override public String getLabelResource() { return "proxy_title"; // $NON-NLS-1$ } /** {@inheritDoc} */ @Override public Collection<String> getMenuCategories() { return Arrays.asList(MenuFactory.NON_TEST_ELEMENTS); } /** {@inheritDoc} */ @Override public void configure(TestElement element) { log.debug("Configuring gui with " + element); super.configure(element); model = (ProxyControl) element; portField.setText(model.getPortString()); sslDomains.setText(model.getSslDomains()); httpHeaders.setSelected(model.getCaptureHttpHeaders()); groupingMode.setSelectedIndex(model.getGroupingMode()); addAssertions.setSelected(model.getAssertions()); samplerTypeName.setSelectedItem(model.getSamplerTypeName()); samplerRedirectAutomatically.setSelected(model.getSamplerRedirectAutomatically()); samplerFollowRedirects.setSelected(model.getSamplerFollowRedirects()); useKeepAlive.setSelected(model.getUseKeepalive()); samplerDownloadImages.setSelected(model.getSamplerDownloadImages()); prefixHTTPSampleName.setText(model.getPrefixHTTPSampleName()); notifyChildSamplerListenerOfFilteredSamplersCB.setSelected(model.getNotifyChildSamplerListenerOfFilteredSamplers()); regexMatch.setSelected(model.getRegexMatch()); contentTypeInclude.setText(model.getContentTypeInclude()); contentTypeExclude.setText(model.getContentTypeExclude()); reinitializeTargetCombo();// Set up list of potential targets and // enable listener populateTable(includeModel, model.getIncludePatterns().iterator()); populateTable(excludeModel, model.getExcludePatterns().iterator()); repaint(); } private void populateTable(PowerTableModel pModel, PropertyIterator iter) { pModel.clearData(); while (iter.hasNext()) { pModel.addRow(new Object[] { iter.next().getStringValue() }); } pModel.fireTableDataChanged(); } /* * Handles groupingMode. actionPerfomed is not suitable, as that seems to be * activated whenever the Proxy is selected in the Test Plan * Also handles samplerTypeName */ /** {@inheritDoc} */ @Override public void itemStateChanged(ItemEvent e) { // System.err.println(e.paramString()); enableRestart(); } /** {@inheritDoc} */ @Override public void actionPerformed(ActionEvent action) { String command = action.getActionCommand(); // Prevent both redirect types from being selected final Object source = action.getSource(); if (source.equals(samplerFollowRedirects) && samplerFollowRedirects.isSelected()) { samplerRedirectAutomatically.setSelected(false); } else if (source.equals(samplerRedirectAutomatically) && samplerRedirectAutomatically.isSelected()) { samplerFollowRedirects.setSelected(false); } // System.err.println(action.paramString()+" "+command+ " // "+action.getModifiers()); if (command.equals(STOP)) { model.stopProxy(); stop.setEnabled(false); start.setEnabled(true); restart.setEnabled(false); } else if (command.equals(START)) { startProxy(); } else if (command.equals(RESTART)) { model.stopProxy(); startProxy(); } else if (command.equals(ENABLE_RESTART)){ enableRestart(); } else if (command.equals(ADD_EXCLUDE)) { excludeModel.addNewRow(); excludeModel.fireTableDataChanged(); enableRestart(); } else if (command.equals(ADD_INCLUDE)) { includeModel.addNewRow(); includeModel.fireTableDataChanged(); enableRestart(); } else if (command.equals(DELETE_EXCLUDE)) { excludeModel.removeRow(excludeTable.getSelectedRow()); excludeModel.fireTableDataChanged(); enableRestart(); } else if (command.equals(DELETE_INCLUDE)) { includeModel.removeRow(includeTable.getSelectedRow()); includeModel.fireTableDataChanged(); enableRestart(); } else if (command.equals(CHANGE_TARGET)) { log.debug("Change target " + targetNodes.getSelectedItem()); log.debug("In model " + model); TreeNodeWrapper nw = (TreeNodeWrapper) targetNodes.getSelectedItem(); model.setTarget(nw.getTreeNode()); enableRestart(); } else if (command.equals(ADD_TO_INCLUDE_FROM_CLIPBOARD)) { addFromClipboard(includeTable); includeModel.fireTableDataChanged(); enableRestart(); } else if (command.equals(ADD_TO_EXCLUDE_FROM_CLIPBOARD)) { addFromClipboard(excludeTable); excludeModel.fireTableDataChanged(); enableRestart(); } else if (command.equals(ADD_SUGGESTED_EXCLUDES)) { addSuggestedExcludes(excludeTable); excludeModel.fireTableDataChanged(); enableRestart(); } } /** * Add suggested excludes to exclude table * @param table {@link JTable} */ protected void addSuggestedExcludes(JTable table) { GuiUtils.stopTableEditing(table); int rowCount = table.getRowCount(); PowerTableModel model = null; String[] exclusions = SUGGESTED_EXCLUSIONS.split(";"); // $NON-NLS-1$ if (exclusions.length>0) { model = (PowerTableModel) table.getModel(); if(model != null) { for (String clipboardLine : exclusions) { model.addRow(new Object[] {clipboardLine}); } if (table.getRowCount() > rowCount) { // Highlight (select) the appropriate rows. int rowToSelect = model.getRowCount() - 1; table.setRowSelectionInterval(rowCount, rowToSelect); } } } } /** * Add values from the clipboard to table * @param table {@link JTable} */ protected void addFromClipboard(JTable table) { GuiUtils.stopTableEditing(table); int rowCount = table.getRowCount(); PowerTableModel model = null; try { String clipboardContent = GuiUtils.getPastedText(); if (clipboardContent != null) { String[] clipboardLines = clipboardContent.split(NEW_LINE); for (String clipboardLine : clipboardLines) { model = (PowerTableModel) table.getModel(); model.addRow(new Object[] {clipboardLine}); } if (table.getRowCount() > rowCount) { if(model != null) { // Highlight (select) the appropriate rows. int rowToSelect = model.getRowCount() - 1; table.setRowSelectionInterval(rowCount, rowToSelect); } } } } catch (IOException ioe) { JOptionPane.showMessageDialog(this, JMeterUtils.getResString("proxy_daemon_error_read_args") // $NON-NLS-1$ + "\n" + ioe.getLocalizedMessage(), JMeterUtils.getResString("error_title"), // $NON-NLS-1$ $NON-NLS-2$ JOptionPane.ERROR_MESSAGE); } catch (UnsupportedFlavorException ufe) { JOptionPane.showMessageDialog(this, JMeterUtils.getResString("proxy_daemon_error_not_retrieve") + SPACE // $NON-NLS-1$ + DataFlavor.stringFlavor.getHumanPresentableName() + SPACE + JMeterUtils.getResString("proxy_daemon_error_from_clipboard") // $NON-NLS-1$ + ufe.getLocalizedMessage(), JMeterUtils.getResString("error_title"), // $NON-NLS-1$ JOptionPane.ERROR_MESSAGE); } } private void startProxy() { ValueReplacer replacer = GuiPackage.getInstance().getReplacer(); modifyTestElement(model); TreeNodeWrapper treeNodeWrapper = (TreeNodeWrapper)targetNodesModel.getSelectedItem(); if (JMeterUtils.getResString("use_recording_controller").equals(treeNodeWrapper.getLabel())) { JMeterTreeNode targetNode = model.findTargetControllerNode(); if(targetNode == null || !(targetNode.getTestElement() instanceof RecordingController)) { JOptionPane.showMessageDialog(this, JMeterUtils.getResString("proxy_cl_wrong_target_cl"), // $NON-NLS-1$ JMeterUtils.getResString("error_title"), // $NON-NLS-1$ JOptionPane.ERROR_MESSAGE); return; } } // Proxy can take some while to start up; show a wating cursor Cursor cursor = getCursor(); setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); // TODO somehow show progress try { replacer.replaceValues(model); model.startProxy(); start.setEnabled(false); stop.setEnabled(true); restart.setEnabled(false); if (ProxyControl.isDynamicMode()) { String[] details = model.getCertificateDetails(); StringBuilder sb = new StringBuilder(); sb.append(JMeterUtils.getResString("proxy_daemon_msg_rootca_cert")) // $NON-NLS-1$ .append(SPACE).append(KeyToolUtils.ROOT_CACERT_CRT_PFX) .append(SPACE).append(JMeterUtils.getResString("proxy_daemon_msg_created_in_bin")); sb.append(NEW_LINE).append(JMeterUtils.getResString("proxy_daemon_msg_install_as_in_doc")); // $NON-NLS-1$ sb.append(NEW_LINE).append(JMeterUtils.getResString("proxy_daemon_msg_check_details")) // $NON-NLS-1$ .append(NEW_LINE).append(NEW_LINE); for(String detail : details) { sb.append(detail).append(NEW_LINE); } JOptionPane.showMessageDialog(this, sb.toString(), JMeterUtils.getResString("proxy_daemon_msg_rootca_cert") + SPACE // $NON-NLS-1$ + KeyToolUtils.ROOT_CACERT_CRT_PFX + SPACE + JMeterUtils.getResString("proxy_daemon_msg_created_in_bin"), // $NON-NLS-1$ JOptionPane.INFORMATION_MESSAGE); } } catch (InvalidVariableException e) { JOptionPane.showMessageDialog(this, JMeterUtils.getResString("invalid_variables")+": "+e.getMessage(), // $NON-NLS-1$ $NON-NLS-2$ JMeterUtils.getResString("error_title"), // $NON-NLS-1$ JOptionPane.ERROR_MESSAGE); } catch (BindException e) { JOptionPane.showMessageDialog(this, JMeterUtils.getResString("proxy_daemon_bind_error")+": "+e.getMessage(), // $NON-NLS-1$ $NON-NLS-2$ JMeterUtils.getResString("error_title"), // $NON-NLS-1$ JOptionPane.ERROR_MESSAGE); } catch (IOException e) { JOptionPane.showMessageDialog(this, JMeterUtils.getResString("proxy_daemon_error")+": "+e.getMessage(), // $NON-NLS-1$ $NON-NLS-2$ JMeterUtils.getResString("error_title"), // $NON-NLS-1$ JOptionPane.ERROR_MESSAGE); } finally { setCursor(cursor); } } private void enableRestart() { if (stop.isEnabled()) { // System.err.println("Enable Restart"); restart.setEnabled(true); } } /** {@inheritDoc} */ @Override public void keyPressed(KeyEvent e) { } /** {@inheritDoc} */ @Override public void keyTyped(KeyEvent e) { } /** {@inheritDoc} */ @Override public void keyReleased(KeyEvent e) { String fieldName = e.getComponent().getName(); if (fieldName.equals(PORTFIELD)) { try { Integer.parseInt(portField.getText()); } catch (NumberFormatException nfe) { int length = portField.getText().length(); if (length > 0) { JOptionPane.showMessageDialog(this, JMeterUtils.getResString("proxy_settings_port_error_digits"), // $NON-NLS-1$ JMeterUtils.getResString("proxy_settings_port_error_invalid_data"), // $NON-NLS-1$ JOptionPane.WARNING_MESSAGE); // Drop the last character: portField.setText(portField.getText().substring(0, length-1)); } } enableRestart(); } else if (fieldName.equals(ENABLE_RESTART)){ enableRestart(); } else if(fieldName.equals(PREFIX_HTTP_SAMPLER_NAME)) { model.setPrefixHTTPSampleName(prefixHTTPSampleName.getText()); } } private void init() { // WARNING: called from ctor so must not be overridden (i.e. must be private or final) setLayout(new BorderLayout(0, 5)); setBorder(makeBorder()); add(makeTitlePanel(), BorderLayout.NORTH); JPanel mainPanel = new JPanel(new BorderLayout()); Box myBox = Box.createVerticalBox(); myBox.add(createPortPanel()); myBox.add(Box.createVerticalStrut(5)); myBox.add(createTestPlanContentPanel()); myBox.add(Box.createVerticalStrut(5)); myBox.add(createHTTPSamplerPanel()); myBox.add(Box.createVerticalStrut(5)); myBox.add(createContentTypePanel()); myBox.add(Box.createVerticalStrut(5)); mainPanel.add(myBox, BorderLayout.NORTH); Box includeExcludePanel = Box.createVerticalBox(); includeExcludePanel.add(createIncludePanel()); includeExcludePanel.add(createExcludePanel()); includeExcludePanel.add(createNotifyListenersPanel()); mainPanel.add(includeExcludePanel, BorderLayout.CENTER); mainPanel.add(createControls(), BorderLayout.SOUTH); add(mainPanel, BorderLayout.CENTER); } private JPanel createControls() { start = new JButton(JMeterUtils.getResString("start")); // $NON-NLS-1$ start.addActionListener(this); start.setActionCommand(START); start.setEnabled(true); stop = new JButton(JMeterUtils.getResString("stop")); // $NON-NLS-1$ stop.addActionListener(this); stop.setActionCommand(STOP); stop.setEnabled(false); restart = new JButton(JMeterUtils.getResString("restart")); // $NON-NLS-1$ restart.addActionListener(this); restart.setActionCommand(RESTART); restart.setEnabled(false); JPanel panel = new JPanel(); panel.add(start); panel.add(stop); panel.add(restart); return panel; } private JPanel createPortPanel() { portField = new JTextField(ProxyControl.DEFAULT_PORT_S, 5); portField.setName(PORTFIELD); portField.addKeyListener(this); JLabel label = new JLabel(JMeterUtils.getResString("port")); // $NON-NLS-1$ label.setLabelFor(portField); JPanel gPane = new JPanel(new BorderLayout()); gPane.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), JMeterUtils.getResString("proxy_general_settings"))); // $NON-NLS-1$ HorizontalPanel panel = new HorizontalPanel(); panel.add(label); panel.add(portField); panel.add(Box.createHorizontalStrut(10)); gPane.add(panel, BorderLayout.WEST); sslDomains = new JLabeledTextField(JMeterUtils.getResString("proxy_domains")); // $NON-NLS-1$ sslDomains.setEnabled(ProxyControl.isDynamicMode()); if (ProxyControl.isDynamicMode()) { sslDomains.setToolTipText(JMeterUtils.getResString("proxy_domains_dynamic_mode_tooltip")); } else { sslDomains.setToolTipText(JMeterUtils.getResString("proxy_domains_dynamic_mode_tooltip_java6")); } gPane.add(sslDomains, BorderLayout.CENTER); return gPane; } private JPanel createTestPlanContentPanel() { httpHeaders = new JCheckBox(JMeterUtils.getResString("proxy_headers")); // $NON-NLS-1$ httpHeaders.setSelected(true); // maintain original default httpHeaders.addActionListener(this); httpHeaders.setActionCommand(ENABLE_RESTART); addAssertions = new JCheckBox(JMeterUtils.getResString("proxy_assertions")); // $NON-NLS-1$ addAssertions.setSelected(false); addAssertions.addActionListener(this); addAssertions.setActionCommand(ENABLE_RESTART); regexMatch = new JCheckBox(JMeterUtils.getResString("proxy_regex")); // $NON-NLS-1$ regexMatch.setSelected(false); regexMatch.addActionListener(this); regexMatch.setActionCommand(ENABLE_RESTART); VerticalPanel mainPanel = new VerticalPanel(); mainPanel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), JMeterUtils.getResString("proxy_test_plan_content"))); // $NON-NLS-1$ HorizontalPanel nodeCreationPanel = new HorizontalPanel(); nodeCreationPanel.add(createGroupingPanel()); nodeCreationPanel.add(httpHeaders); nodeCreationPanel.add(addAssertions); nodeCreationPanel.add(regexMatch); HorizontalPanel targetPanel = new HorizontalPanel(); targetPanel.add(createTargetPanel()); mainPanel.add(targetPanel); mainPanel.add(nodeCreationPanel); return mainPanel; } private JPanel createHTTPSamplerPanel() { DefaultComboBoxModel<String> m = new DefaultComboBoxModel<>(); for (String s : HTTPSamplerFactory.getImplementations()){ m.addElement(s); } m.addElement(USE_DEFAULT_HTTP_IMPL); samplerTypeName = new JComboBox<>(m); samplerTypeName.setPreferredSize(new Dimension(150, 20)); samplerTypeName.setSelectedItem(USE_DEFAULT_HTTP_IMPL); samplerTypeName.addItemListener(this); JLabel label2 = new JLabel(JMeterUtils.getResString("proxy_sampler_type")); // $NON-NLS-1$ label2.setLabelFor(samplerTypeName); samplerRedirectAutomatically = new JCheckBox(JMeterUtils.getResString("follow_redirects_auto")); // $NON-NLS-1$ samplerRedirectAutomatically.setSelected(false); samplerRedirectAutomatically.addActionListener(this); samplerRedirectAutomatically.setActionCommand(ENABLE_RESTART); samplerFollowRedirects = new JCheckBox(JMeterUtils.getResString("follow_redirects")); // $NON-NLS-1$ samplerFollowRedirects.setSelected(true); samplerFollowRedirects.addActionListener(this); samplerFollowRedirects.setActionCommand(ENABLE_RESTART); useKeepAlive = new JCheckBox(JMeterUtils.getResString("use_keepalive")); // $NON-NLS-1$ useKeepAlive.setSelected(true); useKeepAlive.addActionListener(this); useKeepAlive.setActionCommand(ENABLE_RESTART); samplerDownloadImages = new JCheckBox(JMeterUtils.getResString("web_testing_retrieve_images")); // $NON-NLS-1$ samplerDownloadImages.setSelected(false); samplerDownloadImages.addActionListener(this); samplerDownloadImages.setActionCommand(ENABLE_RESTART); prefixHTTPSampleName = new JTextField(4); prefixHTTPSampleName.addKeyListener(this); prefixHTTPSampleName.setName(PREFIX_HTTP_SAMPLER_NAME); // TODO Not sure this is needed prefixHTTPSampleName.setActionCommand(ENABLE_RESTART); JLabel labelPrefix = new JLabel(JMeterUtils.getResString("proxy_prefix_http_sampler_name")); // $NON-NLS-1$ labelPrefix.setLabelFor(prefixHTTPSampleName); HorizontalPanel panel = new HorizontalPanel(); panel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), JMeterUtils.getResString("proxy_sampler_settings"))); // $NON-NLS-1$ panel.add(label2); panel.add(samplerTypeName); panel.add(labelPrefix); panel.add(prefixHTTPSampleName); panel.add(samplerRedirectAutomatically); panel.add(samplerFollowRedirects); panel.add(useKeepAlive); panel.add(samplerDownloadImages); return panel; } private JPanel createTargetPanel() { targetNodesModel = new DefaultComboBoxModel<>(); targetNodes = new JComboBox<>(targetNodesModel); targetNodes.setPrototypeDisplayValue(""); // $NON-NLS-1$ // Bug 56303 fixed the width of combo list JPopupMenu popup = (JPopupMenu) targetNodes.getUI().getAccessibleChild(targetNodes, 0); // get popup element JScrollPane scrollPane = findScrollPane(popup); if(scrollPane != null) { scrollPane.setHorizontalScrollBar(new JScrollBar(JScrollBar.HORIZONTAL)); // add scroll pane if label element is too long scrollPane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); } targetNodes.setActionCommand(CHANGE_TARGET); // Action listener will be added later JLabel label = new JLabel(JMeterUtils.getResString("proxy_target")); // $NON-NLS-1$ label.setLabelFor(targetNodes); HorizontalPanel panel = new HorizontalPanel(); panel.add(label); panel.add(targetNodes); return panel; } private JScrollPane findScrollPane(JPopupMenu popup) { Component[] components = popup.getComponents(); for (Component component : components) { if(component instanceof JScrollPane) { return (JScrollPane) component; } } return null; } private JPanel createGroupingPanel() { DefaultComboBoxModel<String> m = new DefaultComboBoxModel<>(); // Note: position of these elements in the menu *must* match the // corresponding ProxyControl.GROUPING_* values. m.addElement(JMeterUtils.getResString("grouping_no_groups")); // $NON-NLS-1$ m.addElement(JMeterUtils.getResString("grouping_add_separators")); // $NON-NLS-1$ m.addElement(JMeterUtils.getResString("grouping_in_controllers")); // $NON-NLS-1$ m.addElement(JMeterUtils.getResString("grouping_store_first_only")); // $NON-NLS-1$ m.addElement(JMeterUtils.getResString("grouping_in_transaction_controllers")); // $NON-NLS-1$ groupingMode = new JComboBox<>(m); groupingMode.setPreferredSize(new Dimension(150, 20)); groupingMode.setSelectedIndex(0); groupingMode.addItemListener(this); JLabel label2 = new JLabel(JMeterUtils.getResString("grouping_mode")); // $NON-NLS-1$ label2.setLabelFor(groupingMode); HorizontalPanel panel = new HorizontalPanel(); panel.add(label2); panel.add(groupingMode); return panel; } private JPanel createContentTypePanel() { contentTypeInclude = new JTextField(35); contentTypeInclude.addKeyListener(this); contentTypeInclude.setName(ENABLE_RESTART); JLabel labelInclude = new JLabel(JMeterUtils.getResString("proxy_content_type_include")); // $NON-NLS-1$ labelInclude.setLabelFor(contentTypeInclude); // Default value contentTypeInclude.setText(JMeterUtils.getProperty("proxy.content_type_include")); // $NON-NLS-1$ contentTypeExclude = new JTextField(35); contentTypeExclude.addKeyListener(this); contentTypeExclude.setName(ENABLE_RESTART); JLabel labelExclude = new JLabel(JMeterUtils.getResString("proxy_content_type_exclude")); // $NON-NLS-1$ labelExclude.setLabelFor(contentTypeExclude); // Default value contentTypeExclude.setText(JMeterUtils.getProperty("proxy.content_type_exclude")); // $NON-NLS-1$ HorizontalPanel panel = new HorizontalPanel(); panel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), JMeterUtils.getResString("proxy_content_type_filter"))); // $NON-NLS-1$ panel.add(labelInclude); panel.add(contentTypeInclude); panel.add(labelExclude); panel.add(contentTypeExclude); return panel; } private JPanel createIncludePanel() { includeModel = new PowerTableModel(new String[] { INCLUDE_COL }, new Class[] { String.class }); includeTable = new JTable(includeModel); JMeterUtils.applyHiDPI(includeTable); includeTable.getTableHeader().setDefaultRenderer(new HeaderAsPropertyRenderer()); includeTable.setPreferredScrollableViewportSize(new Dimension(100, 30)); JPanel panel = new JPanel(new BorderLayout()); panel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), JMeterUtils .getResString("patterns_to_include"))); // $NON-NLS-1$ panel.add(new JScrollPane(includeTable), BorderLayout.CENTER); panel.add(createTableButtonPanel(ADD_INCLUDE, DELETE_INCLUDE, ADD_TO_INCLUDE_FROM_CLIPBOARD, null), BorderLayout.SOUTH); return panel; } private JPanel createExcludePanel() { excludeModel = new PowerTableModel(new String[] { EXCLUDE_COL }, new Class[] { String.class }); excludeTable = new JTable(excludeModel); JMeterUtils.applyHiDPI(excludeTable); excludeTable.getTableHeader().setDefaultRenderer(new HeaderAsPropertyRenderer()); excludeTable.setPreferredScrollableViewportSize(new Dimension(100, 30)); JPanel panel = new JPanel(new BorderLayout()); panel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), JMeterUtils .getResString("patterns_to_exclude"))); // $NON-NLS-1$ panel.add(new JScrollPane(excludeTable), BorderLayout.CENTER); panel.add(createTableButtonPanel(ADD_EXCLUDE, DELETE_EXCLUDE, ADD_TO_EXCLUDE_FROM_CLIPBOARD, ADD_SUGGESTED_EXCLUDES), BorderLayout.SOUTH); return panel; } private JPanel createNotifyListenersPanel() { JPanel panel = new JPanel(); panel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), JMeterUtils .getResString("notify_child_listeners_fr"))); // $NON-NLS-1$ notifyChildSamplerListenerOfFilteredSamplersCB = new JCheckBox(JMeterUtils.getResString("notify_child_listeners_fr")); // $NON-NLS-1$ notifyChildSamplerListenerOfFilteredSamplersCB.setSelected(true); notifyChildSamplerListenerOfFilteredSamplersCB.addActionListener(this); notifyChildSamplerListenerOfFilteredSamplersCB.setActionCommand(ENABLE_RESTART); panel.add(notifyChildSamplerListenerOfFilteredSamplersCB); return panel; } private JPanel createTableButtonPanel(String addCommand, String deleteCommand, String copyFromClipboard, String addSuggestedExcludes) { JPanel buttonPanel = new JPanel(); JButton addButton = new JButton(JMeterUtils.getResString("add")); // $NON-NLS-1$ addButton.setActionCommand(addCommand); addButton.addActionListener(this); buttonPanel.add(addButton); JButton deleteButton = new JButton(JMeterUtils.getResString("delete")); // $NON-NLS-1$ deleteButton.setActionCommand(deleteCommand); deleteButton.addActionListener(this); buttonPanel.add(deleteButton); /** A button for adding new excludes/includes to the table from the clipboard. */ JButton addFromClipboard = new JButton(JMeterUtils.getResString("add_from_clipboard")); // $NON-NLS-1$ addFromClipboard.setActionCommand(copyFromClipboard); addFromClipboard.addActionListener(this); buttonPanel.add(addFromClipboard); if(addSuggestedExcludes != null) { /** A button for adding suggested excludes. */ JButton addFromSuggestedExcludes = new JButton(JMeterUtils.getResString("add_from_suggested_excludes")); // $NON-NLS-1$ addFromSuggestedExcludes.setActionCommand(addSuggestedExcludes); addFromSuggestedExcludes.addActionListener(this); buttonPanel.add(addFromSuggestedExcludes); } return buttonPanel; } private void reinitializeTargetCombo() { log.debug("Reinitializing target combo"); // Stop action notifications while we shuffle this around: targetNodes.removeActionListener(this); targetNodesModel.removeAllElements(); GuiPackage gp = GuiPackage.getInstance(); JMeterTreeNode root; if (gp != null) { root = (JMeterTreeNode) GuiPackage.getInstance().getTreeModel().getRoot(); targetNodesModel .addElement(new TreeNodeWrapper(null, JMeterUtils.getResString("use_recording_controller"))); // $NON-NLS-1$ buildNodesModel(root, "", 0); } TreeNodeWrapper choice = null; for (int i = 0; i < targetNodesModel.getSize(); i++) { choice = (TreeNodeWrapper) targetNodesModel.getElementAt(i); log.debug("Selecting item " + choice + " for model " + model + " in " + this); if (choice.getTreeNode() == model.getTarget()) // .equals caused NPE { break; } } // Reinstate action notifications: targetNodes.addActionListener(this); // Set the current value: targetNodesModel.setSelectedItem(choice); log.debug("Reinitialization complete"); } private void buildNodesModel(JMeterTreeNode node, String parentName, int level) { String separator = " > "; if (node != null) { for (int i = 0; i < node.getChildCount(); i++) { StringBuilder name = new StringBuilder(); JMeterTreeNode cur = (JMeterTreeNode) node.getChildAt(i); TestElement te = cur.getTestElement(); /* * Will never be true. Probably intended to use * org.apache.jmeter.threads.ThreadGroup rather than * java.lang.ThreadGroup However, that does not work correctly; * whereas treating it as a Controller does. if (te instanceof * ThreadGroup) { name.append(parent_name); * name.append(cur.getName()); name.append(seperator); * buildNodesModel(cur, name.toString(), level); } else */ if (te instanceof Controller) { name.append(parentName); name.append(cur.getName()); TreeNodeWrapper tnw = new TreeNodeWrapper(cur, name.toString()); targetNodesModel.addElement(tnw); name.append(separator); buildNodesModel(cur, name.toString(), level + 1); } else if (te instanceof TestPlan || te instanceof WorkBench) { name.append(cur.getName()); name.append(separator); buildNodesModel(cur, name.toString(), 0); } // Ignore everything else } } } }
/* * Copyright 2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.tooling.internal.adapter; import com.google.common.base.Optional; import org.gradle.internal.UncheckedException; import org.gradle.internal.reflect.DirectInstantiator; import org.gradle.internal.time.CountdownTimer; import org.gradle.internal.time.Time; import org.gradle.internal.typeconversion.EnumFromCharSequenceNotationParser; import org.gradle.internal.typeconversion.NotationConverterToNotationParserAdapter; import org.gradle.internal.typeconversion.NotationParser; import org.gradle.internal.typeconversion.TypeConversionException; import org.gradle.tooling.model.DomainObjectSet; import org.gradle.tooling.model.internal.Exceptions; import org.gradle.tooling.model.internal.ImmutableDomainObjectSet; import javax.annotation.Nullable; import java.io.IOException; import java.io.Serializable; import java.lang.ref.SoftReference; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Proxy; import java.lang.reflect.Type; import java.lang.reflect.WildcardType; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * Adapts some source object to some target view type. */ public class ProtocolToModelAdapter implements ObjectGraphAdapter { private static final ViewDecoration NO_OP_MAPPER = new NoOpDecoration(); private static final TargetTypeProvider IDENTITY_TYPE_PROVIDER = new TargetTypeProvider() { @Override public <T> Class<? extends T> getTargetType(Class<T> initialTargetType, Object protocolObject) { return initialTargetType; } }; private static final ReflectionMethodInvoker REFLECTION_METHOD_INVOKER = new ReflectionMethodInvoker(); private static final TypeInspector TYPE_INSPECTOR = new TypeInspector(); private static final CollectionMapper COLLECTION_MAPPER = new CollectionMapper(); private static final Object[] EMPTY = new Object[0]; private static final Class[] EMPTY_CLASS_ARRAY = new Class[0]; private static final Method EQUALS_METHOD; private static final Method HASHCODE_METHOD; private final TargetTypeProvider targetTypeProvider; static { Method equalsMethod; Method hashCodeMethod; try { equalsMethod = Object.class.getMethod("equals", Object.class); hashCodeMethod = Object.class.getMethod("hashCode"); } catch (NoSuchMethodException e) { throw UncheckedException.throwAsUncheckedException(e); } EQUALS_METHOD = equalsMethod; HASHCODE_METHOD = hashCodeMethod; } public ProtocolToModelAdapter() { this(IDENTITY_TYPE_PROVIDER); } public ProtocolToModelAdapter(TargetTypeProvider targetTypeProvider) { this.targetTypeProvider = targetTypeProvider; } /** * Creates an adapter for a single object graph. Each object adapted by the returned adapter is treated as part of the same object graph, for the purposes of caching etc. */ public ObjectGraphAdapter newGraph() { final ViewGraphDetails graphDetails = new ViewGraphDetails(targetTypeProvider); return new ObjectGraphAdapter() { @Override public <T> T adapt(Class<T> targetType, Object sourceObject) { return createView(targetType, sourceObject, NO_OP_MAPPER, graphDetails); } @Override public <T> ViewBuilder<T> builder(Class<T> viewType) { return new DefaultViewBuilder<T>(viewType, graphDetails); } }; } /** * Adapts the source object to a view object. */ @Override public <T> T adapt(Class<T> targetType, Object sourceObject) { if (sourceObject == null) { return null; } return createView(targetType, sourceObject, NO_OP_MAPPER, new ViewGraphDetails(targetTypeProvider)); } /** * Creates a builder for views of the given type. */ @Override public <T> ViewBuilder<T> builder(final Class<T> viewType) { return new DefaultViewBuilder<T>(viewType); } private static <T> T createView(Class<T> targetType, Object sourceObject, ViewDecoration decoration, ViewGraphDetails graphDetails) { if (sourceObject == null) { return null; } // Calculate the actual type Class<? extends T> viewType = graphDetails.typeProvider.getTargetType(targetType, sourceObject); if (viewType.isInstance(sourceObject)) { return viewType.cast(sourceObject); } if (targetType.isEnum()) { return adaptToEnum(targetType, sourceObject); } // Restrict the decorations to those required to decorate all views reachable from this type ViewDecoration decorationsForThisType = decoration.isNoOp() ? decoration : decoration.restrictTo(TYPE_INSPECTOR.getReachableTypes(targetType)); ViewKey viewKey = new ViewKey(viewType, sourceObject, decorationsForThisType); Object view = graphDetails.views.get(viewKey); if (view != null) { return targetType.cast(view); } // Create a proxy InvocationHandlerImpl handler = new InvocationHandlerImpl(targetType, sourceObject, decorationsForThisType, graphDetails); Object proxy = Proxy.newProxyInstance(viewType.getClassLoader(), new Class<?>[]{viewType}, handler); handler.attachProxy(proxy); return viewType.cast(proxy); } private static <T, S> T adaptToEnum(Class<T> targetType, S sourceObject) { try { String literal; if (sourceObject instanceof Enum) { literal = ((Enum<?>) sourceObject).name(); } else if (sourceObject instanceof String) { literal = (String) sourceObject; } else { literal = sourceObject.toString(); } @SuppressWarnings({"rawtypes", "unchecked"}) NotationParser<String, T> parser = new NotationConverterToNotationParserAdapter<String, T>(new EnumFromCharSequenceNotationParser(targetType)); T parsedLiteral = parser.parseNotation(literal); return targetType.cast(parsedLiteral); } catch (TypeConversionException e) { throw new IllegalArgumentException(String.format("Can't convert '%s' to enum type '%s'", sourceObject, targetType.getSimpleName()), e); } } private static Object convert(Type targetType, Object sourceObject, ViewDecoration decoration, ViewGraphDetails graphDetails) { if (targetType instanceof ParameterizedType) { ParameterizedType parameterizedTargetType = (ParameterizedType) targetType; if (parameterizedTargetType.getRawType() instanceof Class) { Class<?> rawClass = (Class<?>) parameterizedTargetType.getRawType(); if (Iterable.class.isAssignableFrom(rawClass)) { Type targetElementType = getElementType(parameterizedTargetType, 0); return convertCollectionInternal(rawClass, targetElementType, (Iterable<?>) sourceObject, decoration, graphDetails); } if (Map.class.isAssignableFrom(rawClass)) { Type targetKeyType = getElementType(parameterizedTargetType, 0); Type targetValueType = getElementType(parameterizedTargetType, 1); return convertMap(rawClass, targetKeyType, targetValueType, (Map<?, ?>) sourceObject, decoration, graphDetails); } } } if (targetType instanceof Class) { if (((Class) targetType).isPrimitive()) { return sourceObject; } return createView((Class) targetType, sourceObject, decoration, graphDetails); } throw new UnsupportedOperationException(String.format("Cannot convert object of %s to %s.", sourceObject.getClass(), targetType)); } private static Map<Object, Object> convertMap(Class<?> mapClass, Type targetKeyType, Type targetValueType, Map<?, ?> sourceObject, ViewDecoration decoration, ViewGraphDetails graphDetails) { Map<Object, Object> convertedElements = COLLECTION_MAPPER.createEmptyMap(mapClass); for (Map.Entry<?, ?> entry : sourceObject.entrySet()) { convertedElements.put(convert(targetKeyType, entry.getKey(), decoration, graphDetails), convert(targetValueType, entry.getValue(), decoration, graphDetails)); } return convertedElements; } private static Object convertCollectionInternal(Class<?> collectionClass, Type targetElementType, Iterable<?> sourceObject, ViewDecoration decoration, ViewGraphDetails graphDetails) { Collection<Object> convertedElements = COLLECTION_MAPPER.createEmptyCollection(collectionClass); convertCollectionInternal(convertedElements, targetElementType, sourceObject, decoration, graphDetails); if (collectionClass.equals(DomainObjectSet.class)) { return new ImmutableDomainObjectSet(convertedElements); } else { return convertedElements; } } private static void convertCollectionInternal(Collection<Object> targetCollection, Type targetElementType, Iterable<?> sourceObject, ViewDecoration viewDecoration, ViewGraphDetails graphDetails) { for (Object element : sourceObject) { targetCollection.add(convert(targetElementType, element, viewDecoration, graphDetails)); } } private static Type getElementType(ParameterizedType type, int index) { Type elementType = type.getActualTypeArguments()[index]; if (elementType instanceof WildcardType) { WildcardType wildcardType = (WildcardType) elementType; return wildcardType.getUpperBounds()[0]; } return elementType; } /** * Unpacks the source object from a given view object. */ public Object unpack(Object viewObject) { if (!Proxy.isProxyClass(viewObject.getClass()) || !(Proxy.getInvocationHandler(viewObject) instanceof InvocationHandlerImpl)) { throw new IllegalArgumentException("The given object is not a view object"); } InvocationHandlerImpl handler = (InvocationHandlerImpl) Proxy.getInvocationHandler(viewObject); return handler.sourceObject; } private static class ViewGraphDetails implements Serializable { // Transient, don't serialize all the views that happen to have been visited, recreate them when visited via the deserialized view private transient Map<ViewKey, Object> views = new HashMap<ViewKey, Object>(); private final TargetTypeProvider typeProvider; ViewGraphDetails(TargetTypeProvider typeProvider) { this.typeProvider = typeProvider; } private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); views = new HashMap<ViewKey, Object>(); } } private static class ViewKey implements Serializable { private final Class<?> type; private final Object source; private final ViewDecoration viewDecoration; ViewKey(Class<?> type, Object source, ViewDecoration viewDecoration) { this.type = type; this.source = source; this.viewDecoration = viewDecoration; } @Override public boolean equals(Object obj) { ViewKey other = (ViewKey) obj; return other.source == source && other.type.equals(type) && other.viewDecoration.equals(viewDecoration); } @Override public int hashCode() { return type.hashCode() ^ System.identityHashCode(source) ^ viewDecoration.hashCode(); } } private static class InvocationHandlerImpl implements InvocationHandler, Serializable { private final Class<?> targetType; private final Object sourceObject; private final ViewDecoration decoration; private final ViewGraphDetails graphDetails; private Object proxy; // Recreate the invoker when deserialized, rather than serialize all its state private transient MethodInvoker invoker; InvocationHandlerImpl(Class<?> targetType, Object sourceObject, ViewDecoration decoration, ViewGraphDetails graphDetails) { this.targetType = targetType; this.sourceObject = sourceObject; this.decoration = decoration; this.graphDetails = graphDetails; setup(); } private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); setup(); graphDetails.views.put(new ViewKey(targetType, sourceObject, decoration), proxy); } private void setup() { List<MethodInvoker> invokers = new ArrayList<MethodInvoker>(); invokers.add(REFLECTION_METHOD_INVOKER); decoration.collectInvokers(sourceObject, targetType, invokers); MethodInvoker mixInMethodInvoker = invokers.size() == 1 ? invokers.get(0) : new ChainedMethodInvoker(invokers); invoker = new SupportedPropertyInvoker( new SafeMethodInvoker( new PropertyCachingMethodInvoker( new AdaptingMethodInvoker(decoration, graphDetails, mixInMethodInvoker)))); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o == null || o.getClass() != getClass()) { return false; } InvocationHandlerImpl other = (InvocationHandlerImpl) o; return sourceObject.equals(other.sourceObject); } @Override public int hashCode() { return sourceObject.hashCode(); } @Override public Object invoke(Object target, Method method, Object[] params) throws Throwable { if (EQUALS_METHOD.equals(method)) { Object param = params[0]; if (param == null || !Proxy.isProxyClass(param.getClass())) { return false; } InvocationHandler other = Proxy.getInvocationHandler(param); return equals(other); } else if (HASHCODE_METHOD.equals(method)) { return hashCode(); } MethodInvocation invocation = new MethodInvocation(method.getName(), method.getReturnType(), method.getGenericReturnType(), method.getParameterTypes(), target, targetType, sourceObject, params); invoker.invoke(invocation); if (!invocation.found()) { String methodName = method.getDeclaringClass().getSimpleName() + "." + method.getName() + "()"; throw Exceptions.unsupportedMethod(methodName); } return invocation.getResult(); } void attachProxy(Object proxy) { this.proxy = proxy; graphDetails.views.put(new ViewKey(targetType, sourceObject, decoration), proxy); } } private static class ChainedMethodInvoker implements MethodInvoker { private final MethodInvoker[] invokers; private ChainedMethodInvoker(List<MethodInvoker> invokers) { this.invokers = invokers.toArray(new MethodInvoker[0]); } @Override public void invoke(MethodInvocation method) throws Throwable { for (int i = 0; !method.found() && i < invokers.length; i++) { MethodInvoker invoker = invokers[i]; invoker.invoke(method); } } } private static class AdaptingMethodInvoker implements MethodInvoker { private final ViewDecoration decoration; private final ViewGraphDetails graphDetails; private final MethodInvoker next; private AdaptingMethodInvoker(ViewDecoration decoration, ViewGraphDetails graphDetails, MethodInvoker next) { this.decoration = decoration; this.graphDetails = graphDetails; this.next = next; } @Override public void invoke(MethodInvocation invocation) throws Throwable { next.invoke(invocation); if (invocation.found() && invocation.getResult() != null) { invocation.setResult(convert(invocation.getGenericReturnType(), invocation.getResult(), decoration, graphDetails)); } } } private static class MethodInvocationCache { private final Map<MethodInvocationKey, Optional<Method>> store = new HashMap<MethodInvocationKey, Optional<Method>>(); private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private final static long MINIMAL_CLEANUP_INTERVAL = 30000; // For stats we don't really care about thread safety private int cacheMiss; private int cacheHit; private int evict; private CountdownTimer cleanupTimer = Time.startCountdownTimer(MINIMAL_CLEANUP_INTERVAL); private static class MethodInvocationKey { private final SoftReference<Class<?>> lookupClass; private final String methodName; private final SoftReference<Class<?>[]> parameterTypes; private final int hashCode; private MethodInvocationKey(Class<?> lookupClass, String methodName, Class<?>[] parameterTypes) { this.lookupClass = new SoftReference<Class<?>>(lookupClass); this.methodName = methodName; this.parameterTypes = new SoftReference<Class<?>[]>(parameterTypes); // hashcode will always be used, so we precompute it in order to make sure we // won't compute it multiple times during comparisons int result = lookupClass != null ? lookupClass.hashCode() : 0; result = 31 * result + (methodName != null ? methodName.hashCode() : 0); result = 31 * result + Arrays.hashCode(parameterTypes); this.hashCode = result; } public boolean isDirty() { return lookupClass.get() == null || parameterTypes.get() == null; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } MethodInvocationKey that = (MethodInvocationKey) o; if (isDirty() && that.isDirty()) { return true; } if (!eq(lookupClass, that.lookupClass)) { return false; } if (!methodName.equals(that.methodName)) { return false; } return eq(parameterTypes, that.parameterTypes); } private static boolean eq(SoftReference<?> aRef, SoftReference<?> bRef) { Object a = aRef.get(); Object b = bRef.get(); return eq(a, b); } private static boolean eq(Object a, Object b) { if (a == b) { return true; } if (a == null) { return false; } if (a.getClass().isArray()) { return Arrays.equals((Object[]) a, (Object[]) b); } return a.equals(b); } @Override public int hashCode() { return hashCode; } } public Method get(MethodInvocation invocation) { Class<?> owner = invocation.getDelegate().getClass(); String name = invocation.getName(); Class<?>[] parameterTypes = invocation.getParameterTypes(); MethodInvocationKey key = new MethodInvocationKey( owner, name, parameterTypes ); lock.readLock().lock(); Optional<Method> cached = store.get(key); if (cached == null) { cacheMiss++; lock.readLock().unlock(); lock.writeLock().lock(); try { cached = store.get(key); if (cached == null) { cached = lookup(owner, name, parameterTypes); if (cacheMiss % 10 == 0) { removeDirtyEntries(); } store.put(key, cached); } lock.readLock().lock(); } finally { lock.writeLock().unlock(); } } else { cacheHit++; } try { return cached.orNull(); } finally { lock.readLock().unlock(); } } /** * Removes dirty entries from the cache. Calling System.currentTimeMillis() is costly so we should try to limit calls to this method. This method will only trigger cleanup at most once per * 30s. */ private void removeDirtyEntries() { if (!cleanupTimer.hasExpired()) { return; } lock.writeLock().lock(); try { for (MethodInvocationKey key : new LinkedList<MethodInvocationKey>(store.keySet())) { if (key.isDirty()) { evict++; store.remove(key); } } } finally { cleanupTimer.reset(); lock.writeLock().unlock(); } } private static Optional<Method> lookup(Class<?> sourceClass, String methodName, Class<?>[] parameterTypes) { Method match; try { match = sourceClass.getMethod(methodName, parameterTypes); } catch (NoSuchMethodException e) { return Optional.absent(); } LinkedList<Class<?>> queue = new LinkedList<Class<?>>(); queue.add(sourceClass); while (!queue.isEmpty()) { Class<?> c = queue.removeFirst(); try { match = c.getMethod(methodName, parameterTypes); } catch (NoSuchMethodException e) { // ignore } for (Class<?> interfaceType : c.getInterfaces()) { queue.addFirst(interfaceType); } if (c.getSuperclass() != null) { queue.addFirst(c.getSuperclass()); } } match.setAccessible(true); return Optional.of(match); } @Override public String toString() { return "Cache size: " + store.size() + " Hits: " + cacheHit + " Miss: " + cacheMiss + " Evicted: " + evict; } } private static class ReflectionMethodInvoker implements MethodInvoker { private final MethodInvocationCache lookupCache = new MethodInvocationCache(); @Override public void invoke(MethodInvocation invocation) throws Throwable { Method targetMethod = locateMethod(invocation); if (targetMethod == null) { return; } Object returnValue; try { returnValue = targetMethod.invoke(invocation.getDelegate(), invocation.getParameters()); } catch (InvocationTargetException e) { throw e.getCause(); } invocation.setResult(returnValue); } private Method locateMethod(MethodInvocation invocation) { return lookupCache.get(invocation); } } private static class PropertyCachingMethodInvoker implements MethodInvoker { private Map<String, Object> properties = Collections.emptyMap(); private Set<String> unknown = Collections.emptySet(); private final MethodInvoker next; private PropertyCachingMethodInvoker(MethodInvoker next) { this.next = next; } @Override public void invoke(MethodInvocation method) throws Throwable { if (method.isGetter()) { if (properties.containsKey(method.getName())) { method.setResult(properties.get(method.getName())); return; } if (unknown.contains(method.getName())) { return; } Object value; next.invoke(method); if (!method.found()) { markUnknown(method.getName()); return; } value = method.getResult(); cachePropertyValue(method.getName(), value); return; } next.invoke(method); } private void markUnknown(String methodName) { if (unknown.isEmpty()) { unknown = new HashSet<String>(); } unknown.add(methodName); } private void cachePropertyValue(String methodName, Object value) { if (properties.isEmpty()) { properties = new HashMap<String, Object>(); } properties.put(methodName, value); } } private static class SafeMethodInvoker implements MethodInvoker { private final MethodInvoker next; private SafeMethodInvoker(MethodInvoker next) { this.next = next; } @Override public void invoke(MethodInvocation invocation) throws Throwable { next.invoke(invocation); if (invocation.found() || invocation.getParameterTypes().length != 1 || !invocation.isIsOrGet()) { return; } MethodInvocation getterInvocation = new MethodInvocation(invocation.getName(), invocation.getReturnType(), invocation.getGenericReturnType(), EMPTY_CLASS_ARRAY, invocation.getView(), invocation.getViewType(), invocation.getDelegate(), EMPTY); next.invoke(getterInvocation); if (getterInvocation.found() && getterInvocation.getResult() != null) { invocation.setResult(getterInvocation.getResult()); } else { invocation.setResult(invocation.getParameters()[0]); } } } private static class SupportedPropertyInvoker implements MethodInvoker { private final MethodInvoker next; private SupportedPropertyInvoker(MethodInvoker next) { this.next = next; } @Override public void invoke(MethodInvocation invocation) throws Throwable { next.invoke(invocation); if (invocation.found()) { return; } String methodName = invocation.getName(); boolean isSupportMethod = methodName.length() > 11 && methodName.startsWith("is") && methodName.endsWith("Supported"); if (!isSupportMethod) { return; } String getterName = "get" + methodName.substring(2, methodName.length() - 9); MethodInvocation getterInvocation = new MethodInvocation(getterName, invocation.getReturnType(), invocation.getGenericReturnType(), EMPTY_CLASS_ARRAY, invocation.getView(), invocation.getViewType(), invocation.getDelegate(), EMPTY); next.invoke(getterInvocation); invocation.setResult(getterInvocation.found()); } } private static class BeanMixInMethodInvoker implements MethodInvoker { private final Object instance; private final MethodInvoker next; BeanMixInMethodInvoker(Object instance, MethodInvoker next) { this.instance = instance; this.next = next; } @Override public void invoke(MethodInvocation invocation) throws Throwable { MethodInvocation beanInvocation = new MethodInvocation(invocation.getName(), invocation.getReturnType(), invocation.getGenericReturnType(), invocation.getParameterTypes(), invocation.getView(), invocation.getViewType(), instance, invocation.getParameters()); next.invoke(beanInvocation); if (beanInvocation.found()) { invocation.setResult(beanInvocation.getResult()); return; } if (!invocation.isGetter()) { return; } beanInvocation = new MethodInvocation(invocation.getName(), invocation.getReturnType(), invocation.getGenericReturnType(), new Class<?>[]{invocation.getViewType()}, invocation.getView(), invocation.getViewType(), instance, new Object[]{invocation.getView()}); next.invoke(beanInvocation); if (beanInvocation.found()) { invocation.setResult(beanInvocation.getResult()); } } } private static class ClassMixInMethodInvoker implements MethodInvoker { private Object instance; private final Class<?> mixInClass; private final MethodInvoker next; private final ThreadLocal<MethodInvocation> current = new ThreadLocal<MethodInvocation>(); ClassMixInMethodInvoker(Class<?> mixInClass, MethodInvoker next) { this.mixInClass = mixInClass; this.next = next; } @Override public void invoke(MethodInvocation invocation) throws Throwable { if (current.get() != null) { // Already invoking a method on the mix-in return; } if (instance == null) { instance = DirectInstantiator.INSTANCE.newInstance(mixInClass, invocation.getView()); } MethodInvocation beanInvocation = new MethodInvocation(invocation.getName(), invocation.getReturnType(), invocation.getGenericReturnType(), invocation.getParameterTypes(), invocation.getView(), invocation.getViewType(), instance, invocation.getParameters()); current.set(beanInvocation); try { next.invoke(beanInvocation); } finally { current.set(null); } if (beanInvocation.found()) { invocation.setResult(beanInvocation.getResult()); } } } private interface ViewDecoration { void collectInvokers(Object sourceObject, Class<?> viewType, List<MethodInvoker> invokers); boolean isNoOp(); /** * Filter this decoration to apply only to the given view types. Return {@link #NO_OP_MAPPER} if this decoration does not apply to any of the types. */ ViewDecoration restrictTo(Set<Class<?>> viewTypes); } private static class NoOpDecoration implements ViewDecoration, Serializable { @Override public void collectInvokers(Object sourceObject, Class<?> viewType, List<MethodInvoker> invokers) { } @Override public boolean equals(Object obj) { return obj instanceof NoOpDecoration; } @Override public int hashCode() { return 0; } @Override public boolean isNoOp() { return true; } @Override public ViewDecoration restrictTo(Set<Class<?>> viewTypes) { return this; } } private static class MixInMappingAction implements ViewDecoration, Serializable { private final List<? extends ViewDecoration> decorations; private MixInMappingAction(List<? extends ViewDecoration> decorations) { assert decorations.size() >= 2; this.decorations = decorations; } static ViewDecoration chain(List<? extends ViewDecoration> decorations) { if (decorations.isEmpty()) { return NO_OP_MAPPER; } if (decorations.size() == 1) { return decorations.get(0); } return new MixInMappingAction(decorations); } @Override public int hashCode() { int v = 0; for (ViewDecoration decoration : decorations) { v = v ^ decoration.hashCode(); } return v; } @Override public boolean equals(Object obj) { if (!obj.getClass().equals(MixInMappingAction.class)) { return false; } MixInMappingAction other = (MixInMappingAction) obj; return decorations.equals(other.decorations); } @Override public boolean isNoOp() { for (ViewDecoration decoration : decorations) { if (!decoration.isNoOp()) { return false; } } return true; } @Override public ViewDecoration restrictTo(Set<Class<?>> viewTypes) { List<ViewDecoration> filtered = new ArrayList<ViewDecoration>(); for (ViewDecoration viewDecoration : decorations) { ViewDecoration filteredDecoration = viewDecoration.restrictTo(viewTypes); if (!filteredDecoration.isNoOp()) { filtered.add(filteredDecoration); } } if (filtered.size() == 0) { return NO_OP_MAPPER; } if (filtered.size() == 1) { return filtered.get(0); } if (filtered.equals(decorations)) { return this; } return new MixInMappingAction(filtered); } @Override public void collectInvokers(Object sourceObject, Class<?> viewType, List<MethodInvoker> invokers) { for (ViewDecoration decoration : decorations) { decoration.collectInvokers(sourceObject, viewType, invokers); } } } private static abstract class TypeSpecificMappingAction implements ViewDecoration, Serializable { protected final Class<?> targetType; TypeSpecificMappingAction(Class<?> targetType) { this.targetType = targetType; } @Override public boolean isNoOp() { return false; } @Override public ViewDecoration restrictTo(Set<Class<?>> viewTypes) { if (viewTypes.contains(targetType)) { return this; } return NO_OP_MAPPER; } @Override public void collectInvokers(Object sourceObject, Class<?> viewType, List<MethodInvoker> invokers) { if (targetType.isAssignableFrom(viewType)) { invokers.add(createInvoker()); } } protected abstract MethodInvoker createInvoker(); } private static class MixInBeanMappingAction extends TypeSpecificMappingAction { private final Object mixIn; MixInBeanMappingAction(Class<?> targetType, Object mixIn) { super(targetType); this.mixIn = mixIn; } @Override public int hashCode() { return targetType.hashCode() ^ mixIn.hashCode(); } @Override public boolean equals(Object obj) { if (!obj.getClass().equals(MixInBeanMappingAction.class)) { return false; } MixInBeanMappingAction other = (MixInBeanMappingAction) obj; return targetType.equals(other.targetType) && mixIn.equals(other.mixIn); } @Override protected MethodInvoker createInvoker() { return new BeanMixInMethodInvoker(mixIn, REFLECTION_METHOD_INVOKER); } } private static class MixInTypeMappingAction extends TypeSpecificMappingAction { private final Class<?> mixInType; MixInTypeMappingAction(Class<?> targetType, Class<?> mixInType) { super(targetType); this.mixInType = mixInType; } @Override public int hashCode() { return targetType.hashCode() ^ mixInType.hashCode(); } @Override public boolean equals(Object obj) { if (!obj.getClass().equals(MixInTypeMappingAction.class)) { return false; } MixInTypeMappingAction other = (MixInTypeMappingAction) obj; return targetType.equals(other.targetType) && mixInType.equals(other.mixInType); } @Override protected MethodInvoker createInvoker() { return new ClassMixInMethodInvoker(mixInType, REFLECTION_METHOD_INVOKER); } } private class DefaultViewBuilder<T> implements ViewBuilder<T> { private final Class<T> viewType; @Nullable private final ViewGraphDetails graphDetails; List<ViewDecoration> viewDecorations = new ArrayList<ViewDecoration>(); DefaultViewBuilder(Class<T> viewType) { this.viewType = viewType; this.graphDetails = null; } DefaultViewBuilder(Class<T> viewType, @Nullable ViewGraphDetails graphDetails) { this.viewType = viewType; this.graphDetails = graphDetails; } @Override public ViewBuilder<T> mixInTo(final Class<?> targetType, final Object mixIn) { viewDecorations.add(new MixInBeanMappingAction(targetType, mixIn)); return this; } @Override public ViewBuilder<T> mixInTo(final Class<?> targetType, final Class<?> mixInType) { viewDecorations.add(new MixInTypeMappingAction(targetType, mixInType)); return this; } @Override public T build(@Nullable final Object sourceObject) { if (sourceObject == null) { return null; } ViewDecoration viewDecoration = MixInMappingAction.chain(viewDecorations); return createView(viewType, sourceObject, viewDecoration, graphDetails != null ? graphDetails : new ViewGraphDetails(targetTypeProvider)); } } }
/* * ***** BEGIN LICENSE BLOCK ***** * Zimbra Collaboration Suite Server * Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Zimbra, Inc. * * The contents of this file are subject to the Zimbra Public License * Version 1.3 ("License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://www.zimbra.com/license. * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. * ***** END LICENSE BLOCK ***** */ package com.zimbra.cs.mime.charset; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.*; /** * @author dkarp */ public class UTF7 extends Charset { private static final int MAX_UTF7_CHAR_VALUE = 0x7f; protected char BEGIN_SHIFT; protected char END_SHIFT; protected final byte[] BASE_64 = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/' }; protected final byte INVERSE_BASE_64[] = new byte[128]; protected static final byte NON_BASE_64 = -1; protected final boolean NO_SHIFT_REQUIRED[] = new boolean[128]; public class UTF7Decoder extends CharsetDecoder { private boolean shifted = false, first = false; private int decoder = 0, bits = 0; protected UTF7Decoder(Charset cs) { super(cs, (float) 0.4, 1); } @Override protected void implReset() { shifted = first = false; decoder = bits = 0; } @Override protected CoderResult decodeLoop(ByteBuffer in, CharBuffer out) { while (in.hasRemaining()) { if (!out.hasRemaining()) return CoderResult.OVERFLOW; byte c = in.get(); if (c < 0 || c > MAX_UTF7_CHAR_VALUE) { in.position(in.position() - 1); return CoderResult.malformedForLength(1); } if (shifted) { byte decodedChar = INVERSE_BASE_64[c]; if (decodedChar == NON_BASE_64) { boolean malformed = decoder != 0; shifted = false; bits = decoder = 0; if (first && c == END_SHIFT) out.put(BEGIN_SHIFT); if (malformed) { in.position(Math.max(0, in.position() - 2)); return CoderResult.malformedForLength(1); } if (c == END_SHIFT) continue; } else { decoder = (decoder << 6) | decodedChar; first = false; bits += 6; if (bits >= 16) { bits -= 16; out.put((char) (decoder >> bits)); decoder &= ~(0xFFFF << bits); } } } if (!shifted) { if (c == BEGIN_SHIFT) shifted = first = true; else out.put((char) c); } } // must force the flush here because the nio charset framework cannot handle // anything other than UNDERFLOW or OVERFLOW being returned by implFlush, // but an encoded triplet interrupted by EOF is still malformed input... if (shifted && decoder != 0) { implReset(); in.position(Math.max(0, in.position() - 1)); return CoderResult.malformedForLength(1); } return CoderResult.UNDERFLOW; } } public class UTF7Encoder extends CharsetEncoder { private boolean shifted = false; private int encoder = 0, bits = 0; protected UTF7Encoder(Charset cs) { super(cs, (float) 2.5, 5); } @Override protected void implReset() { shifted = false; encoder = bits = 0; } @Override protected CoderResult implFlush(ByteBuffer out) { if (shifted) { if (out.remaining() < 2) return CoderResult.OVERFLOW; if (bits > 0) { encoder <<= (6-bits); out.put(BASE_64[encoder]); encoder = bits = 0; } out.put((byte) END_SHIFT); shifted = false; } return CoderResult.UNDERFLOW; } @Override protected CoderResult encodeLoop(CharBuffer in, ByteBuffer out) { while (in.hasRemaining()) { if (out.remaining() < 4) return CoderResult.OVERFLOW; char c = in.get(); boolean needsShift = c > MAX_UTF7_CHAR_VALUE || !NO_SHIFT_REQUIRED[c]; if (needsShift && !shifted) { out.put((byte) BEGIN_SHIFT); if (c == BEGIN_SHIFT) out.put((byte) END_SHIFT); else shifted = true; } if (shifted) { if (needsShift) { encoder = (encoder << 16) | c; bits += 16; do { out.put(BASE_64[0x3F & (encoder >> (bits-6))]); bits -= 6; } while (bits >= 6); encoder &= (0x3F >> (6-bits)); } else implFlush(out); } if (!needsShift) out.put((byte) c); } // need to force a flush (sigh) // return CoderResult.UNDERFLOW; return implFlush(out); } } UTF7(String canonicalName, String[] aliases) { super(canonicalName, aliases); BEGIN_SHIFT = '+'; END_SHIFT = '-'; for (int i = 0; i < INVERSE_BASE_64.length; i++) INVERSE_BASE_64[i] = NON_BASE_64; for (byte i = 0; i < BASE_64.length; i++) INVERSE_BASE_64[BASE_64[i]] = i; final String unshifted = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'(),-./:? \t\r\n"; for (int i = 0; i < unshifted.length(); i++) NO_SHIFT_REQUIRED[unshifted.charAt(i)] = true; } @Override public boolean contains(Charset cs) { return true; } @Override public CharsetDecoder newDecoder() { return new UTF7Decoder(this); } @Override public CharsetEncoder newEncoder() { return new UTF7Encoder(this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.internal; import com.google.common.collect.Iterables; import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters; import org.apache.jackrabbit.oak.spi.security.SecurityConfiguration; import org.apache.jackrabbit.oak.spi.security.authentication.AuthenticationConfiguration; import org.apache.jackrabbit.oak.spi.security.authentication.token.TokenConfiguration; import org.apache.jackrabbit.oak.spi.security.authorization.AuthorizationConfiguration; import org.apache.jackrabbit.oak.spi.security.principal.PrincipalConfiguration; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConfiguration; import org.apache.jackrabbit.oak.spi.security.user.UserConfiguration; import org.apache.jackrabbit.oak.spi.whiteboard.DefaultWhiteboard; import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard; import org.junit.Test; import org.mockito.Mockito; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; public class InternalSecurityProviderTest { private static final ConfigurationParameters PARAMS = ConfigurationParameters.of("a", "value"); private InternalSecurityProvider securityProvider = new InternalSecurityProvider(); @Test public void testDefaultWhiteboard() { assertNull(securityProvider.getWhiteboard()); } @Test public void testSetWhiteboard() { Whiteboard wb = new DefaultWhiteboard(); securityProvider.setWhiteboard(wb); assertSame(wb, securityProvider.getWhiteboard()); } @Test public void testDefaultPrincipalConfiguration() { PrincipalConfiguration defaultConfig = securityProvider.getConfiguration(PrincipalConfiguration.class); assertNull(defaultConfig); } @Test public void testSetPrincipalConfiguration() { PrincipalConfiguration pc = Mockito.mock(PrincipalConfiguration.class); when(pc.getParameters()).thenReturn(PARAMS); securityProvider.setPrincipalConfiguration(pc); assertSame(pc, securityProvider.getConfiguration(PrincipalConfiguration.class)); for (SecurityConfiguration sc : securityProvider.getConfigurations()) { if (sc instanceof PrincipalConfiguration) { assertSame(pc, sc); } } assertTrue(Iterables.contains(securityProvider.getConfigurations(), pc)); assertEquals(PARAMS, securityProvider.getParameters(PrincipalConfiguration.NAME)); } @Test public void testDefaultUserConfiguration() { assertNull(securityProvider.getConfiguration(UserConfiguration.class)); } @Test public void testSetUserConfiguration() { UserConfiguration uc = Mockito.mock(UserConfiguration.class); when(uc.getParameters()).thenReturn(PARAMS); securityProvider.setUserConfiguration(uc); assertSame(uc, securityProvider.getConfiguration(UserConfiguration.class)); for (SecurityConfiguration sc : securityProvider.getConfigurations()) { if (sc instanceof UserConfiguration) { assertSame(uc, sc); } } assertEquals(PARAMS, securityProvider.getParameters(UserConfiguration.NAME)); } @Test public void testDefaultAuthenticationConfiguration() { assertNull(securityProvider.getConfiguration(AuthenticationConfiguration.class)); } @Test public void testSetAuthenticationConfiguration() { AuthenticationConfiguration ac = Mockito.mock(AuthenticationConfiguration.class); when(ac.getParameters()).thenReturn(PARAMS); securityProvider.setAuthenticationConfiguration(ac); assertSame(ac, securityProvider.getConfiguration(AuthenticationConfiguration.class)); for (SecurityConfiguration sc : securityProvider.getConfigurations()) { if (sc instanceof AuthenticationConfiguration) { assertSame(ac, sc); } } assertEquals(PARAMS, securityProvider.getParameters(AuthenticationConfiguration.NAME)); } @Test public void testDefaultAuthorizationConfiguration() { assertNull(securityProvider.getConfiguration(AuthorizationConfiguration.class)); } @Test public void testSetAuthorizationConfiguration() { AuthorizationConfiguration ac = Mockito.mock(AuthorizationConfiguration.class); when(ac.getParameters()).thenReturn(PARAMS); securityProvider.setAuthorizationConfiguration(ac); assertSame(ac, securityProvider.getConfiguration(AuthorizationConfiguration.class)); for (SecurityConfiguration sc : securityProvider.getConfigurations()) { if (sc instanceof AuthorizationConfiguration) { assertSame(ac, sc); } } assertEquals(PARAMS, securityProvider.getParameters(AuthorizationConfiguration.NAME)); } @Test public void testDefaultPrivilegeConfiguration() { assertNull(securityProvider.getConfiguration(PrivilegeConfiguration.class)); } @Test public void testSetPrivilegeConfiguration() { PrivilegeConfiguration pc = Mockito.mock(PrivilegeConfiguration.class); when(pc.getParameters()).thenReturn(PARAMS); securityProvider.setPrivilegeConfiguration(pc); assertSame(pc, securityProvider.getConfiguration(PrivilegeConfiguration.class)); for (SecurityConfiguration sc : securityProvider.getConfigurations()) { if (sc instanceof PrivilegeConfiguration) { assertSame(pc, sc); } } assertEquals(PARAMS, securityProvider.getParameters(PrivilegeConfiguration.NAME)); } @Test public void testDefaultTokenConfiguration() { assertNull(securityProvider.getConfiguration(TokenConfiguration.class)); } @Test public void testSetTokenConfiguration() { TokenConfiguration tc = Mockito.mock(TokenConfiguration.class); when(tc.getParameters()).thenReturn(PARAMS); securityProvider.setTokenConfiguration(tc); assertSame(tc, securityProvider.getConfiguration(TokenConfiguration.class)); for (SecurityConfiguration sc : securityProvider.getConfigurations()) { if (sc instanceof TokenConfiguration) { assertSame(tc, sc); } } assertEquals(PARAMS, securityProvider.getParameters(TokenConfiguration.NAME)); } @Test(expected = IllegalArgumentException.class) public void testGetUnknownConfiguration() { securityProvider.getConfiguration(SecurityConfiguration.class); } @Test public void testGetParametersForNull() { assertSame(ConfigurationParameters.EMPTY, securityProvider.getParameters(null)); } @Test public void testGetParametersForUnknown() { assertSame(ConfigurationParameters.EMPTY, securityProvider.getParameters("unknownName")); } }
/* Copyright 2016 Goldman Sachs. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.gs.fw.common.mithra.util; import com.gs.collections.impl.list.mutable.FastList; import com.gs.collections.impl.map.mutable.UnifiedMap; import com.gs.fw.common.mithra.MithraList; import com.gs.fw.common.mithra.finder.Operation; import com.gs.fw.common.mithra.finder.RelatedFinder; import com.gs.fw.finder.Navigation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** * A multi-threaded batch processor. Can be used to process a large number of objects with a fixed memory foot print. * Given a potentially large query over many shards, reads the query via cursors (streaming the objects) in as many * threads as there are shards. * The objects are batched, then each batch is deep fetched and finally passed to the user defined Consumer. * The consumer must be thread safe. * @param <T> The type of Reladomo object * @param <TL> the type of the Reladomo List object */ public class MultiThreadedBatchProcessor <T, TL extends MithraList<T>> { private static final Logger LOGGER = LoggerFactory.getLogger(MultiThreadedBatchProcessor.class); private final Set<Object> shards; private final RelatedFinder<T> finderInstance; private final Operation mainOperation; private final Consumer<T, TL> consumer; private final List<Navigation<T>> deepFetches; private Map<Object, Operation> additionalPerShardRetrievalOperations = UnifiedMap.newMap(); private ErrorHandler<T, TL> errorHandler = new DefaultErrorHandler<T, TL>(); private int batchSize = 2000; private int retrievalThreads = -1; // -1 means equal to the number of shards private AtomicLong totalQueued = new AtomicLong(); private AtomicLong totalDeepFetchedTime = new AtomicLong(); private AtomicLong totalDeepFetched = new AtomicLong(); public MultiThreadedBatchProcessor( RelatedFinder<T> finderInstance, Operation mainOperation, List<Navigation<T>> deepFetches, Consumer<T, TL> consumer, Set<Object> shards) { this.finderInstance = finderInstance; this.mainOperation = mainOperation; this.deepFetches = deepFetches; this.consumer = consumer; this.shards = shards; } public void setBatchSize(int batchSize) { this.batchSize = batchSize; } /** * The default error handler aborts processing (by throwing an exception). * A custom error handler could handle some exceptions without re-throwing, thereby allowing * for continued processing. * @param errorHandler */ public void setErrorHandler(ErrorHandler<T, TL> errorHandler) { this.errorHandler = errorHandler; } /** * * @param retrievalThreads # of threads to retrieve. Should be less than or equal to the number of shards. * -1 means the number of retrieval threads will be equal to the number of shards. * The number of deep fetch (consumer) threads is three times this value. * */ public void setRetrievalThreads(int retrievalThreads) { this.retrievalThreads = retrievalThreads; } /** * Add (boolean and) a per-shard operation to the mainOperation * @param additionalPerShardRetrievalOperations a map of shard id to operation. may return null for some shards */ public void setAdditionalPerShardRetrievalOperations(Map<Object, Operation> additionalPerShardRetrievalOperations) { this.additionalPerShardRetrievalOperations = additionalPerShardRetrievalOperations; } public long getTotalDeepFetched() { return totalDeepFetched.get(); } public long getTotalDeepFetchedTime() { return totalDeepFetchedTime.get(); } public long getTotalQueued() { return totalQueued.get(); } public void process() { consumer.startConsumption(this); load(consumer); consumer.endConsumption(this); } public void load(Consumer<T, TL> consumer) { int threads = this.retrievalThreads; if (threads == -1) { threads = shards == null ? 1 : shards.size(); } AutoShutdownThreadExecutor executor = new AutoShutdownThreadExecutor(threads, "MTBP load"); executor.setTimeoutInMilliseconds(10); int deepFetchAndBatchProcessorThreads = threads * 3; AutoShutdownThreadExecutor deepFetchAndBatchProcessor = new AutoShutdownThreadExecutor(deepFetchAndBatchProcessorThreads, "MTBP process"); deepFetchAndBatchProcessor.setTimeoutInMilliseconds(10); final LinkedBlockingQueue<TL> listBeforeDeepFetchesQueue = new LinkedBlockingQueue<TL>(deepFetchAndBatchProcessorThreads + deepFetchAndBatchProcessorThreads/10 + 10); final CountDownLatch loadLatch = new CountDownLatch(shards == null ? 1 : shards.size()); final CountDownLatch deepFetchLatch = new CountDownLatch(deepFetchAndBatchProcessorThreads); for (int i=0;i<deepFetchAndBatchProcessorThreads;i++) { deepFetchAndBatchProcessor.submit(new DeepFetchAndBatchProcessorRunnable(loadLatch, listBeforeDeepFetchesQueue, executor, deepFetchAndBatchProcessor, deepFetchLatch, consumer, errorHandler)); } deepFetchAndBatchProcessor.shutdown(); if (shards != null) { for(final Object shard: shards) { executor.submit(new Runnable() { @Override public void run() { queueForDeepFetchAndProcessing(shard, listBeforeDeepFetchesQueue); loadLatch.countDown(); } }); } } else { executor.submit(new Runnable() { @Override public void run() { queueForDeepFetchAndProcessing(null, listBeforeDeepFetchesQueue); loadLatch.countDown(); } }); } executor.shutdownAndWaitUntilDone(); if (executor.isAborted()) { deepFetchAndBatchProcessor.shutdownNow(); throw new RuntimeException("unrecoverable error while processing. See logs above"); } deepFetchAndBatchProcessor.shutdownAndWaitUntilDone(); if (deepFetchAndBatchProcessor.isAborted()) { throw new RuntimeException("unrecoverable error while processing. See logs above."); } LOGGER.info("Total read from DB: " + totalQueued); } protected long deepFetchBatch(TL list) { long start = System.currentTimeMillis(); addDeepFetches(list); list.forceResolve(); return System.currentTimeMillis() - start; } protected void queueForDeepFetchAndProcessing(final Object shardId, final LinkedBlockingQueue<TL> listBeforeDeepFetchQueue) { final AtomicLong total = new AtomicLong(); queueWithOp(shardId, listBeforeDeepFetchQueue, total); String msg = ""; if (shardId != null) { msg = "Source " + shardId + " "; } LOGGER.info(msg + "finished reading. " + total.get() + " queued for output."); } protected void queueWithOp(final Object shardId, final LinkedBlockingQueue<TL> listQueue, final AtomicLong total) { Operation op = mainOperation; if (shardId != null) { op = op.and(finderInstance.getSourceAttribute().nonPrimitiveEq(shardId)); } Operation additionalOperation = additionalPerShardRetrievalOperations.get(shardId); if (additionalOperation != null) { op = op.and(additionalOperation); } final List accumulator = FastList.newList(batchSize); MithraList many = ((RelatedFinder)finderInstance).findMany(op); many.forEachWithCursor(new DoWhileProcedure() { @Override public boolean execute(Object obj) { T result = (T) obj; accumulator.add(result); if (accumulator.size() == batchSize) { queueResultsWithoutDeepFetch(accumulator, listQueue, shardId); total.addAndGet(accumulator.size()); accumulator.clear(); } return true; } }); if (!accumulator.isEmpty()) { queueResultsWithoutDeepFetch(accumulator, listQueue, shardId); total.addAndGet(accumulator.size()); } } protected void queueResultsWithoutDeepFetch(List<T> accumulator, LinkedBlockingQueue<TL> listQueue, Object shardId) { TL list = (TL) finderInstance.constructEmptyList(); list.addAll(accumulator); try { listQueue.put(list); // must not touch tradeList after queuing, as another thread may be manipulating it. String msg = ""; if (shardId != null) { msg = " for source " + shardId; } LOGGER.info("queued " + accumulator.size() + msg); totalQueued.addAndGet(accumulator.size()); } catch (InterruptedException e) { throw new RuntimeException("Unexpected exception", e); } } protected void addDeepFetches(TL list) { for(int i=0;i<deepFetches.size();i++) { list.deepFetch(deepFetches.get(i)); } } private class DeepFetchAndBatchProcessorRunnable implements Runnable { private final CountDownLatch loadLatch; private final LinkedBlockingQueue<TL> listBeforeDeepFetchesQueue; private final AutoShutdownThreadExecutor loadExecutor; private final AutoShutdownThreadExecutor deepFetchExecutor; private final CountDownLatch deepFetchLatch; private final Consumer<T, TL> consumer; private final ErrorHandler<T, TL> errorHandler; public DeepFetchAndBatchProcessorRunnable(CountDownLatch loadLatch, LinkedBlockingQueue<TL> listBeforeDeepFetchesQueue, AutoShutdownThreadExecutor loadExecutor, AutoShutdownThreadExecutor deepFetchExecutor, CountDownLatch deepFetchLatch, Consumer<T, TL> consumer, ErrorHandler<T, TL> errorHandler) { this.loadLatch = loadLatch; this.listBeforeDeepFetchesQueue = listBeforeDeepFetchesQueue; this.loadExecutor = loadExecutor; this.deepFetchExecutor = deepFetchExecutor; this.deepFetchLatch = deepFetchLatch; this.consumer = consumer; this.errorHandler = errorHandler; } public void processDeepFetchQueue() { TL list = null; try { list = listBeforeDeepFetchesQueue.poll(1, TimeUnit.SECONDS); } catch (InterruptedException e) { //ignore } if (list != null) { totalDeepFetchedTime.addAndGet(deepFetchBatch(list)); totalDeepFetched.addAndGet(list.size()); try { consumer.consume(list); } catch (Throwable e) { this.errorHandler.handleError(e, MultiThreadedBatchProcessor.this, list); } } } @Override public void run() { while (true) { if (loadLatch.getCount() == 0) { // the end while (!listBeforeDeepFetchesQueue.isEmpty()) { this.processDeepFetchQueue(); } break; } else if (loadExecutor.isAborted() || deepFetchExecutor.isAborted()) { deepFetchExecutor.shutdownNow(); break; } else { this.processDeepFetchQueue(); } } deepFetchLatch.countDown(); } } /** * A thread safe consumer. * @param <T> * @param <TL> */ public interface Consumer<T, TL extends MithraList<T>> { /** * called once at the start of processing * @param processor */ public void startConsumption(MultiThreadedBatchProcessor<T, TL> processor); /** * called for every batch. This method must be thread safe. * Multiple threads will call consume() on the same instance simultaneously. * @param list * @throws Exception An exception thrown here is passed to the ErrorHandler */ public void consume(TL list) throws Exception; /** * called once at the end of processing * @param processor */ public void endConsumption(MultiThreadedBatchProcessor<T, TL> processor); } public interface ErrorHandler<T, TL extends MithraList<T>> { /** * Handle an exception. Throwing an exception from this method will abort the processing threads. * @param t * @param processor * @param batch */ public void handleError(Throwable t, MultiThreadedBatchProcessor<T, TL> processor, TL batch); } private static class DefaultErrorHandler<T, TL extends MithraList<T>> implements ErrorHandler<T, TL> { @Override public void handleError(Throwable t, MultiThreadedBatchProcessor<T, TL> processor, TL batch) { if (t instanceof RuntimeException) { throw (RuntimeException)t; } throw new RuntimeException("Unhandled exception", t); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.state.internals; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.kstream.internals.CacheFlushListener; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.streams.processor.internals.InternalProcessorContext; import org.apache.kafka.streams.processor.internals.RecordContext; import org.apache.kafka.streams.state.KeyValueIterator; import org.apache.kafka.streams.state.KeyValueStore; import org.apache.kafka.streams.state.StateSerdes; import java.util.List; class CachingKeyValueStore<K, V> implements KeyValueStore<K, V>, CachedStateStore<K, V> { private final KeyValueStore<Bytes, byte[]> underlying; private final Serde<K> keySerde; private final Serde<V> valueSerde; private CacheFlushListener<K, V> flushListener; private String name; private ThreadCache cache; private InternalProcessorContext context; private StateSerdes<K, V> serdes; private Thread streamThread; CachingKeyValueStore(final KeyValueStore<Bytes, byte[]> underlying, final Serde<K> keySerde, final Serde<V> valueSerde) { this.underlying = underlying; this.keySerde = keySerde; this.valueSerde = valueSerde; } @Override public String name() { return underlying.name(); } @SuppressWarnings("unchecked") @Override public void init(final ProcessorContext context, final StateStore root) { underlying.init(context, root); initInternal(context); // save the stream thread as we only ever want to trigger a flush // when the stream thread is the the current thread. streamThread = Thread.currentThread(); } @SuppressWarnings("unchecked") void initInternal(final ProcessorContext context) { this.context = (InternalProcessorContext) context; this.serdes = new StateSerdes<>(underlying.name(), keySerde == null ? (Serde<K>) context.keySerde() : keySerde, valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde); this.name = context.taskId() + "-" + underlying.name(); this.cache = this.context.getCache(); cache.addDirtyEntryFlushListener(name, new ThreadCache.DirtyEntryFlushListener() { @Override public void apply(final List<ThreadCache.DirtyEntry> entries) { for (ThreadCache.DirtyEntry entry : entries) { putAndMaybeForward(entry, (InternalProcessorContext) context); } } }); } private void putAndMaybeForward(final ThreadCache.DirtyEntry entry, final InternalProcessorContext context) { final RecordContext current = context.recordContext(); try { context.setRecordContext(entry.recordContext()); if (flushListener != null) { flushListener.apply(serdes.keyFrom(entry.key().get()), serdes.valueFrom(entry.newValue()), serdes.valueFrom(underlying.get(entry.key()))); } underlying.put(entry.key(), entry.newValue()); } finally { context.setRecordContext(current); } } public void setFlushListener(final CacheFlushListener<K, V> flushListener) { this.flushListener = flushListener; } @Override public synchronized void flush() { cache.flush(name); underlying.flush(); } @Override public void close() { flush(); underlying.close(); } @Override public boolean persistent() { return underlying.persistent(); } @Override public boolean isOpen() { return underlying.isOpen(); } @Override public synchronized V get(final K key) { final byte[] rawKey = serdes.rawKey(key); return get(rawKey); } private V get(final byte[] rawKey) { final LRUCacheEntry entry = cache.get(name, rawKey); if (entry == null) { final byte[] rawValue = underlying.get(Bytes.wrap(rawKey)); if (rawValue == null) { return null; } // only update the cache if this call is on the streamThread // as we don't want other threads to trigger an eviction/flush if (Thread.currentThread().equals(streamThread)) { cache.put(name, rawKey, new LRUCacheEntry(rawValue)); } return serdes.valueFrom(rawValue); } if (entry.value == null) { return null; } return serdes.valueFrom(entry.value); } @Override public KeyValueIterator<K, V> range(final K from, final K to) { final byte[] origFrom = serdes.rawKey(from); final byte[] origTo = serdes.rawKey(to); final PeekingKeyValueIterator<Bytes, byte[]> storeIterator = new DelegatingPeekingKeyValueIterator<>(underlying.range(Bytes.wrap(origFrom), Bytes.wrap(origTo))); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, origFrom, origTo); return new MergedSortedCacheKeyValueStoreIterator<>(cacheIterator, storeIterator, serdes); } @Override public KeyValueIterator<K, V> all() { final PeekingKeyValueIterator<Bytes, byte[]> storeIterator = new DelegatingPeekingKeyValueIterator<>(underlying.all()); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.all(name); return new MergedSortedCacheKeyValueStoreIterator<>(cacheIterator, storeIterator, serdes); } @Override public synchronized long approximateNumEntries() { return underlying.approximateNumEntries(); } @Override public synchronized void put(final K key, final V value) { put(serdes.rawKey(key), value); } private synchronized void put(final byte[] rawKey, final V value) { final byte[] rawValue = serdes.rawValue(value); cache.put(name, rawKey, new LRUCacheEntry(rawValue, true, context.offset(), context.timestamp(), context.partition(), context.topic())); } @Override public synchronized V putIfAbsent(final K key, final V value) { final byte[] rawKey = serdes.rawKey(key); final V v = get(rawKey); if (v == null) { put(rawKey, value); } return v; } @Override public synchronized void putAll(final List<KeyValue<K, V>> entries) { for (KeyValue<K, V> entry : entries) { put(entry.key, entry.value); } } @Override public synchronized V delete(final K key) { final byte[] rawKey = serdes.rawKey(key); final V v = get(rawKey); put(rawKey, null); return v; } KeyValueStore<Bytes, byte[]> underlying() { return underlying; } }
package eu.newsreader.eventcoreference.objects; import eu.newsreader.eventcoreference.coref.ComponentMatch; import eu.newsreader.eventcoreference.util.Util; import java.io.Serializable; import java.util.ArrayList; /** * Created by piek on 4/23/14. */ public class CompositeEvent implements Serializable{ private SemObject event; private ArrayList<SemTime> mySemTimes; private ArrayList<SemActor> mySemActors; private ArrayList<SemRelation> mySemRelations; public CompositeEvent() { this.event = new SemObject(); this.mySemTimes = new ArrayList<SemTime>(); this.mySemActors = new ArrayList<SemActor>(); this.mySemRelations = new ArrayList<SemRelation>(); } public CompositeEvent(SemEvent event, ArrayList<SemActor> mySemActors, ArrayList<SemTime> mySemTimes, ArrayList<SemRelation> mySemRelations ) { this.event = event; this.mySemTimes = mySemTimes; this.mySemActors = mySemActors; this.mySemRelations = mySemRelations; } public boolean isValid (){ boolean hasParticipant = false; boolean hasTime = false; for (int i = 0; i < mySemRelations.size(); i++) { SemRelation semRelation = mySemRelations.get(i); for (int j = 0; j < semRelation.getPredicates().size(); j++) { String predicate = semRelation.getPredicates().get(j); if (predicate.toLowerCase().endsWith("actor") || predicate.toLowerCase().endsWith("place")) { hasParticipant = true; } if (predicate.toLowerCase().endsWith("time") || predicate.toLowerCase().endsWith("timestamp")) { hasTime = true; } } } if (hasParticipant && hasTime) { return true; } else { return false; } } public SemObject getEvent() { return event; } public void setEvent(SemObject event) { this.event = event; } public ArrayList<SemTime> getMySemTimes() { return mySemTimes; } public void setMySemTimes(ArrayList<SemTime> mySemTimes) { this.mySemTimes = mySemTimes; } public void addMySemTime(SemTime mySemTime) { this.mySemTimes.add(mySemTime); } public ArrayList<SemActor> getMySemActors() { return mySemActors; } public void setMySemActors(ArrayList<SemActor> mySemActors) { this.mySemActors = mySemActors; } public void addMySemActor(SemActor mySemActor) { this.mySemActors.add(mySemActor); } public ArrayList<SemRelation> getMySemRelations() { return mySemRelations; } public void setMySemRelations(ArrayList<SemRelation> mySemRelations) { this.mySemRelations = mySemRelations; } public void addMySemRelation(SemRelation mySemRelation) { this.mySemRelations.add(mySemRelation); } /* @TODO fix true time value matches */ public void mergeRelations (CompositeEvent event) { for (int i = 0; i < event.getMySemRelations().size(); i++) { SemRelation semRelation = event.getMySemRelations().get(i); boolean match = false; for (int j = 0; j < this.getMySemRelations().size(); j++) { SemRelation relation = this.getMySemRelations().get(j); if ( (relation.containsPredicateIgnoreCase(Sem.hasTime.getLocalName()) && semRelation.containsPredicateIgnoreCase(Sem.hasTime.getLocalName())) ||(relation.containsPredicateIgnoreCase(Sem.hasBeginTimeStamp.getLocalName()) && semRelation.containsPredicateIgnoreCase(Sem.hasBeginTimeStamp.getLocalName())) ||(relation.containsPredicateIgnoreCase(Sem.hasEndTimeStamp.getLocalName()) && semRelation.containsPredicateIgnoreCase(Sem.hasEndTimeStamp.getLocalName())) ||(relation.containsPredicateIgnoreCase(Sem.hasEarliestBeginTimeStamp.getLocalName()) && semRelation.containsPredicateIgnoreCase(Sem.hasEarliestBeginTimeStamp.getLocalName())) ||(relation.containsPredicateIgnoreCase(Sem.hasEarliestEndTimeStamp.getLocalName()) && semRelation.containsPredicateIgnoreCase(Sem.hasEarliestEndTimeStamp.getLocalName())) ) { //// make sure the doctime is also considered if (Util.matchTimeReference(this.getMySemTimes(), event.getMySemTimes(), relation.getObject(), semRelation.getObject())) { relation.addMentions(semRelation.getNafMentions()); // System.out.println("relation.getNafMentions().toString() = " + relation.getNafMentions().toString()); match = true; break; } else { ///// } } else if (ComponentMatch.equalSemRelation(semRelation, relation)) { /// we already have this relation so we add the mentions relation.addMentions(semRelation.getNafMentions()); match = true; break; } } if (!match) { semRelation.setSubject(this.getEvent().getId()); // System.out.println("new semRelation = " + semRelation.toString()); this.addMySemRelation(semRelation); } } } public void mergeObjects (CompositeEvent event) { for (int i = 0; i < event.getMySemActors().size(); i++) { SemActor semActor1 = event.getMySemActors().get(i); boolean match = false; for (int j = 0; j < this.getMySemActors().size(); j++) { SemActor semActor2 = this.getMySemActors().get(j); if (semActor1.getURI().equals(semActor2.getURI())) { // System.out.println("adding semActor1 = " + semActor1.getURI()); // System.out.println("adding semActor2 = " + semActor2.getURI()); semActor2.mergeSemObject(semActor1); match = true; break; } } if (!match) { // System.out.println("adding semActor1 = " + semActor1.getURI()); this.mySemActors.add(semActor1); } } for (int i = 0; i < event.getMySemTimes().size(); i++) { SemTime semTime1 = event.getMySemTimes().get(i); boolean match = false; for (int j = 0; j < this.getMySemTimes().size(); j++) { SemTime semTime2 = this.getMySemTimes().get(j); if (semTime1.getOwlTime().matchTimeExact(semTime2.getOwlTime())) { // System.out.println("semTime1 = " + semTime1.getURI()); // System.out.println("semTime2 = " + semTime2.getURI()); semTime2.mergeSemObject(semTime1); match = true; break; } else if (semTime1.getOwlTimeBegin().matchTimeExact(semTime2.getOwlTimeBegin())) { // System.out.println("semTime1 = " + semTime1.getURI()); // System.out.println("semTime2 = " + semTime2.getURI()); semTime2.mergeSemObject(semTime1); match = true; break; } else if (semTime1.getOwlTimeEnd().matchTimeExact(semTime2.getOwlTimeEnd())) { // System.out.println("semTime1 = " + semTime1.getURI()); // System.out.println("semTime2 = " + semTime2.getURI()); semTime2.mergeSemObject(semTime1); match = true; break; } } if (!match) { // System.out.println("adding semTime1 = " + semTime1.getURI()); this.mySemTimes.add(semTime1); } } } public String toString () { String str = this.event.getId(); str += this.event.getPhrase()+"\n"; for (int i = 0; i < mySemActors.size(); i++) { SemActor semActor = mySemActors.get(i); str += "\t"+semActor.getId()+"\n"; } for (int i = 0; i < mySemTimes.size(); i++) { SemTime semTime = mySemTimes.get(i); str += "\t"+semTime.getId()+"\n"; } for (int i = 0; i < mySemRelations.size(); i++) { SemRelation semRelation = mySemRelations.get(i); str += "\t"+semRelation.getSubject()+":"+semRelation.getPredicates().toString()+":"+semRelation.getObject()+"\n"; } return str; } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.util.concurrent; import static com.google.common.base.Objects.firstNonNull; import com.google.common.annotations.Beta; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.collect.Iterables; import com.google.common.collect.MapMaker; import com.google.common.math.IntMath; import com.google.common.primitives.Ints; import java.math.RoundingMode; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Semaphore; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * A striped {@code Lock/Semaphore/ReadWriteLock}. This offers the underlying lock striping * similar to that of {@code ConcurrentHashMap} in a reusable form, and extends it for * semaphores and read-write locks. Conceptually, lock striping is the technique of dividing a lock * into many <i>stripes</i>, increasing the granularity of a single lock and allowing independent * operations to lock different stripes and proceed concurrently, instead of creating contention * for a single lock. * * <p>The guarantee provided by this class is that equal keys lead to the same lock (or semaphore), * i.e. {@code if (key1.equals(key2))} then {@code striped.get(key1) == striped.get(key2)} * (assuming {@link Object#hashCode()} is correctly implemented for the keys). Note * that if {@code key1} is <strong>not</strong> equal to {@code key2}, it is <strong>not</strong> * guaranteed that {@code striped.get(key1) != striped.get(key2)}; the elements might nevertheless * be mapped to the same lock. The lower the number of stripes, the higher the probability of this * happening. * * <p>There are three flavors of this class: {@code Striped<Lock>}, {@code Striped<Semaphore>}, * and {@code Striped<ReadWriteLock>}. For each type, two implementations are offered: * {@linkplain #lock(int) strong} and {@linkplain #lazyWeakLock(int) weak} * {@code Striped<Lock>}, {@linkplain #semaphore(int, int) strong} and {@linkplain * #lazyWeakSemaphore(int, int) weak} {@code Striped<Semaphore>}, and {@linkplain * #readWriteLock(int) strong} and {@linkplain #lazyWeakReadWriteLock(int) weak} * {@code Striped<ReadWriteLock>}. <i>Strong</i> means that all stripes (locks/semaphores) are * initialized eagerly, and are not reclaimed unless {@code Striped} itself is reclaimable. * <i>Weak</i> means that locks/semaphores are created lazily, and they are allowed to be reclaimed * if nobody is holding on to them. This is useful, for example, if one wants to create a {@code * Striped<Lock>} of many locks, but worries that in most cases only a small portion of these * would be in use. * * <p>Prior to this class, one might be tempted to use {@code Map<K, Lock>}, where {@code K} * represents the task. This maximizes concurrency by having each unique key mapped to a unique * lock, but also maximizes memory footprint. On the other extreme, one could use a single lock * for all tasks, which minimizes memory footprint but also minimizes concurrency. Instead of * choosing either of these extremes, {@code Striped} allows the user to trade between required * concurrency and memory footprint. For example, if a set of tasks are CPU-bound, one could easily * create a very compact {@code Striped<Lock>} of {@code availableProcessors() * 4} stripes, * instead of possibly thousands of locks which could be created in a {@code Map<K, Lock>} * structure. * * @author Dimitris Andreou * @since 13.0 */ @Beta public abstract class Striped<L> { private Striped() {} /** * Returns the stripe that corresponds to the passed key. It is always guaranteed that if * {@code key1.equals(key2)}, then {@code get(key1) == get(key2)}. * * @param key an arbitrary, non-null key * @return the stripe that the passed key corresponds to */ public abstract L get(Object key); /** * Returns the stripe at the specified index. Valid indexes are 0, inclusively, to * {@code size()}, exclusively. * * @param index the index of the stripe to return; must be in {@code [0...size())} * @return the stripe at the specified index */ public abstract L getAt(int index); /** * Returns the index to which the given key is mapped, so that getAt(indexFor(key)) == get(key). */ abstract int indexFor(Object key); /** * Returns the total number of stripes in this instance. */ public abstract int size(); /** * Returns the stripes that correspond to the passed objects, in ascending (as per * {@link #getAt(int)}) order. Thus, threads that use the stripes in the order returned * by this method are guaranteed to not deadlock each other. * * <p>It should be noted that using a {@code Striped<L>} with relatively few stripes, and * {@code bulkGet(keys)} with a relative large number of keys can cause an excessive number * of shared stripes (much like the birthday paradox, where much fewer than anticipated birthdays * are needed for a pair of them to match). Please consider carefully the implications of the * number of stripes, the intended concurrency level, and the typical number of keys used in a * {@code bulkGet(keys)} operation. See <a href="http://www.mathpages.com/home/kmath199.htm">Balls * in Bins model</a> for mathematical formulas that can be used to estimate the probability of * collisions. * * @param keys arbitrary non-null keys * @return the stripes corresponding to the objects (one per each object, derived by delegating * to {@link #get(Object)}; may contain duplicates), in an increasing index order. */ public Iterable<L> bulkGet(Iterable<?> keys) { // Initially using the array to store the keys, then reusing it to store the respective L's final Object[] array = Iterables.toArray(keys, Object.class); int[] stripes = new int[array.length]; for (int i = 0; i < array.length; i++) { stripes[i] = indexFor(array[i]); } Arrays.sort(stripes); for (int i = 0; i < array.length; i++) { array[i] = getAt(stripes[i]); } /* * Note that the returned Iterable holds references to the returned stripes, to avoid * error-prone code like: * * Striped<Lock> stripedLock = Striped.lazyWeakXXX(...)' * Iterable<Lock> locks = stripedLock.bulkGet(keys); * for (Lock lock : locks) { * lock.lock(); * } * operation(); * for (Lock lock : locks) { * lock.unlock(); * } * * If we only held the int[] stripes, translating it on the fly to L's, the original locks * might be garbage collected after locking them, ending up in a huge mess. */ @SuppressWarnings("unchecked") // we carefully replaced all keys with their respective L's List<L> asList = (List<L>) Arrays.asList(array); return Collections.unmodifiableList(asList); } // Static factories /** * Creates a {@code Striped<Lock>} with eagerly initialized, strongly referenced locks. * Every lock is reentrant. * * @param stripes the minimum number of stripes (locks) required * @return a new {@code Striped<Lock>} */ public static Striped<Lock> lock(int stripes) { return new CompactStriped<Lock>(stripes, new Supplier<Lock>() { public Lock get() { return new PaddedLock(); } }); } /** * Creates a {@code Striped<Lock>} with lazily initialized, weakly referenced locks. * Every lock is reentrant. * * @param stripes the minimum number of stripes (locks) required * @return a new {@code Striped<Lock>} */ public static Striped<Lock> lazyWeakLock(int stripes) { return new LazyStriped<Lock>(stripes, new Supplier<Lock>() { public Lock get() { return new ReentrantLock(false); } }); } /** * Creates a {@code Striped<Semaphore>} with eagerly initialized, strongly referenced semaphores, * with the specified number of permits. * * @param stripes the minimum number of stripes (semaphores) required * @param permits the number of permits in each semaphore * @return a new {@code Striped<Semaphore>} */ public static Striped<Semaphore> semaphore(int stripes, final int permits) { return new CompactStriped<Semaphore>(stripes, new Supplier<Semaphore>() { public Semaphore get() { return new PaddedSemaphore(permits); } }); } /** * Creates a {@code Striped<Semaphore>} with lazily initialized, weakly referenced semaphores, * with the specified number of permits. * * @param stripes the minimum number of stripes (semaphores) required * @param permits the number of permits in each semaphore * @return a new {@code Striped<Semaphore>} */ public static Striped<Semaphore> lazyWeakSemaphore(int stripes, final int permits) { return new LazyStriped<Semaphore>(stripes, new Supplier<Semaphore>() { public Semaphore get() { return new Semaphore(permits, false); } }); } /** * Creates a {@code Striped<ReadWriteLock>} with eagerly initialized, strongly referenced * read-write locks. Every lock is reentrant. * * @param stripes the minimum number of stripes (locks) required * @return a new {@code Striped<ReadWriteLock>} */ public static Striped<ReadWriteLock> readWriteLock(int stripes) { return new CompactStriped<ReadWriteLock>(stripes, READ_WRITE_LOCK_SUPPLIER); } /** * Creates a {@code Striped<ReadWriteLock>} with lazily initialized, weakly referenced * read-write locks. Every lock is reentrant. * * @param stripes the minimum number of stripes (locks) required * @return a new {@code Striped<ReadWriteLock>} */ public static Striped<ReadWriteLock> lazyWeakReadWriteLock(int stripes) { return new LazyStriped<ReadWriteLock>(stripes, READ_WRITE_LOCK_SUPPLIER); } // ReentrantReadWriteLock is large enough to make padding probably unnecessary private static final Supplier<ReadWriteLock> READ_WRITE_LOCK_SUPPLIER = new Supplier<ReadWriteLock>() { public ReadWriteLock get() { return new ReentrantReadWriteLock(); } }; private abstract static class PowerOfTwoStriped<L> extends Striped<L> { /** Capacity (power of two) minus one, for fast mod evaluation */ final int mask; PowerOfTwoStriped(int stripes) { Preconditions.checkArgument(stripes > 0, "Stripes must be positive"); this.mask = stripes > Ints.MAX_POWER_OF_TWO ? ALL_SET : ceilToPowerOfTwo(stripes) - 1; } @Override final int indexFor(Object key) { int hash = smear(key.hashCode()); return hash & mask; } @Override public final L get(Object key) { return getAt(indexFor(key)); } } /** * Implementation of Striped where 2^k stripes are represented as an array of the same length, * eagerly initialized. */ private static class CompactStriped<L> extends PowerOfTwoStriped<L> { /** Size is a power of two. */ private final Object[] array; private CompactStriped(int stripes, Supplier<L> supplier) { super(stripes); Preconditions.checkArgument(stripes <= Ints.MAX_POWER_OF_TWO, "Stripes must be <= 2^30)"); this.array = new Object[mask + 1]; for (int i = 0; i < array.length; i++) { array[i] = supplier.get(); } } @SuppressWarnings("unchecked") // we only put L's in the array @Override public L getAt(int index) { return (L) array[index]; } @Override public int size() { return array.length; } } /** * Implementation of Striped where up to 2^k stripes can be represented, using a Cache * where the key domain is [0..2^k). To map a user key into a stripe, we take a k-bit slice of the * user key's (smeared) hashCode(). The stripes are lazily initialized and are weakly referenced. */ private static class LazyStriped<L> extends PowerOfTwoStriped<L> { final ConcurrentMap<Integer, L> locks; final Supplier<L> supplier; final int size; LazyStriped(int stripes, Supplier<L> supplier) { super(stripes); this.size = (mask == ALL_SET) ? Integer.MAX_VALUE : mask + 1; this.supplier = supplier; this.locks = new MapMaker().weakValues().makeMap(); } @Override public L getAt(int index) { if (size != Integer.MAX_VALUE) { Preconditions.checkElementIndex(index, size()); } // else no check necessary, all index values are valid L existing = locks.get(index); if (existing != null) { return existing; } L created = supplier.get(); existing = locks.putIfAbsent(index, created); return firstNonNull(existing, created); } @Override public int size() { return size; } } /** * A bit mask were all bits are set. */ private static final int ALL_SET = ~0; private static int ceilToPowerOfTwo(int x) { return 1 << IntMath.log2(x, RoundingMode.CEILING); } /* * This method was written by Doug Lea with assistance from members of JCP * JSR-166 Expert Group and released to the public domain, as explained at * http://creativecommons.org/licenses/publicdomain * * As of 2010/06/11, this method is identical to the (package private) hash * method in OpenJDK 7's java.util.HashMap class. */ // Copied from java/com/google/common/collect/Hashing.java private static int smear(int hashCode) { hashCode ^= (hashCode >>> 20) ^ (hashCode >>> 12); return hashCode ^ (hashCode >>> 7) ^ (hashCode >>> 4); } private static class PaddedLock extends ReentrantLock { /* * Padding from 40 into 64 bytes, same size as cache line. Might be beneficial to add * a fourth long here, to minimize chance of interference between consecutive locks, * but I couldn't observe any benefit from that. */ @SuppressWarnings("unused") long q1, q2, q3; PaddedLock() { super(false); } } private static class PaddedSemaphore extends Semaphore { // See PaddedReentrantLock comment @SuppressWarnings("unused") long q1, q2, q3; PaddedSemaphore(int permits) { super(permits, false); } } }
package test.java.framework.manager.cucumber.runtime; import gherkin.formatter.Formatter; import gherkin.formatter.Reporter; import gherkin.util.FixJava; import test.java.framework.manager.cucumber.api.SnippetType; import test.java.framework.manager.cucumber.runtime.formatter.ColorAware; import test.java.framework.manager.cucumber.runtime.formatter.FormatterFactory; import test.java.framework.manager.cucumber.runtime.formatter.StrictAware; import test.java.framework.manager.cucumber.runtime.io.ResourceLoader; import test.java.framework.manager.cucumber.runtime.model.CucumberFeature; import java.lang.reflect.Proxy; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.ResourceBundle; import java.util.regex.Pattern; import static test.java.framework.manager.cucumber.runtime.model.CucumberFeature.load; // IMPORTANT! Make sure USAGE.txt is always uptodate if this class changes. public class RuntimeOptions { public static final String VERSION = ResourceBundle.getBundle("cucumber.version").getString("cucumber-jvm.version"); public static final String USAGE = FixJava.readResource("/cucumber/api/cli/USAGE.txt"); private final List<String> glue = new ArrayList<>(); private final List<Object> filters = new ArrayList<>(); private final List<Formatter> formatters = new ArrayList<>(); private final List<String> featurePaths = new ArrayList<>(); private final List<String> formatterNames = new ArrayList<>(); private final FormatterFactory formatterFactory; private URL dotCucumber; private boolean dryRun; private boolean strict = false; private boolean monochrome = false; private SnippetType snippetType = SnippetType.UNDERSCORE; private boolean formattersCreated = false; /** * Create a new instance from a string of options, for example: * <p> * <pre<{@code "--name 'the fox' --format pretty --strict"}</pre> * * @param argv the arguments */ public RuntimeOptions(String argv) { this(new FormatterFactory(), Shellwords.parse(argv)); } /** * Create a new instance from a list of options, for example: * <p> * <pre<{@code Arrays.asList("--name", "the fox", "--format", "pretty", "--strict");}</pre> * * @param argv the arguments */ public RuntimeOptions(List<String> argv) { this(new FormatterFactory(), argv); } public RuntimeOptions(Env env, List<String> argv) { this(env, new FormatterFactory(), argv); } public RuntimeOptions(FormatterFactory formatterFactory, List<String> argv) { this(new Env("cucumber"), formatterFactory, argv); } public RuntimeOptions(Env env, FormatterFactory formatterFactory, List<String> argv) { this.formatterFactory = formatterFactory; argv = new ArrayList<>(argv); // in case the one passed in is unmodifiable. parse(argv); String cucumberOptionsFromEnv = env.get("cucumber.options"); if (cucumberOptionsFromEnv != null) { parse(Shellwords.parse(cucumberOptionsFromEnv)); } if (formatterNames.isEmpty()) { formatterNames.add("progress"); } } private void parse(List<String> args) { List<Object> parsedFilters = new ArrayList<>(); List<String> parsedFeaturePaths = new ArrayList<>(); List<String> parsedGlue = new ArrayList<>(); while (!args.isEmpty()) { String arg = args.remove(0).trim(); if (arg.equals("--help") || arg.equals("-h")) { printUsage(); System.exit(0); } else if (arg.equals("--version") || arg.equals("-v")) { System.out.println(VERSION); System.exit(0); } else if (arg.equals("--glue") || arg.equals("-g")) { String gluePath = args.remove(0); parsedGlue.add(gluePath); } else if (arg.equals("--tags") || arg.equals("-t")) { parsedFilters.add(args.remove(0)); } else if (arg.equals("--format") || arg.equals("-f")) { formatterNames.add(args.remove(0)); } else if (arg.equals("--dotcucumber")) { String urlOrPath = args.remove(0); dotCucumber = Utils.toURL(urlOrPath); } else if (arg.equals("--no-dry-run") || arg.equals("--dry-run") || arg.equals("-d")) { dryRun = !arg.startsWith("--no-"); } else if (arg.equals("--no-strict") || arg.equals("--strict") || arg.equals("-s")) { strict = !arg.startsWith("--no-"); } else if (arg.equals("--no-monochrome") || arg.equals("--monochrome") || arg.equals("-m")) { monochrome = !arg.startsWith("--no-"); } else if (arg.equals("--snippets")) { String nextArg = args.remove(0); snippetType = SnippetType.fromString(nextArg); } else if (arg.equals("--name") || arg.equals("-n")) { String nextArg = args.remove(0); Pattern patternFilter = Pattern.compile(nextArg); parsedFilters.add(patternFilter); } else if (arg.startsWith("-")) { printUsage(); throw new CucumberException("Unknown option: " + arg); } else { parsedFeaturePaths.add(arg); } } if (!parsedFilters.isEmpty()) { filters.clear(); filters.addAll(parsedFilters); } if (!parsedFeaturePaths.isEmpty()) { featurePaths.clear(); featurePaths.addAll(parsedFeaturePaths); } if (!parsedGlue.isEmpty()) { glue.clear(); glue.addAll(parsedGlue); } } private void printUsage() { System.out.println(USAGE); } public List<CucumberFeature> cucumberFeatures(ResourceLoader resourceLoader) { return load(resourceLoader, featurePaths, filters, System.out); } List<Formatter> getFormatters() { if (!formattersCreated) { for (String formatterName : formatterNames) { Formatter formatter = formatterFactory.create(formatterName); formatters.add(formatter); setMonochromeOnColorAwareFormatters(formatter); setStrictOnStrictAwareFormatters(formatter); } formattersCreated = true; } return formatters; } public Formatter formatter(ClassLoader classLoader) { return (Formatter) Proxy.newProxyInstance(classLoader, new Class<?>[]{Formatter.class}, (target, method, args) -> { for (Formatter formatter : getFormatters()) { Utils.invoke(formatter, method, 0, args); } return null; }); } public Reporter reporter(ClassLoader classLoader) { return (Reporter) Proxy.newProxyInstance(classLoader, new Class<?>[]{Reporter.class}, (target, method, args) -> { for (Formatter formatter : formatters) { if (formatter instanceof Reporter) { Utils.invoke(formatter, method, 0, args); } } return null; }); } private void setMonochromeOnColorAwareFormatters(Formatter formatter) { if (formatter instanceof ColorAware) { ColorAware colorAware = (ColorAware) formatter; colorAware.setMonochrome(monochrome); } } private void setStrictOnStrictAwareFormatters(Formatter formatter) { if (formatter instanceof StrictAware) { StrictAware strictAware = (StrictAware) formatter; strictAware.setStrict(strict); } } public List<String> getGlue() { return glue; } public boolean isStrict() { return strict; } public boolean isDryRun() { return dryRun; } public List<String> getFeaturePaths() { return featurePaths; } public URL getDotCucumber() { return dotCucumber; } public void addFormatter(Formatter formatter) { formatters.add(formatter); } public List<Object> getFilters() { return filters; } public boolean isMonochrome() { return monochrome; } public SnippetType getSnippetType() { return snippetType; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.orc.stream; import io.prestosql.orc.OrcCorruptionException; import io.prestosql.orc.checkpoint.LongStreamCheckpoint; import io.prestosql.orc.checkpoint.LongStreamV2Checkpoint; import java.io.IOException; import java.io.InputStream; import static com.google.common.primitives.Ints.min; /** * @see {@link org.apache.orc.impl.RunLengthIntegerWriterV2} for description of various lightweight compression techniques. */ // This comes from the Apache Hive ORC code public class LongInputStreamV2 implements LongInputStream { private static final int MIN_REPEAT_SIZE = 3; private static final int MAX_LITERAL_SIZE = 512; private enum EncodingType { SHORT_REPEAT, DIRECT, PATCHED_BASE, DELTA } private final LongBitPacker packer = new LongBitPacker(); private final OrcInputStream input; private final boolean signed; private final long[] literals = new long[MAX_LITERAL_SIZE]; private int numLiterals; private int used; private final boolean skipCorrupt; private long lastReadInputCheckpoint; public LongInputStreamV2(OrcInputStream input, boolean signed, boolean skipCorrupt) { this.input = input; this.signed = signed; this.skipCorrupt = skipCorrupt; lastReadInputCheckpoint = input.getCheckpoint(); } // This comes from the Apache Hive ORC code private void readValues() throws IOException { lastReadInputCheckpoint = input.getCheckpoint(); // read the first 2 bits and determine the encoding type int firstByte = input.read(); if (firstByte < 0) { throw new OrcCorruptionException(input.getOrcDataSourceId(), "Read past end of RLE integer"); } int enc = (firstByte >>> 6) & 0x03; if (EncodingType.SHORT_REPEAT.ordinal() == enc) { readShortRepeatValues(firstByte); } else if (EncodingType.DIRECT.ordinal() == enc) { readDirectValues(firstByte); } else if (EncodingType.PATCHED_BASE.ordinal() == enc) { readPatchedBaseValues(firstByte); } else { readDeltaValues(firstByte); } } // This comes from the Apache Hive ORC code private void readDeltaValues(int firstByte) throws IOException { // extract the number of fixed bits int fixedBits = (firstByte >>> 1) & 0x1f; if (fixedBits != 0) { fixedBits = LongDecode.decodeBitWidth(fixedBits); } // extract the blob run length int length = (firstByte & 0x01) << 8; length |= input.read(); // read the first value stored as vint long firstVal = LongDecode.readVInt(signed, input); // store first value to result buffer literals[numLiterals++] = firstVal; // if fixed bits is 0 then all values have fixed delta long prevVal; if (fixedBits == 0) { // read the fixed delta value stored as vint (deltas can be negative even // if all number are positive) long fixedDelta = LongDecode.readSignedVInt(input); // add fixed deltas to adjacent values for (int i = 0; i < length; i++) { literals[numLiterals++] = literals[numLiterals - 2] + fixedDelta; } } else { long deltaBase = LongDecode.readSignedVInt(input); // add delta base and first value literals[numLiterals++] = firstVal + deltaBase; prevVal = literals[numLiterals - 1]; length -= 1; // write the unpacked values, add it to previous value and store final // value to result buffer. if the delta base value is negative then it // is a decreasing sequence else an increasing sequence packer.unpack(literals, numLiterals, length, fixedBits, input); while (length > 0) { if (deltaBase < 0) { literals[numLiterals] = prevVal - literals[numLiterals]; } else { literals[numLiterals] = prevVal + literals[numLiterals]; } prevVal = literals[numLiterals]; length--; numLiterals++; } } } // This comes from the Apache Hive ORC code private void readPatchedBaseValues(int firstByte) throws IOException { // extract the number of fixed bits int fb = LongDecode.decodeBitWidth((firstByte >>> 1) & 0b1_1111); // extract the run length of data blob int length = (firstByte & 0b1) << 8; length |= input.read(); // runs are always one off length += 1; // extract the number of bytes occupied by base int thirdByte = input.read(); int baseWidth = (thirdByte >>> 5) & 0b0111; // base width is one off baseWidth += 1; // extract patch width int patchWidth = LongDecode.decodeBitWidth(thirdByte & 0b1_1111); // read fourth byte and extract patch gap width int fourthByte = input.read(); int patchGapWidth = (fourthByte >>> 5) & 0b0111; // patch gap width is one off patchGapWidth += 1; // extract the length of the patch list int patchListLength = fourthByte & 0b1_1111; // read the next base width number of bytes to extract base value long base = bytesToLongBE(input, baseWidth); long mask = (1L << ((baseWidth * 8) - 1)); // if MSB of base value is 1 then base is negative value else positive if ((base & mask) != 0) { base = base & ~mask; base = -base; } // unpack the data blob long[] unpacked = new long[length]; packer.unpack(unpacked, 0, length, fb, input); // unpack the patch blob long[] unpackedPatch = new long[patchListLength]; if ((patchWidth + patchGapWidth) > 64 && !skipCorrupt) { throw new OrcCorruptionException(input.getOrcDataSourceId(), "Invalid RLEv2 encoded stream"); } int bitSize = LongDecode.getClosestFixedBits(patchWidth + patchGapWidth); packer.unpack(unpackedPatch, 0, patchListLength, bitSize, input); // apply the patch directly when decoding the packed data int patchIndex = 0; long currentGap; long currentPatch; long patchMask = ((1L << patchWidth) - 1); currentGap = unpackedPatch[patchIndex] >>> patchWidth; currentPatch = unpackedPatch[patchIndex] & patchMask; long actualGap = 0; // special case: gap is >255 then patch value will be 0. // if gap is <=255 then patch value cannot be 0 while (currentGap == 255 && currentPatch == 0) { actualGap += 255; patchIndex++; currentGap = unpackedPatch[patchIndex] >>> patchWidth; currentPatch = unpackedPatch[patchIndex] & patchMask; } // add the left over gap actualGap += currentGap; // unpack data blob, patch it (if required), add base to get final result for (int i = 0; i < unpacked.length; i++) { if (i == actualGap) { // extract the patch value long patchedValue = unpacked[i] | (currentPatch << fb); // add base to patched value literals[numLiterals++] = base + patchedValue; // increment the patch to point to next entry in patch list patchIndex++; if (patchIndex < patchListLength) { // read the next gap and patch currentGap = unpackedPatch[patchIndex] >>> patchWidth; currentPatch = unpackedPatch[patchIndex] & patchMask; actualGap = 0; // special case: gap is >255 then patch will be 0. if gap is // <=255 then patch cannot be 0 while (currentGap == 255 && currentPatch == 0) { actualGap += 255; patchIndex++; currentGap = unpackedPatch[patchIndex] >>> patchWidth; currentPatch = unpackedPatch[patchIndex] & patchMask; } // add the left over gap actualGap += currentGap; // next gap is relative to the current gap actualGap += i; } } else { // no patching required. add base to unpacked value to get final value literals[numLiterals++] = base + unpacked[i]; } } } // This comes from the Apache Hive ORC code private void readDirectValues(int firstByte) throws IOException { // extract the number of fixed bits int fixedBits = LongDecode.decodeBitWidth((firstByte >>> 1) & 0b1_1111); // extract the run length int length = (firstByte & 0b1) << 8; length |= input.read(); // runs are one off length += 1; // write the unpacked values and zigzag decode to result buffer packer.unpack(literals, numLiterals, length, fixedBits, input); if (signed) { for (int i = 0; i < length; i++) { literals[numLiterals] = LongDecode.zigzagDecode(literals[numLiterals]); numLiterals++; } } else { numLiterals += length; } } // This comes from the Apache Hive ORC code private void readShortRepeatValues(int firstByte) throws IOException { // read the number of bytes occupied by the value int size = (firstByte >>> 3) & 0b0111; // #bytes are one off size += 1; // read the run length int length = firstByte & 0x07; // run lengths values are stored only after MIN_REPEAT value is met length += MIN_REPEAT_SIZE; // read the repeated value which is store using fixed bytes long val = bytesToLongBE(input, size); if (signed) { val = LongDecode.zigzagDecode(val); } // repeat the value for length times for (int i = 0; i < length; i++) { literals[numLiterals++] = val; } } /** * Read n bytes in big endian order and convert to long. */ private static long bytesToLongBE(InputStream input, int n) throws IOException { long out = 0; long val; while (n > 0) { n--; // store it in a long and then shift else integer overflow will occur val = input.read(); out |= (val << (n * 8)); } return out; } @Override public long next() throws IOException { if (used == numLiterals) { numLiterals = 0; used = 0; readValues(); } return literals[used++]; } @Override public void next(long[] values, int items) throws IOException { int offset = 0; while (items > 0) { if (used == numLiterals) { numLiterals = 0; used = 0; readValues(); } int chunkSize = min(numLiterals - used, items); System.arraycopy(literals, used, values, offset, chunkSize); used += chunkSize; offset += chunkSize; items -= chunkSize; } } @Override public void next(int[] values, int items) throws IOException { int offset = 0; while (items > 0) { if (used == numLiterals) { numLiterals = 0; used = 0; readValues(); } int chunkSize = min(numLiterals - used, items); for (int i = 0; i < chunkSize; i++) { long literal = literals[used + i]; int value = (int) literal; if (literal != value) { throw new OrcCorruptionException(input.getOrcDataSourceId(), "Decoded value out of range for a 32bit number"); } values[offset + i] = value; } used += chunkSize; offset += chunkSize; items -= chunkSize; } } @Override public void next(short[] values, int items) throws IOException { int offset = 0; while (items > 0) { if (used == numLiterals) { numLiterals = 0; used = 0; readValues(); } int chunkSize = min(numLiterals - used, items); for (int i = 0; i < chunkSize; i++) { long literal = literals[used + i]; short value = (short) literal; if (literal != value) { throw new OrcCorruptionException(input.getOrcDataSourceId(), "Decoded value out of range for a 16bit number"); } values[offset + i] = value; } used += chunkSize; offset += chunkSize; items -= chunkSize; } } @Override public Class<LongStreamV2Checkpoint> getCheckpointType() { return LongStreamV2Checkpoint.class; } @Override public void seekToCheckpoint(LongStreamCheckpoint checkpoint) throws IOException { LongStreamV2Checkpoint v2Checkpoint = (LongStreamV2Checkpoint) checkpoint; // if the checkpoint is within the current buffer, just adjust the pointer if (lastReadInputCheckpoint == v2Checkpoint.getInputStreamCheckpoint() && v2Checkpoint.getOffset() <= numLiterals) { used = v2Checkpoint.getOffset(); } else { // otherwise, discard the buffer and start over input.seekToCheckpoint(v2Checkpoint.getInputStreamCheckpoint()); numLiterals = 0; used = 0; skip(v2Checkpoint.getOffset()); } } @Override public void skip(long items) throws IOException { while (items > 0) { if (used == numLiterals) { numLiterals = 0; used = 0; readValues(); } long consume = Math.min(items, numLiterals - used); used += consume; items -= consume; } } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.network; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import com.cloud.acl.ControlledEntity.ACLType; import com.cloud.dc.DataCenter; import com.cloud.dc.Vlan; import com.cloud.dc.Vlan.VlanType; import com.cloud.deploy.DataCenterDeployment; import com.cloud.deploy.DeployDestination; import com.cloud.deploy.DeploymentPlan; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.InsufficientAddressCapacityException; import com.cloud.exception.InsufficientCapacityException; import com.cloud.exception.InsufficientVirtualNetworkCapcityException; import com.cloud.exception.ResourceAllocationException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.network.Network.Capability; import com.cloud.network.Network.Provider; import com.cloud.network.Network.Service; import com.cloud.network.Networks.TrafficType; import com.cloud.network.addr.PublicIp; import com.cloud.network.element.RemoteAccessVPNServiceProvider; import com.cloud.network.element.UserDataServiceProvider; import com.cloud.network.guru.NetworkGuru; import com.cloud.network.rules.FirewallRule; import com.cloud.network.rules.StaticNat; import com.cloud.offering.NetworkOffering; import com.cloud.offerings.NetworkOfferingVO; import com.cloud.user.Account; import com.cloud.utils.Pair; import com.cloud.vm.Nic; import com.cloud.vm.NicProfile; import com.cloud.vm.ReservationContext; import com.cloud.vm.VMInstanceVO; import com.cloud.vm.VirtualMachine; import com.cloud.vm.VirtualMachineProfile; /** * NetworkManager manages the network for the different end users. * */ public interface NetworkManager extends NetworkService { /** * Assigns a new public ip address. * * @param dcId * @param podId * TODO * @param owner * @param type * @param networkId * @param requestedIp * TODO * @param allocatedBy * TODO * @return * @throws InsufficientAddressCapacityException */ PublicIp assignPublicIpAddress(long dcId, Long podId, Account owner, VlanType type, Long networkId, String requestedIp, boolean isSystem) throws InsufficientAddressCapacityException; /** * assigns a source nat ip address to an account within a network. * * @param owner * @param network * @param callerId * @return * @throws ConcurrentOperationException * @throws InsufficientAddressCapacityException */ PublicIp assignSourceNatIpAddress(Account owner, Network network, long callerId) throws ConcurrentOperationException, InsufficientAddressCapacityException; /** * Do all of the work of releasing public ip addresses. Note that if this method fails, there can be side effects. * * @param userId * @param caller * TODO * @param ipAddress * @return true if it did; false if it didn't */ public boolean releasePublicIpAddress(long id, long userId, Account caller); /** * Lists IP addresses that belong to VirtualNetwork VLANs * * @param accountId * - account that the IP address should belong to * @param dcId * - zone that the IP address should belong to * @param sourceNat * - (optional) true if the IP address should be a source NAT address * @param associatedNetworkId * TODO * @return - list of IP addresses */ List<IPAddressVO> listPublicIpAddressesInVirtualNetwork(long accountId, long dcId, Boolean sourceNat, Long associatedNetworkId); List<NetworkVO> setupNetwork(Account owner, NetworkOfferingVO offering, DeploymentPlan plan, String name, String displayText, boolean isDefault) throws ConcurrentOperationException; List<NetworkVO> setupNetwork(Account owner, NetworkOfferingVO offering, Network predefined, DeploymentPlan plan, String name, String displayText, boolean errorIfAlreadySetup, Long domainId, ACLType aclType, Boolean subdomainAccess) throws ConcurrentOperationException; List<NetworkOfferingVO> getSystemAccountNetworkOfferings(String... offeringNames); void allocate(VirtualMachineProfile<? extends VMInstanceVO> vm, List<Pair<NetworkVO, NicProfile>> networks) throws InsufficientCapacityException, ConcurrentOperationException; void prepare(VirtualMachineProfile<? extends VMInstanceVO> profile, DeployDestination dest, ReservationContext context) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException; void release(VirtualMachineProfile<? extends VMInstanceVO> vmProfile, boolean forced) throws ConcurrentOperationException, ResourceUnavailableException; void cleanupNics(VirtualMachineProfile<? extends VMInstanceVO> vm); void expungeNics(VirtualMachineProfile<? extends VMInstanceVO> vm); List<? extends Nic> getNics(long vmId); List<NicProfile> getNicProfiles(VirtualMachine vm); String getNextAvailableMacAddressInNetwork(long networkConfigurationId) throws InsufficientAddressCapacityException; boolean applyRules(List<? extends FirewallRule> rules, boolean continueOnError) throws ResourceUnavailableException; public boolean validateRule(FirewallRule rule); List<? extends RemoteAccessVPNServiceProvider> getRemoteAccessVpnElements(); PublicIpAddress getPublicIpAddress(long ipAddressId); List<? extends Vlan> listPodVlans(long podId); Pair<NetworkGuru, NetworkVO> implementNetwork(long networkId, DeployDestination dest, ReservationContext context) throws ConcurrentOperationException, ResourceUnavailableException, InsufficientCapacityException; List<NetworkVO> listNetworksUsedByVm(long vmId, boolean isSystem); <T extends VMInstanceVO> void prepareNicForMigration(VirtualMachineProfile<T> vm, DeployDestination dest); boolean shutdownNetwork(long networkId, ReservationContext context, boolean cleanupElements); boolean destroyNetwork(long networkId, ReservationContext context); Network createGuestNetwork(long networkOfferingId, String name, String displayText, String gateway, String cidr, String vlanId, String networkDomain, Account owner, boolean isSecurityGroupEnabled, Long domainId, PhysicalNetwork physicalNetwork, long zoneId, ACLType aclType, Boolean subdomainAccess) throws ConcurrentOperationException, InsufficientCapacityException, ResourceAllocationException; /** * @throws ResourceAllocationException TODO * @throws InsufficientCapacityException * Associates an ip address list to an account. The list of ip addresses are all addresses associated * with the * given vlan id. * @param userId * @param accountId * @param zoneId * @param vlanId * @throws InsufficientAddressCapacityException * @throws */ boolean associateIpAddressListToAccount(long userId, long accountId, long zoneId, Long vlanId, Network guestNetwork) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, ResourceAllocationException; Nic getNicInNetwork(long vmId, long networkId); List<? extends Nic> getNicsForTraffic(long vmId, TrafficType type); Network getDefaultNetworkForVm(long vmId); Nic getDefaultNic(long vmId); List<? extends UserDataServiceProvider> getPasswordResetElements(); boolean networkIsConfiguredForExternalNetworking(long zoneId, long networkId); Map<Capability, String> getNetworkServiceCapabilities(long networkId, Service service); boolean applyIpAssociations(Network network, boolean continueOnError) throws ResourceUnavailableException; boolean areServicesSupportedByNetworkOffering(long networkOfferingId, Service... services); NetworkVO getNetworkWithSecurityGroupEnabled(Long zoneId); boolean startNetwork(long networkId, DeployDestination dest, ReservationContext context) throws ConcurrentOperationException, ResourceUnavailableException, InsufficientCapacityException; String getIpOfNetworkElementInVirtualNetwork(long accountId, long dataCenterId); List<NetworkVO> listNetworksForAccount(long accountId, long zoneId, Network.GuestType type); List<NetworkVO> listAllNetworksInAllZonesByType(Network.GuestType type); IPAddressVO markIpAsUnavailable(long addrId); public String acquireGuestIpAddress(Network network, String requestedIp); String getGlobalGuestDomainSuffix(); String getStartIpAddress(long networkId); boolean applyStaticNats(List<? extends StaticNat> staticNats, boolean continueOnError) throws ResourceUnavailableException; String getIpInNetwork(long vmId, long networkId); String getIpInNetworkIncludingRemoved(long vmId, long networkId); Long getPodIdForVlan(long vlanDbId); List<Long> listNetworkOfferingsForUpgrade(long networkId); PhysicalNetwork translateZoneIdToPhysicalNetwork(long zoneId); boolean isSecurityGroupSupportedInNetwork(Network network); boolean isProviderSupportServiceInNetwork(long networkId, Service service, Provider provider); boolean isProviderEnabledInPhysicalNetwork(long physicalNetowrkId, String providerName); String getNetworkTag(HypervisorType hType, Network network); List<Service> getElementServices(Provider provider); boolean canElementEnableIndividualServices(Provider provider); PhysicalNetworkServiceProvider addDefaultVirtualRouterToPhysicalNetwork(long physicalNetworkId); boolean areServicesSupportedInNetwork(long networkId, Service... services); boolean isNetworkSystem(Network network); boolean reallocate(VirtualMachineProfile<? extends VMInstanceVO> vm, DataCenterDeployment dest) throws InsufficientCapacityException, ConcurrentOperationException; Map<Capability, String> getNetworkOfferingServiceCapabilities(NetworkOffering offering, Service service); Long getPhysicalNetworkId(Network network); boolean getAllowSubdomainAccessGlobal(); boolean isProviderForNetwork(Provider provider, long networkId); boolean isProviderForNetworkOffering(Provider provider, long networkOfferingId); void canProviderSupportServices(Map<Provider, Set<Service>> providersMap); PhysicalNetworkServiceProvider addDefaultSecurityGroupProviderToPhysicalNetwork( long physicalNetworkId); List<PhysicalNetworkSetupInfo> getPhysicalNetworkInfo(long dcId, HypervisorType hypervisorType); boolean canAddDefaultSecurityGroup(); List<Service> listNetworkOfferingServices(long networkOfferingId); boolean areServicesEnabledInZone(long zoneId, NetworkOffering offering, List<Service> services); public Map<PublicIp, Set<Service>> getIpToServices(List<PublicIp> publicIps, boolean rulesRevoked, boolean includingFirewall); public Map<Provider, ArrayList<PublicIp>> getProviderToIpList(Network network, Map<PublicIp, Set<Service>> ipToServices); public boolean checkIpForService(IPAddressVO ip, Service service); void checkVirtualNetworkCidrOverlap(Long zoneId, String cidr); void checkCapabilityForProvider(Set<Provider> providers, Service service, Capability cap, String capValue); Provider getDefaultUniqueProviderForService(String serviceName); IpAddress assignSystemIp(long networkId, Account owner, boolean forElasticLb, boolean forElasticIp) throws InsufficientAddressCapacityException; boolean handleSystemIpRelease(IpAddress ip); void checkNetworkPermissions(Account owner, Network network); void allocateDirectIp(NicProfile nic, DataCenter dc, VirtualMachineProfile<? extends VirtualMachine> vm, Network network, String requestedIp) throws InsufficientVirtualNetworkCapcityException, InsufficientAddressCapacityException; String getDefaultManagementTrafficLabel(long zoneId, HypervisorType hypervisorType); String getDefaultStorageTrafficLabel(long zoneId, HypervisorType hypervisorType); String getDefaultPublicTrafficLabel(long dcId, HypervisorType vmware); String getDefaultGuestTrafficLabel(long dcId, HypervisorType vmware); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.transforms.windowing; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasKey; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFor; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import com.google.common.collect.Iterables; import java.io.Serializable; import java.util.Collection; import java.util.Collections; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.beam.sdk.Pipeline.PipelineVisitor; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.Coder.NonDeterministicException; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.io.CountingInput; import org.apache.beam.sdk.runners.TransformHierarchy; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.testing.ValidatesRunner; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.GroupByKey; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.SimpleFunction; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.transforms.display.DisplayDataEvaluator; import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.util.WindowingStrategy.AccumulationMode; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; import org.hamcrest.Matchers; import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.Mockito; /** * Tests for {@link Window}. */ @RunWith(JUnit4.class) public class WindowTest implements Serializable { @Rule public final transient TestPipeline pipeline = TestPipeline.create() .enableAbandonedNodeEnforcement(false); @Rule public transient ExpectedException thrown = ExpectedException.none(); @Test public void testWindowIntoSetWindowfn() { WindowingStrategy<?, ?> strategy = pipeline .apply(Create.of("hello", "world").withCoder(StringUtf8Coder.of())) .apply(Window.<String>into(FixedWindows.of(Duration.standardMinutes(10)))) .getWindowingStrategy(); assertTrue(strategy.getWindowFn() instanceof FixedWindows); assertTrue(strategy.getTrigger() instanceof DefaultTrigger); assertEquals(AccumulationMode.DISCARDING_FIRED_PANES, strategy.getMode()); } @Test public void testWindowIntoTriggersAndAccumulating() { FixedWindows fixed10 = FixedWindows.of(Duration.standardMinutes(10)); Repeatedly trigger = Repeatedly.forever(AfterPane.elementCountAtLeast(5)); WindowingStrategy<?, ?> strategy = pipeline .apply(Create.of("hello", "world").withCoder(StringUtf8Coder.of())) .apply(Window.<String>into(fixed10) .triggering(trigger) .accumulatingFiredPanes() .withAllowedLateness(Duration.ZERO)) .getWindowingStrategy(); assertEquals(fixed10, strategy.getWindowFn()); assertEquals(trigger, strategy.getTrigger()); assertEquals(AccumulationMode.ACCUMULATING_FIRED_PANES, strategy.getMode()); } @Test public void testWindowIntoAccumulatingLatenessNoTrigger() { FixedWindows fixed = FixedWindows.of(Duration.standardMinutes(10)); WindowingStrategy<?, ?> strategy = pipeline .apply(Create.of("hello", "world").withCoder(StringUtf8Coder.of())) .apply( "Lateness", Window.<String>into(fixed) .withAllowedLateness(Duration.standardDays(1)) .accumulatingFiredPanes()) .getWindowingStrategy(); assertThat(strategy.isTriggerSpecified(), is(false)); assertThat(strategy.isModeSpecified(), is(true)); assertThat(strategy.isAllowedLatenessSpecified(), is(true)); assertThat(strategy.getMode(), equalTo(AccumulationMode.ACCUMULATING_FIRED_PANES)); assertThat(strategy.getAllowedLateness(), equalTo(Duration.standardDays(1))); } @Test public void testWindowPropagatesEachPart() { FixedWindows fixed10 = FixedWindows.of(Duration.standardMinutes(10)); Repeatedly trigger = Repeatedly.forever(AfterPane.elementCountAtLeast(5)); WindowingStrategy<?, ?> strategy = pipeline .apply(Create.of("hello", "world").withCoder(StringUtf8Coder.of())) .apply("Mode", Window.<String>configure().accumulatingFiredPanes()) .apply("Lateness", Window.<String>configure().withAllowedLateness(Duration.standardDays(1))) .apply("Trigger", Window.<String>configure().triggering(trigger)) .apply("Window", Window.<String>into(fixed10)) .getWindowingStrategy(); assertEquals(fixed10, strategy.getWindowFn()); assertEquals(trigger, strategy.getTrigger()); assertEquals(AccumulationMode.ACCUMULATING_FIRED_PANES, strategy.getMode()); assertEquals(Duration.standardDays(1), strategy.getAllowedLateness()); } @Test public void testWindowIntoPropagatesLateness() { FixedWindows fixed10 = FixedWindows.of(Duration.standardMinutes(10)); FixedWindows fixed25 = FixedWindows.of(Duration.standardMinutes(25)); WindowingStrategy<?, ?> strategy = pipeline .apply(Create.of("hello", "world").withCoder(StringUtf8Coder.of())) .apply("WindowInto10", Window.<String>into(fixed10) .withAllowedLateness(Duration.standardDays(1)) .triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(5))) .accumulatingFiredPanes()) .apply("WindowInto25", Window.<String>into(fixed25)) .getWindowingStrategy(); assertEquals(Duration.standardDays(1), strategy.getAllowedLateness()); assertEquals(fixed25, strategy.getWindowFn()); } /** * With {@link #testWindowIntoNullWindowFnNoAssign()}, demonstrates that the expansions of the * {@link Window} transform depends on if it actually assigns elements to windows. */ @Test public void testWindowIntoWindowFnAssign() { pipeline .apply(Create.of(1, 2, 3)) .apply( Window.<Integer>into( FixedWindows.of(Duration.standardMinutes(11L).plus(Duration.millis(1L))))); final AtomicBoolean foundAssign = new AtomicBoolean(false); pipeline.traverseTopologically( new PipelineVisitor.Defaults() { public void visitPrimitiveTransform(TransformHierarchy.Node node) { if (node.getTransform() instanceof Window.Assign) { foundAssign.set(true); } } }); assertThat(foundAssign.get(), is(true)); } /** * With {@link #testWindowIntoWindowFnAssign()}, demonstrates that the expansions of the * {@link Window} transform depends on if it actually assigns elements to windows. */ @Test public void testWindowIntoNullWindowFnNoAssign() { pipeline .apply(Create.of(1, 2, 3)) .apply( Window.<Integer>configure().triggering(AfterWatermark.pastEndOfWindow()) .withAllowedLateness(Duration.ZERO) .accumulatingFiredPanes()); pipeline.traverseTopologically( new PipelineVisitor.Defaults() { public void visitPrimitiveTransform(TransformHierarchy.Node node) { assertThat(node.getTransform(), not(instanceOf(Window.Assign.class))); } }); } @Test public void testWindowGetName() { assertEquals("Window.Into()", Window.<String>into(FixedWindows.of(Duration.standardMinutes(10))).getName()); } @Test public void testNonDeterministicWindowCoder() throws NonDeterministicException { FixedWindows mockWindowFn = Mockito.mock(FixedWindows.class); @SuppressWarnings({"unchecked", "rawtypes"}) Class<Coder<IntervalWindow>> coderClazz = (Class) Coder.class; Coder<IntervalWindow> mockCoder = Mockito.mock(coderClazz); when(mockWindowFn.windowCoder()).thenReturn(mockCoder); NonDeterministicException toBeThrown = new NonDeterministicException(mockCoder, "Its just not deterministic."); Mockito.doThrow(toBeThrown).when(mockCoder).verifyDeterministic(); thrown.expect(IllegalArgumentException.class); thrown.expectCause(Matchers.sameInstance(toBeThrown)); thrown.expectMessage("Window coders must be deterministic"); Window.into(mockWindowFn); } @Test public void testMissingMode() { FixedWindows fixed10 = FixedWindows.of(Duration.standardMinutes(10)); Repeatedly trigger = Repeatedly.forever(AfterPane.elementCountAtLeast(5)); PCollection<String> input = pipeline .apply(Create.of("hello", "world").withCoder(StringUtf8Coder.of())) .apply("Window", Window.<String>into(fixed10)); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("requires that the accumulation mode"); input.apply( "Triggering", Window.<String>configure() .withAllowedLateness(Duration.standardDays(1)) .triggering(trigger)); } @Test public void testMissingModeViaLateness() { FixedWindows fixed = FixedWindows.of(Duration.standardMinutes(10)); PCollection<String> input = pipeline .apply(Create.of("hello", "world").withCoder(StringUtf8Coder.of())) .apply("Window", Window.<String>into(fixed)); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("allowed lateness"); thrown.expectMessage("accumulation mode be specified"); input.apply( "Lateness", Window.<String>configure().withAllowedLateness(Duration.standardDays(1))); } @Test public void testMissingLateness() { FixedWindows fixed10 = FixedWindows.of(Duration.standardMinutes(10)); Repeatedly trigger = Repeatedly.forever(AfterPane.elementCountAtLeast(5)); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("requires that the allowed lateness"); pipeline .apply(Create.of("hello", "world").withCoder(StringUtf8Coder.of())) .apply("Mode", Window.<String>configure().accumulatingFiredPanes()) .apply("Window", Window.<String>into(fixed10)) .apply("Trigger", Window.<String>configure().triggering(trigger)); } private static class WindowOddEvenBuckets extends NonMergingWindowFn<Long, IntervalWindow> { private static final IntervalWindow EVEN_WINDOW = new IntervalWindow( BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE.maxTimestamp()); private static final IntervalWindow ODD_WINDOW = new IntervalWindow( BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE.maxTimestamp().minus(1)); @Override public Collection<IntervalWindow> assignWindows(AssignContext c) throws Exception { if (c.element() % 2 == 0) { return Collections.singleton(EVEN_WINDOW); } return Collections.singleton(ODD_WINDOW); } @Override public boolean isCompatible(WindowFn<?, ?> other) { return other instanceof WindowOddEvenBuckets; } @Override public Coder<IntervalWindow> windowCoder() { return new IntervalWindow.IntervalWindowCoder(); } @Override public WindowMappingFn<IntervalWindow> getDefaultWindowMappingFn() { throw new UnsupportedOperationException( String.format("Can't use %s for side inputs", getClass().getSimpleName())); } } @Test @Category(ValidatesRunner.class) public void testNoWindowFnDoesNotReassignWindows() { pipeline.enableAbandonedNodeEnforcement(true); final PCollection<Long> initialWindows = pipeline .apply(CountingInput.upTo(10L)) .apply("AssignWindows", Window.into(new WindowOddEvenBuckets())); // Sanity check the window assignment to demonstrate the baseline PAssert.that(initialWindows) .inWindow(WindowOddEvenBuckets.EVEN_WINDOW) .containsInAnyOrder(0L, 2L, 4L, 6L, 8L); PAssert.that(initialWindows) .inWindow(WindowOddEvenBuckets.ODD_WINDOW) .containsInAnyOrder(1L, 3L, 5L, 7L, 9L); PCollection<Boolean> upOne = initialWindows.apply( "ModifyTypes", MapElements.<Long, Boolean>via( new SimpleFunction<Long, Boolean>() { @Override public Boolean apply(Long input) { return input % 2 == 0; } })); PAssert.that(upOne) .inWindow(WindowOddEvenBuckets.EVEN_WINDOW) .containsInAnyOrder(true, true, true, true, true); PAssert.that(upOne) .inWindow(WindowOddEvenBuckets.ODD_WINDOW) .containsInAnyOrder(false, false, false, false, false); // The elements should be in the same windows, even though they would not be assigned to the // same windows with the updated timestamps. If we try to apply the original WindowFn, the type // will not be appropriate and the runner should crash, as a Boolean cannot be converted into // a long. PCollection<Boolean> updatedTrigger = upOne.apply( "UpdateWindowingStrategy", Window.<Boolean>configure().triggering(Never.ever()) .withAllowedLateness(Duration.ZERO) .accumulatingFiredPanes()); pipeline.run(); } /** * Tests that when two elements are combined via a GroupByKey their output timestamp agrees * with the windowing function default, the end of the window. */ @Test @Category(ValidatesRunner.class) public void testOutputTimeFnDefault() { pipeline.enableAbandonedNodeEnforcement(true); pipeline .apply( Create.timestamped( TimestampedValue.of(KV.of(0, "hello"), new Instant(0)), TimestampedValue.of(KV.of(0, "goodbye"), new Instant(10)))) .apply(Window.<KV<Integer, String>>into(FixedWindows.of(Duration.standardMinutes(10)))) .apply(GroupByKey.<Integer, String>create()) .apply( ParDo.of( new DoFn<KV<Integer, Iterable<String>>, Void>() { @ProcessElement public void processElement(ProcessContext c) throws Exception { assertThat( c.timestamp(), equalTo( new IntervalWindow( new Instant(0), new Instant(0).plus(Duration.standardMinutes(10))) .maxTimestamp())); } })); pipeline.run(); } /** * Tests that when two elements are combined via a GroupByKey their output timestamp agrees * with the windowing function customized to use the end of the window. */ @Test @Category(ValidatesRunner.class) public void testOutputTimeFnEndOfWindow() { pipeline.enableAbandonedNodeEnforcement(true); pipeline.apply( Create.timestamped( TimestampedValue.of(KV.of(0, "hello"), new Instant(0)), TimestampedValue.of(KV.of(0, "goodbye"), new Instant(10)))) .apply(Window.<KV<Integer, String>>into(FixedWindows.of(Duration.standardMinutes(10))) .withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow())) .apply(GroupByKey.<Integer, String>create()) .apply(ParDo.of(new DoFn<KV<Integer, Iterable<String>>, Void>() { @ProcessElement public void processElement(ProcessContext c) throws Exception { assertThat(c.timestamp(), equalTo(new Instant(10 * 60 * 1000 - 1))); } })); pipeline.run(); } @Test public void testDisplayData() { FixedWindows windowFn = FixedWindows.of(Duration.standardHours(5)); AfterWatermark.FromEndOfWindow triggerBuilder = AfterWatermark.pastEndOfWindow(); Duration allowedLateness = Duration.standardMinutes(10); Window.ClosingBehavior closingBehavior = Window.ClosingBehavior.FIRE_IF_NON_EMPTY; OutputTimeFn<BoundedWindow> outputTimeFn = OutputTimeFns.outputAtEndOfWindow(); Window<?> window = Window .into(windowFn) .triggering(triggerBuilder) .accumulatingFiredPanes() .withAllowedLateness(allowedLateness, closingBehavior) .withOutputTimeFn(outputTimeFn); DisplayData displayData = DisplayData.from(window); assertThat(displayData, hasDisplayItem("windowFn", windowFn.getClass())); assertThat(displayData, includesDisplayDataFor("windowFn", windowFn)); assertThat(displayData, hasDisplayItem("trigger", triggerBuilder.toString())); assertThat(displayData, hasDisplayItem("accumulationMode", AccumulationMode.ACCUMULATING_FIRED_PANES.toString())); assertThat(displayData, hasDisplayItem("allowedLateness", allowedLateness)); assertThat(displayData, hasDisplayItem("closingBehavior", closingBehavior.toString())); assertThat(displayData, hasDisplayItem("outputTimeFn", outputTimeFn.getClass())); } @Test @Category(ValidatesRunner.class) public void testPrimitiveDisplayData() { FixedWindows windowFn = FixedWindows.of(Duration.standardHours(5)); AfterWatermark.FromEndOfWindow triggerBuilder = AfterWatermark.pastEndOfWindow(); Duration allowedLateness = Duration.standardMinutes(10); Window.ClosingBehavior closingBehavior = Window.ClosingBehavior.FIRE_IF_NON_EMPTY; OutputTimeFn<BoundedWindow> outputTimeFn = OutputTimeFns.outputAtEndOfWindow(); Window<?> window = Window .into(windowFn) .triggering(triggerBuilder) .accumulatingFiredPanes() .withAllowedLateness(allowedLateness, closingBehavior) .withOutputTimeFn(outputTimeFn); DisplayData primitiveDisplayData = Iterables.getOnlyElement( DisplayDataEvaluator.create().displayDataForPrimitiveTransforms(window)); assertThat(primitiveDisplayData, hasDisplayItem("windowFn", windowFn.getClass())); assertThat(primitiveDisplayData, includesDisplayDataFor("windowFn", windowFn)); assertThat(primitiveDisplayData, hasDisplayItem("trigger", triggerBuilder.toString())); assertThat(primitiveDisplayData, hasDisplayItem("accumulationMode", AccumulationMode.ACCUMULATING_FIRED_PANES.toString())); assertThat(primitiveDisplayData, hasDisplayItem("allowedLateness", allowedLateness)); assertThat(primitiveDisplayData, hasDisplayItem("closingBehavior", closingBehavior.toString())); assertThat(primitiveDisplayData, hasDisplayItem("outputTimeFn", outputTimeFn.getClass())); } @Test public void testAssignDisplayDataUnchanged() { FixedWindows windowFn = FixedWindows.of(Duration.standardHours(5)); Window<Object> original = Window.into(windowFn); WindowingStrategy<?, ?> updated = WindowingStrategy.globalDefault().withWindowFn(windowFn); DisplayData displayData = DisplayData.from(new Window.Assign<>(original, updated)); assertThat(displayData, hasDisplayItem("windowFn", windowFn.getClass())); assertThat(displayData, includesDisplayDataFor("windowFn", windowFn)); assertThat(displayData, not(hasDisplayItem("trigger"))); assertThat(displayData, not(hasDisplayItem("accumulationMode"))); assertThat(displayData, not(hasDisplayItem("allowedLateness"))); assertThat(displayData, not(hasDisplayItem("closingBehavior"))); assertThat(displayData, not(hasDisplayItem("outputTimeFn"))); } @Test public void testDisplayDataExcludesUnspecifiedProperties() { Window<?> onlyHasAccumulationMode = Window.<Object>configure().discardingFiredPanes(); assertThat(DisplayData.from(onlyHasAccumulationMode), not(hasDisplayItem(hasKey(isOneOf( "windowFn", "trigger", "outputTimeFn", "allowedLateness", "closingBehavior"))))); Window<?> noAccumulationMode = Window.into(new GlobalWindows()); assertThat(DisplayData.from(noAccumulationMode), not(hasDisplayItem(hasKey("accumulationMode")))); } @Test public void testDisplayDataExcludesDefaults() { Window<?> window = Window.into(new GlobalWindows()) .triggering(DefaultTrigger.of()) .withAllowedLateness(Duration.millis(BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis())); DisplayData data = DisplayData.from(window); assertThat(data, not(hasDisplayItem("trigger"))); assertThat(data, not(hasDisplayItem("allowedLateness"))); } }
package uk.gov.dvsa.motr.config; import org.openqa.selenium.Platform; import java.io.FileInputStream; import java.io.InputStream; import java.text.SimpleDateFormat; import java.util.Properties; public abstract class Configurator { private static final String SELENIUM_DRIVER_PROPERTIES = "SELENIUM_DRIVER_PROPERTIES"; private static final String SELENIUM_ENV_PROPERTIES = "SELENIUM_ENV_PROPERTIES"; private static final String DEFAULT_SELENIUM_DRIVER_PROPERTIES_FILE_PATH = "/selenium/driver/default.properties"; private static final String DEFAULT_SELENIUM_ENV_PROPERTIES_FILE_PATH = "/selenium/environment/default.properties"; private static Properties props; private static final int defaultDriverTimeout = 20; public static SimpleDateFormat screenshotDateFormat = new SimpleDateFormat("yyyyMMdd-HHmmss"); static { props = new Properties(); loadEnvironmentPropertiesFromFile(); loadBrowserPropertiesFromFile(); overrideWithSystemProperties(); props.forEach((key, val) -> System.out.println("Key: " + key + ", Val: " + val)); } private static void loadEnvironmentPropertiesFromFile() { loadPropertiesFromFile(DEFAULT_SELENIUM_ENV_PROPERTIES_FILE_PATH, SELENIUM_ENV_PROPERTIES); } private static void loadBrowserPropertiesFromFile() { loadPropertiesFromFile(DEFAULT_SELENIUM_DRIVER_PROPERTIES_FILE_PATH, SELENIUM_DRIVER_PROPERTIES); } private static void overrideWithSystemProperties() { System.getProperties().forEach((key, val) -> { boolean valIsNotEmpty = val != null && !"".equals(((String) val).trim()); if (valIsNotEmpty && props.containsKey(key)) { props.put(key, val); } }); } public enum SeleniumGrid { NONE, SELENIUM, BROWSERSTACK } /** * Load properties from the properties file specified unless the envVariableOverride parameter is specified, in * which case load the properties from the file specified in the environment variable. * * @param defaultPropertiesFilePath Properties file to be loaded * @param envVariableOverride Specifies the environment variable name that contains a property file to be loaded in preference * @param ignoreNotFound skips this resource if not found */ private static void loadPropertiesFromFile(String defaultPropertiesFilePath, String envVariableOverride, boolean ignoreNotFound) { String customPropertiesFilePath = System.getenv(envVariableOverride); boolean useCustomProperties = customPropertiesFilePath != null && !customPropertiesFilePath.trim().isEmpty(); try { InputStream propsStream = Configurator.class.getResourceAsStream(defaultPropertiesFilePath); props.load(propsStream); propsStream.close(); if (useCustomProperties) { InputStream overridePropsStream = new FileInputStream(customPropertiesFilePath); props.load(overridePropsStream); overridePropsStream.close(); } } catch (Exception ex) { if (!ignoreNotFound) { ex.printStackTrace(); throw new RuntimeException( "Problem loading test properties file [" + ex.getMessage() + "]. Is " + (useCustomProperties ? (customPropertiesFilePath + " a valid file?") : (defaultPropertiesFilePath + " on the classpath?")), ex); } } } private static void loadPropertiesFromFile(String defaultPropertiesFilePath, String envVariableOverride) { loadPropertiesFromFile(defaultPropertiesFilePath, envVariableOverride, false); } /** * Return a property * * @param key The name of the property to be returned * @return Value of property or null if the key does not exist */ protected static String getProp(String key) { return getProp(key, null); } /** * Return a property, using a default value if it is not set * * @param key The name of the property to be returned * @param defaultValue The default value to return if the property is not set * @return Value of property or the default if the key does not exist */ protected static String getProp(String key, String defaultValue) { String s = props.getProperty(key, defaultValue); return (s != null) ? s.trim() : null; } public static String baseUrl() { return getProp("test.baseUrl"); } public static String dynamoDbRegion() { return getProp("test.dynamoDb.region"); } public static String dynamoDbTableName() { return getProp("test.dynamoDb.table.name"); } public static String dynamoDbTablePendingName() { return getProp("test.dynamoDb.pendingTable.name"); } public static String dynamoDbTableSmsConfirmName() { return getProp("test.dynamoDb.smsConfirmationTable.name"); } public String getChromeDriverPath() { return getProp("test.chromeDriverPath"); } public SeleniumGrid getGridStatus() { switch (getProp("test.gridEnabled")) { case "selenium": return SeleniumGrid.SELENIUM; case "browserstack": return SeleniumGrid.BROWSERSTACK; default: return SeleniumGrid.NONE; } } public String getGridUrl() { return getProp("test.gridUrl"); } public int getDefaultDriverTimeout() { return defaultDriverTimeout; } public static boolean isErrorScreenshotEnabled() { return "yes".equalsIgnoreCase(getProp("test.screenshots.error.enabled")); } public static String getErrorScreenshotPath() { String errorFolder = System.getProperty("test.screenshots.error.folder"); return errorFolder != null ? errorFolder : getProp("test.screenshots.error.folder", "/tmp/selenium-screenshots"); } public boolean getJavascriptStatus() { return "yes".equalsIgnoreCase(getProp("test.javascript.enabled")); } public static String getBuildNumber() { String buildNumber = System.getenv("BUILD_NUMBER"); return buildNumber != null ? buildNumber : ""; } /** * Get the desired Platform from properties file to use in Grid * * @return Platform - the matching value from the Platform class */ public Platform getPlatform() { switch (getProp("test.platform")) { case "windows": return Platform.WINDOWS; case "android": return Platform.ANDROID; case "linux": return Platform.LINUX; case "mac": return Platform.MAC; default: return null; } } public String getOs() { return getProp("test.os"); } public String getOsVersion() { return getProp("test.osVersion"); } public Browser getBrowser() { switch (String.valueOf(getProp("test.browserName")).toLowerCase()) { case "firefox": return Browser.FIREFOX; case "chrome": return Browser.CHROME; case "safari": return Browser.SAFARI; case "ie": return Browser.IE; case "ipad": return Browser.IPAD; case "iphone": return Browser.IPHONE; case "android": return Browser.ANDROID; default: return null; } } public String getBrowserVersion() { return getProp("test.browserVersion"); } public String getResolution() { return getProp("test.resolution"); } public String getDevice() { return getProp("test.device"); } public String getDeviceOrientation() { return getProp("test.deviceOrientation"); } }
package com.project.pebblevote; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.net.Uri; import android.os.AsyncTask; import android.support.v4.app.FragmentActivity; import android.os.Bundle; import android.util.Log; import com.android.volley.RequestQueue; import com.android.volley.toolbox.Volley; import com.getpebble.android.kit.PebbleKit; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.GoogleApiClient.ConnectionCallbacks; import com.google.android.gms.common.api.GoogleApiClient.OnConnectionFailedListener; import com.google.android.gms.common.api.PendingResult; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.drive.Drive; import com.google.android.gms.location.LocationServices; import com.google.android.gms.location.places.PlaceLikelihood; import com.google.android.gms.location.places.PlaceLikelihoodBuffer; import com.google.android.gms.location.places.Places; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.SupportMapFragment; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.MarkerOptions; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.ProtocolException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Logger; public class MapsActivity extends FragmentActivity implements ConnectionCallbacks, OnConnectionFailedListener{ private GoogleMap mMap; // Might be null if Google Play services APK is not available. private GoogleApiClient mGoogleApiClient; private static final String TAG = "MyActivity"; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_maps); setUpMapIfNeeded(); mGoogleApiClient = new GoogleApiClient.Builder(this) .addApi(Drive.API) .addScope(Drive.SCOPE_FILE) .build(); } @Override public void onConnected(Bundle connectionHint) { PendingResult<PlaceLikelihoodBuffer> result = Places.PlaceDetectionApi .getCurrentPlace(mGoogleApiClient, null); result.setResultCallback(new ResultCallback<PlaceLikelihoodBuffer>() { @Override public void onResult(PlaceLikelihoodBuffer likelyPlaces) { for (PlaceLikelihood placeLikelihood : likelyPlaces) { Log.i(TAG, String.format("Place '%s' has likelihood: %g", placeLikelihood.getPlace().getName(), placeLikelihood.getLikelihood())); } likelyPlaces.release(); } }); } @Override public void onConnectionSuspended(int cause) { // The connection has been interrupted. // Disable any UI components that depend on Google APIs // until onConnected() is called. } @Override public void onConnectionFailed(ConnectionResult result) { // This callback is important for handling errors that // may occur while attempting to connect with Google. // // More about this in the 'Handle Connection Failures' section. Log.i(TAG, String.format("Connection Failed :(")); //new FetchServerHealth().execute(); } @Override protected void onResume() { super.onResume(); setUpMapIfNeeded(); } /** * Sets up the map if it is possible to do so (i.e., the Google Play services APK is correctly * installed) and the map has not already been instantiated.. This will ensure that we only ever * call {@link #setUpMap()} once when {@link #mMap} is not null. * <p/> * If it isn't installed {@link SupportMapFragment} (and * {@link com.google.android.gms.maps.MapView MapView}) will show a prompt for the user to * install/update the Google Play services APK on their device. * <p/> * A user can return to this FragmentActivity after following the prompt and correctly * installing/updating/enabling the Google Play services. Since the FragmentActivity may not * have been completely destroyed during this process (it is likely that it would only be * stopped or paused), {@link #onCreate(Bundle)} may not be called again so we should call this * method in {@link #onResume()} to guarantee that it will be called. */ private void setUpMapIfNeeded() { // Do a null check to confirm that we have not already instantiated the map. if (mMap == null) { // Try to obtain the map from the SupportMapFragment. mMap = ((SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map)) .getMap(); // Check if we were successful in obtaining the map. if (mMap != null) { setUpMap(); } } } /** * This is where we can add markers or lines, add listeners or move the camera. In this case, we * just add a marker near Africa. * <p/> * This should only be called once and when we are sure that {@link #mMap} is not null. */ private void setUpMap() { mMap.addMarker(new MarkerOptions().position(new LatLng(0, 0)).title("Marker")); } @Override protected void onStart() { super.onStart(); boolean connected = PebbleKit.isWatchConnected(getApplicationContext()); Log.i(getLocalClassName(), "Pebble is " + (connected ? "connected" : "not connected")); LocationManager locationManager = (LocationManager)getSystemService(Context.LOCATION_SERVICE); LocationListener locationListener = new LocationListener() { @Override public void onLocationChanged(Location loc) { //HERE YOU CAN UPDATE THE LOCATION WHEN AS YOU MOVE String longitude = "Longitude: " + loc.getLongitude(); Log.v(TAG, longitude); String latitude = "Latitude: " + loc.getLatitude(); Log.v(TAG, latitude); } @Override public void onStatusChanged(String provider, int status, Bundle extras) {} @Override public void onProviderEnabled(String provider) {} @Override public void onProviderDisabled(String provider) {} }; try { boolean isGPSEnabled = locationManager .isProviderEnabled(LocationManager.GPS_PROVIDER); locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 5000, 10, locationListener); //CHANGE THIS INTERVAL FOR MORE FREQUENT UPDATES Location location = locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER); //COULD REMOVE IN YOU WANT THIS IS ONLY FOR TESTING... Log.v(TAG, "=================" + location.getLatitude()); Log.v(TAG, "=================" + location.getLongitude()); } catch (Exception e){ e.printStackTrace(); } PebbleKit.registerPebbleConnectedReceiver(getApplicationContext(), new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { Log.i(getLocalClassName(), "Pebble connected!"); } }); PebbleKit.registerPebbleDisconnectedReceiver(getApplicationContext(), new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { Log.i(getLocalClassName(), "Pebble disconnected!"); } }); // if (!mResolvingError) { // mGoogleApiClient.connect(); //} } @Override protected void onStop() { mGoogleApiClient.disconnect(); super.onStop(); } private class FetchServerHealth extends AsyncTask<Void, Void, String> { @Override protected String doInBackground(Void... params) { Uri.Builder uri = null; URL url = null; try { uri = new Uri.Builder(); uri.scheme("http"); uri.authority("pebblevote.herokuapp.com/"); uri.appendPath("_ah"); uri.appendPath("health"); uri.build(); url = new URL(uri.toString()); } catch (Exception e) { e.printStackTrace(); } HttpURLConnection urlConnection = null; BufferedReader reader = null; String result = null; try { urlConnection = (HttpURLConnection) url.openConnection(); urlConnection.setRequestMethod("GET"); urlConnection.connect(); InputStream inputStream = urlConnection.getInputStream(); StringBuffer buffer = new StringBuffer(); if (inputStream == null) { // Nothing to do. return null; } reader = new BufferedReader(new InputStreamReader(inputStream)); String line; while ((line = reader.readLine()) != null) { // Since it's JSON, adding a newline isn't necessary (it won't affect parsing) // But it does make debugging a *lot* easier if you print out the completed // buffer for debugging. buffer.append(line + "\n"); } if (buffer.length() == 0) { // Stream was empty. No point in parsing. return null; } result = buffer.toString(); } catch (ProtocolException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally{ if (urlConnection != null) { //always disconnect and close urlConnection.disconnect(); } if (reader != null) { try { reader.close(); } catch (final IOException e) { } } } return result; } } /** * We will just call the api end point and get a list of data, 10 of them to be precise * */ private class FetchLocationList extends AsyncTask<LatLng, Void, List<LocationModel>> { protected List<LocationModel> doInBackground(LatLng... location) { LatLng firstLocation = location[0]; Uri.Builder uri = null; URL url = null; try { uri = new Uri.Builder(); uri.scheme("http"); uri.authority("api.openweathermap.org"); uri.appendPath(""); uri.appendQueryParameter("latitude", String.valueOf(firstLocation.latitude)); uri.appendQueryParameter("longitude", String.valueOf(firstLocation.longitude)); uri.build(); url = new URL(uri.toString()); } catch (Exception e) { e.printStackTrace(); } HttpURLConnection urlConnection = null; BufferedReader reader = null; String result = null; try { urlConnection = (HttpURLConnection) url.openConnection(); urlConnection.setRequestMethod("GET"); urlConnection.connect(); InputStream inputStream = urlConnection.getInputStream(); StringBuffer buffer = new StringBuffer(); if (inputStream == null) { // Nothing to do. return null; } reader = new BufferedReader(new InputStreamReader(inputStream)); String line; while ((line = reader.readLine()) != null) { // Since it's JSON, adding a newline isn't necessary (it won't affect parsing) // But it does make debugging a *lot* easier if you print out the completed // buffer for debugging. buffer.append(line + "\n"); } if (buffer.length() == 0) { // Stream was empty. No point in parsing. return null; } result = buffer.toString(); } catch (ProtocolException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { if (urlConnection != null) { //always disconnect and close urlConnection.disconnect(); } if (reader != null) { try { reader.close(); } catch (final IOException e) { } } } return getLocationModelFromJson(result); } private List<LocationModel> getLocationModelFromJson(String JSON) { try { JSONArray jsonLocationModelList = new JSONArray(JSON); List<LocationModel> toReturn = new ArrayList<>(); for (int i = 0; i < jsonLocationModelList.length(); i++) { LocationModel model = new LocationModel(); JSONObject e = jsonLocationModelList.getJSONObject(i); model.setName(e.getString(LocationModel.LM_NAME)); model.setNeightborhood(e.getString(LocationModel.LM_BOROUGH)); model.setLatitude(e.getDouble(LocationModel.LM_LATITUDE)); model.setLongitude(e.getDouble(LocationModel.LM_LONGITUDE)); model.setUpVotes(e.getInt(LocationModel.LM_UPVOTE)); model.setDownVotes(e.getInt(LocationModel.LM_DOWNVOTE)); toReturn.add(model); } return toReturn; } catch (JSONException e) { e.printStackTrace(); return null; } } } private void UpVoteContent (String location) { RequestQueue queue = Volley.newRequestQueue(this); String url = ""; } }
/* * Copyright 2014-2016 Amazon Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://aws.amazon.com/apache2.0 * * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES * OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.dynamodbv2.datamodeling; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertNotNull; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import org.junit.Test; /** * Unit test on reflecting domain classes with getter or field annotations. It * also tests the scenario when annotated properties are inherited from the * superclass. */ public class PojoReflectionTest { /** * Tests reflecting a model class that uses getter annotations. */ @Test public void testGetterAnnotations() { validateModel(PojoWithGetterAnnotations.class); } /** * Tests reflecting a model class that uses field annotations. */ @Test public void testFieldAnnotations() { validateModel(PojoWithFieldAnnotations.class); } /** * Tests reflecting a model class that uses both getter and field * annotations. */ @Test public void testMixedAnnotations() { validateModel(PojoWithMixedAnnotations.class); } /** * Validates that the reflected information from the POJO class mathes the * model defined in both PojoWithGetterAnnotations and * PojoWithFieldAnnotations. */ private void validateModel(Class<?> clazz) { final DynamoDBMappingsRegistry.Mappings mappings = DynamoDBMappingsRegistry.instance().mappingsOf(clazz); // There should be 7 relevant getters (ignoredAttr is excluded) assertEquals(7, mappings.getMappings().size()); for (final DynamoDBMappingsRegistry.Mapping m : mappings.getMappings()) { // Check that getAttributeName returns the expected attribute name assertEquals( expectedAttributeNames.get(m.getter().getName()), m.getAttributeName()); // @DynamoDBVersionAttribute if (m.getter().getName().equals("getVersionedAttr")) { assertTrue(m.isVersion()); } } // Key getters assertEquals("getHashKey", mappings.getHashKey().getter().getName()); assertEquals("hashKey", mappings.getHashKey().getAttributeName()); assertEquals("getAutogeneratedRangeKey", mappings.getRangeKey().getter().getName()); assertEquals("autogeneratedRangeKey", mappings.getRangeKey().getAttributeName()); } /** * A POJO model that uses getter annotations. */ @DynamoDBTable(tableName="table") private static class PojoWithGetterAnnotations { private String hashKey; private String autogeneratedRangeKey; private String indexHashKey; private String indexRangeKey; private String attrWithAttrAnnotation; private String versionedAttr; private String customMarshallingAttr; private String ignoredAttr; @DynamoDBHashKey public String getHashKey() { return hashKey; } public void setHashKey(String hashKey) { this.hashKey = hashKey; } @DynamoDBRangeKey @DynamoDBAutoGeneratedKey public String getAutogeneratedRangeKey() { return autogeneratedRangeKey; } public void setAutogeneratedRangeKey(String autogeneratedRangeKey) { this.autogeneratedRangeKey = autogeneratedRangeKey; } @DynamoDBIndexHashKey(globalSecondaryIndexName="index") public String getIndexHashKey() { return indexHashKey; } public void setIndexHashKey(String indexHashKey) { this.indexHashKey = indexHashKey; } @DynamoDBIndexRangeKey(globalSecondaryIndexName="index") public String getIndexRangeKey() { return indexRangeKey; } public void setIndexRangeKey(String indexRangeKey) { this.indexRangeKey = indexRangeKey; } @DynamoDBAttribute(attributeName="real-attribute-name") public String getAttrWithAttrAnnotation() { return attrWithAttrAnnotation; } public void setAttrWithAttrAnnotation(String attrWithAttrAnnotation) { this.attrWithAttrAnnotation = attrWithAttrAnnotation; } @DynamoDBVersionAttribute public String getVersionedAttr() { return versionedAttr; } public void setVersionedAttr(String versionedAttr) { this.versionedAttr = versionedAttr; } @DynamoDBMarshalling(marshallerClass=RandomUUIDMarshaller.class) public String getCustomMarshallingAttr() { return customMarshallingAttr; } public void setCustomMarshallingAttr(String customMarshallingAttr) { this.customMarshallingAttr = customMarshallingAttr; } @DynamoDBIgnore public String getIgnoredAttr() { return ignoredAttr; } public void setIgnoredAttr(String ignoredAttr) { this.ignoredAttr = ignoredAttr; } } /** * The same model as defined in PojoWithGetterAnnotations, but uses field * annotations instead. */ @DynamoDBTable(tableName="table") private static class PojoWithFieldAnnotations { @DynamoDBHashKey private String hashKey; @DynamoDBRangeKey @DynamoDBAutoGeneratedKey private String autogeneratedRangeKey; @DynamoDBIndexHashKey(globalSecondaryIndexName="index") private String indexHashKey; @DynamoDBIndexRangeKey(globalSecondaryIndexName="index") private String indexRangeKey; @DynamoDBAttribute(attributeName="real-attribute-name") private String attrWithAttrAnnotation; @DynamoDBVersionAttribute private String versionedAttr; @DynamoDBMarshalling(marshallerClass=RandomUUIDMarshaller.class) private String customMarshallingAttr; @DynamoDBIgnore private String ignoredAttr; public String getHashKey() { return hashKey; } public void setHashKey(String hashKey) { this.hashKey = hashKey; } public String getAutogeneratedRangeKey() { return autogeneratedRangeKey; } public void setAutogeneratedRangeKey(String autogeneratedRangeKey) { this.autogeneratedRangeKey = autogeneratedRangeKey; } public String getIndexHashKey() { return indexHashKey; } public void setIndexHashKey(String indexHashKey) { this.indexHashKey = indexHashKey; } public String getIndexRangeKey() { return indexRangeKey; } public void setIndexRangeKey(String indexRangeKey) { this.indexRangeKey = indexRangeKey; } public String getAttrWithAttrAnnotation() { return attrWithAttrAnnotation; } public void setAttrWithAttrAnnotation(String attrWithAttrAnnotation) { this.attrWithAttrAnnotation = attrWithAttrAnnotation; } public String getVersionedAttr() { return versionedAttr; } public void setVersionedAttr(String versionedAttr) { this.versionedAttr = versionedAttr; } public String getCustomMarshallingAttr() { return customMarshallingAttr; } public void setCustomMarshallingAttr(String customMarshallingAttr) { this.customMarshallingAttr = customMarshallingAttr; } public String getIgnoredAttr() { return ignoredAttr; } public void setIgnoredAttr(String ignoredAttr) { this.ignoredAttr = ignoredAttr; } } /** * The same model as defined in PojoWithGetterAnnotations, but uses both getter and field * annotations. */ @DynamoDBTable(tableName="table") private static class PojoWithMixedAnnotations { @DynamoDBHashKey private String hashKey; private String autogeneratedRangeKey; @DynamoDBIndexHashKey(globalSecondaryIndexName="index") private String indexHashKey; private String indexRangeKey; @DynamoDBAttribute(attributeName="real-attribute-name") private String attrWithAttrAnnotation; private String versionedAttr; @DynamoDBMarshalling(marshallerClass=RandomUUIDMarshaller.class) private String customMarshallingAttr; private String ignoredAttr; public String getHashKey() { return hashKey; } public void setHashKey(String hashKey) { this.hashKey = hashKey; } @DynamoDBRangeKey @DynamoDBAutoGeneratedKey public String getAutogeneratedRangeKey() { return autogeneratedRangeKey; } public void setAutogeneratedRangeKey(String autogeneratedRangeKey) { this.autogeneratedRangeKey = autogeneratedRangeKey; } public String getIndexHashKey() { return indexHashKey; } public void setIndexHashKey(String indexHashKey) { this.indexHashKey = indexHashKey; } @DynamoDBIndexRangeKey(globalSecondaryIndexName="index") public String getIndexRangeKey() { return indexRangeKey; } public void setIndexRangeKey(String indexRangeKey) { this.indexRangeKey = indexRangeKey; } public String getAttrWithAttrAnnotation() { return attrWithAttrAnnotation; } public void setAttrWithAttrAnnotation(String attrWithAttrAnnotation) { this.attrWithAttrAnnotation = attrWithAttrAnnotation; } @DynamoDBVersionAttribute public String getVersionedAttr() { return versionedAttr; } public void setVersionedAttr(String versionedAttr) { this.versionedAttr = versionedAttr; } public String getCustomMarshallingAttr() { return customMarshallingAttr; } public void setCustomMarshallingAttr(String customMarshallingAttr) { this.customMarshallingAttr = customMarshallingAttr; } @DynamoDBIgnore public String getIgnoredAttr() { return ignoredAttr; } public void setIgnoredAttr(String ignoredAttr) { this.ignoredAttr = ignoredAttr; } } @SuppressWarnings("serial") private static final Map<String, String> expectedAttributeNames = new HashMap<String, String>(){{ put("getHashKey", "hashKey"); put("getAutogeneratedRangeKey", "autogeneratedRangeKey"); put("getIndexHashKey", "indexHashKey"); put("getIndexRangeKey", "indexRangeKey"); put("getAttrWithAttrAnnotation", "real-attribute-name"); // w/ attribute name override put("getVersionedAttr", "versionedAttr"); put("getCustomMarshallingAttr", "customMarshallingAttr"); }}; @Test public void testInheritedProperties() { // Base class final DynamoDBMappingsRegistry.Mappings mappings1 = DynamoDBMappingsRegistry.instance().mappingsOf(BaseTablePojo.class); assertEquals(3, mappings1.getMappings().size()); assertEquals("getParentHashKeyWithFieldAnnotation", mappings1.getHashKey().getter().getName()); assertEquals("parentHashKeyWithFieldAnnotation", mappings1.getHashKey().getAttributeName()); assertEquals("getParentRangeKeyWithGetterAnnotation", mappings1.getRangeKey().getter().getName()); assertEquals("parentRangeKeyWithGetterAnnotation", mappings1.getRangeKey().getAttributeName()); // Subclass pojo inherits the key getters, and defines an attribute that is ignored in the superclass final DynamoDBMappingsRegistry.Mappings mappings2 = DynamoDBMappingsRegistry.instance().mappingsOf(TablePojoSubclass.class); assertEquals(4, mappings2.getMappings().size()); assertEquals(mappings1.getHashKey().getter(), mappings2.getHashKey().getter()); assertEquals("parentHashKeyWithFieldAnnotation", mappings2.getHashKey().getAttributeName()); assertEquals(mappings1.getRangeKey().getter(), mappings2.getRangeKey().getter()); assertEquals("parentRangeKeyWithGetterAnnotation", mappings2.getRangeKey().getAttributeName()); } @DynamoDBTable(tableName="table") private static class BaseTablePojo { @DynamoDBHashKey private String parentHashKeyWithFieldAnnotation; private String parentRangeKeyWithGetterAnnotation; private String parentAttrWithNoAnnotation; @DynamoDBIgnore private String parentIgnoredAttr; public String getParentHashKeyWithFieldAnnotation() { return parentHashKeyWithFieldAnnotation; } public void setParentHashKeyWithFieldAnnotation( String parentHashKeyWithFieldAnnotation) { this.parentHashKeyWithFieldAnnotation = parentHashKeyWithFieldAnnotation; } @DynamoDBRangeKey public String getParentRangeKeyWithGetterAnnotation() { return parentRangeKeyWithGetterAnnotation; } public void setParentRangeKeyWithGetterAnnotation( String parentRangeKeyWithGetterAnnotation) { this.parentRangeKeyWithGetterAnnotation = parentRangeKeyWithGetterAnnotation; } public String getParentAttrWithNoAnnotation() { return parentAttrWithNoAnnotation; } public void setParentAttrWithNoAnnotation(String parentAttrWithNoAnnotation) { this.parentAttrWithNoAnnotation = parentAttrWithNoAnnotation; } public String getParentIgnoredAttr() { return parentIgnoredAttr; } public void setParentIgnoredAttr(String parentIgnoredAttr) { this.parentIgnoredAttr = parentIgnoredAttr; } } /** * Subclass of BaseTablePojo that inherits all the key attribtues, and * declared the parentIgnoredAttr which is ignored in the superclass. */ @DynamoDBTable(tableName="table") private static class TablePojoSubclass extends BaseTablePojo { // Not ignored by the subclass private String parentIgnoredAttr; @Override public String getParentIgnoredAttr() { return parentIgnoredAttr; } @Override public void setParentIgnoredAttr(String parentIgnoredAttr) { this.parentIgnoredAttr = parentIgnoredAttr; } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kafka.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * BrokerNodeInfo * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/BrokerNodeInfo" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class BrokerNodeInfo implements Serializable, Cloneable, StructuredPojo { /** * <p> * The attached elastic network interface of the broker. * </p> */ private String attachedENIId; /** * <p> * The ID of the broker. * </p> */ private Double brokerId; /** * <p> * The client subnet to which this broker node belongs. * </p> */ private String clientSubnet; /** * <p> * The virtual private cloud (VPC) of the client. * </p> */ private String clientVpcIpAddress; /** * <p> * Information about the version of software currently deployed on the Apache Kafka brokers in the cluster. * </p> */ private BrokerSoftwareInfo currentBrokerSoftwareInfo; /** * <p> * Endpoints for accessing the broker. * </p> */ private java.util.List<String> endpoints; /** * <p> * The attached elastic network interface of the broker. * </p> * * @param attachedENIId * <p> * The attached elastic network interface of the broker. * </p> */ public void setAttachedENIId(String attachedENIId) { this.attachedENIId = attachedENIId; } /** * <p> * The attached elastic network interface of the broker. * </p> * * @return <p> * The attached elastic network interface of the broker. * </p> */ public String getAttachedENIId() { return this.attachedENIId; } /** * <p> * The attached elastic network interface of the broker. * </p> * * @param attachedENIId * <p> * The attached elastic network interface of the broker. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public BrokerNodeInfo withAttachedENIId(String attachedENIId) { setAttachedENIId(attachedENIId); return this; } /** * <p> * The ID of the broker. * </p> * * @param brokerId * <p> * The ID of the broker. * </p> */ public void setBrokerId(Double brokerId) { this.brokerId = brokerId; } /** * <p> * The ID of the broker. * </p> * * @return <p> * The ID of the broker. * </p> */ public Double getBrokerId() { return this.brokerId; } /** * <p> * The ID of the broker. * </p> * * @param brokerId * <p> * The ID of the broker. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public BrokerNodeInfo withBrokerId(Double brokerId) { setBrokerId(brokerId); return this; } /** * <p> * The client subnet to which this broker node belongs. * </p> * * @param clientSubnet * <p> * The client subnet to which this broker node belongs. * </p> */ public void setClientSubnet(String clientSubnet) { this.clientSubnet = clientSubnet; } /** * <p> * The client subnet to which this broker node belongs. * </p> * * @return <p> * The client subnet to which this broker node belongs. * </p> */ public String getClientSubnet() { return this.clientSubnet; } /** * <p> * The client subnet to which this broker node belongs. * </p> * * @param clientSubnet * <p> * The client subnet to which this broker node belongs. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public BrokerNodeInfo withClientSubnet(String clientSubnet) { setClientSubnet(clientSubnet); return this; } /** * <p> * The virtual private cloud (VPC) of the client. * </p> * * @param clientVpcIpAddress * <p> * The virtual private cloud (VPC) of the client. * </p> */ public void setClientVpcIpAddress(String clientVpcIpAddress) { this.clientVpcIpAddress = clientVpcIpAddress; } /** * <p> * The virtual private cloud (VPC) of the client. * </p> * * @return <p> * The virtual private cloud (VPC) of the client. * </p> */ public String getClientVpcIpAddress() { return this.clientVpcIpAddress; } /** * <p> * The virtual private cloud (VPC) of the client. * </p> * * @param clientVpcIpAddress * <p> * The virtual private cloud (VPC) of the client. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public BrokerNodeInfo withClientVpcIpAddress(String clientVpcIpAddress) { setClientVpcIpAddress(clientVpcIpAddress); return this; } /** * <p> * Information about the version of software currently deployed on the Apache Kafka brokers in the cluster. * </p> * * @param currentBrokerSoftwareInfo * <p> * Information about the version of software currently deployed on the Apache Kafka brokers in the cluster. * </p> */ public void setCurrentBrokerSoftwareInfo(BrokerSoftwareInfo currentBrokerSoftwareInfo) { this.currentBrokerSoftwareInfo = currentBrokerSoftwareInfo; } /** * <p> * Information about the version of software currently deployed on the Apache Kafka brokers in the cluster. * </p> * * @return <p> * Information about the version of software currently deployed on the Apache Kafka brokers in the cluster. * </p> */ public BrokerSoftwareInfo getCurrentBrokerSoftwareInfo() { return this.currentBrokerSoftwareInfo; } /** * <p> * Information about the version of software currently deployed on the Apache Kafka brokers in the cluster. * </p> * * @param currentBrokerSoftwareInfo * <p> * Information about the version of software currently deployed on the Apache Kafka brokers in the cluster. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public BrokerNodeInfo withCurrentBrokerSoftwareInfo(BrokerSoftwareInfo currentBrokerSoftwareInfo) { setCurrentBrokerSoftwareInfo(currentBrokerSoftwareInfo); return this; } /** * <p> * Endpoints for accessing the broker. * </p> * * @return <p> * Endpoints for accessing the broker. * </p> */ public java.util.List<String> getEndpoints() { return endpoints; } /** * <p> * Endpoints for accessing the broker. * </p> * * @param endpoints * <p> * Endpoints for accessing the broker. * </p> */ public void setEndpoints(java.util.Collection<String> endpoints) { if (endpoints == null) { this.endpoints = null; return; } this.endpoints = new java.util.ArrayList<String>(endpoints); } /** * <p> * Endpoints for accessing the broker. * </p> * * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setEndpoints(java.util.Collection)} or {@link #withEndpoints(java.util.Collection)} if you want to * override the existing values. * </p> * * @param endpoints * <p> * Endpoints for accessing the broker. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public BrokerNodeInfo withEndpoints(String... endpoints) { if (this.endpoints == null) { setEndpoints(new java.util.ArrayList<String>(endpoints.length)); } for (String ele : endpoints) { this.endpoints.add(ele); } return this; } /** * <p> * Endpoints for accessing the broker. * </p> * * @param endpoints * <p> * Endpoints for accessing the broker. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public BrokerNodeInfo withEndpoints(java.util.Collection<String> endpoints) { setEndpoints(endpoints); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAttachedENIId() != null) sb.append("AttachedENIId: ").append(getAttachedENIId()).append(","); if (getBrokerId() != null) sb.append("BrokerId: ").append(getBrokerId()).append(","); if (getClientSubnet() != null) sb.append("ClientSubnet: ").append(getClientSubnet()).append(","); if (getClientVpcIpAddress() != null) sb.append("ClientVpcIpAddress: ").append(getClientVpcIpAddress()).append(","); if (getCurrentBrokerSoftwareInfo() != null) sb.append("CurrentBrokerSoftwareInfo: ").append(getCurrentBrokerSoftwareInfo()).append(","); if (getEndpoints() != null) sb.append("Endpoints: ").append(getEndpoints()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof BrokerNodeInfo == false) return false; BrokerNodeInfo other = (BrokerNodeInfo) obj; if (other.getAttachedENIId() == null ^ this.getAttachedENIId() == null) return false; if (other.getAttachedENIId() != null && other.getAttachedENIId().equals(this.getAttachedENIId()) == false) return false; if (other.getBrokerId() == null ^ this.getBrokerId() == null) return false; if (other.getBrokerId() != null && other.getBrokerId().equals(this.getBrokerId()) == false) return false; if (other.getClientSubnet() == null ^ this.getClientSubnet() == null) return false; if (other.getClientSubnet() != null && other.getClientSubnet().equals(this.getClientSubnet()) == false) return false; if (other.getClientVpcIpAddress() == null ^ this.getClientVpcIpAddress() == null) return false; if (other.getClientVpcIpAddress() != null && other.getClientVpcIpAddress().equals(this.getClientVpcIpAddress()) == false) return false; if (other.getCurrentBrokerSoftwareInfo() == null ^ this.getCurrentBrokerSoftwareInfo() == null) return false; if (other.getCurrentBrokerSoftwareInfo() != null && other.getCurrentBrokerSoftwareInfo().equals(this.getCurrentBrokerSoftwareInfo()) == false) return false; if (other.getEndpoints() == null ^ this.getEndpoints() == null) return false; if (other.getEndpoints() != null && other.getEndpoints().equals(this.getEndpoints()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAttachedENIId() == null) ? 0 : getAttachedENIId().hashCode()); hashCode = prime * hashCode + ((getBrokerId() == null) ? 0 : getBrokerId().hashCode()); hashCode = prime * hashCode + ((getClientSubnet() == null) ? 0 : getClientSubnet().hashCode()); hashCode = prime * hashCode + ((getClientVpcIpAddress() == null) ? 0 : getClientVpcIpAddress().hashCode()); hashCode = prime * hashCode + ((getCurrentBrokerSoftwareInfo() == null) ? 0 : getCurrentBrokerSoftwareInfo().hashCode()); hashCode = prime * hashCode + ((getEndpoints() == null) ? 0 : getEndpoints().hashCode()); return hashCode; } @Override public BrokerNodeInfo clone() { try { return (BrokerNodeInfo) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.kafka.model.transform.BrokerNodeInfoMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.heliosapm.opentsdb.client.util; import java.io.File; import java.lang.reflect.Method; import java.net.InetAddress; import java.net.NetworkInterface; import java.util.Enumeration; import java.util.Iterator; import java.util.Properties; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import java.util.regex.Pattern; /** * <p>Title: Util</p> * <p>Description: Misc utility functions</p> * <p>Company: Helios Development Group LLC</p> * @author Whitehead (nwhitehead AT heliosdev DOT org) * <p><code>com.heliosapm.opentsdb.client.util.Util</code></p> */ public class Util { /** IP4 address pattern matcher */ public static final Pattern IP4_ADDRESS = Pattern.compile("((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])"); /** IP6 address pattern matcher */ public static final Pattern IP6_ADDRESS = Pattern.compile("(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])).*?"); /** A set of shutdown hooks */ private static final Set<Runnable> shutdownHookRunnables = new CopyOnWriteArraySet<Runnable>(); static { final Thread shutdownHook = new Thread("PendingShutdownHook") { @Override public void run() { Thread.currentThread().setName("RunningShutdownHook"); for(Iterator<Runnable> riter = shutdownHookRunnables.iterator(); riter.hasNext();) { Runnable t = riter.next(); try { t.run(); } catch (Throwable tx) {/* No Op */} } shutdownHookRunnables.clear(); } }; Runtime.getRuntime().addShutdownHook(shutdownHook); } /** * Adds a task to be run on JVM shutdown * @param r The task to run */ public static void sdhook(final Runnable r) { if(r!=null) shutdownHookRunnables.add(r); } /** * Adds a task to delete the passed files on JVM shutdown * @param files The files to delete */ public static void sdhook(final File...files) { shutdownHookRunnables.add(new Runnable() { @Override public void run() { if(files!=null) { for(File f: files) { if(f==null) continue; f.delete(); } } } }); } /** * Returns the JMX domain where csf MBeans are registered * @return the JMX domain where csf MBeans are registered */ public static String getJMXDomain() { return "com.heliosapm"; //return ConfigurationReader.conf(Constants.PROP_JMX_DOMAIN, Constants.DEFAULT_JMX_DOMAIN); } // /** // * Determines if the passed stringy is a valid object name // * @param cs The stringy to test // * @return true if the stringy is a valid object name, false otherwise // */ // public static boolean isObjectName(final CharSequence cs) { // if(cs==null || cs.toString().trim().isEmpty()) return false; // try { // objectName(cs); // return true; // } catch (Exception ex) { // return false; // } // } // /** // * Creates a JMX ObjectName from the passed metric name and tags // * @param metric The ObjectName's domain // * @param tags The ObjectName's properties in the form of <b>=</b> separated key value pairs // * @return the built ObjectName // */ // public static ObjectName objectName(final String metric, final String...tags) { // if(metric==null || metric.trim().isEmpty()) throw new IllegalArgumentException("The passed metric name was null or empty"); // if(tags.length==0) throw new IllegalArgumentException("The passed tags array was zero length"); // StringBuilder b = new StringBuilder(clean(metric)).append(":"); // int tcount = 0; // for(String tag: tags) { // String s = clean(tag); // if(s==null || s.trim().isEmpty() || s.indexOf('=')==0) continue; // b.append(s).append(","); // tcount++; // } // if(tcount==0) if(tags.length==0) throw new IllegalArgumentException("The passed tags array contained no legal tags"); // b.deleteCharAt(b.length()-1); // return objectName(b); // } // /** // * Registers the passed MBean on all located MBeanServers // * @param bean The bean to register // * @param objectName The ObjectName to register the bean with // * @return the number of MBeanServers registered with // */ // public static int registerMBeanEverywhere(final Object bean, final ObjectName objectName) { // int cnt = 0; // for(MBeanServer mbs: MBeanServerFactory.findMBeanServer(null)) { // if(!mbs.isRegistered(objectName)) { // try { // mbs.registerMBean(bean, objectName); // cnt++; // } catch (Exception ex) {/* No Op */} // } // } // return cnt; // } /** * Returns the agent properties * @return the agent properties or null if reflective call failed */ public static Properties getAgentProperties() { try { Class<?> clazz = Class.forName("sun.misc.VMSupport"); Method m = clazz.getDeclaredMethod("getAgentProperties"); m.setAccessible(true); Properties p = (Properties)m.invoke(null); return p; } catch (Throwable t) { return null; } } /** * Loads a class by name * @param className The class name * @param loader The optional class loader * @return The class of null if the name could not be resolved */ public static Class<?> loadClassByName(final String className, final ClassLoader loader) { try { if(loader!=null) { return Class.forName(className, true, loader); } return Class.forName(className); } catch (Exception ex) { return null; } } /** * Uses <b><code>InetAddress.getLocalHost().getCanonicalHostName()</code></b> to get the host name. * If the value is null, empty or equals <b><code>localhost</code></b>, returns null. * @return The host name or null if one was not found. */ public static String getHostNameByInet() { try { String inetHost = InetAddress.getLocalHost().getCanonicalHostName(); if(inetHost==null || inetHost.trim().isEmpty() || "localhost".equalsIgnoreCase(inetHost.trim())) return null; return inetHost.trim(); } catch (Exception x) { return null; } } /** * Iterates through the found NICs, extracting the host name if the NIC is not the loopback interface. * The host name is extracted from the first address bound to the first matching NIC that does not * have a canonical name that is an IP address. * @return The host name or null if one was not found. */ public static String getHostNameByNic() { try { for(Enumeration<NetworkInterface> nicEnum = NetworkInterface.getNetworkInterfaces(); nicEnum.hasMoreElements();) { NetworkInterface nic = nicEnum.nextElement(); if(nic!=null && nic.isUp() && !nic.isLoopback()) { for(Enumeration<InetAddress> nicAddr = nic.getInetAddresses(); nicAddr.hasMoreElements();) { InetAddress addr = nicAddr.nextElement(); String chost = addr.getCanonicalHostName(); if(chost!=null && !chost.trim().isEmpty()) { if(!IP4_ADDRESS.matcher(chost).matches() && !IP6_ADDRESS.matcher(chost).matches()) { return chost; } } } } } return null; } catch (Exception x) { return null; } } /** * Cleans the passed stringy to make it more likely to not be rejected by OpenTSDB * @param cs The stringy to clean * @return the cleaned stringy */ public static String clean(final CharSequence cs) { if(cs==null || cs.toString().trim().isEmpty()) return ""; String s = cs.toString().trim(); final int index = s.indexOf('/'); if(index!=-1) { s = s.substring(index+1); } return s.replace(" ", "").replace('$', 'S'); } /** * Returns the next highest power of 2 * @param value The value to get the next power of 2 for * @return the next power of 2 */ public static int findNextPositivePowerOfTwo(final int value) { return 1 << (32 - Integer.numberOfLeadingZeros((int)value - 1)); } private static final int[] POW2 = new int[]{1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864, 134217728, 268435456, 536870912, 1073741824}; private static final byte[] BYTEPOW2 = new byte[]{1, 2, 4, 8, 16, 32, 64}; public static int pow2Index(final int x) { if(x<0 || x > 30) throw new IllegalArgumentException("Unsupported Value [" + x + "]. Only supported for values between 0 and 30 inclusive"); return POW2[x]; } public static byte pow2ByteIndex(final int x) { if(x<0 || x > 7) throw new IllegalArgumentException("Unsupported Value [" + x + "]. Only supported for values between 0 and 30 inclusive"); return BYTEPOW2[x]; } /** * Calculates a percent * @param part The part * @param whole The whole * @return The percentage that the part is of the whole */ public static int percent(double part, double whole) { if(part==0d || whole==0d) return 0; double p = part/whole*100; return (int) Math.round(p); } }
/** * Copyright (C) 2013 * Michael Mosmann <michael@mosmann.de> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.flapdoodle.guava; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import org.junit.Test; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; public class TransformationsTest { @Test public void nullCollectionShouldGiveEmpty() { assertTrue(Transformations.emptyIfNull(null).isEmpty()); } @Test public void nonNullShouldReturnUnmodified() { ArrayList<String> source = Lists.newArrayList(); assertTrue(Transformations.emptyIfNull(source).isEmpty()); assertTrue(source == Transformations.emptyIfNull(source)); } @Test public void flatmapShouldGiveAllEntries() { ImmutableList<? extends String> result = Transformations.flatmap(Lists.newArrayList("A", "B"), new Function<String, Collection<String>>() { @Override public Collection<String> apply(String input) { return Lists.newArrayList(input, input, input); } }); assertEquals("[A, A, A, B, B, B]", result.toString()); } @Test public void iterableFlatmapShouldGiveAllEntries() { Iterable<? extends String> result = Transformations.flatmap(Lists.newArrayList("A", "B"), new Function<String, Iterable<String>>() { @Override public Iterable<String> apply(String input) { return Lists.newArrayList(input, input, input); } }); assertEquals("[A, A, A, B, B, B]", result.toString()); } @Test public void flatmapShouldGiveAllEntriesFromLists() { List<? extends List<String>> lists = ImmutableList.<List<String>> builder().add(Lists.newArrayList("A", "B")).add( Lists.newArrayList("C", "D", "E")).build(); ImmutableList<? extends String> result = Transformations.flatmap(lists); assertEquals("[A, B, C, D, E]", result.toString()); } @Test public void mapListToMap() { Map<String, String> map = Transformations.map(Lists.newArrayList("Achim", "Susi", "Jochen"), new Function<String, String>() { @Override public String apply(String input) { return input.substring(0, 1); } }); assertEquals(3, map.size()); assertEquals("Susi", map.get("S")); } @Test(expected = IllegalArgumentException.class) public void mapListToMapShouldFailIfKeyCollides() { Transformations.map(Lists.newArrayList("Achim", "Susi", "Jochen", "Arnim"), new Function<String, String>() { @Override public String apply(String input) { return input.substring(0, 1); } }); } @Test public void mapListToMapWithFoldShouldGiveFoldedValuesInMap() { Map<String, ? extends List<? extends String>> map = Transformations.map( Lists.newArrayList("Achim", "Susi", "Jochen", "Arnim"), new Function<String, String>() { @Override public String apply(String input) { return input.substring(0, 1); } }, Folds.asListFold(Transformations.<String> asCollection())); assertEquals(3, map.size()); assertEquals("[Susi]", map.get("S").toString()); assertEquals("[Achim, Arnim]", map.get("A").toString()); } @Test public void predicateWithTransformationIsPredicateWithDifferentType() { Predicate<Integer> predicate = Transformations.map(Predicates.equalTo("12"),new Function<Integer, String>() { @Override public String apply(Integer input) { return input.toString(); } }); assertTrue(predicate.apply(12)); assertFalse(predicate.apply(11)); } @Test public void firstOfShouldGiveEntriesOrLessIfCollectionIsSmaller() { assertEquals(2,Transformations.firstOf(Lists.newArrayList("A","B"), 17).size()); } @Test public void firstOfShouldGiveOptionalPresentIfCollectionIsNotEmpty() { assertTrue(Transformations.firstOf(Lists.newArrayList("A","B")).isPresent()); assertFalse(Transformations.firstOf(Lists.newArrayList()).isPresent()); assertEquals("C", Transformations.firstOf(Lists.newArrayList("C","B")).get()); } @Test public void partitionShouldSeparateMatchingFromOthers() { Partition<Integer> partition = Transformations.partition(Lists.newArrayList(1, 2, 3, 4, 5, 6, 7), new Predicate<Integer>() { @Override public boolean apply(Integer input) { return input % 2 != 0; } }); assertEquals("[1, 3, 5, 7]",partition.matching().toString()); assertEquals("[2, 4, 6]",partition.notMatching().toString()); } @Test public void splitInBoundsShouldGiveValidAnswers() { Partition<String> partition = Transformations.split(Lists.newArrayList("A", "B", "C"), 1); assertEquals("[A]", partition.matching().toString()); assertEquals("[B, C]", partition.notMatching().toString()); partition = Transformations.split(Lists.newArrayList("A", "B", "C"), 0); assertEquals("[]", partition.matching().toString()); assertEquals("[A, B, C]", partition.notMatching().toString()); partition = Transformations.split(Lists.newArrayList("A", "B", "C"), 3); assertEquals("[A, B, C]", partition.matching().toString()); assertEquals("[]", partition.notMatching().toString()); } @Test public void zipShouldReturnPairForEachEntry() { Collection<Pair<String, Integer>> result = Transformations.zip(Lists.newArrayList("A","B","C"), Lists.newArrayList(1,2,3)); assertEquals("[Pair[A, 1], Pair[B, 2], Pair[C, 3]]",result.toString()); } @Test(expected = IndexOutOfBoundsException.class) public void zipShouldFailIfUnequalSizedCollections() { Transformations.zip(Lists.newArrayList("A","B","C"), Lists.newArrayList(1,2)); } @Test public void noopShouldDoNothing() { assertEquals("foo", Transformations.noop().apply("foo")); } @Test public void asCollectionShouldGiveCollectionForValue() { assertEquals("[foo]", Transformations.asCollection().apply("foo").toString()); } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.uberfire.backend.server.io.watch; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.inject.Inject; import javax.naming.InitialContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.uberfire.backend.server.util.Filter; import org.uberfire.commons.async.DescriptiveRunnable; import org.uberfire.commons.concurrent.Unmanaged; import org.uberfire.commons.services.cdi.ApplicationStarted; import org.uberfire.io.IOWatchService; import org.uberfire.java.nio.file.FileSystem; import org.uberfire.java.nio.file.WatchEvent; import org.uberfire.java.nio.file.WatchKey; import org.uberfire.java.nio.file.WatchService; import org.uberfire.workbench.events.ResourceAddedEvent; import org.uberfire.workbench.events.ResourceBatchChangesEvent; import org.uberfire.workbench.events.ResourceDeletedEvent; import org.uberfire.workbench.events.ResourceRenamedEvent; import org.uberfire.workbench.events.ResourceUpdatedEvent; public abstract class AbstractIOWatchService implements IOWatchService, Filter<WatchEvent<?>> { private static final Logger LOG = LoggerFactory.getLogger(AbstractIOWatchService.class); private static final Integer AWAIT_TERMINATION_TIMEOUT = Integer.parseInt(System.getProperty("org.uberfire.watcher.quitetimeout", "3")); private final List<String> fileSystems = new ArrayList<>(); private final List<WatchService> watchServices = new ArrayList<>(); protected boolean isDisposed = false; private boolean started; private final Set<AsyncWatchService> watchThreads = new HashSet<>(); private Event<ResourceBatchChangesEvent> resourceBatchChanges; private Event<ResourceUpdatedEvent> resourceUpdatedEvent; private Event<ResourceRenamedEvent> resourceRenamedEvent; private Event<ResourceDeletedEvent> resourceDeletedEvent; private Event<ResourceAddedEvent> resourceAddedEvent; private ExecutorService executorService; private IOWatchServiceExecutor executor = null; private final Set<Future<?>> jobs = new CopyOnWriteArraySet<>(); public AbstractIOWatchService() { } @Inject public AbstractIOWatchService(Event<ResourceBatchChangesEvent> resourceBatchChanges, Event<ResourceUpdatedEvent> resourceUpdatedEvent, Event<ResourceRenamedEvent> resourceRenamedEvent, Event<ResourceDeletedEvent> resourceDeletedEvent, Event<ResourceAddedEvent> resourceAddedEvent, @Unmanaged ExecutorService executorService) { this.resourceBatchChanges = resourceBatchChanges; this.resourceUpdatedEvent = resourceUpdatedEvent; this.resourceRenamedEvent = resourceRenamedEvent; this.resourceDeletedEvent = resourceDeletedEvent; this.resourceAddedEvent = resourceAddedEvent; this.executorService = executorService; } @PostConstruct public void initialize() { final boolean autostart = Boolean.parseBoolean(System.getProperty("org.uberfire.watcher.autostart", "true")); if (autostart) { start(); } } public synchronized void start() { if (!started) { this.started = true; for (final AsyncWatchService watchThread : watchThreads) { final IOWatchServiceExecutor watchServiceExecutor = getWatchServiceExecutor(); jobs.add(executorService.submit(new DescriptiveRunnable() { @Override public String getDescription() { return watchThread.getDescription(); } @Override public void run() { watchThread.execute(watchServiceExecutor); } })); } watchThreads.clear(); } } @PreDestroy protected void dispose() { isDisposed = true; for (final WatchService watchService : watchServices) { watchService.close(); } for (final Future<?> job : jobs) { if (!job.isCancelled() && !job.isDone()) { job.cancel(true); } } executorService.shutdown(); // Disable new tasks from being submitted try { // Wait a while for existing tasks to terminate if (!executorService.awaitTermination(AWAIT_TERMINATION_TIMEOUT, TimeUnit.SECONDS)) { executorService.shutdownNow(); // Cancel currently executing tasks // Wait a while for tasks to respond to being cancelled if (!executorService.awaitTermination(AWAIT_TERMINATION_TIMEOUT, TimeUnit.SECONDS)) { LOG.error("Thread pool did not terminate"); } } } catch (InterruptedException ie) { // (Re-)Cancel if current thread also interrupted executorService.shutdownNow(); // Preserve interrupt status Thread.currentThread().interrupt(); } } @Override public boolean hasWatchService(final FileSystem fs) { return fileSystems.contains(fs.getName()); } @Override public void addWatchService(final FileSystem fs, final WatchService ws) { fileSystems.add(fs.getName()); watchServices.add(ws); final AsyncWatchService asyncWatchService = new AsyncWatchService() { @Override public void execute(final IOWatchServiceExecutor wsExecutor) { while (!isDisposed) { final WatchKey wk; try { wk = ws.take(); } catch (final Exception ex) { break; } try { wsExecutor.execute(wk, AbstractIOWatchService.this); } catch (final Exception ex) { LOG.error("Unexpected error during WatchService execution", ex); } // Reset the key -- this step is critical if you want to // receive further watch events. If the key is no longer valid, // the directory is inaccessible so exit the loop. boolean valid = wk.reset(); if (!valid) { break; } } } @Override public String getDescription() { return AbstractIOWatchService.this.getClass().getName() + "(" + ws.toString() + ")"; } }; if (started) { final IOWatchServiceExecutor watchServiceExecutor = getWatchServiceExecutor(); executorService.execute(new DescriptiveRunnable() { @Override public String getDescription() { return asyncWatchService.getDescription(); } @Override public void run() { asyncWatchService.execute(watchServiceExecutor); } }); } else { watchThreads.add(asyncWatchService); } } public void configureOnEvent(@Observes ApplicationStarted applicationStartedEvent) { start(); } protected IOWatchServiceExecutor getWatchServiceExecutor() { if (executor == null) { IOWatchServiceExecutor _executor = null; try { _executor = InitialContext.doLookup("java:module/IOWatchServiceExecutorImpl"); } catch (final Exception ignored) { } if (_executor == null) { _executor = new IOWatchServiceExecutorImpl(); ((IOWatchServiceExecutorImpl) _executor).setEvents(resourceBatchChanges, resourceUpdatedEvent, resourceRenamedEvent, resourceDeletedEvent, resourceAddedEvent); } executor = _executor; } return executor; } }
package io.fabric8.openshift.api.model.installer.baremetal.v1; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import io.fabric8.kubernetes.api.model.Container; import io.fabric8.kubernetes.api.model.IntOrString; import io.fabric8.kubernetes.api.model.KubernetesResource; import io.fabric8.kubernetes.api.model.LabelSelector; import io.fabric8.kubernetes.api.model.LocalObjectReference; import io.fabric8.kubernetes.api.model.ObjectMeta; import io.fabric8.kubernetes.api.model.ObjectReference; import io.fabric8.kubernetes.api.model.PersistentVolumeClaim; import io.fabric8.kubernetes.api.model.PodTemplateSpec; import io.fabric8.kubernetes.api.model.ResourceRequirements; import io.sundr.builder.annotations.Buildable; import io.sundr.builder.annotations.BuildableReference; import lombok.EqualsAndHashCode; import lombok.Setter; import lombok.ToString; import lombok.experimental.Accessors; @JsonDeserialize(using = com.fasterxml.jackson.databind.JsonDeserializer.None.class) @JsonInclude(JsonInclude.Include.NON_NULL) @JsonPropertyOrder({ "apiVersion", "kind", "metadata", "apiVIP", "bootstrapOSImage", "bootstrapProvisioningIP", "clusterOSImage", "clusterProvisioningIP", "defaultMachinePlatform", "externalBridge", "externalMACAddress", "hosts", "ingressVIP", "libvirtURI", "provisioningBridge", "provisioningDHCPExternal", "provisioningDHCPRange", "provisioningHostIP", "provisioningMACAddress", "provisioningNetwork", "provisioningNetworkCIDR", "provisioningNetworkInterface" }) @ToString @EqualsAndHashCode @Setter @Accessors(prefix = { "_", "" }) @Buildable(editableEnabled = false, validationEnabled = false, generateBuilderPackage = false, lazyCollectionInitEnabled = false, builderPackage = "io.fabric8.kubernetes.api.builder", refs = { @BuildableReference(ObjectMeta.class), @BuildableReference(LabelSelector.class), @BuildableReference(Container.class), @BuildableReference(PodTemplateSpec.class), @BuildableReference(ResourceRequirements.class), @BuildableReference(IntOrString.class), @BuildableReference(ObjectReference.class), @BuildableReference(LocalObjectReference.class), @BuildableReference(PersistentVolumeClaim.class) }) public class Platform implements KubernetesResource { @JsonProperty("apiVIP") private String apiVIP; @JsonProperty("bootstrapOSImage") private String bootstrapOSImage; @JsonProperty("bootstrapProvisioningIP") private String bootstrapProvisioningIP; @JsonProperty("clusterOSImage") private String clusterOSImage; @JsonProperty("clusterProvisioningIP") private String clusterProvisioningIP; @JsonProperty("defaultMachinePlatform") private MachinePool defaultMachinePlatform; @JsonProperty("externalBridge") private String externalBridge; @JsonProperty("externalMACAddress") private String externalMACAddress; @JsonProperty("hosts") private List<Host> hosts = new ArrayList<Host>(); @JsonProperty("ingressVIP") private String ingressVIP; @JsonProperty("libvirtURI") private String libvirtURI; @JsonProperty("provisioningBridge") private String provisioningBridge; @JsonProperty("provisioningDHCPExternal") private Boolean provisioningDHCPExternal; @JsonProperty("provisioningDHCPRange") private String provisioningDHCPRange; @JsonProperty("provisioningHostIP") private String provisioningHostIP; @JsonProperty("provisioningMACAddress") private String provisioningMACAddress; @JsonProperty("provisioningNetwork") private String provisioningNetwork; @JsonProperty("provisioningNetworkCIDR") private String provisioningNetworkCIDR; @JsonProperty("provisioningNetworkInterface") private String provisioningNetworkInterface; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); /** * No args constructor for use in serialization * */ public Platform() { } /** * * @param bootstrapProvisioningIP * @param provisioningNetworkInterface * @param apiVIP * @param ingressVIP * @param provisioningDHCPExternal * @param hosts * @param provisioningNetwork * @param provisioningNetworkCIDR * @param clusterProvisioningIP * @param bootstrapOSImage * @param provisioningDHCPRange * @param clusterOSImage * @param defaultMachinePlatform * @param provisioningBridge * @param externalMACAddress * @param provisioningHostIP * @param provisioningMACAddress * @param libvirtURI * @param externalBridge */ public Platform(String apiVIP, String bootstrapOSImage, String bootstrapProvisioningIP, String clusterOSImage, String clusterProvisioningIP, MachinePool defaultMachinePlatform, String externalBridge, String externalMACAddress, List<Host> hosts, String ingressVIP, String libvirtURI, String provisioningBridge, Boolean provisioningDHCPExternal, String provisioningDHCPRange, String provisioningHostIP, String provisioningMACAddress, String provisioningNetwork, String provisioningNetworkCIDR, String provisioningNetworkInterface) { super(); this.apiVIP = apiVIP; this.bootstrapOSImage = bootstrapOSImage; this.bootstrapProvisioningIP = bootstrapProvisioningIP; this.clusterOSImage = clusterOSImage; this.clusterProvisioningIP = clusterProvisioningIP; this.defaultMachinePlatform = defaultMachinePlatform; this.externalBridge = externalBridge; this.externalMACAddress = externalMACAddress; this.hosts = hosts; this.ingressVIP = ingressVIP; this.libvirtURI = libvirtURI; this.provisioningBridge = provisioningBridge; this.provisioningDHCPExternal = provisioningDHCPExternal; this.provisioningDHCPRange = provisioningDHCPRange; this.provisioningHostIP = provisioningHostIP; this.provisioningMACAddress = provisioningMACAddress; this.provisioningNetwork = provisioningNetwork; this.provisioningNetworkCIDR = provisioningNetworkCIDR; this.provisioningNetworkInterface = provisioningNetworkInterface; } @JsonProperty("apiVIP") public String getApiVIP() { return apiVIP; } @JsonProperty("apiVIP") public void setApiVIP(String apiVIP) { this.apiVIP = apiVIP; } @JsonProperty("bootstrapOSImage") public String getBootstrapOSImage() { return bootstrapOSImage; } @JsonProperty("bootstrapOSImage") public void setBootstrapOSImage(String bootstrapOSImage) { this.bootstrapOSImage = bootstrapOSImage; } @JsonProperty("bootstrapProvisioningIP") public String getBootstrapProvisioningIP() { return bootstrapProvisioningIP; } @JsonProperty("bootstrapProvisioningIP") public void setBootstrapProvisioningIP(String bootstrapProvisioningIP) { this.bootstrapProvisioningIP = bootstrapProvisioningIP; } @JsonProperty("clusterOSImage") public String getClusterOSImage() { return clusterOSImage; } @JsonProperty("clusterOSImage") public void setClusterOSImage(String clusterOSImage) { this.clusterOSImage = clusterOSImage; } @JsonProperty("clusterProvisioningIP") public String getClusterProvisioningIP() { return clusterProvisioningIP; } @JsonProperty("clusterProvisioningIP") public void setClusterProvisioningIP(String clusterProvisioningIP) { this.clusterProvisioningIP = clusterProvisioningIP; } @JsonProperty("defaultMachinePlatform") public MachinePool getDefaultMachinePlatform() { return defaultMachinePlatform; } @JsonProperty("defaultMachinePlatform") public void setDefaultMachinePlatform(MachinePool defaultMachinePlatform) { this.defaultMachinePlatform = defaultMachinePlatform; } @JsonProperty("externalBridge") public String getExternalBridge() { return externalBridge; } @JsonProperty("externalBridge") public void setExternalBridge(String externalBridge) { this.externalBridge = externalBridge; } @JsonProperty("externalMACAddress") public String getExternalMACAddress() { return externalMACAddress; } @JsonProperty("externalMACAddress") public void setExternalMACAddress(String externalMACAddress) { this.externalMACAddress = externalMACAddress; } @JsonProperty("hosts") public List<Host> getHosts() { return hosts; } @JsonProperty("hosts") public void setHosts(List<Host> hosts) { this.hosts = hosts; } @JsonProperty("ingressVIP") public String getIngressVIP() { return ingressVIP; } @JsonProperty("ingressVIP") public void setIngressVIP(String ingressVIP) { this.ingressVIP = ingressVIP; } @JsonProperty("libvirtURI") public String getLibvirtURI() { return libvirtURI; } @JsonProperty("libvirtURI") public void setLibvirtURI(String libvirtURI) { this.libvirtURI = libvirtURI; } @JsonProperty("provisioningBridge") public String getProvisioningBridge() { return provisioningBridge; } @JsonProperty("provisioningBridge") public void setProvisioningBridge(String provisioningBridge) { this.provisioningBridge = provisioningBridge; } @JsonProperty("provisioningDHCPExternal") public Boolean getProvisioningDHCPExternal() { return provisioningDHCPExternal; } @JsonProperty("provisioningDHCPExternal") public void setProvisioningDHCPExternal(Boolean provisioningDHCPExternal) { this.provisioningDHCPExternal = provisioningDHCPExternal; } @JsonProperty("provisioningDHCPRange") public String getProvisioningDHCPRange() { return provisioningDHCPRange; } @JsonProperty("provisioningDHCPRange") public void setProvisioningDHCPRange(String provisioningDHCPRange) { this.provisioningDHCPRange = provisioningDHCPRange; } @JsonProperty("provisioningHostIP") public String getProvisioningHostIP() { return provisioningHostIP; } @JsonProperty("provisioningHostIP") public void setProvisioningHostIP(String provisioningHostIP) { this.provisioningHostIP = provisioningHostIP; } @JsonProperty("provisioningMACAddress") public String getProvisioningMACAddress() { return provisioningMACAddress; } @JsonProperty("provisioningMACAddress") public void setProvisioningMACAddress(String provisioningMACAddress) { this.provisioningMACAddress = provisioningMACAddress; } @JsonProperty("provisioningNetwork") public String getProvisioningNetwork() { return provisioningNetwork; } @JsonProperty("provisioningNetwork") public void setProvisioningNetwork(String provisioningNetwork) { this.provisioningNetwork = provisioningNetwork; } @JsonProperty("provisioningNetworkCIDR") public String getProvisioningNetworkCIDR() { return provisioningNetworkCIDR; } @JsonProperty("provisioningNetworkCIDR") public void setProvisioningNetworkCIDR(String provisioningNetworkCIDR) { this.provisioningNetworkCIDR = provisioningNetworkCIDR; } @JsonProperty("provisioningNetworkInterface") public String getProvisioningNetworkInterface() { return provisioningNetworkInterface; } @JsonProperty("provisioningNetworkInterface") public void setProvisioningNetworkInterface(String provisioningNetworkInterface) { this.provisioningNetworkInterface = provisioningNetworkInterface; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } }
package org.apache.velocity.test; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.velocity.VelocityContext; /** * Used to check that vararg method calls on references work properly */ public class VarargMethodsTestCase extends BaseTestCase { public VarargMethodsTestCase(final String name) { super(name); } protected void setUpContext(VelocityContext context) { context.put("nice", new NiceTool()); context.put("nasty", new NastyTool()); context.put("objects", new Object[] { this, VelocityContext.class }); context.put("strings", new String[] { "one", "two" }); context.put("doubles", new double[] { 1.5, 2.5 }); context.put("float", new Float(1f)); context.put("ints", new int[] { 1, 2 }); } public void testStrings() { assertEvalEquals("onetwo", "$nice.var($strings)"); assertEvalEquals("onetwo", "$nice.var('one','two')"); assertEvalEquals("one", "$nice.var('one')"); assertEvalEquals("", "$nice.var()"); } public void testDoubles() { assertEvalEquals("4.0", "$nice.add($doubles)"); assertEvalEquals("3.0", "$nice.add(1,2)"); assertEvalEquals("1.0", "$nice.add(1)"); assertEvalEquals("0.0", "$nice.add()"); } public void testFloatToDoubleVarArg() { assertEvalEquals("1.0", "$nice.add($float)"); } public void testStringVsStrings() { assertEvalEquals("onlyone", "$nasty.var('one')"); assertEvalEquals("onlynull", "$nasty.var($null)"); assertEvalEquals("", "$nasty.var()"); } public void testIntVsDoubles() { assertEvalEquals("1", "$nasty.add(1)"); assertEvalEquals("1.0", "$nasty.add(1.0)"); assertEvalEquals("3.0", "$nasty.add(1.0,2)"); } public void testInts() { assertEvalEquals("3", "$nasty.add($ints)"); assertEvalEquals("3", "$nasty.add(1,2)"); assertEvalEquals("1", "$nasty.add(1)"); // add(int[]) wins because it is "more specific" assertEvalEquals("0", "$nasty.add()"); } public void testStringsVsObjectsAKASubclassVararg() { assertEvalEquals("objects", "$nice.test($objects)"); assertEvalEquals("objects", "$nice.test($nice,$nasty,$ints)"); assertEvalEquals("strings", "$nice.test('foo')"); } public void testObjectVarArgVsObjectEtc() { assertEvalEquals("object,string", "$nasty.test($nice,'foo')"); } public void testObjectVarArgVsObjectVelocity605() { assertEvalEquals("string", "$nasty.test('joe')"); assertEvalEquals("object", "$nasty.test($nice)"); } public void testNoArgs() { assertEvalEquals("noargs", "$nasty.test()"); } public void testPassingArrayToVarArgVelocity642() { assertEvalEquals("[one, two]", "$nasty.test642($strings)"); assertEvalEquals("[1, 2]", "#set( $list = [1..2] )$nasty.test642($list.toArray())"); } public void testNullToPrimitiveVarArg() { assertEvalEquals("int[]", "$nasty.test649($null)"); } public void testArgsBeforeVarargWithNoArgs() { assertEvalEquals("String,String,Object[]", "$nasty.test651('a','b')"); } public void testVelocity651() { assertEvalEquals("String,List", "$nasty.test651('test',['TEST'])"); } public static class NiceTool { public String var(String[] ss) { StringBuffer out = new StringBuffer(); for (int i=0; i < ss.length; i++) { out.append(ss[i]); } return out.toString(); } public double add(double[] dd) { double total = 0; for (int i=0; i < dd.length; i++) { total += dd[i]; } return total; } public String test(Object[] oo) { return "objects"; } public String test(String[] oo) { return "strings"; } } public static class NastyTool extends NiceTool { public String var(String s) { return "only"+s; } public int add(int[] ii) { int total = 0; for (int i=0; i < ii.length; i++) { total += ii[i]; } return total; } public int add(int i) { return i; } public String test() { return "noargs"; } public Object test(Object arg) { return "object"; } public Object test(String arg) { return "string"; } public String test(Object[] array) { return "object[]"; } public String test(Object object, String property) { return "object,string"; } public String test642(Object[] array) { //JDK5: return Arrays.deepToString(array); if (array == null) { return null; } StringBuffer o = new StringBuffer("["); for (int i=0; i < array.length; i++) { if (i > 0) { o.append(", "); } o.append(String.valueOf(array[i])); } o.append("]"); return o.toString(); } public String test649(int[] array) { return "int[]"; } public String test651(String s, String s2, Object[] args) { return "String,String,Object[]"; } public String test651(String s, java.util.List l) { return "String,List"; } } }
/* * AcceleratorActionFactory.java * * Created on May 20, 2003, 12:38 PM */ package xal.extension.application.smf; import xal.smf.*; import xal.smf.data.*; import javax.swing.*; import javax.swing.filechooser.FileFilter; import javax.swing.event.*; import java.awt.event.*; import java.io.File; import java.util.*; import java.util.logging.*; /** * AcceleratorActionFactory provides factory methods specific to the accelerator based application theme. * @author tap */ public class AcceleratorActionFactory { /** Creates a new instance of AcceleratorActionFactory */ protected AcceleratorActionFactory() {} /** * The handler that dynamically builds the Sequence submenu whenever the menu is selected. * Specifically it builds menu items for loading accelerator sequences associated with the selected accelerator. * @param document The document for which the menu is built * @return The menu listener that handles the "menuSelected" event. */ public static MenuListener sequenceHandler( final AcceleratorDocument document ) { return new MenuListener() { /** MenuListener interface */ public void menuSelected( final MenuEvent event ) { final Accelerator accelerator = document.getAccelerator(); final List<AcceleratorSeq> sequences = accelerator != null ? new ArrayList<AcceleratorSeq>( accelerator.getSequences() ) : Collections.<AcceleratorSeq>emptyList(); final JMenu menu = (JMenu)event.getSource(); menu.removeAll(); if ( sequences.isEmpty() ) return; final AcceleratorSeq selectedSequence = document.getSelectedSequence(); final ButtonGroup sequenceGroup = new ButtonGroup(); for ( final AcceleratorSeq sequence : sequences ) { final JMenuItem sequenceItem = selectSequenceMenuItem( sequence, document ); menu.add( sequenceItem ); sequenceGroup.add( sequenceItem ); if ( sequence == selectedSequence ) { sequenceItem.setSelected( true ); } } // add the combo sequences menu.addSeparator(); // fetch the pre-defined combo sequences and make sure we don't overwrite the list final List<AcceleratorSeqCombo> comboSequences = (accelerator != null) ? new ArrayList<AcceleratorSeqCombo>( accelerator.getComboSequences() ) : Collections.<AcceleratorSeqCombo>emptyList(); // If the selected sequence is a combo sequence make sure there is an appropriate menu item if ( selectedSequence != null && selectedSequence instanceof AcceleratorSeqCombo && !comboSequences.contains( selectedSequence ) ) { comboSequences.add( (AcceleratorSeqCombo)selectedSequence ); } for ( final AcceleratorSeq sequence : comboSequences ) { final JMenuItem sequenceItem = selectSequenceMenuItem( sequence, document ); menu.add(sequenceItem); sequenceGroup.add( sequenceItem ); if ( sequence.equals( selectedSequence ) ) { sequenceItem.setSelected( true ); } } menu.addSeparator(); menu.add( comboSequenceSelectorMenuItem(document) ); } /** MenuListener interface */ public void menuCanceled( final MenuEvent event ) {} /** MenuListener interface */ public void menuDeselected( final MenuEvent event ) {} }; } /** * Creates the action that, when activated, will load the default accelerator. * @param document The document into which the accelerator will be set. * @return The action that will load the accelerator when activated. */ public static Action loadDefaultAcceleratorAction( final AcceleratorDocument document ) { final Action action = new AbstractAction() { /** serialization ID */ private static final long serialVersionUID = 1L; public void actionPerformed( final ActionEvent event ) { try { document.loadDefaultAccelerator(); } catch( Exception exception ) { final String message = "Exception while loading the default accelerator: "; System.err.println( message + '\n' + exception ); Logger.getLogger("global").log( Level.SEVERE, message, exception ); document.displayError( "Exception", message, exception ); } } }; action.putValue( Action.NAME, "load-default-accelerator" ); return action; } /** * Creates the action that, when activated, will provide an open dialog box that allows the user to select an accelerator file. * @param document The document into which the accelerator will be set. * @return The action that when activated will load the accelerator selected by the user. */ public static Action loadAcceleratorAction( final AcceleratorDocument document ) { final Action action = new AbstractAction() { /** serialization ID */ private static final long serialVersionUID = 1L; public void actionPerformed( final ActionEvent event ) { final JFileChooser fileChooser = ((AcceleratorApplication)AcceleratorApplication.getApp()).getAcceleratorFileChooser(); int status = fileChooser.showOpenDialog( document.getMainWindow() ); try { switch(status) { case JFileChooser.CANCEL_OPTION: break; case JFileChooser.APPROVE_OPTION: final File fileSelection = fileChooser.getSelectedFile(); final String filePath = fileSelection.getAbsolutePath(); final Accelerator accelerator = XMLDataManager.acceleratorWithPath( filePath, document.nextChannelFactory() ); document.setAccelerator( accelerator, filePath ); break; case JFileChooser.ERROR_OPTION: break; } } catch(Exception exception) { final String message = "Exception while loading the selected accelerator: "; System.err.println( message + '\n' + exception ); Logger.getLogger( "global" ).log( Level.SEVERE, message, exception ); document.displayError( "Exception", message, exception ); } } }; action.putValue( Action.NAME, "load-accelerator" ); return action; } /** * Creates the menu item that selects a particular sequence and sets the document's selected sequence to that sequence. * @param sequence The sequence that gets selected when the menu item is selected. * @param document The document for which the menu item applies the selected sequence. * @return The menu item used to select the specific sequence. */ static private JMenuItem selectSequenceMenuItem( final AcceleratorSeq sequence, final AcceleratorDocument document ) { final String label = sequence.getId(); final JRadioButtonMenuItem menuItem = new JRadioButtonMenuItem( label ); menuItem.setAction( new AbstractAction() { /** serialization ID */ private static final long serialVersionUID = 1L; public void actionPerformed( final ActionEvent event ) { document.setSelectedSequence( sequence ); } }); menuItem.setText( label ); return menuItem; } /** * Creates the menu item that allows the user to construct and select a combo sequence. * @param document The document into which the combo sequence is selected. * @return The menu item used to construct the combo sequence. */ static private JMenuItem comboSequenceSelectorMenuItem( final AcceleratorDocument document ) { final String label = "New Combo Sequence"; final JMenuItem menuItem = new JMenuItem( label ); menuItem.setAction( new AbstractAction() { /** serialization ID */ private static final long serialVersionUID = 1L; public void actionPerformed( final ActionEvent event ) { final AcceleratorSeqCombo comboSequence = ComboSequenceComposer.composeComboSequence( document.getAccelerator(), document.getMainWindow() ); if ( comboSequence != null ) { document.setSelectedSequence( comboSequence ); document.setSelectedSequenceList( comboSequence.getConstituents() ); } } }); menuItem.setText( label ); return menuItem; } }
package org.artifactory.ui.rest.service.builds.buildsinfo.tabs.licenses; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import org.apache.commons.lang.StringUtils; import org.artifactory.addon.AddonsManager; import org.artifactory.addon.license.LicensesAddon; import org.artifactory.api.build.BuildService; import org.artifactory.api.context.ContextHelper; import org.artifactory.api.license.LicenseInfo; import org.artifactory.api.license.ModuleLicenseModel; import org.artifactory.api.repo.RepositoryService; import org.artifactory.build.BuildRun; import org.artifactory.descriptor.property.PredefinedValue; import org.artifactory.descriptor.property.Property; import org.artifactory.descriptor.property.PropertySet; import org.artifactory.descriptor.repo.LocalRepoDescriptor; import org.artifactory.repo.InternalRepoPathFactory; import org.artifactory.repo.RepoPath; import org.artifactory.rest.common.service.ArtifactoryRestRequest; import org.artifactory.rest.common.service.RestResponse; import org.artifactory.rest.common.service.RestService; import org.artifactory.sapi.common.RepositoryRuntimeException; import org.artifactory.ui.rest.model.utils.predefinevalues.PreDefineValues; import org.artifactory.ui.utils.DateUtils; import org.jfrog.build.api.Build; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import javax.annotation.Nonnull; import javax.servlet.http.HttpServletResponse; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author Chen Keinan */ @Component @Scope(BeanDefinition.SCOPE_PROTOTYPE) public class ChangeBuildLicenseService implements RestService { private static final Logger log = LoggerFactory.getLogger(ChangeBuildLicenseService.class); @Autowired BuildService buildService; @Autowired RepositoryService repositoryService; @Override public void execute(ArtifactoryRestRequest request, RestResponse response) { try { String id = request.getQueryParamByKey("id"); String repoKey = request.getQueryParamByKey("repoKey"); String path = request.getQueryParamByKey("path"); String name = request.getPathParamByKey("name"); String buildNumber = request.getPathParamByKey("number"); String buildStarted = DateUtils.formatBuildDate(Long.parseLong(request.getPathParamByKey("date"))); // get license-repo map Build build = getBuild(name, buildNumber, buildStarted, response); RepoPath repoPath = InternalRepoPathFactory.create(repoKey, path); Multimap<RepoPath, ModuleLicenseModel> repoPathLicenseMultimap = getRepoPathLicenseModuleModelMultimap(build); Map<String, LicenseInfo> currentValues = getCurrentValues(id, repoPath, repoPathLicenseMultimap); PreDefineValues preDefineValues = getLicenseValues(repoPath, currentValues); response.iModel(preDefineValues); } catch (ParseException e) { log.error(e.toString()); } } /** * get license preDefine values * * @param repoPath - repo path * @return - pre define values */ private PreDefineValues getLicenseValues(RepoPath repoPath, Map<String, LicenseInfo> currentValues) { PreDefineValues values = new PreDefineValues(); String name = "artifactory.licenses"; Map<String, Property> propertyItemMap = createPropertyItemMap(repoPath); if (!propertyItemMap.isEmpty()) { List<PredefinedValue> predefinedValues = propertyItemMap.get(name).getPredefinedValues(); List<String> listOfPredefineValuesAsString = new ArrayList<>(); List<String> selectedValues = new ArrayList<>(); predefinedValues.forEach(predefinedValue -> { if (predefinedValue.isDefaultValue() || currentValues.get(predefinedValue.getValue()) != null) { selectedValues.add(predefinedValue.getValue()); } else { listOfPredefineValuesAsString.add(predefinedValue.getValue()); } }); values.setSelectedValues(selectedValues); values.setPredefinedValues(listOfPredefineValuesAsString); } return values; } /** * get license with repo path data * * @param build - license build * @return multi map with repo path and license */ private Multimap<RepoPath, ModuleLicenseModel> getRepoPathLicenseModuleModelMultimap(Build build) { AddonsManager addonsManager = ContextHelper.get().beanForType(AddonsManager.class); LicensesAddon licensesAddon = addonsManager.addonByType(LicensesAddon.class); Multimap<RepoPath, ModuleLicenseModel> repoPathLicenseMultimap = licensesAddon. populateLicenseInfoSynchronously(build, false); return repoPathLicenseMultimap; } /** * Get all licenses that are currently on the models for a specific id and repo path * * @param id The id of the model * @param repoPath The repo path of the model * @return The current values (licenses) for a specific id and repo path. */ private Map<String, LicenseInfo> getCurrentValues(String id, RepoPath repoPath, Multimap<RepoPath, ModuleLicenseModel> LicenseMap) { List<LicenseInfo> licenseInfos = Lists.newArrayList(); Map<String, LicenseInfo> licenseMap = new HashMap<>(); Iterable<ModuleLicenseModel> modelsWithSameId = Iterables.filter(LicenseMap.get(repoPath), new SameIdPredicate(id)); for (ModuleLicenseModel moduleLicenseModel : modelsWithSameId) { LicenseInfo licenseInfo = moduleLicenseModel.getLicense(); if (licenseInfo.isValidLicense()) { licenseInfos.add(licenseInfo); licenseMap.put(licenseInfo.getName(), licenseInfo); } } return licenseMap; } private static class SameIdPredicate implements Predicate<ModuleLicenseModel> { private String id; private SameIdPredicate(String id) { this.id = id; } @Override public boolean apply(@Nonnull ModuleLicenseModel input) { return input.getId().equals(id); } } /** * get build info * * @param buildName - build name * @param buildNumber - build number * @param buildStarted - build date * @param response - encapsulate data related to request * @return */ private Build getBuild(String buildName, String buildNumber, String buildStarted, RestResponse response) { boolean buildStartedSupplied = StringUtils.isNotBlank(buildStarted); try { Build build = null; if (buildStartedSupplied) { BuildRun buildRun = buildService.getBuildRun(buildName, buildNumber, buildStarted); if (buildRun != null) { build = buildService.getBuild(buildRun); } } else { //Take the latest build of the specified number build = buildService.getLatestBuildByNameAndNumber(buildName, buildNumber); } if (build == null) { StringBuilder builder = new StringBuilder().append("Could not find build '").append(buildName). append("' #").append(buildNumber); if (buildStartedSupplied) { builder.append(" that started at ").append(buildStarted); } throwNotFoundError(response, builder.toString()); } return build; } catch (RepositoryRuntimeException e) { String errorMessage = new StringBuilder().append("Error locating latest build for '").append(buildName). append("' #").append(buildNumber).append(": ").append(e.getMessage()).toString(); throwInternalError(errorMessage, response); } //Should not happen return null; } /** * Throws a 404 AbortWithHttpErrorCodeException with the given message * * @param errorMessage Message to display in the error */ private void throwNotFoundError(RestResponse response, String errorMessage) { log.error(errorMessage); response.error(errorMessage); } /** * return not found error * * @param errorMessage * @param response */ private void throwInternalError(String errorMessage, RestResponse response) { response.error(errorMessage); response.responseCode(HttpServletResponse.SC_NOT_FOUND); } /** * create property map by repo path * * @param repoPath - repo path * @return map of properties */ private Map<String, Property> createPropertyItemMap(RepoPath repoPath) { Map<String, Property> propertyItemMap = new HashMap<>(); LocalRepoDescriptor descriptor = repositoryService.localOrCachedRepoDescriptorByKey(repoPath.getRepoKey()); List<PropertySet> propertySets = new ArrayList<>(descriptor.getPropertySets()); for (PropertySet propertySet : propertySets) { List<Property> propertyList = propertySet.getProperties(); for (Property property : propertyList) { propertyItemMap.put(propertySet.getName() + "." + property.getName(), property); } } return propertyItemMap; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht.colocated; import java.io.Externalizable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.Map; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.CacheEntryPredicate; import org.apache.ignite.internal.processors.cache.CacheObject; import org.apache.ignite.internal.processors.cache.CacheOperationContext; import org.apache.ignite.internal.processors.cache.EntryGetResult; import org.apache.ignite.internal.processors.cache.GridCacheConcurrentMap; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheEntryEx; import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException; import org.apache.ignite.internal.processors.cache.GridCacheLockTimeoutException; import org.apache.ignite.internal.processors.cache.GridCacheMapEntryFactory; import org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate; import org.apache.ignite.internal.processors.cache.GridCacheReturn; import org.apache.ignite.internal.processors.cache.IgniteCacheExpiryPolicy; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.database.CacheDataRow; import org.apache.ignite.internal.processors.cache.distributed.GridDistributedCacheEntry; import org.apache.ignite.internal.processors.cache.distributed.GridDistributedLockCancelledException; import org.apache.ignite.internal.processors.cache.distributed.GridDistributedUnlockRequest; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtCacheEntry; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtEmbeddedFuture; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtFinishedFuture; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtInvalidPartitionException; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtLockFuture; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTransactionalCacheAdapter; import org.apache.ignite.internal.processors.cache.distributed.dht.GridPartitionedGetFuture; import org.apache.ignite.internal.processors.cache.distributed.dht.GridPartitionedSingleGetFuture; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearGetResponse; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearLockResponse; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearSingleGetResponse; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTransactionalCache; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearUnlockRequest; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxKey; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxLocalEx; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.future.GridEmbeddedFuture; import org.apache.ignite.internal.util.future.GridFinishedFuture; import org.apache.ignite.internal.util.lang.IgnitePair; import org.apache.ignite.internal.util.typedef.C2; import org.apache.ignite.internal.util.typedef.CI2; import org.apache.ignite.internal.util.typedef.CX1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.plugin.security.SecurityPermission; import org.apache.ignite.transactions.TransactionIsolation; import org.jetbrains.annotations.Nullable; /** * Colocated cache. */ public class GridDhtColocatedCache<K, V> extends GridDhtTransactionalCacheAdapter<K, V> { /** */ private static final long serialVersionUID = 0L; /** * Empty constructor required for {@link Externalizable} */ public GridDhtColocatedCache() { // No-op. } /** * @param ctx Cache context. */ public GridDhtColocatedCache(GridCacheContext<K, V> ctx) { super(ctx); } /** * Creates colocated cache with specified map. * * @param ctx Cache context. * @param map Cache map. */ public GridDhtColocatedCache(GridCacheContext<K, V> ctx, GridCacheConcurrentMap map) { super(ctx, map); } /** {@inheritDoc} */ @Override public boolean isColocated() { return true; } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { super.start(); ctx.io().addCacheHandler(ctx.cacheId(), GridNearGetResponse.class, new CI2<UUID, GridNearGetResponse>() { @Override public void apply(UUID nodeId, GridNearGetResponse res) { processNearGetResponse(nodeId, res); } }); ctx.io().addCacheHandler(ctx.cacheId(), GridNearSingleGetResponse.class, new CI2<UUID, GridNearSingleGetResponse>() { @Override public void apply(UUID nodeId, GridNearSingleGetResponse res) { processNearSingleGetResponse(nodeId, res); } }); ctx.io().addCacheHandler(ctx.cacheId(), GridNearLockResponse.class, new CI2<UUID, GridNearLockResponse>() { @Override public void apply(UUID nodeId, GridNearLockResponse res) { processLockResponse(nodeId, res); } }); } /** * Gets or creates entry for given key and given topology version. * * @param key Key for entry. * @param topVer Topology version. * @param allowDetached Whether to allow detached entries. If {@code true} and node is not primary * for given key, a new detached entry will be created. Otherwise, entry will be obtained from * dht cache map. * @return Cache entry. * @throws GridDhtInvalidPartitionException If {@code allowDetached} is false and node is not primary * for given key. */ public GridDistributedCacheEntry entryExx( KeyCacheObject key, AffinityTopologyVersion topVer, boolean allowDetached ) { return allowDetached && !ctx.affinity().primaryByKey(ctx.localNode(), key, topVer) ? createEntry(key) : entryExx(key, topVer); } /** {@inheritDoc} */ @Override public boolean isLocked(K key) { KeyCacheObject cacheKey = ctx.toCacheKeyObject(key); return ctx.mvcc().isLockedByThread(ctx.txKey(cacheKey), -1); } /** {@inheritDoc} */ @Override public boolean isLockedByThread(K key) { KeyCacheObject cacheKey = ctx.toCacheKeyObject(key); return ctx.mvcc().isLockedByThread(ctx.txKey(cacheKey), Thread.currentThread().getId()); } /** {@inheritDoc} */ @Override protected IgniteInternalFuture<V> getAsync(final K key, boolean forcePrimary, boolean skipTx, @Nullable UUID subjId, String taskName, final boolean deserializeBinary, final boolean skipVals, boolean canRemap, final boolean needVer) { ctx.checkSecurity(SecurityPermission.CACHE_READ); if (keyCheck) validateCacheKey(key); GridNearTxLocal tx = ctx.tm().threadLocalTx(ctx); final CacheOperationContext opCtx = ctx.operationContextPerCall(); final boolean recovery = opCtx != null && opCtx.recovery(); if (tx != null && !tx.implicit() && !skipTx) { return asyncOp(tx, new AsyncOp<V>() { @Override public IgniteInternalFuture<V> op(GridNearTxLocal tx, AffinityTopologyVersion readyTopVer) { IgniteInternalFuture<Map<Object, Object>> fut = tx.getAllAsync(ctx, readyTopVer, Collections.singleton(ctx.toCacheKeyObject(key)), deserializeBinary, skipVals, false, opCtx != null && opCtx.skipStore(), recovery, needVer); return fut.chain(new CX1<IgniteInternalFuture<Map<Object, Object>>, V>() { @SuppressWarnings("unchecked") @Override public V applyx(IgniteInternalFuture<Map<Object, Object>> e) throws IgniteCheckedException { Map<Object, Object> map = e.get(); assert map.isEmpty() || map.size() == 1 : map.size(); if (skipVals) { Boolean val = map.isEmpty() ? false : (Boolean)F.firstValue(map); return (V)(val); } return (V)F.firstValue(map); } }); } }, opCtx, /*retry*/false); } AffinityTopologyVersion topVer = tx == null ? (canRemap ? ctx.affinity().affinityTopologyVersion() : ctx.shared().exchange().readyAffinityVersion()) : tx.topologyVersion(); subjId = ctx.subjectIdPerCall(subjId, opCtx); GridPartitionedSingleGetFuture fut = new GridPartitionedSingleGetFuture(ctx, ctx.toCacheKeyObject(key), topVer, opCtx == null || !opCtx.skipStore(), forcePrimary, subjId, taskName, deserializeBinary, skipVals ? null : expiryPolicy(opCtx != null ? opCtx.expiry() : null), skipVals, canRemap, needVer, /*keepCacheObjects*/false, opCtx != null && opCtx.recovery()); fut.init(); return (IgniteInternalFuture<V>)fut; } /** {@inheritDoc} */ @Override public IgniteInternalFuture<Map<K, V>> getAllAsync( @Nullable final Collection<? extends K> keys, boolean forcePrimary, boolean skipTx, @Nullable UUID subjId, String taskName, final boolean deserializeBinary, final boolean recovery, final boolean skipVals, boolean canRemap, final boolean needVer ) { ctx.checkSecurity(SecurityPermission.CACHE_READ); if (F.isEmpty(keys)) return new GridFinishedFuture<>(Collections.<K, V>emptyMap()); if (keyCheck) validateCacheKeys(keys); GridNearTxLocal tx = ctx.tm().threadLocalTx(ctx); final CacheOperationContext opCtx = ctx.operationContextPerCall(); if (tx != null && !tx.implicit() && !skipTx) { return asyncOp(tx, new AsyncOp<Map<K, V>>(keys) { @Override public IgniteInternalFuture<Map<K, V>> op(GridNearTxLocal tx, AffinityTopologyVersion readyTopVer) { return tx.getAllAsync(ctx, readyTopVer, ctx.cacheKeysView(keys), deserializeBinary, skipVals, false, opCtx != null && opCtx.skipStore(), recovery, needVer); } }, opCtx, /*retry*/false); } AffinityTopologyVersion topVer = tx == null ? (canRemap ? ctx.affinity().affinityTopologyVersion() : ctx.shared().exchange().readyAffinityVersion()) : tx.topologyVersion(); subjId = ctx.subjectIdPerCall(subjId, opCtx); return loadAsync( ctx.cacheKeysView(keys), opCtx == null || !opCtx.skipStore(), forcePrimary, topVer, subjId, taskName, deserializeBinary, recovery, skipVals ? null : expiryPolicy(opCtx != null ? opCtx.expiry() : null), skipVals, canRemap, needVer); } /** * @param keys Keys to load. * @param readThrough Read through flag. * @param forcePrimary Force get from primary node flag. * @param topVer Topology version. * @param subjId Subject ID. * @param taskName Task name. * @param deserializeBinary Deserialize binary flag. * @param expiryPlc Expiry policy. * @param skipVals Skip values flag. * @param canRemap Can remap flag. * @param needVer Need version. * @return Loaded values. */ public IgniteInternalFuture<Map<K, V>> loadAsync( @Nullable Collection<KeyCacheObject> keys, boolean readThrough, boolean forcePrimary, AffinityTopologyVersion topVer, @Nullable UUID subjId, String taskName, boolean deserializeBinary, boolean recovery, @Nullable IgniteCacheExpiryPolicy expiryPlc, boolean skipVals, boolean canRemap, boolean needVer) { return loadAsync(keys, readThrough, forcePrimary, topVer, subjId, taskName, deserializeBinary, recovery, expiryPlc, skipVals, canRemap, needVer, false); } /** * @param key Key to load. * @param readThrough Read through flag. * @param forcePrimary Force get from primary node flag. * @param topVer Topology version. * @param subjId Subject ID. * @param taskName Task name. * @param deserializeBinary Deserialize binary flag. * @param expiryPlc Expiry policy. * @param skipVals Skip values flag. * @param canRemap Flag indicating whether future can be remapped on a newer topology version. * @param needVer If {@code true} returns values as tuples containing value and version. * @param keepCacheObj Keep cache objects flag. * @return Load future. */ public final IgniteInternalFuture<Object> loadAsync( KeyCacheObject key, boolean readThrough, boolean forcePrimary, AffinityTopologyVersion topVer, @Nullable UUID subjId, String taskName, boolean deserializeBinary, @Nullable IgniteCacheExpiryPolicy expiryPlc, boolean skipVals, boolean canRemap, boolean needVer, boolean keepCacheObj, boolean recovery ) { GridPartitionedSingleGetFuture fut = new GridPartitionedSingleGetFuture(ctx, ctx.toCacheKeyObject(key), topVer, readThrough, forcePrimary, subjId, taskName, deserializeBinary, expiryPlc, skipVals, canRemap, needVer, keepCacheObj, recovery); fut.init(); return fut; } /** * @param keys Keys to load. * @param readThrough Read through flag. * @param forcePrimary Force get from primary node flag. * @param topVer Topology version. * @param subjId Subject ID. * @param taskName Task name. * @param deserializeBinary Deserialize binary flag. * @param expiryPlc Expiry policy. * @param skipVals Skip values flag. * @param canRemap Flag indicating whether future can be remapped on a newer topology version. * @param needVer If {@code true} returns values as tuples containing value and version. * @param keepCacheObj Keep cache objects flag. * @return Load future. */ public final IgniteInternalFuture<Map<K, V>> loadAsync( @Nullable Collection<KeyCacheObject> keys, boolean readThrough, boolean forcePrimary, AffinityTopologyVersion topVer, @Nullable UUID subjId, String taskName, boolean deserializeBinary, boolean recovery, @Nullable IgniteCacheExpiryPolicy expiryPlc, boolean skipVals, boolean canRemap, boolean needVer, boolean keepCacheObj ) { if (keys == null || keys.isEmpty()) return new GridFinishedFuture<>(Collections.<K, V>emptyMap()); if (expiryPlc == null) expiryPlc = expiryPolicy(null); // Optimisation: try to resolve value locally and escape 'get future' creation. if (!forcePrimary && ctx.affinityNode()) { try { Map<K, V> locVals = null; boolean success = true; boolean readNoEntry = ctx.readNoEntry(expiryPlc, false); boolean evt = !skipVals; for (KeyCacheObject key : keys) { if (readNoEntry) { CacheDataRow row = ctx.offheap().read(ctx, key); if (row != null) { long expireTime = row.expireTime(); if (expireTime == 0 || expireTime > U.currentTimeMillis()) { if (locVals == null) locVals = U.newHashMap(keys.size()); ctx.addResult(locVals, key, row.value(), skipVals, keepCacheObj, deserializeBinary, true, null, row.version(), 0, 0, needVer); if (evt) { ctx.events().readEvent(key, null, row.value(), subjId, taskName, !deserializeBinary); } } else success = false; } else success = false; } else { GridCacheEntryEx entry = null; while (true) { try { entry = entryEx(key); // If our DHT cache do has value, then we peek it. if (entry != null) { boolean isNew = entry.isNewLocked(); EntryGetResult getRes = null; CacheObject v = null; GridCacheVersion ver = null; if (needVer) { getRes = entry.innerGetVersioned( null, null, /*update-metrics*/false, /*event*/evt, subjId, null, taskName, expiryPlc, !deserializeBinary, null); if (getRes != null) { v = getRes.value(); ver = getRes.version(); } } else { v = entry.innerGet( null, null, /*read-through*/false, /*update-metrics*/false, /*event*/evt, subjId, null, taskName, expiryPlc, !deserializeBinary); } // Entry was not in memory or in swap, so we remove it from cache. if (v == null) { GridCacheVersion obsoleteVer = context().versions().next(); if (isNew && entry.markObsoleteIfEmpty(obsoleteVer)) removeEntry(entry); success = false; } else { if (locVals == null) locVals = U.newHashMap(keys.size()); ctx.addResult(locVals, key, v, skipVals, keepCacheObj, deserializeBinary, true, getRes, ver, 0, 0, needVer); } } else success = false; break; // While. } catch (GridCacheEntryRemovedException ignored) { // No-op, retry. } catch (GridDhtInvalidPartitionException ignored) { success = false; break; // While. } finally { if (entry != null) context().evicts().touch(entry, topVer); } } } if (!success) break; else if (!skipVals && ctx.config().isStatisticsEnabled()) ctx.cache().metrics0().onRead(true); } if (success) { sendTtlUpdateRequest(expiryPlc); return new GridFinishedFuture<>(locVals); } } catch (IgniteCheckedException e) { return new GridFinishedFuture<>(e); } } if (expiryPlc != null) expiryPlc.reset(); // Either reload or not all values are available locally. GridPartitionedGetFuture<K, V> fut = new GridPartitionedGetFuture<>( ctx, keys, topVer, readThrough, forcePrimary, subjId, taskName, deserializeBinary, recovery, expiryPlc, skipVals, canRemap, needVer, keepCacheObj); fut.init(); return fut; } /** * This is an entry point to pessimistic locking within transaction. * * {@inheritDoc} */ @Override public IgniteInternalFuture<Boolean> lockAllAsync( Collection<KeyCacheObject> keys, long timeout, @Nullable IgniteTxLocalEx tx, boolean isInvalidate, boolean isRead, boolean retval, @Nullable TransactionIsolation isolation, long createTtl, long accessTtl ) { assert tx == null || tx instanceof GridNearTxLocal : tx; GridNearTxLocal txx = (GridNearTxLocal)tx; CacheOperationContext opCtx = ctx.operationContextPerCall(); GridDhtColocatedLockFuture fut = new GridDhtColocatedLockFuture(ctx, keys, txx, isRead, retval, timeout, createTtl, accessTtl, CU.empty0(), opCtx != null && opCtx.skipStore(), opCtx != null && opCtx.isKeepBinary(), opCtx != null && opCtx.recovery()); // Future will be added to mvcc only if it was mapped to remote nodes. fut.map(); return fut; } /** {@inheritDoc} */ @Override public GridNearTransactionalCache<K, V> near() { assert false : "Near cache is not available in colocated mode."; return null; } /** {@inheritDoc} */ @Override public void unlockAll(Collection<? extends K> keys) { if (keys.isEmpty()) return; try { GridCacheVersion ver = null; int keyCnt = -1; Map<ClusterNode, GridNearUnlockRequest> map = null; Collection<KeyCacheObject> locKeys = new ArrayList<>(); for (K key : keys) { KeyCacheObject cacheKey = ctx.toCacheKeyObject(key); IgniteTxKey txKey = ctx.txKey(cacheKey); GridDistributedCacheEntry entry = peekExx(cacheKey); GridCacheMvccCandidate lock = ctx.mvcc().removeExplicitLock(Thread.currentThread().getId(), txKey, null); if (lock != null) { final AffinityTopologyVersion topVer = lock.topologyVersion(); assert topVer.compareTo(AffinityTopologyVersion.ZERO) > 0; // Send request to remove from remote nodes. ClusterNode primary = ctx.affinity().primaryByKey(key, topVer); if (primary == null) { if (log.isDebugEnabled()) log.debug("Failed to unlock keys (all partition nodes left the grid)."); continue; } if (map == null) { Collection<ClusterNode> affNodes = CU.allNodes(ctx, topVer); keyCnt = (int)Math.ceil((double)keys.size() / affNodes.size()); map = U.newHashMap(affNodes.size()); } if (ver == null) ver = lock.version(); if (!lock.reentry()) { if (!ver.equals(lock.version())) throw new IgniteCheckedException("Failed to unlock (if keys were locked separately, " + "then they need to be unlocked separately): " + keys); if (!primary.isLocal()) { GridNearUnlockRequest req = map.get(primary); if (req == null) { map.put(primary, req = new GridNearUnlockRequest(ctx.cacheId(), keyCnt, ctx.deploymentEnabled())); req.version(ver); } KeyCacheObject key0 = entry != null ? entry.key() : cacheKey; req.addKey(key0, ctx); } else locKeys.add(cacheKey); if (log.isDebugEnabled()) log.debug("Removed lock (will distribute): " + lock); } else if (log.isDebugEnabled()) log.debug("Current thread still owns lock (or there are no other nodes)" + " [lock=" + lock + ", curThreadId=" + Thread.currentThread().getId() + ']'); } } if (ver == null) return; if (!locKeys.isEmpty()) removeLocks(ctx.localNodeId(), ver, locKeys, true); for (Map.Entry<ClusterNode, GridNearUnlockRequest> mapping : map.entrySet()) { ClusterNode n = mapping.getKey(); GridDistributedUnlockRequest req = mapping.getValue(); assert !n.isLocal(); if (!F.isEmpty(req.keys())) { try { // We don't wait for reply to this message. ctx.io().send(n, req, ctx.ioPolicy()); } catch (ClusterTopologyCheckedException e) { if (log.isDebugEnabled()) log.debug("Failed to send unlock request (node has left the grid) [keys=" + req.keys() + ", n=" + n + ", e=" + e + ']'); } catch (IgniteCheckedException e) { U.error(log, "Failed to send unlock request [keys=" + req.keys() + ", n=" + n + ']', e); } } } } catch (IgniteCheckedException ex) { U.error(log, "Failed to unlock the lock for keys: " + keys, ex); } } /** * Removes locks regardless of whether they are owned or not for given * version and keys. * * @param threadId Thread ID. * @param ver Lock version. * @param keys Keys. */ public void removeLocks(long threadId, GridCacheVersion ver, Collection<KeyCacheObject> keys) { if (keys.isEmpty()) return; try { int keyCnt = -1; Map<ClusterNode, GridNearUnlockRequest> map = null; Collection<KeyCacheObject> locKeys = new LinkedList<>(); for (KeyCacheObject key : keys) { IgniteTxKey txKey = ctx.txKey(key); GridCacheMvccCandidate lock = ctx.mvcc().removeExplicitLock(threadId, txKey, ver); if (lock != null) { AffinityTopologyVersion topVer = lock.topologyVersion(); if (map == null) { Collection<ClusterNode> affNodes = CU.allNodes(ctx, topVer); keyCnt = (int)Math.ceil((double)keys.size() / affNodes.size()); map = U.newHashMap(affNodes.size()); } ClusterNode primary = ctx.affinity().primaryByKey(key, topVer); if (primary == null) { if (log.isDebugEnabled()) log.debug("Failed to remove locks (all partition nodes left the grid)."); continue; } if (!primary.isLocal()) { // Send request to remove from remote nodes. GridNearUnlockRequest req = map.get(primary); if (req == null) { map.put(primary, req = new GridNearUnlockRequest(ctx.cacheId(), keyCnt, ctx.deploymentEnabled())); req.version(ver); } GridCacheEntryEx entry = peekEx(key); KeyCacheObject key0 = entry != null ? entry.key() : key; req.addKey(key0, ctx); } else locKeys.add(key); } } if (!locKeys.isEmpty()) removeLocks(ctx.localNodeId(), ver, locKeys, true); if (map == null || map.isEmpty()) return; IgnitePair<Collection<GridCacheVersion>> versPair = ctx.tm().versions(ver); Collection<GridCacheVersion> committed = versPair.get1(); Collection<GridCacheVersion> rolledback = versPair.get2(); for (Map.Entry<ClusterNode, GridNearUnlockRequest> mapping : map.entrySet()) { ClusterNode n = mapping.getKey(); GridDistributedUnlockRequest req = mapping.getValue(); if (!F.isEmpty(req.keys())) { req.completedVersions(committed, rolledback); try { // We don't wait for reply to this message. ctx.io().send(n, req, ctx.ioPolicy()); } catch (ClusterTopologyCheckedException e) { if (log.isDebugEnabled()) log.debug("Failed to send unlock request (node has left the grid) [keys=" + req.keys() + ", n=" + n + ", e=" + e + ']'); } catch (IgniteCheckedException e) { U.error(log, "Failed to send unlock request [keys=" + req.keys() + ", n=" + n + ']', e); } } } } catch (IgniteCheckedException ex) { U.error(log, "Failed to unlock the lock for keys: " + keys, ex); } } /** * @param cacheCtx Cache context. * @param tx Started colocated transaction (if any). * @param threadId Thread ID. * @param ver Lock version. * @param topVer Topology version. * @param keys Mapped keys. * @param txRead Tx read. * @param retval Return value flag. * @param timeout Lock timeout. * @param createTtl TTL for create operation. * @param accessTtl TTL for read operation. * @param filter filter Optional filter. * @param skipStore Skip store flag. * @return Lock future. */ IgniteInternalFuture<Exception> lockAllAsync( final GridCacheContext<?, ?> cacheCtx, @Nullable final GridNearTxLocal tx, final long threadId, final GridCacheVersion ver, final AffinityTopologyVersion topVer, final Collection<KeyCacheObject> keys, final boolean txRead, final boolean retval, final long timeout, final long createTtl, final long accessTtl, @Nullable final CacheEntryPredicate[] filter, final boolean skipStore, final boolean keepBinary ) { assert keys != null; IgniteInternalFuture<Object> keyFut = ctx.group().preloader().request(cacheCtx, keys, topVer); // Prevent embedded future creation if possible. if (keyFut == null || keyFut.isDone()) { // Check for exception. if (keyFut != null && keyFut.error() != null) return new GridFinishedFuture<>(keyFut.error()); return lockAllAsync0(cacheCtx, tx, threadId, ver, topVer, keys, txRead, retval, timeout, createTtl, accessTtl, filter, skipStore, keepBinary); } else { return new GridEmbeddedFuture<>(keyFut, new C2<Object, Exception, IgniteInternalFuture<Exception>>() { @Override public IgniteInternalFuture<Exception> apply(Object o, Exception exx) { if (exx != null) return new GridDhtFinishedFuture<>(exx); return lockAllAsync0(cacheCtx, tx, threadId, ver, topVer, keys, txRead, retval, timeout, createTtl, accessTtl, filter, skipStore, keepBinary); } } ); } } /** * @param cacheCtx Cache context. * @param tx Started colocated transaction (if any). * @param threadId Thread ID. * @param ver Lock version. * @param topVer Topology version. * @param keys Mapped keys. * @param txRead Tx read. * @param retval Return value flag. * @param timeout Lock timeout. * @param createTtl TTL for create operation. * @param accessTtl TTL for read operation. * @param filter filter Optional filter. * @param skipStore Skip store flag. * @return Lock future. */ private IgniteInternalFuture<Exception> lockAllAsync0( GridCacheContext<?, ?> cacheCtx, @Nullable final GridNearTxLocal tx, long threadId, final GridCacheVersion ver, AffinityTopologyVersion topVer, final Collection<KeyCacheObject> keys, final boolean txRead, boolean retval, final long timeout, final long createTtl, final long accessTtl, @Nullable final CacheEntryPredicate[] filter, boolean skipStore, boolean keepBinary) { int cnt = keys.size(); if (tx == null) { GridDhtLockFuture fut = new GridDhtLockFuture(ctx, ctx.localNodeId(), ver, topVer, cnt, txRead, retval, timeout, tx, threadId, createTtl, accessTtl, filter, skipStore, keepBinary); // Add before mapping. if (!ctx.mvcc().addFuture(fut)) throw new IllegalStateException("Duplicate future ID: " + fut); boolean timedout = false; for (KeyCacheObject key : keys) { if (timedout) break; while (true) { GridDhtCacheEntry entry = entryExx(key, topVer); try { fut.addEntry(key == null ? null : entry); if (fut.isDone()) timedout = true; break; } catch (GridCacheEntryRemovedException ignore) { if (log.isDebugEnabled()) log.debug("Got removed entry when adding lock (will retry): " + entry); } catch (GridDistributedLockCancelledException e) { if (log.isDebugEnabled()) log.debug("Failed to add entry [err=" + e + ", entry=" + entry + ']'); fut.onError(e); return new GridDhtFinishedFuture<>(e); } } } // This will send remote messages. fut.map(); return new GridDhtEmbeddedFuture<>( new C2<Boolean, Exception, Exception>() { @Override public Exception apply(Boolean b, Exception e) { if (e != null) e = U.unwrap(e); else if (!b) e = new GridCacheLockTimeoutException(ver); return e; } }, fut); } else { // Handle implicit locks for pessimistic transactions. ctx.tm().txContext(tx); if (log.isDebugEnabled()) log.debug("Performing colocated lock [tx=" + tx + ", keys=" + keys + ']'); IgniteInternalFuture<GridCacheReturn> txFut = tx.lockAllAsync(cacheCtx, keys, retval, txRead, createTtl, accessTtl, skipStore, keepBinary); return new GridDhtEmbeddedFuture<>( new C2<GridCacheReturn, Exception, Exception>() { @Override public Exception apply(GridCacheReturn ret, Exception e) { if (e != null) e = U.unwrap(e); assert !tx.empty(); return e; } }, txFut); } } /** * @param nodeId Node ID. * @param res Response. */ private void processLockResponse(UUID nodeId, GridNearLockResponse res) { if (txLockMsgLog.isDebugEnabled()) txLockMsgLog.debug("Received near lock response [txId=" + res.version() + ", node=" + nodeId + ']'); assert nodeId != null; assert res != null; GridDhtColocatedLockFuture fut = (GridDhtColocatedLockFuture)ctx.mvcc(). <Boolean>mvccFuture(res.version(), res.futureId()); if (fut != null) fut.onResult(nodeId, res); else { if (txLockMsgLog.isDebugEnabled()) { txLockMsgLog.debug("Received near lock response for unknown future [txId=" + res.version() + ", node=" + nodeId + ", res=" + res + ']'); } } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridDhtColocatedCache.class, this, super.toString()); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.ui.table; import com.intellij.ide.IdeBundle; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.*; import com.intellij.ui.table.JBTable; import com.intellij.ui.table.TableView; import com.intellij.util.Function; import com.intellij.util.FunctionUtil; import com.intellij.util.PlatformIcons; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.CollectionItemEditor; import com.intellij.util.ui.CollectionModelEditor; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.ListTableModel; import com.intellij.util.xmlb.SkipDefaultValuesSerializationFilters; import com.intellij.util.xmlb.XmlSerializer; import gnu.trove.TObjectObjectProcedure; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.TableModelEvent; import javax.swing.event.TableModelListener; import javax.swing.table.TableModel; import java.awt.*; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class TableModelEditor<T> extends CollectionModelEditor<T, CollectionItemEditor<T>> { private final TableView<T> table; private final ToolbarDecorator toolbarDecorator; private final MyListTableModel model; public TableModelEditor(@NotNull ColumnInfo[] columns, @NotNull CollectionItemEditor<T> itemEditor, @NotNull String emptyText) { this(Collections.<T>emptyList(), columns, itemEditor, emptyText); } /** * source will be copied, passed list will not be used directly * * Implement {@link DialogItemEditor} instead of {@link CollectionItemEditor} if you want provide dialog to edit. */ public TableModelEditor(@NotNull List<T> items, @NotNull ColumnInfo[] columns, @NotNull CollectionItemEditor<T> itemEditor, @NotNull String emptyText) { super(itemEditor); model = new MyListTableModel(columns, new ArrayList<T>(items)); table = new TableView<T>(model); table.setDefaultEditor(Enum.class, ComboBoxTableCellEditor.INSTANCE); table.setStriped(true); table.setEnableAntialiasing(true); preferredScrollableViewportHeightInRows(JBTable.PREFERRED_SCROLLABLE_VIEWPORT_HEIGHT_IN_ROWS); new TableSpeedSearch(table); ColumnInfo firstColumn = columns[0]; if ((firstColumn.getColumnClass() == boolean.class || firstColumn.getColumnClass() == Boolean.class) && firstColumn.getName().isEmpty()) { TableUtil.setupCheckboxColumn(table.getColumnModel().getColumn(0)); } boolean needTableHeader = false; for (ColumnInfo column : columns) { if (!StringUtil.isEmpty(column.getName())) { needTableHeader = true; break; } } if (!needTableHeader) { table.setTableHeader(null); } table.getEmptyText().setText(emptyText); MyRemoveAction removeAction = new MyRemoveAction(); toolbarDecorator = ToolbarDecorator.createDecorator(table, this).setRemoveAction(removeAction).setRemoveActionUpdater(removeAction); if (itemEditor instanceof DialogItemEditor) { addDialogActions(); } } @NotNull public TableModelEditor<T> preferredScrollableViewportHeightInRows(int rows) { table.setPreferredScrollableViewportSize(new Dimension(200, table.getRowHeight() * rows)); return this; } private void addDialogActions() { toolbarDecorator.setEditAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { T item = table.getSelectedObject(); if (item != null) { Function<T, T> mutator; if (helper.isMutable(item)) { mutator = FunctionUtil.id(); } else { final int selectedRow = table.getSelectedRow(); mutator = new Function<T, T>() { @Override public T fun(T item) { return helper.getMutable(item, selectedRow); } }; } ((DialogItemEditor<T>)itemEditor).edit(item, mutator, false); table.requestFocus(); } } }).setEditActionUpdater(new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { T item = table.getSelectedObject(); return item != null && ((DialogItemEditor<T>)itemEditor).isEditable(item); } }); if (((DialogItemEditor)itemEditor).isUseDialogToAdd()) { toolbarDecorator.setAddAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { T item = createElement(); ((DialogItemEditor<T>)itemEditor).edit(item, new Function<T, T>() { @Override public T fun(T item) { model.addRow(item); return item; } }, true); } }); } } @NotNull public TableModelEditor<T> disableUpDownActions() { toolbarDecorator.disableUpDownActions(); return this; } @NotNull public TableModelEditor<T> enabled(boolean value) { table.setEnabled(value); return this; } public static abstract class DataChangedListener<T> implements TableModelListener { public abstract void dataChanged(@NotNull ColumnInfo<T, ?> columnInfo, int rowIndex); @Override public void tableChanged(@NotNull TableModelEvent e) { } } public TableModelEditor<T> modelListener(@NotNull DataChangedListener<T> listener) { model.dataChangedListener = listener; model.addTableModelListener(listener); return this; } @NotNull public ListTableModel<T> getModel() { return model; } public static abstract class DialogItemEditor<T> extends CollectionItemEditor<T> { public abstract void edit(@NotNull T item, @NotNull Function<T, T> mutator, boolean isAdd); public abstract void applyEdited(@NotNull T oldItem, @NotNull T newItem); public boolean isEditable(@NotNull T item) { return true; } public boolean isUseDialogToAdd() { return false; } } @NotNull public static <T> T cloneUsingXmlSerialization(@NotNull T oldItem, @NotNull T newItem) { Element serialized = XmlSerializer.serialize(oldItem, new SkipDefaultValuesSerializationFilters()); if (!JDOMUtil.isEmpty(serialized)) { XmlSerializer.deserializeInto(newItem, serialized); } return newItem; } private final class MyListTableModel extends ListTableModel<T> { private List<T> items; private DataChangedListener<T> dataChangedListener; public MyListTableModel(@NotNull ColumnInfo[] columns, @NotNull List<T> items) { super(columns, items); this.items = items; } @Override public void setItems(@NotNull List<T> items) { this.items = items; super.setItems(items); } @Override public void removeRow(int index) { helper.remove(getItem(index)); super.removeRow(index); } @Override public void setValueAt(Object newValue, int rowIndex, int columnIndex) { if (rowIndex < getRowCount()) { @SuppressWarnings("unchecked") ColumnInfo<T, Object> column = (ColumnInfo<T, Object>)getColumnInfos()[columnIndex]; T item = getItem(rowIndex); Object oldValue = column.valueOf(item); if (column.getColumnClass() == String.class ? !Comparing.strEqual(((String)oldValue), ((String)newValue)) : !Comparing.equal(oldValue, newValue)) { column.setValue(helper.getMutable(item, rowIndex), newValue); if (dataChangedListener != null) { dataChangedListener.dataChanged(column, rowIndex); } } } } } public abstract static class EditableColumnInfo<Item, Aspect> extends ColumnInfo<Item, Aspect> { public EditableColumnInfo(@NotNull String name) { super(name); } public EditableColumnInfo() { super(""); } @Override public boolean isCellEditable(Item item) { return true; } } @NotNull public JComponent createComponent() { return toolbarDecorator.addExtraAction( new ToolbarDecorator.ElementActionButton(IdeBundle.message("button.copy"), PlatformIcons.COPY_ICON) { @Override public void actionPerformed(@NotNull AnActionEvent e) { TableUtil.stopEditing(table); List<T> selectedItems = table.getSelectedObjects(); if (selectedItems.isEmpty()) { return; } for (T item : selectedItems) { model.addRow(itemEditor.clone(item, false)); } table.requestFocus(); TableUtil.updateScroller(table); } } ).createPanel(); } @NotNull @Override protected List<T> getItems() { return model.items; } public void selectItem(@NotNull final T item) { table.clearSelection(); final Ref<T> ref; if (helper.hasModifiedItems()) { ref = Ref.create(); helper.process(new TObjectObjectProcedure<T, T>() { @Override public boolean execute(T modified, T original) { if (item == original) { ref.set(modified); } return ref.isNull(); } }); } else { ref = null; } table.addSelection(ref == null || ref.isNull() ? item : ref.get()); } @NotNull public List<T> apply() { if (helper.hasModifiedItems()) { @SuppressWarnings("unchecked") final ColumnInfo<T, Object>[] columns = model.getColumnInfos(); helper.process(new TObjectObjectProcedure<T, T>() { @Override public boolean execute(T newItem, @NotNull T oldItem) { for (ColumnInfo<T, Object> column : columns) { if (column.isCellEditable(newItem)) { column.setValue(oldItem, column.valueOf(newItem)); } } if (itemEditor instanceof DialogItemEditor) { ((DialogItemEditor<T>)itemEditor).applyEdited(oldItem, newItem); } model.items.set(ContainerUtil.indexOfIdentity(model.items, newItem), oldItem); return true; } }); } helper.reset(model.items); return model.items; } public void reset(@NotNull List<T> items) { super.reset(items); model.setItems(new ArrayList<T>(items)); } private class MyRemoveAction implements AnActionButtonRunnable, AnActionButtonUpdater, TableUtil.ItemChecker { @Override public void run(AnActionButton button) { if (TableUtil.doRemoveSelectedItems(table, model, this)) { table.requestFocus(); TableUtil.updateScroller(table); } } @Override public boolean isOperationApplyable(@NotNull TableModel ignored, int row) { T item = model.getItem(row); return item != null && itemEditor.isRemovable(item); } @Override public boolean isEnabled(AnActionEvent e) { return areSelectedItemsRemovable(table.getSelectionModel()); } } }
package com.intellij.coverage; import com.intellij.CommonBundle; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.coverage.view.CoverageViewManager; import com.intellij.coverage.view.CoverageViewSuiteListener; import com.intellij.execution.configurations.RunConfigurationBase; import com.intellij.execution.configurations.RunnerSettings; import com.intellij.execution.configurations.coverage.CoverageEnabledConfiguration; import com.intellij.execution.process.ProcessAdapter; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.process.ProcessHandler; import com.intellij.ide.projectView.ProjectView; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.State; import com.intellij.openapi.components.Storage; import com.intellij.openapi.components.StoragePathMacros; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.event.EditorFactoryEvent; import com.intellij.openapi.editor.event.EditorFactoryListener; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.TextEditor; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ProjectManagerAdapter; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.JDOMExternalizable; import com.intellij.openapi.util.WriteExternalException; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.rt.coverage.data.ClassData; import com.intellij.rt.coverage.data.LineCoverage; import com.intellij.rt.coverage.data.LineData; import com.intellij.rt.coverage.data.ProjectData; import com.intellij.util.Alarm; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.UIUtil; import consulo.container.boot.ContainerPathManager; import consulo.disposer.Disposable; import consulo.disposer.Disposer; import consulo.logging.Logger; import jakarta.inject.Inject; import jakarta.inject.Singleton; import org.jdom.Element; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.DataInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.*; /** * @author ven */ @Singleton @State(name = "CoverageDataManager", storages = @Storage(StoragePathMacros.WORKSPACE_FILE)) public class CoverageDataManagerImpl extends CoverageDataManager implements JDOMExternalizable { private static final String REPLACE_ACTIVE_SUITES = "&Replace active suites"; private static final String ADD_TO_ACTIVE_SUITES = "&Add to active suites"; private static final String DO_NOT_APPLY_COLLECTED_COVERAGE = "Do not apply &collected coverage"; private final List<CoverageSuiteListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList(); private static final Logger LOG = Logger.getInstance(CoverageDataManagerImpl.class); private static final String SUITE = "SUITE"; private final Project myProject; private final Set<CoverageSuite> myCoverageSuites = new HashSet<CoverageSuite>(); private boolean myIsProjectClosing = false; private final Object myLock = new Object(); private boolean mySubCoverageIsActive; @Override public CoverageSuitesBundle getCurrentSuitesBundle() { return myCurrentSuitesBundle; } private CoverageSuitesBundle myCurrentSuitesBundle; private final Object ANNOTATORS_LOCK = new Object(); private final Map<Editor, SrcFileAnnotator> myAnnotators = new HashMap<Editor, SrcFileAnnotator>(); @Inject public CoverageDataManagerImpl(final Project project) { myProject = project; if (project.isDefault()) { return; } project.getMessageBus().connect().subscribe(EditorColorsManager.TOPIC, scheme -> chooseSuitesBundle(myCurrentSuitesBundle)); EditorFactory.getInstance().addEditorFactoryListener(new CoverageEditorFactoryListener(), myProject); ProjectManagerAdapter projectManagerListener = new ProjectManagerAdapter() { @Override public void projectClosing(Project project) { synchronized (myLock) { myIsProjectClosing = true; } } }; ProjectManager.getInstance().addProjectManagerListener(myProject, projectManagerListener); addSuiteListener(new CoverageViewSuiteListener(this, myProject), myProject); } @Override public void readExternal(Element element) throws InvalidDataException { //noinspection unchecked for (Element suiteElement : element.getChildren(SUITE)) { final CoverageRunner coverageRunner = BaseCoverageSuite.readRunnerAttribute(suiteElement); // skip unknown runners if (coverageRunner == null) { // collect gc final CoverageFileProvider fileProvider = BaseCoverageSuite.readDataFileProviderAttribute(suiteElement); if (fileProvider.isValid()) { //deleteCachedCoverage(fileProvider.getCoverageDataFilePath()); } continue; } CoverageSuite suite = null; for (CoverageEngine engine : CoverageEngine.EP_NAME.getExtensionList()) { if (coverageRunner.acceptsCoverageEngine(engine)) { suite = engine.createEmptyCoverageSuite(coverageRunner); if (suite != null) { break; } } } if (suite != null) { try { suite.readExternal(suiteElement); myCoverageSuites.add(suite); } catch (NumberFormatException e) { //try next suite } } } } @Override public void writeExternal(final Element element) throws WriteExternalException { for (CoverageSuite coverageSuite : myCoverageSuites) { final Element suiteElement = new Element(SUITE); element.addContent(suiteElement); coverageSuite.writeExternal(suiteElement); } } @Override public CoverageSuite addCoverageSuite(final String name, final CoverageFileProvider fileProvider, final String[] filters, final long lastCoverageTimeStamp, @Nullable final String suiteToMergeWith, final CoverageRunner coverageRunner, final boolean collectLineInfo, final boolean tracingEnabled) { final CoverageSuite suite = createCoverageSuite(coverageRunner, name, fileProvider, filters, lastCoverageTimeStamp, suiteToMergeWith, collectLineInfo, tracingEnabled); if (suiteToMergeWith == null || !name.equals(suiteToMergeWith)) { removeCoverageSuite(suite); } myCoverageSuites.remove(suite); // remove previous instance myCoverageSuites.add(suite); // add new instance return suite; } @Override public CoverageSuite addExternalCoverageSuite(String selectedFileName, long timeStamp, CoverageRunner coverageRunner, CoverageFileProvider fileProvider) { final CoverageSuite suite = createCoverageSuite(coverageRunner, selectedFileName, fileProvider, ArrayUtil.EMPTY_STRING_ARRAY, timeStamp, null, false, false); myCoverageSuites.add(suite); return suite; } @Override public CoverageSuite addCoverageSuite(final CoverageEnabledConfiguration config) { final String name = config.getName() + " Coverage Results"; final String covFilePath = config.getCoverageFilePath(); assert covFilePath != null; // Shouldn't be null here! final CoverageRunner coverageRunner = config.getCoverageRunner(); LOG.assertTrue(coverageRunner != null, "Coverage runner id = " + config.getRunnerId()); final DefaultCoverageFileProvider fileProvider = new DefaultCoverageFileProvider(new File(covFilePath)); final CoverageSuite suite = createCoverageSuite(config, name, coverageRunner, fileProvider); // remove previous instance removeCoverageSuite(suite); // add new instance myCoverageSuites.add(suite); return suite; } @Override public void removeCoverageSuite(final CoverageSuite suite) { final String fileName = suite.getCoverageDataFileName(); boolean deleteTraces = suite.isTracingEnabled(); if (!FileUtil.isAncestor(ContainerPathManager.get().getSystemPath(), fileName, false)) { String message = "Would you like to delete file \'" + fileName + "\' "; if (deleteTraces) { message += "and traces directory \'" + FileUtil.getNameWithoutExtension(new File(fileName)) + "\' "; } message += "on disk?"; if (Messages.showYesNoDialog(myProject, message, CommonBundle.getWarningTitle(), Messages.getWarningIcon()) == Messages.YES) { deleteCachedCoverage(fileName, deleteTraces); } } else { deleteCachedCoverage(fileName, deleteTraces); } myCoverageSuites.remove(suite); if (myCurrentSuitesBundle != null && myCurrentSuitesBundle.contains(suite)) { CoverageSuite[] suites = myCurrentSuitesBundle.getSuites(); suites = ArrayUtil.remove(suites, suite); chooseSuitesBundle(suites.length > 0 ? new CoverageSuitesBundle(suites) : null); } } private void deleteCachedCoverage(String coverageDataFileName, boolean deleteTraces) { FileUtil.delete(new File(coverageDataFileName)); if (deleteTraces) { FileUtil.delete(getTracesDirectory(coverageDataFileName)); } } @Override public CoverageSuite[] getSuites() { return myCoverageSuites.toArray(new CoverageSuite[myCoverageSuites.size()]); } @Override public void chooseSuitesBundle(final CoverageSuitesBundle suite) { if (myCurrentSuitesBundle == suite && suite == null) { return; } LOG.assertTrue(!myProject.isDefault()); fireBeforeSuiteChosen(); mySubCoverageIsActive = false; if (myCurrentSuitesBundle != null) { myCurrentSuitesBundle.getCoverageEngine().getCoverageAnnotator(myProject).onSuiteChosen(suite); } myCurrentSuitesBundle = suite; disposeAnnotators(); if (suite == null) { triggerPresentationUpdate(); return; } for (CoverageSuite coverageSuite : myCurrentSuitesBundle.getSuites()) { final boolean suiteFileExists = coverageSuite.getCoverageDataFileProvider().ensureFileExists(); if (!suiteFileExists) { chooseSuitesBundle(null); return; } } renewCoverageData(suite); fireAfterSuiteChosen(); } @Override public void coverageGathered(@Nonnull final CoverageSuite suite) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (myProject.isDisposed()) return; if (myCurrentSuitesBundle != null) { final String message = CodeInsightBundle.message("display.coverage.prompt", suite.getPresentableName()); final CoverageOptionsProvider coverageOptionsProvider = CoverageOptionsProvider.getInstance(myProject); final DialogWrapper.DoNotAskOption doNotAskOption = new DialogWrapper.DoNotAskOption() { @Override public boolean isToBeShown() { return coverageOptionsProvider.getOptionToReplace() == 3; } @Override public void setToBeShown(boolean value, int exitCode) { coverageOptionsProvider.setOptionsToReplace(value ? 3 : exitCode); } @Override public boolean canBeHidden() { return true; } @Override public boolean shouldSaveOptionsOnCancel() { return true; } @Nonnull @Override public String getDoNotShowMessage() { return CommonBundle.message("dialog.options.do.not.show"); } }; final String[] options = myCurrentSuitesBundle.getCoverageEngine() == suite.getCoverageEngine() ? new String[]{REPLACE_ACTIVE_SUITES, ADD_TO_ACTIVE_SUITES, DO_NOT_APPLY_COLLECTED_COVERAGE} : new String[]{REPLACE_ACTIVE_SUITES, DO_NOT_APPLY_COLLECTED_COVERAGE}; final int answer = doNotAskOption.isToBeShown() ? Messages.showDialog(message, CodeInsightBundle.message("code.coverage"), options, 1, Messages.getQuestionIcon(), doNotAskOption) : coverageOptionsProvider.getOptionToReplace(); if (answer == DialogWrapper.OK_EXIT_CODE) { chooseSuitesBundle(new CoverageSuitesBundle(suite)); } else if (answer == 1) { chooseSuitesBundle(new CoverageSuitesBundle(ArrayUtil.append(myCurrentSuitesBundle.getSuites(), suite))); } } else { chooseSuitesBundle(new CoverageSuitesBundle(suite)); } } }); } @Override public void triggerPresentationUpdate() { renewInformationInEditors(); UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { if (myProject.isDisposed()) return; ProjectView.getInstance(myProject).refresh(); CoverageViewManager.getInstance(myProject).setReady(true); } }); } @Override public void attachToProcess(@Nonnull final ProcessHandler handler, @Nonnull final RunConfigurationBase configuration, final RunnerSettings runnerSettings) { handler.addProcessListener(new ProcessAdapter() { @Override public void processTerminated(final ProcessEvent event) { processGatheredCoverage(configuration, runnerSettings); } }); } @Override public void processGatheredCoverage(@Nonnull RunConfigurationBase configuration, RunnerSettings runnerSettings) { if (runnerSettings instanceof CoverageRunnerData) { processGatheredCoverage(configuration); } } public static void processGatheredCoverage(RunConfigurationBase configuration) { final Project project = configuration.getProject(); if (project.isDisposed()) return; final CoverageDataManager coverageDataManager = CoverageDataManager.getInstance(project); final CoverageEnabledConfiguration coverageEnabledConfiguration = CoverageEnabledConfiguration.getOrCreate(configuration); //noinspection ConstantConditions final CoverageSuite coverageSuite = coverageEnabledConfiguration.getCurrentCoverageSuite(); if (coverageSuite != null) { ((BaseCoverageSuite)coverageSuite).setConfiguration(configuration); coverageDataManager.coverageGathered(coverageSuite); } } protected void renewCoverageData(@Nonnull final CoverageSuitesBundle suite) { if (myCurrentSuitesBundle != null) { myCurrentSuitesBundle.getCoverageEngine().getCoverageAnnotator(myProject).renewCoverageData(suite, this); } } private void renewInformationInEditors() { final FileEditorManager fileEditorManager = FileEditorManager.getInstance(myProject); final VirtualFile[] openFiles = fileEditorManager.getOpenFiles(); for (VirtualFile openFile : openFiles) { final FileEditor[] allEditors = fileEditorManager.getAllEditors(openFile); applyInformationToEditor(allEditors, openFile); } } private void applyInformationToEditor(FileEditor[] editors, final VirtualFile file) { final PsiFile psiFile = doInReadActionIfProjectOpen(new Computable<PsiFile>() { @Nullable @Override public PsiFile compute() { return PsiManager.getInstance(myProject).findFile(file); } }); if (psiFile != null && myCurrentSuitesBundle != null && psiFile.isPhysical()) { final CoverageEngine engine = myCurrentSuitesBundle.getCoverageEngine(); if (!engine.coverageEditorHighlightingApplicableTo(psiFile)) { return; } for (FileEditor editor : editors) { if (editor instanceof TextEditor) { final Editor textEditor = ((TextEditor)editor).getEditor(); SrcFileAnnotator annotator; synchronized (ANNOTATORS_LOCK) { annotator = myAnnotators.remove(textEditor); } if (annotator != null) { Disposer.dispose(annotator); } break; } } for (FileEditor editor : editors) { if (editor instanceof TextEditor) { final Editor textEditor = ((TextEditor)editor).getEditor(); SrcFileAnnotator annotator = getAnnotator(textEditor); if (annotator == null) { annotator = new SrcFileAnnotator(psiFile, textEditor); synchronized (ANNOTATORS_LOCK) { myAnnotators.put(textEditor, annotator); } } if (myCurrentSuitesBundle != null && engine.acceptedByFilters(psiFile, myCurrentSuitesBundle)) { annotator.showCoverageInformation(myCurrentSuitesBundle); } } } } } @Override public <T> T doInReadActionIfProjectOpen(Computable<T> computation) { synchronized (myLock) { if (myIsProjectClosing) return null; } return ApplicationManager.getApplication().runReadAction(computation); } @Override public void selectSubCoverage(@Nonnull final CoverageSuitesBundle suite, final List<String> testNames) { suite.restoreCoverageData(); final ProjectData data = suite.getCoverageData(); if (data == null) return; mySubCoverageIsActive = true; final Map<String, Set<Integer>> executionTrace = new HashMap<String, Set<Integer>>(); for (CoverageSuite coverageSuite : suite.getSuites()) { final String fileName = coverageSuite.getCoverageDataFileName(); final File tracesDir = getTracesDirectory(fileName); for (String testName : testNames) { final File file = new File(tracesDir, FileUtil.sanitizeFileName(testName) + ".tr"); if (file.exists()) { DataInputStream in = null; try { in = new DataInputStream(new FileInputStream(file)); int traceSize = in.readInt(); for (int i = 0; i < traceSize; i++) { final String className = in.readUTF(); final int linesSize = in.readInt(); Set<Integer> lines = executionTrace.get(className); if (lines == null) { lines = new HashSet<Integer>(); executionTrace.put(className, lines); } for (int l = 0; l < linesSize; l++) { lines.add(in.readInt()); } } } catch (Exception e) { LOG.error(e); } finally { try { in.close(); } catch (IOException e) { LOG.error(e); } } } } } final ProjectData projectData = new ProjectData(); for (String className : executionTrace.keySet()) { ClassData loadedClassData = projectData.getClassData(className); if (loadedClassData == null) { loadedClassData = projectData.getOrCreateClassData(className); } final Set<Integer> lineNumbers = executionTrace.get(className); final ClassData oldData = data.getClassData(className); LOG.assertTrue(oldData != null, "missed className: \"" + className + "\""); final Object[] oldLines = oldData.getLines(); LOG.assertTrue(oldLines != null); int maxNumber = oldLines.length; for (Integer lineNumber : lineNumbers) { if (lineNumber >= maxNumber) { maxNumber = lineNumber + 1; } } final LineData[] lines = new LineData[maxNumber]; for (Integer line : lineNumbers) { final int lineIdx = line.intValue() - 1; String methodSig = null; if (lineIdx < oldData.getLines().length) { final LineData oldLineData = oldData.getLineData(lineIdx); if (oldLineData != null) { methodSig = oldLineData.getMethodSignature(); } } final LineData lineData = new LineData(lineIdx, methodSig); if (methodSig != null) { loadedClassData.registerMethodSignature(lineData); } lineData.setStatus(LineCoverage.FULL); lines[lineIdx] = lineData; } loadedClassData.setLines(lines); } suite.setCoverageData(projectData); renewCoverageData(suite); } private File getTracesDirectory(final String fileName) { return new File(new File(fileName).getParentFile(), FileUtil.getNameWithoutExtension(new File(fileName))); } @Override public void restoreMergedCoverage(@Nonnull final CoverageSuitesBundle suite) { mySubCoverageIsActive = false; suite.restoreCoverageData(); renewCoverageData(suite); } @Override public void addSuiteListener(final CoverageSuiteListener listener, Disposable parentDisposable) { myListeners.add(listener); Disposer.register(parentDisposable, () -> myListeners.remove(listener)); } public void fireBeforeSuiteChosen() { for (CoverageSuiteListener listener : myListeners) { listener.beforeSuiteChosen(); } } public void fireAfterSuiteChosen() { for (CoverageSuiteListener listener : myListeners) { listener.afterSuiteChosen(); } } @Override public boolean isSubCoverageActive() { return mySubCoverageIsActive; } @Nullable public SrcFileAnnotator getAnnotator(Editor editor) { synchronized (ANNOTATORS_LOCK) { return myAnnotators.get(editor); } } public void disposeAnnotators() { synchronized (ANNOTATORS_LOCK) { for (SrcFileAnnotator annotator : myAnnotators.values()) { if (annotator != null) { Disposer.dispose(annotator); } } myAnnotators.clear(); } } @Nonnull private CoverageSuite createCoverageSuite(final CoverageEnabledConfiguration config, final String name, final CoverageRunner coverageRunner, final DefaultCoverageFileProvider fileProvider) { CoverageSuite suite = null; for (CoverageEngine engine : CoverageEngine.EP_NAME.getExtensionList()) { if (coverageRunner.acceptsCoverageEngine(engine) && engine.isApplicableTo(config.getConfiguration())) { suite = engine.createCoverageSuite(coverageRunner, name, fileProvider, config); if (suite != null) { break; } } } LOG.assertTrue(suite != null, "Cannot create coverage suite for runner: " + coverageRunner.getPresentableName()); return suite; } @Nonnull private CoverageSuite createCoverageSuite(final CoverageRunner coverageRunner, final String name, final CoverageFileProvider fileProvider, final String[] filters, final long lastCoverageTimeStamp, final String suiteToMergeWith, final boolean collectLineInfo, final boolean tracingEnabled) { CoverageSuite suite = null; for (CoverageEngine engine : CoverageEngine.EP_NAME.getExtensionList()) { if (coverageRunner.acceptsCoverageEngine(engine)) { suite = engine.createCoverageSuite(coverageRunner, name, fileProvider, filters, lastCoverageTimeStamp, suiteToMergeWith, collectLineInfo, tracingEnabled, false, myProject); if (suite != null) { break; } } } LOG.assertTrue(suite != null, "Cannot create coverage suite for runner: " + coverageRunner.getPresentableName()); return suite; } private class CoverageEditorFactoryListener implements EditorFactoryListener { private final Alarm myAlarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, myProject); private final Map<Editor, Runnable> myCurrentEditors = new HashMap<Editor, Runnable>(); @Override public void editorCreated(@Nonnull EditorFactoryEvent event) { synchronized (myLock) { if (myIsProjectClosing) return; } final Editor editor = event.getEditor(); if (editor.getProject() != myProject) return; final PsiFile psiFile = ApplicationManager.getApplication().runReadAction(new Computable<PsiFile>() { @Nullable @Override public PsiFile compute() { if (myProject.isDisposed()) return null; final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject); final Document document = editor.getDocument(); return documentManager.getPsiFile(document); } }); if (psiFile != null && myCurrentSuitesBundle != null && psiFile.isPhysical()) { final CoverageEngine engine = myCurrentSuitesBundle.getCoverageEngine(); if (!engine.coverageEditorHighlightingApplicableTo(psiFile)) { return; } SrcFileAnnotator annotator = getAnnotator(editor); if (annotator == null) { annotator = new SrcFileAnnotator(psiFile, editor); } final SrcFileAnnotator finalAnnotator = annotator; synchronized (ANNOTATORS_LOCK) { myAnnotators.put(editor, finalAnnotator); } final Runnable request = new Runnable() { @Override public void run() { if (myProject.isDisposed()) return; if (myCurrentSuitesBundle != null) { if (engine.acceptedByFilters(psiFile, myCurrentSuitesBundle)) { finalAnnotator.showCoverageInformation(myCurrentSuitesBundle); } } } }; myCurrentEditors.put(editor, request); myAlarm.addRequest(request, 100); } } @Override public void editorReleased(@Nonnull EditorFactoryEvent event) { final Editor editor = event.getEditor(); if (editor.getProject() != myProject) return; try { final SrcFileAnnotator fileAnnotator; synchronized (ANNOTATORS_LOCK) { fileAnnotator = myAnnotators.remove(editor); } if (fileAnnotator != null) { Disposer.dispose(fileAnnotator); } } finally { final Runnable request = myCurrentEditors.remove(editor); if (request != null) { myAlarm.cancelRequest(request); } } } } }
/* * Copyright 2012 Shared Learning Collaborative, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2012.04.20 at 03:09:04 PM EDT // package org.slc.sli.sample.entities; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for CountryCodeType. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="CountryCodeType"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}token"> * &lt;enumeration value="AF"/> * &lt;enumeration value="AX"/> * &lt;enumeration value="AL"/> * &lt;enumeration value="DZ"/> * &lt;enumeration value="AS"/> * &lt;enumeration value="AD"/> * &lt;enumeration value="AO"/> * &lt;enumeration value="AI"/> * &lt;enumeration value="AQ"/> * &lt;enumeration value="AG"/> * &lt;enumeration value="AR"/> * &lt;enumeration value="AM"/> * &lt;enumeration value="AW"/> * &lt;enumeration value="AU"/> * &lt;enumeration value="AT"/> * &lt;enumeration value="AZ"/> * &lt;enumeration value="BS"/> * &lt;enumeration value="BH"/> * &lt;enumeration value="BD"/> * &lt;enumeration value="BB"/> * &lt;enumeration value="BY"/> * &lt;enumeration value="BE"/> * &lt;enumeration value="BZ"/> * &lt;enumeration value="BJ"/> * &lt;enumeration value="BM"/> * &lt;enumeration value="BT"/> * &lt;enumeration value="BO"/> * &lt;enumeration value="BQ"/> * &lt;enumeration value="BA"/> * &lt;enumeration value="BW"/> * &lt;enumeration value="BV"/> * &lt;enumeration value="BR"/> * &lt;enumeration value="IO"/> * &lt;enumeration value="BN"/> * &lt;enumeration value="BG"/> * &lt;enumeration value="BF"/> * &lt;enumeration value="BI"/> * &lt;enumeration value="KH"/> * &lt;enumeration value="CM"/> * &lt;enumeration value="CA"/> * &lt;enumeration value="CV"/> * &lt;enumeration value="KY"/> * &lt;enumeration value="CF"/> * &lt;enumeration value="TD"/> * &lt;enumeration value="CL"/> * &lt;enumeration value="CN"/> * &lt;enumeration value="CX"/> * &lt;enumeration value="CC"/> * &lt;enumeration value="CO"/> * &lt;enumeration value="KM"/> * &lt;enumeration value="CG"/> * &lt;enumeration value="CD"/> * &lt;enumeration value="CK"/> * &lt;enumeration value="CR"/> * &lt;enumeration value="CI"/> * &lt;enumeration value="HR"/> * &lt;enumeration value="CU"/> * &lt;enumeration value="CW"/> * &lt;enumeration value="CY"/> * &lt;enumeration value="CZ"/> * &lt;enumeration value="DK"/> * &lt;enumeration value="DJ"/> * &lt;enumeration value="DM"/> * &lt;enumeration value="DO"/> * &lt;enumeration value="EC"/> * &lt;enumeration value="EG"/> * &lt;enumeration value="SV"/> * &lt;enumeration value="GQ"/> * &lt;enumeration value="ER"/> * &lt;enumeration value="EE"/> * &lt;enumeration value="ET"/> * &lt;enumeration value="FK"/> * &lt;enumeration value="FO"/> * &lt;enumeration value="FJ"/> * &lt;enumeration value="FI"/> * &lt;enumeration value="FR"/> * &lt;enumeration value="GF"/> * &lt;enumeration value="PF"/> * &lt;enumeration value="TF"/> * &lt;enumeration value="GA"/> * &lt;enumeration value="GM"/> * &lt;enumeration value="GE"/> * &lt;enumeration value="DE"/> * &lt;enumeration value="GH"/> * &lt;enumeration value="GI"/> * &lt;enumeration value="GR"/> * &lt;enumeration value="GL"/> * &lt;enumeration value="GD"/> * &lt;enumeration value="GP"/> * &lt;enumeration value="GU"/> * &lt;enumeration value="GT"/> * &lt;enumeration value="GG"/> * &lt;enumeration value="GN"/> * &lt;enumeration value="GW"/> * &lt;enumeration value="GY"/> * &lt;enumeration value="HT"/> * &lt;enumeration value="HM"/> * &lt;enumeration value="VA"/> * &lt;enumeration value="HN"/> * &lt;enumeration value="HK"/> * &lt;enumeration value="HU"/> * &lt;enumeration value="IS"/> * &lt;enumeration value="IN"/> * &lt;enumeration value="ID"/> * &lt;enumeration value="IR"/> * &lt;enumeration value="IQ"/> * &lt;enumeration value="IE"/> * &lt;enumeration value="IM"/> * &lt;enumeration value="IL"/> * &lt;enumeration value="IT"/> * &lt;enumeration value="JM"/> * &lt;enumeration value="JP"/> * &lt;enumeration value="JE"/> * &lt;enumeration value="JO"/> * &lt;enumeration value="KZ"/> * &lt;enumeration value="KE"/> * &lt;enumeration value="KI"/> * &lt;enumeration value="KP"/> * &lt;enumeration value="KR"/> * &lt;enumeration value="KW"/> * &lt;enumeration value="KG"/> * &lt;enumeration value="LA"/> * &lt;enumeration value="LV"/> * &lt;enumeration value="LB"/> * &lt;enumeration value="LS"/> * &lt;enumeration value="LR"/> * &lt;enumeration value="LY"/> * &lt;enumeration value="LI"/> * &lt;enumeration value="LT"/> * &lt;enumeration value="LU"/> * &lt;enumeration value="MO"/> * &lt;enumeration value="MK"/> * &lt;enumeration value="MG"/> * &lt;enumeration value="MW"/> * &lt;enumeration value="MY"/> * &lt;enumeration value="MV"/> * &lt;enumeration value="ML"/> * &lt;enumeration value="MT"/> * &lt;enumeration value="MH"/> * &lt;enumeration value="MQ"/> * &lt;enumeration value="MR"/> * &lt;enumeration value="MU"/> * &lt;enumeration value="YT"/> * &lt;enumeration value="MX"/> * &lt;enumeration value="FM"/> * &lt;enumeration value="MD"/> * &lt;enumeration value="MC"/> * &lt;enumeration value="MN"/> * &lt;enumeration value="ME"/> * &lt;enumeration value="MS"/> * &lt;enumeration value="MA"/> * &lt;enumeration value="MZ"/> * &lt;enumeration value="MM"/> * &lt;enumeration value="NA"/> * &lt;enumeration value="NR"/> * &lt;enumeration value="NP"/> * &lt;enumeration value="NL"/> * &lt;enumeration value="NC"/> * &lt;enumeration value="NZ"/> * &lt;enumeration value="NI"/> * &lt;enumeration value="NE"/> * &lt;enumeration value="NG"/> * &lt;enumeration value="NU"/> * &lt;enumeration value="NF"/> * &lt;enumeration value="MP"/> * &lt;enumeration value="NO"/> * &lt;enumeration value="OM"/> * &lt;enumeration value="PK"/> * &lt;enumeration value="PW"/> * &lt;enumeration value="PS"/> * &lt;enumeration value="PA"/> * &lt;enumeration value="PG"/> * &lt;enumeration value="PY"/> * &lt;enumeration value="PE"/> * &lt;enumeration value="PH"/> * &lt;enumeration value="PN"/> * &lt;enumeration value="PL"/> * &lt;enumeration value="PT"/> * &lt;enumeration value="PR"/> * &lt;enumeration value="QA"/> * &lt;enumeration value="RE"/> * &lt;enumeration value="RO"/> * &lt;enumeration value="RU"/> * &lt;enumeration value="RW"/> * &lt;enumeration value="BL"/> * &lt;enumeration value="SH"/> * &lt;enumeration value="KN"/> * &lt;enumeration value="LC"/> * &lt;enumeration value="MF"/> * &lt;enumeration value="PM"/> * &lt;enumeration value="VC"/> * &lt;enumeration value="WS"/> * &lt;enumeration value="SM"/> * &lt;enumeration value="ST"/> * &lt;enumeration value="SA"/> * &lt;enumeration value="SN"/> * &lt;enumeration value="RS"/> * &lt;enumeration value="SC"/> * &lt;enumeration value="SL"/> * &lt;enumeration value="SG"/> * &lt;enumeration value="SX"/> * &lt;enumeration value="SK"/> * &lt;enumeration value="SI"/> * &lt;enumeration value="SB"/> * &lt;enumeration value="SO"/> * &lt;enumeration value="ZA"/> * &lt;enumeration value="GS"/> * &lt;enumeration value="ES"/> * &lt;enumeration value="LK"/> * &lt;enumeration value="SD"/> * &lt;enumeration value="SR"/> * &lt;enumeration value="SJ"/> * &lt;enumeration value="SZ"/> * &lt;enumeration value="SE"/> * &lt;enumeration value="CH"/> * &lt;enumeration value="SY"/> * &lt;enumeration value="TW"/> * &lt;enumeration value="TJ"/> * &lt;enumeration value="TZ"/> * &lt;enumeration value="TH"/> * &lt;enumeration value="TL"/> * &lt;enumeration value="TG"/> * &lt;enumeration value="TK"/> * &lt;enumeration value="TO"/> * &lt;enumeration value="TT"/> * &lt;enumeration value="TN"/> * &lt;enumeration value="TR"/> * &lt;enumeration value="TM"/> * &lt;enumeration value="TC"/> * &lt;enumeration value="TV"/> * &lt;enumeration value="UG"/> * &lt;enumeration value="UA"/> * &lt;enumeration value="AE"/> * &lt;enumeration value="GB"/> * &lt;enumeration value="US"/> * &lt;enumeration value="UM"/> * &lt;enumeration value="UY"/> * &lt;enumeration value="UZ"/> * &lt;enumeration value="VU"/> * &lt;enumeration value="VE"/> * &lt;enumeration value="VN"/> * &lt;enumeration value="VG"/> * &lt;enumeration value="VI"/> * &lt;enumeration value="WF"/> * &lt;enumeration value="EH"/> * &lt;enumeration value="YE"/> * &lt;enumeration value="ZM"/> * &lt;enumeration value="ZW"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "CountryCodeType") @XmlEnum public enum CountryCodeType { AF, AX, AL, DZ, AS, AD, AO, AI, AQ, AG, AR, AM, AW, AU, AT, AZ, BS, BH, BD, BB, BY, BE, BZ, BJ, BM, BT, BO, BQ, BA, BW, BV, BR, IO, BN, BG, BF, BI, KH, CM, CA, CV, KY, CF, TD, CL, CN, CX, CC, CO, KM, CG, CD, CK, CR, CI, HR, CU, CW, CY, CZ, DK, DJ, DM, DO, EC, EG, SV, GQ, ER, EE, ET, FK, FO, FJ, FI, FR, GF, PF, TF, GA, GM, GE, DE, GH, GI, GR, GL, GD, GP, GU, GT, GG, GN, GW, GY, HT, HM, VA, HN, HK, HU, IS, IN, ID, IR, IQ, IE, IM, IL, IT, JM, JP, JE, JO, KZ, KE, KI, KP, KR, KW, KG, LA, LV, LB, LS, LR, LY, LI, LT, LU, MO, MK, MG, MW, MY, MV, ML, MT, MH, MQ, MR, MU, YT, MX, FM, MD, MC, MN, ME, MS, MA, MZ, MM, NA, NR, NP, NL, NC, NZ, NI, NE, NG, NU, NF, MP, NO, OM, PK, PW, PS, PA, PG, PY, PE, PH, PN, PL, PT, PR, QA, RE, RO, RU, RW, BL, SH, KN, LC, MF, PM, VC, WS, SM, ST, SA, SN, RS, SC, SL, SG, SX, SK, SI, SB, SO, ZA, GS, ES, LK, SD, SR, SJ, SZ, SE, CH, SY, TW, TJ, TZ, TH, TL, TG, TK, TO, TT, TN, TR, TM, TC, TV, UG, UA, AE, GB, US, UM, UY, UZ, VU, VE, VN, VG, VI, WF, EH, YE, ZM, ZW; public String value() { return name(); } public static CountryCodeType fromValue(String v) { return valueOf(v); } }
/* * Generated by the Jasper component of Apache Tomcat * Version: JspC/ApacheTomcat8 * Generated at: 2016-08-23 16:29:30 UTC * Note: The last modified time of this file was set to * the last modified time of the source file after * generation to assist with modification tracking. */ package org.jivesoftware.openfire.admin; import javax.servlet.*; import javax.servlet.http.*; import javax.servlet.jsp.*; import org.jivesoftware.admin.AdminConsole; import org.jivesoftware.util.LocaleUtils; public final class error_002dserverdown_jsp extends org.apache.jasper.runtime.HttpJspBase implements org.apache.jasper.runtime.JspSourceDependent { private static final javax.servlet.jsp.JspFactory _jspxFactory = javax.servlet.jsp.JspFactory.getDefaultFactory(); private static java.util.Map<java.lang.String,java.lang.Long> _jspx_dependants; private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody; private javax.el.ExpressionFactory _el_expressionfactory; private org.apache.tomcat.InstanceManager _jsp_instancemanager; public java.util.Map<java.lang.String,java.lang.Long> getDependants() { return _jspx_dependants; } public void _jspInit() { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig()); _el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory(); _jsp_instancemanager = org.apache.jasper.runtime.InstanceManagerFactory.getInstanceManager(getServletConfig()); } public void _jspDestroy() { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.release(); } public void _jspService(final javax.servlet.http.HttpServletRequest request, final javax.servlet.http.HttpServletResponse response) throws java.io.IOException, javax.servlet.ServletException { final javax.servlet.jsp.PageContext pageContext; javax.servlet.http.HttpSession session = null; final javax.servlet.ServletContext application; final javax.servlet.ServletConfig config; javax.servlet.jsp.JspWriter out = null; final java.lang.Object page = this; javax.servlet.jsp.JspWriter _jspx_out = null; javax.servlet.jsp.PageContext _jspx_page_context = null; try { response.setContentType("text/html"); pageContext = _jspxFactory.getPageContext(this, request, response, null, true, 8192, true); _jspx_page_context = pageContext; application = pageContext.getServletContext(); config = pageContext.getServletConfig(); session = pageContext.getSession(); out = pageContext.getOut(); _jspx_out = out; out.write(' '); out.write("\n\n\n\n\n\n"); org.jivesoftware.admin.AdminPageBean pageinfo = null; pageinfo = (org.jivesoftware.admin.AdminPageBean) _jspx_page_context.getAttribute("pageinfo", javax.servlet.jsp.PageContext.REQUEST_SCOPE); if (pageinfo == null){ pageinfo = new org.jivesoftware.admin.AdminPageBean(); _jspx_page_context.setAttribute("pageinfo", pageinfo, javax.servlet.jsp.PageContext.REQUEST_SCOPE); } out.write('\n'); out.write('\n'); out.write('\n'); org.jivesoftware.util.WebManager admin = null; admin = (org.jivesoftware.util.WebManager) _jspx_page_context.getAttribute("admin", javax.servlet.jsp.PageContext.PAGE_SCOPE); if (admin == null){ admin = new org.jivesoftware.util.WebManager(); _jspx_page_context.setAttribute("admin", admin, javax.servlet.jsp.PageContext.PAGE_SCOPE); } out.write('\n'); admin.init(request, response, session, application, out ); out.write('\n'); out.write('\n'); String path = request.getContextPath(); // Title of this page String title = AdminConsole.getAppName() + " " +LocaleUtils.getLocalizedString("error.serverdown.title"); pageinfo.setTitle(title); out.write("\n<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">\n\n<html>\n<head>\n <title>"); out.print( AdminConsole.getAppName() ); out.write(' '); if (_jspx_meth_fmt_005fmessage_005f0(_jspx_page_context)) return; out.print( (pageinfo.getTitle() != null ? (": "+pageinfo.getTitle()) : "") ); out.write("</title>\n <meta http-equiv=\"content-type\" content=\"text/html; charset=UTF-8\">\n <link rel=\"stylesheet\" type=\"text/css\" href=\""); out.print( path ); out.write("/style/global.css\">\n</head>\n\n<body>\n\n<div id=\"jive-header\">\n<table cellpadding=\"0\" cellspacing=\"0\" width=\"100%\" border=\"0\">\n<tbody>\n <tr>\n <td>\n <img src=\""); out.print( path ); out.write('/'); out.print( AdminConsole.getLogoImage() ); out.write("\" border=\"0\" alt=\""); out.print( AdminConsole.getAppName() ); out.write(' '); if (_jspx_meth_fmt_005fmessage_005f1(_jspx_page_context)) return; out.write("\">\n </td>\n <td align=\"right\">\n <table cellpadding=\"0\" cellspacing=\"0\" border=\"0\">\n <tr>\n <td>&nbsp;</td>\n <td class=\"info\">\n <nobr>"); out.print( AdminConsole.getAppName() ); out.write(' '); out.print( AdminConsole.getVersionString() ); out.write("</nobr>\n </td>\n </tr>\n </table>\n </td>\n </tr>\n</tbody>\n</table>\n</div>\n\n<div id=\"jive-main\">\n<table cellpadding=\"0\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n<tbody>\n <tr valign=\"top\">\n <td width=\"1%\">\n <div id=\"jive-sidebar\">\n <img src=\""); out.print( path ); out.write("/images/blank.gif\" width=\"5\" height=\"1\" border=\"0\" alt=\"\">\n </div>\n </td>\n <td width=\"99%\" id=\"jive-content\">\n\n <div id=\"jive-title\">\n "); out.print( title ); out.write("\n </div>\n\n <p>\n "); out.print( AdminConsole.getAppName() ); out.write(' '); if (_jspx_meth_fmt_005fmessage_005f2(_jspx_page_context)) return; out.write("\n </p>\n\n <ol>\n <li>\n "); if (_jspx_meth_fmt_005fmessage_005f3(_jspx_page_context)) return; out.write("\n </li>\n <li>\n <a href=\"index.jsp\">"); if (_jspx_meth_fmt_005fmessage_005f4(_jspx_page_context)) return; out.write("</a>.\n </li>\n </ol>\n\n </td>\n </tr>\n</tbody>\n</table>\n</div>\n\n</body>\n</html>"); } catch (java.lang.Throwable t) { if (!(t instanceof javax.servlet.jsp.SkipPageException)){ out = _jspx_out; if (out != null && out.getBufferSize() != 0) try { if (response.isCommitted()) { out.flush(); } else { out.clearBuffer(); } } catch (java.io.IOException e) {} if (_jspx_page_context != null) _jspx_page_context.handlePageException(t); else throw new ServletException(t); } } finally { _jspxFactory.releasePageContext(_jspx_page_context); } } private boolean _jspx_meth_fmt_005fmessage_005f0(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f0 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f0.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f0.setParent(null); // /error-serverdown.jsp(28,41) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f0.setKey("error.serverdown.admin_console"); int _jspx_eval_fmt_005fmessage_005f0 = _jspx_th_fmt_005fmessage_005f0.doStartTag(); if (_jspx_th_fmt_005fmessage_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0); return false; } private boolean _jspx_meth_fmt_005fmessage_005f1(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f1 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f1.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f1.setParent(null); // /error-serverdown.jsp(40,116) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f1.setKey("error.serverdown.admin_console"); int _jspx_eval_fmt_005fmessage_005f1 = _jspx_th_fmt_005fmessage_005f1.doStartTag(); if (_jspx_th_fmt_005fmessage_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f1); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f1); return false; } private boolean _jspx_meth_fmt_005fmessage_005f2(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f2 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f2.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f2.setParent(null); // /error-serverdown.jsp(73,41) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f2.setKey("error.serverdown.is_down"); int _jspx_eval_fmt_005fmessage_005f2 = _jspx_th_fmt_005fmessage_005f2.doStartTag(); if (_jspx_th_fmt_005fmessage_005f2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2); return false; } private boolean _jspx_meth_fmt_005fmessage_005f3(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f3 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f3.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f3.setParent(null); // /error-serverdown.jsp(78,16) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f3.setKey("error.serverdown.start"); int _jspx_eval_fmt_005fmessage_005f3 = _jspx_th_fmt_005fmessage_005f3.doStartTag(); if (_jspx_th_fmt_005fmessage_005f3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3); return false; } private boolean _jspx_meth_fmt_005fmessage_005f4(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f4 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f4.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f4.setParent(null); // /error-serverdown.jsp(81,36) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f4.setKey("error.serverdown.login"); int _jspx_eval_fmt_005fmessage_005f4 = _jspx_th_fmt_005fmessage_005f4.doStartTag(); if (_jspx_th_fmt_005fmessage_005f4.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4); return false; } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.easemob.chatuidemo.video.util; import java.io.File; import java.lang.ref.SoftReference; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import com.cpic.rabbitfarm.BuildConfig; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.BitmapFactory; import android.graphics.drawable.BitmapDrawable; import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Environment; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.util.LruCache; import android.util.Log; /** * This class memory caching of bitmaps in conjunction with the * {@link ImageWorker} class and its subclasses. Use * {@link ImageCache#getInstance(android.support.v4.app.FragmentManager, ImageCacheParams)} * to get an instance of this class, although usually a cache should be added * directly to an {@link ImageWorker} by calling * {@link ImageWorker#addImageCache(android.support.v4.app.FragmentManager, ImageCacheParams)} * . */ public class ImageCache { private static final String TAG = "ImageCache"; // Default memory cache size in kilobytes private static final int DEFAULT_MEM_CACHE_SIZE = 1024 * 5; // 5MB private static final int DEFAULT_COMPRESS_QUALITY = 70; // Constants to easily toggle various caches private static final boolean DEFAULT_MEM_CACHE_ENABLED = true; private static final boolean DEFAULT_INIT_DISK_CACHE_ON_CREATE = false; private LruCache<String, BitmapDrawable> mMemoryCache; private ImageCacheParams mCacheParams; private Set<SoftReference<Bitmap>> mReusableBitmaps; /** * Create a new ImageCache object using the specified parameters. This * should not be called directly by other classes, instead use * {@link ImageCache#getInstance(android.support.v4.app.FragmentManager, ImageCacheParams)} * to fetch an ImageCache instance. * * @param cacheParams * The cache parameters to use to initialize the cache */ private ImageCache(ImageCacheParams cacheParams) { init(cacheParams); } /** * Return an {@link ImageCache} instance. A {@link RetainFragment} is used * to retain the ImageCache object across configuration changes such as a * change in device orientation. * * @param fragmentManager * The fragment manager to use when dealing with the retained * fragment. * @param cacheParams * The cache parameters to use if the ImageCache needs * instantiation. * @return An existing retained ImageCache object or a new one if one did * not exist */ public static ImageCache getInstance(FragmentManager fragmentManager, ImageCacheParams cacheParams) { // Search for, or create an instance of the non-UI RetainFragment final RetainFragment mRetainFragment = findOrCreateRetainFragment(fragmentManager); // See if we already have an ImageCache stored in RetainFragment ImageCache imageCache = (ImageCache) mRetainFragment.getObject(); // No existing ImageCache, create one and store it in RetainFragment if (imageCache == null) { imageCache = new ImageCache(cacheParams); mRetainFragment.setObject(imageCache); } return imageCache; } /** * Initialize the cache, providing all parameters. * * @param cacheParams * The cache parameters to initialize the cache */ private void init(ImageCacheParams cacheParams) { mCacheParams = cacheParams; // BEGIN_INCLUDE(init_memory_cache) // Set up memory cache if (mCacheParams.memoryCacheEnabled) { if (BuildConfig.DEBUG) { Log.d(TAG, "Memory cache created (size = " + mCacheParams.memCacheSize + ")"); } // If we're running on Honeycomb or newer, create a set of reusable // bitmaps that can be // populated into the inBitmap field of BitmapFactory.Options. Note // that the set is // of SoftReferences which will actually not be very effective due // to the garbage // collector being aggressive clearing Soft/WeakReferences. A better // approach // would be to use a strongly references bitmaps, however this would // require some // balancing of memory usage between this set and the bitmap // LruCache. It would also // require knowledge of the expected size of the bitmaps. From // Honeycomb to JellyBean // the size would need to be precise, from KitKat onward the size // would just need to // be the upper bound (due to changes in how inBitmap can re-use // bitmaps). if (Utils.hasHoneycomb()) { mReusableBitmaps = Collections .synchronizedSet(new HashSet<SoftReference<Bitmap>>()); } mMemoryCache = new LruCache<String, BitmapDrawable>( mCacheParams.memCacheSize) { /** * Notify the removed entry that is no longer being cached */ @Override protected void entryRemoved(boolean evicted, String key, BitmapDrawable oldValue, BitmapDrawable newValue) { if (RecyclingBitmapDrawable.class.isInstance(oldValue)) { // The removed entry is a recycling drawable, so notify // it // that it has been removed from the memory cache ((RecyclingBitmapDrawable) oldValue).setIsCached(false); } else { // The removed entry is a standard BitmapDrawable if (Utils.hasHoneycomb()) { // We're running on Honeycomb or later, so add the // bitmap // to a SoftReference set for possible use with // inBitmap later mReusableBitmaps.add(new SoftReference<Bitmap>( oldValue.getBitmap())); } } } /** * Measure item size in kilobytes rather than units which is * more practical for a bitmap cache */ @Override protected int sizeOf(String key, BitmapDrawable value) { final int bitmapSize = getBitmapSize(value) / 1024; return bitmapSize == 0 ? 1 : bitmapSize; } }; } } /** * Adds a bitmap to both memory and disk cache. * * @param data * Unique identifier for the bitmap to store * @param value * The bitmap drawable to store */ public void addBitmapToCache(String data, BitmapDrawable value) { // BEGIN_INCLUDE(add_bitmap_to_cache) if (data == null || value == null) { return; } // Add to memory cache if (mMemoryCache != null) { if (RecyclingBitmapDrawable.class.isInstance(value)) { // The removed entry is a recycling drawable, so notify it // that it has been added into the memory cache ((RecyclingBitmapDrawable) value).setIsCached(true); } mMemoryCache.put(data, value); } } /** * Get from memory cache. * * @param data * Unique identifier for which item to get * @return The bitmap drawable if found in cache, null otherwise */ public BitmapDrawable getBitmapFromMemCache(String data) { // BEGIN_INCLUDE(get_bitmap_from_mem_cache) BitmapDrawable memValue = null; if (mMemoryCache != null) { memValue = mMemoryCache.get(data); } if (BuildConfig.DEBUG && memValue != null) { Log.d(TAG, "Memory cache hit"); } return memValue; // END_INCLUDE(get_bitmap_from_mem_cache) } /** * @param options * - BitmapFactory.Options with out* options populated * @return Bitmap that case be used for inBitmap */ protected Bitmap getBitmapFromReusableSet(BitmapFactory.Options options) { // BEGIN_INCLUDE(get_bitmap_from_reusable_set) Bitmap bitmap = null; if (mReusableBitmaps != null && !mReusableBitmaps.isEmpty()) { synchronized (mReusableBitmaps) { final Iterator<SoftReference<Bitmap>> iterator = mReusableBitmaps .iterator(); Bitmap item; while (iterator.hasNext()) { item = iterator.next().get(); if (null != item && item.isMutable()) { // Check to see it the item can be used for inBitmap if (canUseForInBitmap(item, options)) { bitmap = item; // Remove from reusable set so it can't be used // again iterator.remove(); break; } } else { // Remove from the set if the reference has been // cleared. iterator.remove(); } } } } return bitmap; // END_INCLUDE(get_bitmap_from_reusable_set) } /** * Clears both the memory and disk cache associated with this ImageCache * object. Note that this includes disk access so this should not be * executed on the main/UI thread. */ public void clearCache() { if (mMemoryCache != null) { mMemoryCache.evictAll(); if (BuildConfig.DEBUG) { Log.d(TAG, "Memory cache cleared"); } } } /** * A holder class that contains cache parameters. */ public static class ImageCacheParams { public int memCacheSize = DEFAULT_MEM_CACHE_SIZE; public int compressQuality = DEFAULT_COMPRESS_QUALITY; public boolean memoryCacheEnabled = DEFAULT_MEM_CACHE_ENABLED; public boolean initDiskCacheOnCreate = DEFAULT_INIT_DISK_CACHE_ON_CREATE; /** * Sets the memory cache size based on a percentage of the max available * VM memory. Eg. setting percent to 0.2 would set the memory cache to * one fifth of the available memory. Throws * {@link IllegalArgumentException} if percent is < 0.01 or > .8. * memCacheSize is stored in kilobytes instead of bytes as this will * eventually be passed to construct a LruCache which takes an int in * its constructor. * * This value should be chosen carefully based on a number of factors * Refer to the corresponding Android Training class for more * discussion: http://developer.android.com/training/displaying-bitmaps/ * * @param percent * Percent of available app memory to use to size memory * cache */ public void setMemCacheSizePercent(float percent) { if (percent < 0.01f || percent > 0.8f) { throw new IllegalArgumentException( "setMemCacheSizePercent - percent must be " + "between 0.01 and 0.8 (inclusive)"); } memCacheSize = Math.round(percent * Runtime.getRuntime().maxMemory() / 1024); } } /** * @param candidate * - Bitmap to check * @param targetOptions * - Options that have the out* value populated * @return true if <code>candidate</code> can be used for inBitmap re-use * with <code>targetOptions</code> */ @TargetApi(19) private static boolean canUseForInBitmap(Bitmap candidate, BitmapFactory.Options targetOptions) { // BEGIN_INCLUDE(can_use_for_inbitmap) if (!Utils.hasKitKat()) { // On earlier versions, the dimensions must match exactly and the // inSampleSize must be 1 return candidate.getWidth() == targetOptions.outWidth && candidate.getHeight() == targetOptions.outHeight && targetOptions.inSampleSize == 1; } // From Android 4.4 (KitKat) onward we can re-use if the byte size of // the new bitmap // is smaller than the reusable bitmap candidate allocation byte count. int width = targetOptions.outWidth / targetOptions.inSampleSize; int height = targetOptions.outHeight / targetOptions.inSampleSize; int byteCount = width * height * getBytesPerPixel(candidate.getConfig()); return byteCount <= candidate.getByteCount(); // END_INCLUDE(can_use_for_inbitmap) } /** * Return the byte usage per pixel of a bitmap based on its configuration. * * @param config * The bitmap configuration. * @return The byte usage per pixel. */ private static int getBytesPerPixel(Config config) { if (config == Config.ARGB_8888) { return 4; } else if (config == Config.RGB_565) { return 2; } else if (config == Config.ARGB_4444) { return 2; } else if (config == Config.ALPHA_8) { return 1; } return 1; } /** * Get a usable cache directory (external if available, internal otherwise). * * @param context * The context to use * @param uniqueName * A unique directory name to append to the cache dir * @return The cache dir */ public static File getDiskCacheDir(Context context, String uniqueName) { // Check if media is mounted or storage is built-in, if so, try and use // external cache dir // otherwise use internal cache dir final String cachePath = Environment.MEDIA_MOUNTED.equals(Environment .getExternalStorageState()) || !isExternalStorageRemovable() ? getExternalCacheDir( context).getPath() : context.getCacheDir().getPath(); return new File(cachePath + File.separator + uniqueName); } /** * A hashing method that changes a string (like a URL) into a hash suitable * for using as a disk filename. */ public static String hashKeyForDisk(String key) { String cacheKey; try { final MessageDigest mDigest = MessageDigest.getInstance("MD5"); mDigest.update(key.getBytes()); cacheKey = bytesToHexString(mDigest.digest()); } catch (NoSuchAlgorithmException e) { cacheKey = String.valueOf(key.hashCode()); } return cacheKey; } private static String bytesToHexString(byte[] bytes) { // http://stackoverflow.com/questions/332079 StringBuilder sb = new StringBuilder(); for (int i = 0; i < bytes.length; i++) { String hex = Integer.toHexString(0xFF & bytes[i]); if (hex.length() == 1) { sb.append('0'); } sb.append(hex); } return sb.toString(); } /** * Get the size in bytes of a bitmap in a BitmapDrawable. Note that from * Android 4.4 (KitKat) onward this returns the allocated memory size of the * bitmap which can be larger than the actual bitmap data byte count (in the * case it was re-used). * * @param value * @return size in bytes */ @TargetApi(19) public static int getBitmapSize(BitmapDrawable value) { Bitmap bitmap = value.getBitmap(); // From KitKat onward use getAllocationByteCount() as allocated bytes // can potentially be // larger than bitmap byte count. // if (Utils.hasKitKat()) { // return bitmap.getAllocationByteCount(); // } if (Utils.hasHoneycombMR1()) { return bitmap.getByteCount(); } // Pre HC-MR1 return bitmap.getRowBytes() * bitmap.getHeight(); } /** * Check if external storage is built-in or removable. * * @return True if external storage is removable (like an SD card), false * otherwise. */ @TargetApi(VERSION_CODES.GINGERBREAD) public static boolean isExternalStorageRemovable() { if (Utils.hasGingerbread()) { return Environment.isExternalStorageRemovable(); } return true; } /** * Get the external app cache directory. * * @param context * The context to use * @return The external cache dir */ @TargetApi(VERSION_CODES.FROYO) public static File getExternalCacheDir(Context context) { if (Utils.hasFroyo()) { return context.getExternalCacheDir(); } // Before Froyo we need to construct the external cache dir ourselves final String cacheDir = "/Android/data/" + context.getPackageName() + "/cache/"; return new File(Environment.getExternalStorageDirectory().getPath() + cacheDir); } /** * Locate an existing instance of this Fragment or if not found, create and * add it using FragmentManager. * * @param fm * The FragmentManager manager to use. * @return The existing instance of the Fragment or the new instance if just * created. */ private static RetainFragment findOrCreateRetainFragment(FragmentManager fm) { // BEGIN_INCLUDE(find_create_retain_fragment) // Check to see if we have retained the worker fragment. RetainFragment mRetainFragment = (RetainFragment) fm .findFragmentByTag(TAG); // If not retained (or first time running), we need to create and add // it. if (mRetainFragment == null) { mRetainFragment = new RetainFragment(); fm.beginTransaction().add(mRetainFragment, TAG) .commitAllowingStateLoss(); } return mRetainFragment; // END_INCLUDE(find_create_retain_fragment) } /** * A simple non-UI Fragment that stores a single Object and is retained over * configuration changes. It will be used to retain the ImageCache object. */ public static class RetainFragment extends Fragment { private Object mObject; /** * Empty constructor as per the Fragment documentation */ public RetainFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Make sure this Fragment is retained over a configuration change setRetainInstance(true); } /** * Store a single object in this Fragment. * * @param object * The object to store */ public void setObject(Object object) { mObject = object; } /** * Get the stored object. * * @return The stored object */ public Object getObject() { return mObject; } } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.feature.vt.api.correlator.program; import java.util.*; import java.util.Map.Entry; import generic.DominantPair; import generic.lsh.KandL; import generic.lsh.LSHMemoryModel; import generic.lsh.vector.LSHCosineVectorAccum; import generic.lsh.vector.VectorCompare; import ghidra.feature.vt.api.main.*; import ghidra.feature.vt.api.util.VTAbstractProgramCorrelator; import ghidra.framework.options.ToolOptions; import ghidra.framework.plugintool.ServiceProvider; import ghidra.program.model.address.Address; import ghidra.program.model.address.AddressSetView; import ghidra.program.model.listing.*; import ghidra.program.model.symbol.*; import ghidra.util.exception.CancelledException; import ghidra.util.task.TaskMonitor; public class SimilarSymbolNameProgramCorrelator extends VTAbstractProgramCorrelator { public static double SIMILARITY_THRESHOLD = 0.5; protected SimilarSymbolNameProgramCorrelator(ServiceProvider serviceProvider, Program sourceProgram, AddressSetView sourceAddressSet, Program destinationProgram, AddressSetView destinationAddressSet, ToolOptions options) { super(serviceProvider, sourceProgram, sourceAddressSet, destinationProgram, destinationAddressSet, options); } HashMap<Symbol, LSHCosineVectorAccum> sourceMap; HashMap<Symbol, LSHCosineVectorAccum> destinationMap; HashMap<String, Integer> idMap; int featureID = 0; int minNameLength; @Override protected void doCorrelate(VTMatchSet matchSet, TaskMonitor monitor) throws CancelledException { minNameLength = getOptions().getInt(SimilarSymbolNameProgramCorrelatorFactory.MIN_NAME_LENGTH, SimilarSymbolNameProgramCorrelatorFactory.MIN_NAME_LENGTH_DEFAULT); LSHMultiHash<Symbol> sourceDictionary; monitor.setMessage("Generating source dictionary"); sourceDictionary = generateDictionary(getSourceProgram(), matchSet, monitor); monitor.setMessage("Finding destination symbols"); findDestinations(matchSet, sourceDictionary, SIMILARITY_THRESHOLD, monitor); } private void extractNGramFeatures(VTMatchSet matchSet, TaskMonitor monitor, int n) { sourceMap = new HashMap<Symbol, LSHCosineVectorAccum>(); destinationMap = new HashMap<Symbol, LSHCosineVectorAccum>(); idMap = new HashMap<String, Integer>(); final Program sourceProgram = getSourceProgram(); final Program destinationProgram = getDestinationProgram(); final SymbolTable sourceSymbolTable = sourceProgram.getSymbolTable(); final SymbolTable destinationSymbolTable = destinationProgram.getSymbolTable(); SymbolIterator sourceSymbols = sourceSymbolTable.getAllSymbols(false); SymbolIterator destinationSymbols = destinationSymbolTable.getAllSymbols(false); addSymbolsToMap(sourceSymbols, true, n, monitor); addSymbolsToMap(destinationSymbols, false, n, monitor); } private void addSymbolsToMap(SymbolIterator symbolIt, boolean isSourceProgram, int n, TaskMonitor monitor) { double weight = 1.0 / n; AddressSetView addressSet; if (isSourceProgram) { addressSet = getSourceAddressSet(); } else { addressSet = getDestinationAddressSet(); } while (symbolIt.hasNext()) { if (monitor.isCancelled()) { break; } Symbol symbol = symbolIt.next(); String symbolName = symbol.getName(); if (symbolName.length() < minNameLength) { continue; } if (!addressSet.contains(symbol.getAddress())) { continue; } if (symbol.getSource() == SourceType.DEFAULT || symbol.getSource() == SourceType.ANALYSIS) { continue; } for (int i = 0; i < symbolName.length() - (n - 1); i++) { String threeGram = symbolName.substring(i, i + n); LSHCosineVectorAccum vector; if (isSourceProgram) { vector = sourceMap.get(symbol); } else { vector = destinationMap.get(symbol); } if (vector == null) { vector = new LSHCosineVectorAccum(); if (isSourceProgram) { sourceMap.put(symbol, vector); } else { destinationMap.put(symbol, vector); } } int id = getFeatureID(threeGram); vector.addHash(id, weight); } } } private int getFeatureID(String threeGram) { if (idMap.containsKey(threeGram)) { return idMap.get(threeGram); } featureID++; idMap.put(threeGram, featureID); return featureID; } private LSHMultiHash<Symbol> generateDictionary(Program program, VTMatchSet matchSet, final TaskMonitor monitor) { final LSHMultiHash<Symbol> dictionary = generateLSHMultiHash(); extractNGramFeatures(matchSet, monitor, 3); dictionary.add(sourceMap, monitor); return dictionary; } private void findDestinations(VTMatchSet matchSet, LSHMultiHash<Symbol> sourceDictionary, double threshold, TaskMonitor monitor) { monitor.initialize(destinationMap.size()); for (Entry<Symbol, LSHCosineVectorAccum> entry : destinationMap.entrySet()) { if (monitor.isCancelled()) { return; } monitor.incrementProgress(1); Symbol destinationSymbol = entry.getKey(); LSHCosineVectorAccum vector = entry.getValue(); Set<DominantPair<Symbol, LSHCosineVectorAccum>> neighbors = sourceDictionary.lookup(vector); List<VTMatchInfo> members = transform(matchSet, destinationSymbol, vector, neighbors, threshold, monitor); for (VTMatchInfo member : members) { if (monitor.isCancelled()) { return; } if (member != null) { matchSet.addMatch(member); } } } } private List<VTMatchInfo> transform(VTMatchSet matchSet, Symbol destinationSymbol, LSHCosineVectorAccum destinationVector, Set<DominantPair<Symbol, LSHCosineVectorAccum>> neighbors, double threshold, TaskMonitor monitor) { List<VTMatchInfo> result = new ArrayList<VTMatchInfo>(); int sourceLength = 0; int destinationLength = 0; Address destinationAddress = destinationSymbol.getAddress(); FunctionManager destinationFunctionManager = getDestinationProgram().getFunctionManager(); Function destinationFunction = destinationFunctionManager.getFunctionAt(destinationAddress); CodeUnit destinationCodeUnit = null; VectorCompare veccompare = new VectorCompare(); if (destinationFunction == null) { destinationCodeUnit = getDestinationProgram().getListing().getCodeUnitAt(destinationAddress); if (destinationCodeUnit == null) { return result; } } if (destinationFunction != null && destinationFunction.isThunk()) { return result; } for (DominantPair<Symbol, LSHCosineVectorAccum> neighbor : neighbors) { if (monitor.isCancelled()) { break; } Symbol sourceSymbol = neighbor.first; Address sourceAddress = sourceSymbol.getAddress(); VTAssociationType type; FunctionManager sourceFunctionManager = getSourceProgram().getFunctionManager(); Function sourceFunction = sourceFunctionManager.getFunctionAt(sourceAddress); if (destinationFunction != null) { if (sourceFunction == null || sourceFunction.isThunk()) { continue; } type = VTAssociationType.FUNCTION; sourceLength = (int) sourceFunction.getBody().getNumAddresses(); destinationLength = (int) destinationFunction.getBody().getNumAddresses(); } else { if (sourceFunction != null) { continue; } CodeUnit sourceCodeUnit = getSourceProgram().getListing().getCodeUnitAt(sourceAddress); if (sourceCodeUnit == null) { continue; } type = VTAssociationType.DATA; sourceLength = sourceCodeUnit.getLength(); destinationLength = destinationCodeUnit.getLength(); } LSHCosineVectorAccum sourceVector = neighbor.second; double similarity = sourceVector.compare(destinationVector, veccompare); if (similarity < threshold || Double.isNaN(similarity)) { continue; } double confidence = similarity * sourceVector.getLength() * destinationVector.getLength(); confidence *= 10.0; VTMatchInfo match = new VTMatchInfo(matchSet); match.setSimilarityScore(new VTScore(similarity)); match.setConfidenceScore(new VTScore(confidence)); match.setSourceLength(sourceLength); match.setDestinationLength(destinationLength); match.setSourceAddress(sourceAddress); match.setDestinationAddress(destinationAddress); match.setTag(null); match.setAssociationType(type); result.add(match); } return result; } private LSHMultiHash<Symbol> generateLSHMultiHash() { LSHMemoryModel model = getOptions().getEnum(SimilarSymbolNameProgramCorrelatorFactory.MEMORY_MODEL, SimilarSymbolNameProgramCorrelatorFactory.MEMORY_MODEL_DEFAULT); int L = KandL.memoryModelToL(model); return new LSHMultiHash<Symbol>(model.getK(), L); } @Override public String getName() { return SimilarSymbolNameProgramCorrelatorFactory.NAME; } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.javaFX.fxml; import com.intellij.codeInsight.TargetElementUtil; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.application.PluginPathManager; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiMethod; import com.intellij.refactoring.RenameRefactoring; import com.intellij.refactoring.openapi.impl.JavaRenameRefactoringImpl; import com.intellij.refactoring.rename.PsiElementRenameHandler; import com.intellij.refactoring.rename.RenameHandler; import com.intellij.refactoring.rename.RenameHandlerRegistry; import com.intellij.refactoring.rename.RenameProcessor; import com.intellij.refactoring.rename.inplace.MemberInplaceRenameHandler; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.testFramework.MapDataContext; import com.intellij.testFramework.fixtures.CodeInsightTestUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.plugins.javaFX.refactoring.JavaFxPropertyRenameHandler; import java.util.Arrays; public class JavaFXRenameTest extends AbstractJavaFXRenameTest { public void testCustomComponent() throws Exception { doTest(getTestName(false) + "1"); } public void testInRoot() throws Exception { doTest(getTestName(false) + "1"); } public void testControllerField() throws Exception { doTest("newFieldName"); } public void testControllerFieldWithRefs() throws Exception { doTest("newFieldName"); } public void testHandler() throws Exception { doTest("newHandlerName"); } public void testPrivateSuperHandler() throws Exception { final String newName = "newHandlerName"; final String fxmlPath = getTestName(true) + ".fxml"; final String fxmlPathAfter = getTestName(true) + "_after.fxml"; final String baseClassName = getTestName(false) + "Base"; myFixture.configureByFiles(baseClassName + ".java", getTestName(false) + ".java", fxmlPath); doRenameWithAutomaticRenamers(newName); myFixture.checkResultByFile(fxmlPath, fxmlPathAfter, false); final PsiClass psiClass = myFixture.findClass(baseClassName); assertNotNull(psiClass); assertMethodExists(psiClass, newName); } public void testCustomComponentTag() throws Exception { doTest("Foo", true); } public void testCustomComponentPropertyTag() throws Exception { doTest("Foo", true); } public void testFromReference() throws Exception { final String newName = "lbl1"; doTest(newName); final PsiClass controllerClass = myFixture.findClass(getTestName(false)); assertNotNull(controllerClass); assertFieldExists(controllerClass, newName); } public void testIdWithRefs() throws Exception { myFixture.configureByFiles(getTestName(true) + ".fxml"); PsiElement element = TargetElementUtil .findTargetElement(getEditor(), TargetElementUtil.ELEMENT_NAME_ACCEPTED | TargetElementUtil.REFERENCED_ELEMENT_ACCEPTED); assertNotNull(element); new RenameProcessor(getProject(), element, "lb1", true, true).run(); myFixture.checkResultByFile(getTestName(true) + "_after.fxml"); } public void testControllerBare() throws Exception { doTestErrorHint("Foo", "Cannot rename built-in property"); } public void testControllerInExpr() throws Exception { doTestErrorHint("Foo", "Cannot rename built-in property"); } private void doTestErrorHint(String newName, String message) throws Exception { try { doTest(newName, true); fail(message); } catch (CommonRefactoringUtil.RefactoringErrorHintException expectedException) { assertEquals(message, expectedException.getMessage()); } } public void testPropertyRenameHandlerPresent() throws Exception { doTestPropertyRenameHandler(getTestName(true) + ".fxml", getTestName(false) + ".java"); } public void testPropertyRenameHandlerPresentForStatic() throws Exception { doTestPropertyRenameHandler(getTestName(true) + ".fxml", "container/MyCustomContainer.java"); } public void doTestPropertyRenameHandler(String... files) throws Exception { myFixture.configureByFiles(files); final MapDataContext dataContext = new MapDataContext(); dataContext.put(CommonDataKeys.EDITOR, getEditor()); final RenameHandler renameHandler = RenameHandlerRegistry.getInstance().getRenameHandler(dataContext); assertTrue(renameHandler instanceof JavaFxPropertyRenameHandler); } public void testStaticPropertyImportClass() throws Exception { doTestStaticProperty("newPropName2", "container.MyCustomContainer"); } public void testStaticPropertyImportPackage() throws Exception { doTestStaticProperty("newPropName2", "container.MyCustomContainer"); } public void doTestStaticProperty(@NonNls String newName, String className) throws Exception { myFixture.configureByFiles(getTestName(true) + ".fxml", className.replace('.', '/') + ".java"); final MapDataContext dataContext = new MapDataContext(); dataContext.put(CommonDataKeys.EDITOR, getEditor()); dataContext.put(PsiElementRenameHandler.DEFAULT_NAME, newName); final JavaFxPropertyRenameHandler renameHandler = new JavaFxPropertyRenameHandler(); assertTrue(renameHandler.isAvailableOnDataContext(dataContext)); renameHandler.invoke(getProject(), getEditor(), null, dataContext); myFixture.checkResultByFile(getTestName(true) + "_after.fxml"); final PsiClass psiClass = myFixture.findClass(className); assertNotNull(psiClass); final String propName = newName.substring(0, 1).toUpperCase() + newName.substring(1); assertMethodExists(psiClass, "set" + propName); } public void testStaticPropertyMethod() throws Exception { final String className="container.MyCustomContainer"; final String methodName = "setStaticProp"; final String newName = "setNewMethodName"; myFixture.configureByFiles(getTestName(true) + ".fxml", className.replace('.', '/') + ".java"); final PsiClass psiClass = myFixture.findClass(className); assertNotNull(psiClass); final PsiMethod[] methods = psiClass.findMethodsByName(methodName, false); assertEquals(1, methods.length); final PsiMethod method = methods[0]; final RenameRefactoring rename = new JavaRenameRefactoringImpl(getProject(), method, newName, false, false); rename.run(); myFixture.checkResultByFile(getTestName(true) + "_after.fxml"); assertMethodExists(psiClass, newName); } public void testStaticPropertyFromLibrary() throws Exception { doTestErrorHint("Foo", "Cannot perform refactoring.\n" + "Selected method is not located inside the project"); } public void testControllerMethod() throws Exception { final PsiClass psiClass = doTestHandler("newName", null); assertMethodExists(psiClass, "getNewName"); } public void testControllerStringProperty() throws Exception { doTestProperty("newName", false); } public void testControllerBooleanProperty() throws Exception { doTestProperty("newName", true); } public void testModelIdProperty() throws Exception { doTestProperty("newName", "model.Data", false); } public void testModelFieldProperty() throws Exception { doTestProperty("newName", "model.Data", false); } public void doTestProperty(String name, boolean isBoolean) throws Exception { doTestProperty(name, null, isBoolean); } public void doTestProperty(@NonNls String name, String className, boolean isBoolean) throws Exception { final PsiClass psiClass = doTestHandler(name, className); final String propName = name.substring(0, 1).toUpperCase() + name.substring(1); assertMethodExists(psiClass, (isBoolean ? "is" : "get") + propName); assertMethodExists(psiClass, "set" + propName); assertMethodExists(psiClass, name + "Property"); assertFieldExists(psiClass, name); } @NotNull public PsiClass doTestHandler(String newName, String className) throws Exception { if (className == null) { className = getTestName(false); myFixture.configureByFiles(getTestName(true) + ".fxml", getTestName(false) + ".java"); } else { myFixture.configureByFiles(getTestName(true) + ".fxml", getTestName(false) + ".java", className.replace('.', '/') + ".java"); } final MapDataContext dataContext = new MapDataContext(); dataContext.put(CommonDataKeys.EDITOR, getEditor()); dataContext.put(PsiElementRenameHandler.DEFAULT_NAME, newName); final JavaFxPropertyRenameHandler renameHandler = new JavaFxPropertyRenameHandler(); assertTrue(renameHandler.isAvailableOnDataContext(dataContext)); renameHandler.invoke(getProject(), getEditor(), null, dataContext); myFixture.checkResultByFile(getTestName(true) + "_after.fxml"); final PsiClass psiClass = myFixture.findClass(className); assertNotNull(psiClass); return psiClass; } private static void assertFieldExists(PsiClass controllerClass, String name) { assertNotNull(name, controllerClass.findFieldByName(name, false)); } private static void assertMethodExists(PsiClass controllerClass, String name) { final PsiMethod[] methods = controllerClass.findMethodsByName(name, false); assertOrderedEquals(Arrays.stream(methods).map(PsiMethod::getName).toArray(), name); } private void doTest(final String newName) throws Exception { doTest(newName, false); } private void doTest(final String newName, boolean inline) throws Exception { myFixture.configureByFiles(getTestName(true) + ".fxml", getTestName(false) + ".java"); PsiElement element = TargetElementUtil .findTargetElement(getEditor(), TargetElementUtil.ELEMENT_NAME_ACCEPTED | TargetElementUtil.REFERENCED_ELEMENT_ACCEPTED); assertNotNull(element); if (inline) { CodeInsightTestUtil.doInlineRename(new MemberInplaceRenameHandler(), newName, getEditor(), element); } else { new RenameProcessor(getProject(), element, newName, true, true).run(); } myFixture.checkResultByFile(getTestName(true) + "_after.fxml"); } @NotNull @Override protected String getTestDataPath() { return PluginPathManager.getPluginHomePath("javaFX") + "/testData/rename/"; } }
package application.services; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.function.Consumer; import application.DataSet; import application.MapApp; import application.MarkerManager; import application.RouteVisualization; import application.controllers.RouteController; import java.util.Iterator; import geography.GeographicPoint; import geography.RoadSegment; import gmapsfx.GoogleMapView; import gmapsfx.javascript.object.GoogleMap; import gmapsfx.javascript.object.LatLong; import gmapsfx.javascript.object.LatLongBounds; import gmapsfx.javascript.object.MVCArray; import gmapsfx.shapes.Polyline; import javafx.scene.control.Button; import roadgraph.MapRoadNode; public class RouteService { private GoogleMap map; // static variable private MarkerManager markerManager; private Polyline routeLine; private RouteVisualization rv; public RouteService(GoogleMapView mapComponent, MarkerManager manager) { this.map = mapComponent.getMap(); this.markerManager = manager; } // COULD SEPARATE INTO ROUTE SERVICES IF CONTROLLER // GETS BIG // initialize?? // add route polyline to map // DISPLAY ROUTE METHODS /** * Displays route on Google Map * * @return returns false if route fails to display */ private boolean displayRoute(List<LatLong> route) { if (routeLine != null) { removeRouteLine(); } routeLine = new Polyline(); MVCArray path = new MVCArray(); LatLongBounds bounds = new LatLongBounds(); for (LatLong point : route) { path.push(point); bounds = bounds.extend(point); } routeLine.setPath(path); map.addMapShape(routeLine); // System.out.println(bounds.getNorthEast()); // EXCEPTION getBounds() messed up?? // System.out.println(routeLine.getBounds()); markerManager.hideIntermediateMarkers(); map.fitBounds(bounds); markerManager.disableVisButton(false); return true; } public void hideRoute() { if (routeLine != null) { map.removeMapShape(routeLine); if (markerManager.getVisualization() != null) { markerManager.clearVisualization(); } markerManager.restoreMarkers(); markerManager.disableVisButton(true); routeLine = null; } } public void reset() { removeRouteLine(); } public boolean isRouteDisplayed() { return routeLine != null; } public boolean displayRoute(geography.GeographicPoint start, geography.GeographicPoint end, int toggle) { if (routeLine == null) { if (markerManager.getVisualization() != null) { markerManager.clearVisualization(); } if (toggle == RouteController.DIJ || toggle == RouteController.A_STAR || toggle == RouteController.BFS) { markerManager.initVisualization(); Consumer<geography.GeographicPoint> nodeAccepter = markerManager.getVisualization()::acceptPoint; List<geography.GeographicPoint> path = null; if (toggle == RouteController.BFS) path = markerManager.getDataSet().getGraph().bfs(start, end, nodeAccepter); else { if (toggle == RouteController.DIJ) { path = markerManager.getDataSet().getGraph().dijkstra(start, end, nodeAccepter); } else if (toggle == RouteController.A_STAR) { path = markerManager.getDataSet().getGraph().aStarSearch(start, end, nodeAccepter); } if (path != null) MapApp.showInfoAlert("Estimated minimum total trip time : ", (int) Math.round(((MapRoadNode) path.get(path.size() - 1)).getTimeFromStartNode() * 60) + " mins"); } if (path == null) { // System.out.println("In displayRoute : PATH NOT FOUND"); MapApp.showInfoAlert("Routing Error : ", "No path found"); return false; } // TODO -- debug road segments List<LatLong> mapPath = constructMapPath(path); // List<LatLong> mapPath = new ArrayList<LatLong>(); // for(geography.GeographicPoint point : path) { // mapPath.add(new LatLong(point.getX(), point.getY())); // } markerManager.setSelectMode(false); return displayRoute(mapPath); } return false; } return false; } /** * Construct path including road regments * * @param path * - path with only intersections * @return list of LatLongs corresponding the path of route */ private List<LatLong> constructMapPath(List<geography.GeographicPoint> path) { List<LatLong> retVal = new ArrayList<LatLong>(); List<geography.GeographicPoint> segmentList = null; geography.GeographicPoint curr; geography.GeographicPoint next; geography.RoadSegment chosenSegment = null; ; for (int i = 0; i < path.size() - 1; i++) { double minLength = Double.MAX_VALUE; curr = path.get(i); next = path.get(i + 1); if (markerManager.getDataSet().getRoads().containsKey(curr)) { HashSet<geography.RoadSegment> segments = markerManager.getDataSet().getRoads().get(curr); Iterator<geography.RoadSegment> it = segments.iterator(); // get segments which are geography.RoadSegment currSegment; while (it.hasNext()) { // System.out.println("new segment"); currSegment = it.next(); if (currSegment.getOtherPoint(curr).equals(next)) { // System.out.println("1st check passed : other point // correct"); if (currSegment.getLength() < minLength) { // System.out.println("2nd check passed : length // less"); chosenSegment = currSegment; } } } if (chosenSegment != null) { segmentList = chosenSegment.getPoints(curr, next); for (geography.GeographicPoint point : segmentList) { retVal.add(new LatLong(point.getX(), point.getY())); } } else { System.err.println("ERROR in constructMapPath : chosenSegment was null"); } // find } } // System.out.println("NOW there are " + retVal.size() + " points"); return retVal; } private void removeRouteLine() { if (routeLine != null) { map.removeMapShape(routeLine); } } // private void setMarkerManager(MarkerManager manager) { // this.markerManager = manager; // } }
package org.apache.hadoop.mapreduce.lib.output; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.NetCDFArrayWritable; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import ucar.ma2.*; import ucar.nc2.*; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * Created by saman on 12/21/15. */ public class NetCDFOutputFormatCompactForLonReduced<Text, List> extends FileOutputFormat<Text, List> { public static final String NETCDF_INPUT_PATH = "hadoop.netcdf.outputformat.input"; public static final String NETCDF_LOCAL_TEMPFILE_PREFIX = "hadoop.netcdfoutputformat.tempfileprefix"; private static final Log LOG = LogFactory.getLog(NetCDFOutputFormatCompact2.class); public NetCDFOutputFormatCompactForLonReduced(){ super(); } protected static class NetCDFRecordWriterCompactForLon<Text, List> extends RecordWriter<Text, List> { private FileSystem _fs; private TaskAttemptContext _job; private Path _output_path; private String netCDFLocalTempPrefix; public NetCDFRecordWriterCompactForLon( FileSystem _fs, TaskAttemptContext _job ) { Configuration conf = _job.getConfiguration(); this._fs = _fs; this._job = _job; this._output_path = getOutputPath(_job); this.netCDFLocalTempPrefix = conf.get(NETCDF_LOCAL_TEMPFILE_PREFIX); // Checking if the temp file exists or not. If yes, remove the file. } public synchronized void write(Text key, List value) throws IOException { System.out.println( "[SAMAN][NetCDFRecordWriter][write] Beginning!" ); //FloatWritable[] records = (FloatWritable[])((org.apache.hadoop.io.NetCDFArrayWritable)value).toArray(); //for( int i = 0; i < 10; i++ ){ // System.out.println( "[SAMAN][NetCDFRecordWriter][Write] Records["+i+"]="+records[i] ); //} //System.out.println( "[SAMAN][NetCDFRecordWriter][Write] records length is: " + records.length ); String keyString = key.toString(); String[] keySplitted = keyString.split(","); String currentCumulativeLon = keySplitted[0]; String timeDimSize = keySplitted[1]; String latDimSize = keySplitted[2]; String lonDimSize = keySplitted[3]; System.out.println( "Lon is: "+keySplitted[0]+",timeDim: "+keySplitted[1] +",latDim: "+keySplitted[2]+",lonDim: "+keySplitted[3] ); int blockSize = 128*1024*1024; int chunkSize = Integer.valueOf(timeDimSize)*Integer.valueOf(latDimSize)*4; int numChunksPerKey = (blockSize/chunkSize); //boolean isBreak = false; //for( int i = 0; i < numChunksPerKey; i++ ){ // for( int j = 0; j < Integer.valueOf(timeDimSize); j++ ){ // for( int k = 0; k < Integer.valueOf(lonDimSize); k++ ){ // if( records[i*Integer.valueOf(timeDimSize)*Integer.valueOf(lonDimSize)+j*Integer.valueOf(lonDimSize)+k] == null ) // continue; //if( i*Integer.valueOf(timeDimSize)*Integer.valueOf(lonDimSize)+j*Integer.valueOf(lonDimSize)+k >= records.length ) { // isBreak = true; // break; //} // System.out.println( "[SAMAN][NetCDFOutputFormatCompact][Write] ("+(i+Integer.valueOf(currentCumulativeLat)*chunkSize)+","+j+","+k+")="+records[i*Integer.valueOf(timeDimSize)*Integer.valueOf(lonDimSize)+j*Integer.valueOf(lonDimSize)+k].get() ); // } // if( isBreak == true ) // break; // } // if( isBreak == true ) // break; //} int lonIndexesSize = ((Integer.valueOf(currentCumulativeLon)+1)*numChunksPerKey <= Integer.valueOf(lonDimSize)) ? ( numChunksPerKey ) : ( Integer.valueOf(lonDimSize)-(Integer.valueOf(currentCumulativeLon))*numChunksPerKey ); System.out.println( "[SAMAN][NetCDFOutputFormatCompact2][Write] latIndexesSize="+lonIndexesSize ); /* Writing partial NetCDF file into the temporary file */ // Need to be taken out of being static. String fileName = "hdfs://c3n2:9000/rsut"; String outputFileName = "/data/saman/lon-" + currentCumulativeLon + ".nc"; NetcdfFile dataFile = null; NetcdfFileWriter outputFile = null; try { dataFile = NetcdfFile.open(fileName, null); outputFile = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, outputFileName); Variable vtime = dataFile.findVariable("time"); Variable vtime_bnds = dataFile.findVariable("time_bnds"); Variable vlat = dataFile.findVariable("lat"); Variable vlat_bnds = dataFile.findVariable("lat_bnds"); Variable vlon = dataFile.findVariable("lon"); Variable vlon_bnds = dataFile.findVariable("lon_bnds"); Variable vrsut = dataFile.findVariable("rsut"); System.out.println("sizes are = vtime.size=" + vtime.getSize() + ", vtime_bnds.size=" + vtime_bnds.getSize() + ", vlat.size=" + vlat.getSize() + ", vlat_bnds.size=" + vlat_bnds.getSize() + ", vlon.size=" + vlon.getSize() + ", vlon_bnds.size=" + vlon_bnds.getSize() + ", vrsut.size=" + vrsut.getSize()); System.out.println("dimension names are = " + vtime.getDimensionsString() + "," + vlat.getDimensionsString() + "," + vlon.getDimensionsString() + "," + vrsut.getDimensionsString()); //Dimension latDim = outputFile.addDimension(null, vlat.getDimensionsString(), (int) (vlat.getSize())); System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before Dimensions."); Dimension latDim = outputFile.addDimension(null, vlat.getDimensionsString(), (int) (vlat.getSize())); Dimension timeDim = outputFile.addDimension(null, vtime.getDimensionsString(), (int) (vtime.getSize())); Dimension lonDim = outputFile.addDimension(null, vlon.getDimensionsString(), lonIndexesSize); Dimension bndDim = outputFile.addDimension(null, "bnds", 2); System.out.println("[SAMAN][NetCDFOutputFormat][Write] After Dimensions."); System.out.println("[SAMAN][NetCDFOutputFormat][Write] latDim: " + latDim.getLength()); System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before List Dimensions;"); java.util.List<Dimension> time_bnds_dim = new ArrayList<Dimension>(); java.util.List<Dimension> lat_bnds_dim = new ArrayList<Dimension>(); java.util.List<Dimension> lon_bnds_dim = new ArrayList<Dimension>(); java.util.List<Dimension> rsut_dim = new ArrayList<Dimension>(); time_bnds_dim.add(timeDim); time_bnds_dim.add(bndDim); lat_bnds_dim.add(latDim); lat_bnds_dim.add(bndDim); lon_bnds_dim.add(lonDim); lon_bnds_dim.add(bndDim); rsut_dim.add(lonDim); rsut_dim.add(timeDim); rsut_dim.add(latDim); System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before Variables, with vlat Dimension string: " + vlat.getDimensionsString()); Variable vlonNew = outputFile.addVariable(null, vlon.getShortName(), vlon.getDataType(), vlon.getDimensionsString()); Variable vlonbndsNew = outputFile.addVariable(null, vlon_bnds.getShortName(), vlon_bnds.getDataType(), lon_bnds_dim); Variable vtimeNew = outputFile.addVariable(null, vtime.getShortName(), vtime.getDataType(), vtime.getDimensionsString()); Variable vtimebndsNew = outputFile.addVariable(null, vtime_bnds.getShortName(), vtime_bnds.getDataType(), time_bnds_dim); Variable vlatNew = outputFile.addVariable(null, vlat.getShortName(), vlat.getDataType(), vlat.getDimensionsString()); Variable vlatbndsNew = outputFile.addVariable(null, vlat_bnds.getShortName(), vlat_bnds.getDataType(), lat_bnds_dim); Variable vrsutNew = outputFile.addVariable(null, vrsut.getShortName(), vrsut.getDataType(), rsut_dim); System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before Attributes;"); java.util.List<Attribute> attributes = vtime.getAttributes(); Iterator itr = attributes.iterator(); while (itr.hasNext()) { Attribute attribute = (Attribute) itr.next(); vtimeNew.addAttribute(attribute); } attributes = vlat.getAttributes(); itr = attributes.iterator(); while (itr.hasNext()) { Attribute attribute = (Attribute) itr.next(); vlatNew.addAttribute(attribute); } attributes = vlon.getAttributes(); itr = attributes.iterator(); while (itr.hasNext()) { Attribute attribute = (Attribute) itr.next(); vlonNew.addAttribute(attribute); } attributes = vrsut.getAttributes(); itr = attributes.iterator(); while (itr.hasNext()) { Attribute attribute = (Attribute) itr.next(); vrsutNew.addAttribute(attribute); } outputFile.addGroupAttribute(null, new Attribute("institution", "European Centre for Medium-Range Weather Forecasts")); outputFile.addGroupAttribute(null, new Attribute("institute_id", "ECMWF")); outputFile.addGroupAttribute(null, new Attribute("experiment_id", "ERA-Interim")); outputFile.addGroupAttribute(null, new Attribute("source", "ERA Interim, Synoptic Monthly Means, Full Resolution")); outputFile.addGroupAttribute(null, new Attribute("model_id", "IFS-Cy31r2")); outputFile.addGroupAttribute(null, new Attribute("contact", "ECMWF, Dick Dee (dick.dee@ecmwf.int)")); outputFile.addGroupAttribute(null, new Attribute("references", "http://www.ecmwf.int")); outputFile.addGroupAttribute(null, new Attribute("tracking_id", "df4494d9-1d4b-4156-8804-ce238542a777")); outputFile.addGroupAttribute(null, new Attribute("mip_specs", "CMIP5")); outputFile.addGroupAttribute(null, new Attribute("source_id", "ERA-Interim")); outputFile.addGroupAttribute(null, new Attribute("product", "reanalysis")); outputFile.addGroupAttribute(null, new Attribute("frequency", "mon")); outputFile.addGroupAttribute(null, new Attribute("creation_date", "2014-04-28T21:55:14Z")); outputFile.addGroupAttribute(null, new Attribute("history", "2014-04-28T21:54:28Z CMOR rewrote data to comply with CF standards and ana4MIPs requirements.")); outputFile.addGroupAttribute(null, new Attribute("Conventions", "CF-1.4")); outputFile.addGroupAttribute(null, new Attribute("project_id", "ana4MIPs")); outputFile.addGroupAttribute(null, new Attribute("table_id", "Table Amon_ana (10 March 2011) fb925e593e0cbb86dd6e96fbbcb352e0")); outputFile.addGroupAttribute(null, new Attribute("title", "Reanalysis output prepared for ana4MIPs ")); outputFile.addGroupAttribute(null, new Attribute("modeling_realm", "atmos")); outputFile.addGroupAttribute(null, new Attribute("cmor_version", "2.8.3")); //ArrayDouble.D1 lonArray = (ArrayDouble.D1) vlon.read(); System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before DataLon;"); Array dataLon = Array.factory(DataType.DOUBLE, new int[]{lonIndexesSize}); int[] shape; for( int i = 0; i < lonIndexesSize; i++ ){ //System.out.println( "[SAMAN][NetCDFOutputFormatCompact][Write] getting lat: " + (Integer.valueOf(currentCumulativeLat)*numChunksPerKey+i) ); dataLon.setDouble(i, Double.valueOf(Integer.valueOf(currentCumulativeLon)*numChunksPerKey+i)); } System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before DataLonBnds;"); //ArrayDouble.D2 lonBndsArray = (ArrayDouble.D2) vlon_bnds.read(); Array dataLonBnds = Array.factory(DataType.DOUBLE, new int[]{lonIndexesSize, 2}); shape = dataLonBnds.getShape(); Index2D idx = new Index2D(new int[]{lonIndexesSize, 2}); //idx.set(0, 0); //dataLatBnds.setDouble(idx, latBndsArray.get(Integer.valueOf(currentLat), 0)); //idx.set(0, 1); //dataLatBnds.setDouble(idx, latBndsArray.get(Integer.valueOf(currentLat), 1)); for (int i = 0; i < lonIndexesSize; i++) { for (int j = 0; j < shape[1]; j++) { idx.set(i, j); if( j == 0 ){ dataLonBnds.setDouble(idx, (double)(Integer.valueOf(currentCumulativeLon)*numChunksPerKey+i) - 0.5); }else if( j == 1 ){ dataLonBnds.setDouble(idx, (double)(Integer.valueOf(currentCumulativeLon)*numChunksPerKey+i) + 0.5); } //dataLonBnds.setDouble(idx, lonBndsArray.get(Integer.valueOf(currentCumulativeLon)*numChunksPerKey+i, j)); } } System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before DataTime;"); //ArrayDouble.D1 timeArray = (ArrayDouble.D1) vtime.read(); Array dataTime = Array.factory(DataType.DOUBLE, new int[]{(int) (vtime.getSize())}); shape = dataTime.getShape(); for (int i = 0; i < shape[0]; i++) { dataTime.setDouble(i, (double)i); } System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before DataTimeBnds;"); //ArrayDouble.D2 timeBndsArray = (ArrayDouble.D2) vtime_bnds.read(); Array dataTimeBnds = Array.factory(DataType.DOUBLE, new int[]{(int) (vtime.getSize()), 2}); shape = dataTimeBnds.getShape(); idx = new Index2D(new int[]{(int) (vtime.getSize()), 2}); for (int i = 0; i < shape[0]; i++) { for (int j = 0; j < shape[1]; j++) { idx.set(i, j); if( j == 0 ){ dataTimeBnds.setDouble(idx, (double)i - 0.5); }else if( j == 1 ){ dataTimeBnds.setDouble(idx, (double)i + 0.5); } } } System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before DataLat;"); //ArrayDouble.D1 latArray = (ArrayDouble.D1) vlat.read(); Array dataLat = Array.factory(DataType.DOUBLE, new int[]{(int) (vlat.getSize())}); shape = dataLat.getShape(); for (int i = 0; i < shape[0]; i++) { dataLat.setDouble(i, (double)i); } System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before DataLatBnds;"); //ArrayDouble.D2 latBndsArray = (ArrayDouble.D2) vlat_bnds.read(); Array dataLatBnds = Array.factory(DataType.DOUBLE, new int[]{(int) (vlat.getSize()), 2}); shape = dataLatBnds.getShape(); idx = new Index2D(new int[]{(int) (vlat.getSize()), 2}); for (int i = 0; i < shape[0]; i++) { for (int j = 0; j < shape[1]; j++) { idx.set(i, j); if( j == 0 ){ dataLatBnds.setDouble(idx, (double)i - 0.5 ); }else if( j == 1 ){ dataLatBnds.setDouble(idx, (double)i + 0.5 ); } } } System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before DataRsut;"); Index3D idx3 = new Index3D(new int[]{lonIndexesSize, (int) (vtime.getSize()), (int) (vlat.getSize())}); Array dataRsut = Array.factory(DataType.FLOAT, new int[]{lonIndexesSize, (int) (vtime.getSize()), (int) (vlat.getSize())}); int globalIndex = 0; for( int i = 0; i < lonIndexesSize; i++ ) { for (int j = 0; j < vtime.getSize(); j++) { NetCDFArrayWritable netCDFArrayWritable = ((java.util.List<NetCDFArrayWritable>)value).get(globalIndex); FloatWritable[] records = (FloatWritable[])netCDFArrayWritable.toArray(); for (int k = 0; k < vlat.getSize(); k++) { try { //System.out.println("[SAMAN][NetCDFOutputFormat][Write] before idx.set("+i+"," + j + "," + k + ")"); idx3.set(i, j, k); //System.out.println("[SAMAN][NetCDFOutputFormat][Write] after idx.set("+i+"," + j + "," + k + ")"); //System.out.println("[SAMAN][NetCDFOutputFormat][Write] idx3 is: " + idx3); //System.out.println("[SAMAN][NetCDFOutputFormat][Write] index to get: " + (j * Integer.valueOf(lonDimSize) + k)); //System.out.println("[SAMAN][NetCDFOutputFormat][Write] value is: " // + records[j * Integer.valueOf(lonDimSize) + k].get()); dataRsut.setFloat(idx3, records[2+k].get()); //System.out.println("[SAMAN][NetCDFOutputFormat][Write] after dataRsut.setFloat(..)"); } catch (Exception e) { e.printStackTrace(); System.out.println("[SAMAN][NetCDFOutputFormat][Write] Exception in rsut = " + e.getMessage()); throw e; } } globalIndex++; } } System.out.println("[SAMAN][NetCDFOutputFormat][Write] Before Write;"); outputFile.create(); outputFile.write(vlonNew, dataLon); outputFile.write(vlonbndsNew, dataLonBnds); outputFile.write(vtimeNew, dataTime); outputFile.write(vtimebndsNew, dataTimeBnds); outputFile.write(vlatNew, dataLat); outputFile.write(vlatbndsNew, dataLatBnds); outputFile.write(vrsutNew, dataRsut); outputFile.close(); //_fs.copyFromLocalFile(new Path(outputFileName), new Path(_output_path + "/rsutlon" + currentCumulativeLon)); _fs.moveFromLocalFile(new Path(outputFileName), new Path(_output_path + "/rsutlon" + currentCumulativeLon)); } catch (Exception e) { System.out.println("[SAMAN][NetCDFOutputFormat][write] Exception in end = " + e.getMessage()); throw new IOException(e); } System.out.println( "[SAMAN][NetCDFRecordWriter][write] End!" ); } public synchronized void close(TaskAttemptContext context) throws IOException { // TODO: Maybe we can close the NetCDF file here??!! } } public RecordWriter<Text, List> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException { Configuration conf = job.getConfiguration(); Path outputPath = getOutputPath(job); FileSystem _fs = outputPath.getFileSystem(job.getConfiguration()); System.out.println( "[SAMAN][NetCDFOutputFormat][getRecordWriter] output path is: " + outputPath.getName() ); return new NetCDFRecordWriterCompactForLon<Text, List>( _fs, job ); } }
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.command.script; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import com.orientechnologies.common.util.OCommonConst; import com.orientechnologies.orient.core.db.ODatabase; import com.orientechnologies.orient.core.db.ODatabase.ATTRIBUTES; import com.orientechnologies.orient.core.db.ODatabase.OPERATION_MODE; import com.orientechnologies.orient.core.db.ODatabase.STATUS; import com.orientechnologies.orient.core.db.ODatabaseInternal; import com.orientechnologies.orient.core.db.document.ODatabaseDocument; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.dictionary.ODictionary; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.index.OIndex; import com.orientechnologies.orient.core.intent.OIntent; import com.orientechnologies.orient.core.iterator.ORecordIteratorClass; import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster; import com.orientechnologies.orient.core.metadata.OMetadata; import com.orientechnologies.orient.core.metadata.security.OSecurityUser; import com.orientechnologies.orient.core.metadata.security.OUser; import com.orientechnologies.orient.core.record.ORecord; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.OCommandSQL; import com.orientechnologies.orient.core.sql.query.OSQLQuery; import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery; import com.orientechnologies.orient.core.storage.ORecordCallback; import com.orientechnologies.orient.core.tx.OTransaction; /** * Document Database wrapper class to use from scripts. * * @author Luca Garulli (l.garulli--at--orientechnologies.com) * */ @SuppressWarnings("unchecked") public class OScriptDocumentDatabaseWrapper { protected ODatabaseDocumentTx database; public OScriptDocumentDatabaseWrapper(final ODatabaseDocumentTx database) { this.database = database; } public OScriptDocumentDatabaseWrapper(final String iURL) { this.database = new ODatabaseDocumentTx(iURL); } public void switchUser(final String iUserName, final String iUserPassword) { if (!database.isClosed()) database.close(); database.open(iUserName, iUserPassword); } public OIdentifiable[] query(final String iText) { return query(iText, (Object[]) null); } public OIdentifiable[] query(final String iText, final Object... iParameters) { return query(new OSQLSynchQuery<Object>(iText), iParameters); } public OIdentifiable[] query(final OSQLQuery iQuery, final Object... iParameters) { final List<OIdentifiable> res = database.query(iQuery, convertParameters(iParameters)); if (res == null) return OCommonConst.EMPTY_IDENTIFIABLE_ARRAY; return res.toArray(new OIdentifiable[res.size()]); } /** * To maintain the compatibility with JS API. */ public Object executeCommand(final String iText) { return command(iText, (Object[]) null); } /** * To maintain the compatibility with JS API. */ public Object executeCommand(final String iText, final Object... iParameters) { return command(iText, iParameters); } public Object command(final String iText) { return command(iText, (Object[]) null); } public Object command(final String iText, final Object... iParameters) { Object res = database.command(new OCommandSQL(iText)).execute(convertParameters(iParameters)); if (res instanceof List) { final List<OIdentifiable> list = (List<OIdentifiable>) res; return list.toArray(new OIdentifiable[list.size()]); } return res; } public OIndex<?> getIndex(final String iName) { return database.getMetadata().getIndexManager().getIndex(iName); } public boolean exists() { return database.exists(); } public ODocument newInstance() { return database.newInstance(); } public void reload() { database.reload(); } public ODocument newInstance(String iClassName) { return database.newInstance(iClassName); } public ORecordIteratorClass<ODocument> browseClass(String iClassName) { return database.browseClass(iClassName); } public STATUS getStatus() { return database.getStatus(); } public ORecordIteratorClass<ODocument> browseClass(String iClassName, boolean iPolymorphic) { return database.browseClass(iClassName, iPolymorphic); } public <THISDB extends ODatabase> THISDB setStatus(STATUS iStatus) { return (THISDB) database.setStatus(iStatus); } public void drop() { database.drop(); } public String getName() { return database.getName(); } public String getURL() { return database.getURL(); } public ORecordIteratorCluster<ODocument> browseCluster(String iClusterName) { return database.browseCluster(iClusterName); } public boolean isClosed() { return database.isClosed(); } public <THISDB extends ODatabase> THISDB open(String iUserName, String iUserPassword) { return (THISDB) database.open(iUserName, iUserPassword); } public ODocument save(final Map<String, Object> iObject) { return database.save(new ODocument().fields(iObject)); } public ODocument save(final String iString) { // return database.save((ORecord) new ODocument().fromJSON(iString)); return database.save((ORecord) new ODocument().fromJSON(iString, true)); } public ODocument save(ORecord iRecord) { return database.save(iRecord); } public boolean dropCluster(String iClusterName, final boolean iTruncate) { return database.dropCluster(iClusterName, iTruncate); } public <THISDB extends ODatabase> THISDB create() { return (THISDB) database.create(); } public boolean dropCluster(int iClusterId, final boolean iTruncate) { return database.dropCluster(iClusterId, true); } public void close() { database.close(); } public int getClusters() { return database.getClusters(); } public Collection<String> getClusterNames() { return database.getClusterNames(); } public OTransaction getTransaction() { return database.getTransaction(); } public ODatabase<ORecord> begin() { return database.begin(); } public int getClusterIdByName(String iClusterName) { return database.getClusterIdByName(iClusterName); } public boolean isMVCC() { return database.isMVCC(); } public String getClusterNameById(int iClusterId) { return database.getClusterNameById(iClusterId); } public <RET extends ODatabase<?>> RET setMVCC(boolean iValue) { return (RET) database.setMVCC(iValue); } public long getClusterRecordSizeById(int iClusterId) { return database.getClusterRecordSizeById(iClusterId); } public boolean isValidationEnabled() { return database.isValidationEnabled(); } public long getClusterRecordSizeByName(String iClusterName) { return database.getClusterRecordSizeByName(iClusterName); } public <RET extends ODatabaseDocument> RET setValidationEnabled(boolean iValue) { return (RET) database.setValidationEnabled(iValue); } public OSecurityUser getUser() { return database.getUser(); } public void setUser(OUser user) { database.setUser(user); } public ODocument save(ORecord iRecord, OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { return database.save(iRecord, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } public OMetadata getMetadata() { return database.getMetadata(); } public ODictionary<ORecord> getDictionary() { return database.getDictionary(); } public byte getRecordType() { return database.getRecordType(); } public ODatabase<ORecord> delete(ORID iRid) { return database.delete(iRid); } public <RET extends ORecord> RET load(ORID iRecordId) { return (RET) database.load(iRecordId); } public <RET extends ORecord> RET load(ORID iRecordId, String iFetchPlan) { return (RET) database.load(iRecordId, iFetchPlan); } public <RET extends ORecord> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache) { return (RET) database.load(iRecordId, iFetchPlan, iIgnoreCache); } public <RET extends ORecord> RET getRecord(OIdentifiable iIdentifiable) { return (RET) database.getRecord(iIdentifiable); } public int getDefaultClusterId() { return database.getDefaultClusterId(); } public <RET extends ORecord> RET load(final String iRidAsString) { return (RET) database.load(new ORecordId(iRidAsString)); } public <RET extends ORecord> RET load(ORecord iRecord) { return (RET) database.load(iRecord); } public boolean declareIntent(OIntent iIntent) { return database.declareIntent(iIntent); } public <RET extends ORecord> RET load(ORecord iRecord, String iFetchPlan) { return (RET) database.load(iRecord, iFetchPlan); } public <RET extends ORecord> RET load(ORecord iRecord, String iFetchPlan, boolean iIgnoreCache) { return (RET) database.load(iRecord, iFetchPlan, iIgnoreCache); } public ODatabase<?> setDatabaseOwner(ODatabaseInternal<?> iOwner) { return database.setDatabaseOwner(iOwner); } public void reload(ORecord iRecord) { database.reload(iRecord); } public void reload(ORecord iRecord, String iFetchPlan, boolean iIgnoreCache) { database.reload(iRecord, iFetchPlan, iIgnoreCache); } public Object setProperty(String iName, Object iValue) { return database.setProperty(iName, iValue); } public ODocument save(ORecord iRecord, String iClusterName) { return database.save(iRecord, iClusterName); } public Object getProperty(String iName) { return database.getProperty(iName); } public Iterator<Entry<String, Object>> getProperties() { return database.getProperties(); } public Object get(ATTRIBUTES iAttribute) { return database.get(iAttribute); } public <THISDB extends ODatabase> THISDB set(ATTRIBUTES attribute, Object iValue) { return (THISDB) database.set(attribute, iValue); } public void setInternal(ATTRIBUTES attribute, Object iValue) { database.setInternal(attribute, iValue); } public boolean isRetainRecords() { return database.isRetainRecords(); } public ODatabaseDocument setRetainRecords(boolean iValue) { return database.setRetainRecords(iValue); } public long getSize() { return database.getSize(); } public ODocument save(ORecord iRecord, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { return database.save(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } public ODatabaseDocumentTx delete(ODocument iRecord) { return database.delete(iRecord); } public long countClass(String iClassName) { return database.countClass(iClassName); } public ODatabase<ORecord> commit() { return database.commit(); } public ODatabase<ORecord> rollback() { return database.rollback(); } public String getType() { return database.getType(); } protected Object[] convertParameters(final Object[] iParameters) { if (iParameters != null) for (int i = 0; i < iParameters.length; ++i) { final Object p = iParameters[i]; if (p != null) { // if (p instanceof sun.org.mozilla.javascript.internal.IdScriptableObject) { // iParameters[i] = ((sun.org.mozilla.javascript.internal.NativeDate) p).to; // } } } return iParameters; } }
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2014 Eric Lafortune (eric@graphics.cornell.edu) * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.preverify; import proguard.classfile.*; import proguard.classfile.attribute.*; import proguard.classfile.attribute.preverification.*; import proguard.classfile.attribute.visitor.AttributeVisitor; import proguard.classfile.editor.*; import proguard.classfile.instruction.InstructionConstants; import proguard.classfile.util.SimplifiedVisitor; import proguard.classfile.visitor.ClassPrinter; import proguard.evaluation.*; import proguard.evaluation.value.*; import proguard.optimize.evaluation.*; import java.util.*; /** * This class can preverify methods in program class pools, according to a given * specification. * * @author Eric Lafortune */ public class CodePreverifier extends SimplifiedVisitor implements AttributeVisitor { //* private static final boolean DEBUG = false; /*/ private static boolean DEBUG = true; //*/ private final boolean microEdition; private final PartialEvaluator partialEvaluator = new PartialEvaluator(); private final LivenessAnalyzer livenessAnalyzer = new LivenessAnalyzer(partialEvaluator); private final CodeAttributeEditor codeAttributeEditor = new CodeAttributeEditor(); /** * Creates a new CodePreverifier. */ public CodePreverifier(boolean microEdition) { this.microEdition = microEdition; } // Implementations for AttributeVisitor. public void visitAnyAttribute(Clazz clazz, Attribute attribute) {} public void visitCodeAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute) { // TODO: Remove this when the preverifier has stabilized. // Catch any unexpected exceptions from the actual visiting method. try { // Process the code. visitCodeAttribute0(clazz, method, codeAttribute); } catch (RuntimeException ex) { System.err.println("Unexpected error while preverifying:"); System.err.println(" Class = ["+clazz.getName()+"]"); System.err.println(" Method = ["+method.getName(clazz)+method.getDescriptor(clazz)+"]"); System.err.println(" Exception = ["+ex.getClass().getName()+"] ("+ex.getMessage()+")"); throw ex; } } public void visitCodeAttribute0(Clazz clazz, Method method, CodeAttribute codeAttribute) { // DEBUG = // clazz.getName().equals("abc/Def") && // method.getName(clazz).equals("abc"); ProgramClass programClass = (ProgramClass)clazz; ProgramMethod programMethod = (ProgramMethod)method; int codeLength = codeAttribute.u4codeLength; // Evaluate the method. //partialEvaluator.visitCodeAttribute(clazz, method, codeAttribute); livenessAnalyzer.visitCodeAttribute(clazz, method, codeAttribute); // We may have to remove unreachable code. codeAttributeEditor.reset(codeLength); // Collect the stack map frames. List stackMapFrameList = new ArrayList(); for (int offset = 0; offset < codeLength; offset++) { // Only store frames at the beginning of code blocks. if (!partialEvaluator.isTraced(offset)) { // Mark the unreachable instruction for deletion. codeAttributeEditor.deleteInstruction(offset); } else if (partialEvaluator.isBranchOrExceptionTarget(offset)) { // Convert the variable values to types. VerificationType[] variableTypes = correspondingVerificationTypes(programClass, programMethod, codeAttribute, offset, partialEvaluator.getVariablesBefore(offset)); // Convert the stack values to types. VerificationType[] stackTypes = correspondingVerificationTypes(programClass, programMethod, codeAttribute, offset, partialEvaluator.getStackBefore(offset)); // Create and store a new frame. stackMapFrameList.add(new FullFrame(offset, variableTypes, stackTypes)); } } // Compress the stack map frames if the target is not Java Micro Edition. if (!microEdition && !stackMapFrameList.isEmpty()) { // Convert the initial variable values to types. VerificationType[] initialVariables = correspondingVerificationTypes(programClass, programMethod, codeAttribute, PartialEvaluator.AT_METHOD_ENTRY, partialEvaluator.getVariablesBefore(0)); // Special case: the <init> method. if (method.getName(programClass).equals(ClassConstants.METHOD_NAME_INIT)) { initialVariables[0] = VerificationTypeFactory.createUninitializedThisType(); } compressStackMapFrames(initialVariables, stackMapFrameList); } // Get the proper name for the attribute to be added/replaced/deleted. String stackMapAttributeName = microEdition ? ClassConstants.ATTR_StackMap : ClassConstants.ATTR_StackMapTable; int frameCount = stackMapFrameList.size(); if (DEBUG) { Attribute originalStackMapAttribute = codeAttribute.getAttribute(clazz, stackMapAttributeName); if (originalStackMapAttribute != null) { int originalFrameCount = microEdition ? ((StackMapAttribute)originalStackMapAttribute).u2stackMapFramesCount : ((StackMapTableAttribute)originalStackMapAttribute).u2stackMapFramesCount; StackMapFrame[] originalFrames = microEdition ? ((StackMapAttribute)originalStackMapAttribute).stackMapFrames : ((StackMapTableAttribute)originalStackMapAttribute).stackMapFrames; if (frameCount != originalFrameCount || !Arrays.equals(stackMapFrameList.toArray(), originalFrames)) { System.out.println("Original preverification ["+clazz.getName()+"]:"); new ClassPrinter().visitProgramMethod(programClass, programMethod); } } else if (frameCount != 0) { System.out.println("Original preverification empty ["+clazz.getName()+"."+method.getName(clazz)+"]"); } } if (frameCount == 0) { // Remove any stack map (table) attribute from the code attribute. new AttributesEditor(programClass, programMethod, codeAttribute, true).deleteAttribute(stackMapAttributeName); } else { Attribute stackMapAttribute; // Create the appropriate attribute. if (microEdition) { // Copy the frames into an array. FullFrame[] stackMapFrames = new FullFrame[frameCount]; stackMapFrameList.toArray(stackMapFrames); // Put the frames into a stack map attribute. stackMapAttribute = new StackMapAttribute(stackMapFrames); } else { // Copy the frames into an array. StackMapFrame[] stackMapFrames = new StackMapFrame[frameCount]; stackMapFrameList.toArray(stackMapFrames); // Put the frames into a stack map table attribute. stackMapAttribute = new StackMapTableAttribute(stackMapFrames); } // Fill out the name of the stack map attribute. stackMapAttribute.u2attributeNameIndex = new ConstantPoolEditor(programClass).addUtf8Constant(stackMapAttributeName); // Add the new stack map (table) attribute to the code attribute. new AttributesEditor(programClass, programMethod, codeAttribute, true).addAttribute(stackMapAttribute); if (DEBUG) { System.out.println("Preverifier ["+programClass.getName()+"."+programMethod.getName(programClass)+"]:"); stackMapAttribute.accept(programClass, programMethod, codeAttribute, new ClassPrinter()); } } // Apply code modifications, deleting unreachable code. codeAttributeEditor.visitCodeAttribute(clazz, method, codeAttribute); } // Small utility methods. /** * Creates and returns the verification types corresponding to the given * variables. If necessary, class constants are added to the constant pool * of the given class. */ private VerificationType[] correspondingVerificationTypes(ProgramClass programClass, ProgramMethod programMethod, CodeAttribute codeAttribute, int offset, TracedVariables variables) { int maximumVariablesSize = variables.size(); int typeCount = 0; int typeIndex = 0; // Count the the number of verification types, ignoring any nulls at // the end. for (int index = 0; index < maximumVariablesSize; index++) { Value value = variables.getValue(index); typeIndex++; // Remember the maximum live type index. if (value != null && (offset == PartialEvaluator.AT_METHOD_ENTRY || livenessAnalyzer.isAliveBefore(offset, index))) { typeCount = typeIndex; // Category 2 types that are alive are stored as single entries. if (value.isCategory2()) { index++; } } } // Create and fill out the verification types. VerificationType[] types = new VerificationType[typeCount]; typeIndex = 0; // Note the slightly different terminating condition, because the // types may have been truncated. for (int index = 0; typeIndex < typeCount; index++) { Value value = variables.getValue(index); Value producerValue = variables.getProducerValue(index); // Fill out the type. VerificationType type; if (value != null && (offset == PartialEvaluator.AT_METHOD_ENTRY || livenessAnalyzer.isAliveBefore(offset, index))) { type = correspondingVerificationType(programClass, programMethod, codeAttribute, offset, index == 0, value, producerValue); // Category 2 types that are alive are stored as single entries. if (value.isCategory2()) { index++; } } else { type = VerificationTypeFactory.createTopType(); } types[typeIndex++] = type; } return types; } /** * Creates and returns the verification types corresponding to the given * stack. If necessary, class constants are added to the constant pool * of the given class. */ private VerificationType[] correspondingVerificationTypes(ProgramClass programClass, ProgramMethod programMethod, CodeAttribute codeAttribute, int offset, TracedStack stack) { int maximumStackSize = stack.size(); int typeCount = 0; // Count the the number of verification types. for (int index = 0; index < maximumStackSize; index++) { // We have to work down from the top of the stack. Value value = stack.getTop(index); typeCount++; // Category 2 types are stored as single entries. if (value.isCategory2()) { index++; } } // Create and fill out the verification types. VerificationType[] types = new VerificationType[typeCount]; int typeIndex = typeCount; for (int index = 0; index < maximumStackSize; index++) { // We have to work down from the top of the stack. Value value = stack.getTop(index); Value producerValue = stack.getTopProducerValue(index); // Fill out the type. types[--typeIndex] = correspondingVerificationType(programClass, programMethod, codeAttribute, offset, false, value, producerValue); // Category 2 types are stored as single entries. if (value.isCategory2()) { index++; } } return types; } /** * Creates and returns the verification type corresponding to the given * value. If necessary, a class constant is added to the constant pool of * the given class. */ private VerificationType correspondingVerificationType(ProgramClass programClass, ProgramMethod programMethod, CodeAttribute codeAttribute, int offset, boolean isVariable0, Value value, Value producerValue) { if (value == null) { return VerificationTypeFactory.createTopType(); } int type = value.computationalType(); switch (type) { case Value.TYPE_INSTRUCTION_OFFSET: case Value.TYPE_INTEGER: return VerificationTypeFactory.createIntegerType(); case Value.TYPE_LONG: return VerificationTypeFactory.createLongType(); case Value.TYPE_FLOAT: return VerificationTypeFactory.createFloatType(); case Value.TYPE_DOUBLE: return VerificationTypeFactory.createDoubleType(); case Value.TYPE_TOP: return VerificationTypeFactory.createTopType(); case Value.TYPE_REFERENCE: // Is it a Null type? ReferenceValue referenceValue = value.referenceValue(); if (referenceValue.isNull() == Value.ALWAYS) { return VerificationTypeFactory.createNullType(); } // Does the reference type have a single producer? if (offset != PartialEvaluator.AT_METHOD_ENTRY) { InstructionOffsetValue producers = producerValue.instructionOffsetValue(); if (producers.instructionOffsetCount() == 1) { int producerOffset = producers.instructionOffset(0); // Follow any dup or swap instructions. while (producerOffset != PartialEvaluator.AT_METHOD_ENTRY && isDupOrSwap(codeAttribute.code[producerOffset])) { producers = partialEvaluator.getStackBefore(producerOffset).getTopProducerValue(0).instructionOffsetValue(); producerOffset = producers.minimumValue(); } // Are we in an instance initialization method, // before the super initialization, loading "this"? if (partialEvaluator.isInitializer() && offset <= partialEvaluator.superInitializationOffset() && (isVariable0 || producerOffset > PartialEvaluator.AT_METHOD_ENTRY && codeAttribute.code[producerOffset] == InstructionConstants.OP_ALOAD_0)) { // It's an UninitializedThis type. return VerificationTypeFactory.createUninitializedThisType(); } // Is the reference type newly created and still // uninitialized? if (producerOffset > PartialEvaluator.AT_METHOD_ENTRY && offset <= partialEvaluator.initializationOffset(producerOffset)) { // It's an Uninitialized type. return VerificationTypeFactory.createUninitializedType(producerOffset); } } } // It's an ordinary Object type. return VerificationTypeFactory.createObjectType(createClassConstant(programClass, referenceValue)); } throw new IllegalArgumentException("Unknown computational type ["+type+"]"); } /** * Finds or creates a class constant for the given reference value, and * returns its index in the constant pool. */ private int createClassConstant(ProgramClass programClass, ReferenceValue referenceValue) { return new ConstantPoolEditor(programClass).addClassConstant(referenceValue.getType(), referenceValue.getReferencedClass()); } /** * Compresses the given list of full frames, for use in a stack map table. */ private void compressStackMapFrames(VerificationType[] initialVariableTypes, List stackMapFrameList) { int previousVariablesCount = initialVariableTypes.length; VerificationType[] previousVariableTypes = initialVariableTypes; int previousOffset = -1; for (int index = 0; index < stackMapFrameList.size(); index++) { FullFrame fullFrame = (FullFrame)stackMapFrameList.get(index); int variablesCount = fullFrame.variablesCount; VerificationType[] variables = fullFrame.variables; int stackCount = fullFrame.stackCount; VerificationType[] stack = fullFrame.stack; // Start computing the compressed frame. // The default is the full frame. StackMapFrame compressedFrame = fullFrame; // Are all variables equal? if (variablesCount == previousVariablesCount && equalVerificationTypes(variables, previousVariableTypes, variablesCount)) { // Are the stacks equal? //if (stackCount == previousStackCount && // equalVerificationTypes(stack, previousStack, stackCount)) //{ // // Remove the identical frame. // stackMapFrameList.remove(index--); // // // Move on to the next frame (at the same index). // continue; //} // Is the new stack empty? //else if (stackCount == 0) { compressedFrame = new SameZeroFrame(); } // Does the new stack contain a single element? else if (stackCount == 1) { compressedFrame = new SameOneFrame(stack[0]); } } // Is the stack empty? else if (stackCount == 0) { int additionalVariablesCount = variablesCount - previousVariablesCount; // Are the variables chopped? if (additionalVariablesCount < 0 && additionalVariablesCount > -4 && equalVerificationTypes(variables, previousVariableTypes, variablesCount)) { compressedFrame = new LessZeroFrame((byte)-additionalVariablesCount); } // Are the variables extended? else if (//previousVariablesCount > 0 && additionalVariablesCount > 0 && additionalVariablesCount < 4 && equalVerificationTypes(variables, previousVariableTypes, previousVariablesCount)) { // Copy the additional variables into an array. VerificationType[] additionalVariables = new VerificationType[additionalVariablesCount]; System.arraycopy(variables, variablesCount - additionalVariablesCount, additionalVariables, 0, additionalVariablesCount); compressedFrame = new MoreZeroFrame(additionalVariables); } } // Compress the instruction offset. int offset = fullFrame.u2offsetDelta; compressedFrame.u2offsetDelta = offset - previousOffset - 1; previousOffset = offset; // Remember this frame. previousVariablesCount = fullFrame.variablesCount; previousVariableTypes = fullFrame.variables; // Replace the full frame. stackMapFrameList.set(index, compressedFrame); } } /** * Returns whether the given arrays of verification types are equal, up to * the given length. */ private boolean equalVerificationTypes(VerificationType[] types1, VerificationType[] types2, int length) { if (length > 0 && (types1.length < length || types2.length < length)) { return false; } for (int index = 0; index < length; index++) { if (!types1[index].equals(types2[index])) { return false; } } return true; } /** * Returns whether the given instruction opcode represents a dup or swap * instruction (dup, dup_x1, dup_x2, dup2, dup2_x1, dup2_x2, swap). */ private boolean isDupOrSwap(int opcode) { return opcode >= InstructionConstants.OP_DUP && opcode <= InstructionConstants.OP_SWAP; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Created by IntelliJ IDEA. * User: yole * Date: 15.11.2006 * Time: 18:05:20 */ package com.intellij.openapi.diff.impl.patch; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.LineTokenizer; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.changes.TransparentlyFailedValue; import com.intellij.openapi.vcs.changes.TransparentlyFailedValueI; import com.intellij.util.SmartList; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; public class PatchReader { @NonNls public static final String NO_NEWLINE_SIGNATURE = "\\ No newline at end of file"; private final List<String> myLines; private final PatchReader.PatchContentParser myPatchContentParser; private final AdditionalInfoParser myAdditionalInfoParser; private List<TextFilePatch> myPatches; private enum DiffFormat { CONTEXT, UNIFIED } @NonNls private static final String CONTEXT_HUNK_PREFIX = "***************"; @NonNls private static final String CONTEXT_FILE_PREFIX = "*** "; @NonNls private static final Pattern ourUnifiedHunkStartPattern = Pattern.compile("@@ -(\\d+)(,(\\d+))? \\+(\\d+)(,(\\d+))? @@.*"); @NonNls private static final Pattern ourContextBeforeHunkStartPattern = Pattern.compile("\\*\\*\\* (\\d+),(\\d+) \\*\\*\\*\\*"); @NonNls private static final Pattern ourContextAfterHunkStartPattern = Pattern.compile("--- (\\d+),(\\d+) ----"); public PatchReader(CharSequence patchContent) { myLines = LineTokenizer.tokenizeIntoList(patchContent, false); myAdditionalInfoParser = new AdditionalInfoParser(); myPatchContentParser = new PatchContentParser(); } public List<TextFilePatch> readAllPatches() throws PatchSyntaxException { parseAllPatches(); return myPatches; } @Nullable public CharSequence getBaseRevision(final Project project, final String relativeFilePath) { final Map<String, Map<String, CharSequence>> map = myAdditionalInfoParser.getResultMap(); if (! map.isEmpty()) { final Map<String, CharSequence> inner = map.get(relativeFilePath); if (inner != null) { final BaseRevisionTextPatchEP baseRevisionTextPatchEP = Extensions.findExtension(PatchEP.EP_NAME, project, BaseRevisionTextPatchEP.class); if (baseRevisionTextPatchEP != null) { return inner.get(baseRevisionTextPatchEP.getName()); } } } return null; } /*private void callAdditionalInfoExtensions() { final Map<String, Map<String, CharSequence>> map = myAdditionalInfoParser.getResultMap(); if (! map.isEmpty()) { PatchEP[] extensions = Extensions.getExtensions(PatchEP.EP_NAME, myProject); final Map<String, PatchEP> byName = new HashMap<String, PatchEP>(); for (PatchEP extension : extensions) { byName.put(extension.getName(), extension); } if (extensions == null || extensions.length == 0) return; for (Map.Entry<String, Map<String, CharSequence>> entry : map.entrySet()) { final String path = entry.getKey(); final Map<String, CharSequence> extensionToContents = entry.getValue(); for (Map.Entry<String, CharSequence> innerEntry : extensionToContents.entrySet()) { final PatchEP patchEP = byName.get(innerEntry.getKey()); if (patchEP != null) { patchEP.consumeContentBeforePatchApplied(path, innerEntry.getValue(), myCommitContext); } } } } }*/ public List<TextFilePatch> getPatches() { return myPatches; } public void parseAllPatches() throws PatchSyntaxException { final ListIterator<String> iterator = myLines.listIterator(); if (! iterator.hasNext()) { myPatches = Collections.emptyList(); return; } String next; boolean containsAdditional = false; while (iterator.hasNext()) { next = iterator.next(); final boolean containsAdditionalNow = myAdditionalInfoParser.testIsStart(next); if (containsAdditionalNow && containsAdditional) { myAdditionalInfoParser.acceptError(new PatchSyntaxException(iterator.previousIndex(), "Contains additional information without patch itself")); } if (containsAdditionalNow) { containsAdditional = containsAdditionalNow; myAdditionalInfoParser.parse(next, iterator); if (! iterator.hasNext()) { myAdditionalInfoParser.acceptError(new PatchSyntaxException(iterator.previousIndex(), "Contains additional information without patch itself")); break; } next = iterator.next(); } if (myPatchContentParser.testIsStart(next)) { myPatchContentParser.parse(next, iterator); //iterator.previous(); // to correctly initialize next if (containsAdditional) { final String lastName = myPatchContentParser.getLastName(); if (lastName == null) { myAdditionalInfoParser.acceptError(new PatchSyntaxException(iterator.previousIndex(), "Contains additional information without patch itself")); } else { myAdditionalInfoParser.copyToResult(lastName); } } containsAdditional = false; } } myPatches = myPatchContentParser.getResult(); } public TransparentlyFailedValueI<Map<String, Map<String, CharSequence>>, PatchSyntaxException> getAdditionalInfo(final Set<String> filterByPaths) { final TransparentlyFailedValue<Map<String, Map<String, CharSequence>>, PatchSyntaxException> value = new TransparentlyFailedValue<Map<String, Map<String, CharSequence>>, PatchSyntaxException>(); final Map<String, Map<String, CharSequence>> map = myAdditionalInfoParser.getResultMap(); final Map<String, Map<String, CharSequence>>newMap = new HashMap<String, Map<String, CharSequence>>(); for (Map.Entry<String, Map<String, CharSequence>> entry : map.entrySet()) { final Map<String, CharSequence> innerMap = entry.getValue(); if (filterByPaths == null || filterByPaths.contains(entry.getKey())) { newMap.put(entry.getKey(), innerMap); } } value.set(newMap); final PatchSyntaxException e = myAdditionalInfoParser.getSyntaxException(); if (e != null) { value.fail(e); } return value; } private static class AdditionalInfoParser implements Parser { // first is path! private final Map<String,Map<String, CharSequence>> myResultMap; private Map<String, CharSequence> myAddMap; private PatchSyntaxException mySyntaxException; private AdditionalInfoParser() { myAddMap = new HashMap<String, CharSequence>(); myResultMap = new HashMap<String, Map<String, CharSequence>>(); } public PatchSyntaxException getSyntaxException() { return mySyntaxException; } public Map<String, Map<String, CharSequence>> getResultMap() { return myResultMap; } public void copyToResult(final String filePath) { if (myAddMap != null && ! myAddMap.isEmpty()) { myResultMap.put(filePath, myAddMap); myAddMap = new HashMap<String, CharSequence>(); } } @Override public boolean testIsStart(String start) { if (mySyntaxException != null) return false; // stop on first error return start != null && start.contains(UnifiedDiffWriter.ADDITIONAL_PREFIX); } @Override public void parse(String start, ListIterator<String> iterator) { if (! iterator.hasNext()) { mySyntaxException = new PatchSyntaxException(iterator.previousIndex(), "Empty additional info header"); return; } while (true) { final String header = iterator.next(); final int idxHead = header.indexOf(UnifiedDiffWriter.ADD_INFO_HEADER); if (idxHead == -1) { if (myAddMap.isEmpty()) { mySyntaxException = new PatchSyntaxException(iterator.previousIndex(), "Empty additional info header"); } iterator.previous(); return; } final String subsystem = header.substring(idxHead + UnifiedDiffWriter.ADD_INFO_HEADER.length()).trim(); if (! iterator.hasNext()) { mySyntaxException = new PatchSyntaxException(iterator.previousIndex(), "Empty '" + subsystem + "' data section"); return; } final StringBuilder sb = new StringBuilder(); myAddMap.put(subsystem, sb); while (iterator.hasNext()) { final String line = iterator.next(); if (! line.startsWith(UnifiedDiffWriter.ADD_INFO_LINE_START)) { iterator.previous(); break; } if (sb.length() > 0) { sb.append("\n"); } sb.append(StringUtil.unescapeStringCharacters(line.substring(UnifiedDiffWriter.ADD_INFO_LINE_START.length()))); } } } public void acceptError(PatchSyntaxException e) { mySyntaxException = e; } } private static class PatchContentParser implements Parser { private DiffFormat myDiffFormat = null; private final List<TextFilePatch> myPatches; private boolean myDiffCommandLike; private boolean myIndexLike; private PatchContentParser() { myPatches = new SmartList<TextFilePatch>(); } @Override public boolean testIsStart(String start) { if (start.startsWith("diff")) { myDiffCommandLike = true; return false; } if (start.startsWith("index")) { myIndexLike = true; return false; } if (start.startsWith("--- ") && (myDiffFormat == null || myDiffFormat == DiffFormat.UNIFIED)) { myDiffFormat = DiffFormat.UNIFIED; return true; } else if (start.startsWith(CONTEXT_FILE_PREFIX) && (myDiffFormat == null || myDiffFormat == DiffFormat.CONTEXT)) { myDiffFormat = DiffFormat.CONTEXT; return true; } return false; } @Override public void parse(String start, ListIterator<String> iterator) throws PatchSyntaxException { final TextFilePatch patch = readPatch(start, iterator); if (patch != null) { myPatches.add(patch); } myDiffCommandLike = false; myIndexLike = false; } public List<TextFilePatch> getResult() throws PatchSyntaxException { return myPatches; } private TextFilePatch readPatch(String curLine, ListIterator<String> iterator) throws PatchSyntaxException { final TextFilePatch curPatch = new TextFilePatch(null); extractFileName(curLine, curPatch, true, myDiffCommandLike && myIndexLike); if (! iterator.hasNext()) throw new PatchSyntaxException(iterator.previousIndex(), "Second file name expected"); curLine = iterator.next(); String secondNamePrefix = myDiffFormat == DiffFormat.UNIFIED ? "+++ " : "--- "; if (! curLine.startsWith(secondNamePrefix)) { throw new PatchSyntaxException(iterator.previousIndex(), "Second file name expected"); } extractFileName(curLine, curPatch, false, myDiffCommandLike && myIndexLike); while (iterator.hasNext()) { PatchHunk hunk; if (myDiffFormat == DiffFormat.UNIFIED) { hunk = readNextHunkUnified(iterator); } else { hunk = readNextHunkContext(iterator); } if (hunk == null) break; curPatch.addHunk(hunk); } if (curPatch.getBeforeName() == null) { curPatch.setBeforeName(curPatch.getAfterName()); } if (curPatch.getAfterName() == null) { curPatch.setAfterName(curPatch.getBeforeName()); } return curPatch; } @Nullable private PatchHunk readNextHunkUnified(ListIterator<String> iterator) throws PatchSyntaxException { String curLine = null; int numIncrements = 0; while (iterator.hasNext()) { curLine = iterator.next(); ++ numIncrements; if (curLine.startsWith("--- ")) { for (int i = 0; i < numIncrements; i++) { iterator.previous(); } return null; } if (curLine.startsWith("@@ ")) { break; } } if (! iterator.hasNext()) return null; Matcher m = ourUnifiedHunkStartPattern.matcher(curLine); if (!m.matches()) { throw new PatchSyntaxException(iterator.previousIndex(), "Unknown hunk start syntax"); } int startLineBefore = Integer.parseInt(m.group(1)); final String linesBeforeText = m.group(3); int linesBefore = linesBeforeText == null ? 1 : Integer.parseInt(linesBeforeText); int startLineAfter = Integer.parseInt(m.group(4)); final String linesAfterText = m.group(6); int linesAfter = linesAfterText == null ? 1 : Integer.parseInt(linesAfterText); PatchHunk hunk = new PatchHunk(startLineBefore-1, startLineBefore+linesBefore-1, startLineAfter-1, startLineAfter+linesAfter-1); PatchLine lastLine = null; int numLines = linesBefore + linesAfter; while (iterator.hasNext()) { String hunkCurLine = iterator.next(); -- numLines; if (lastLine != null && hunkCurLine.startsWith(NO_NEWLINE_SIGNATURE)) { lastLine.setSuppressNewLine(true); continue; } if (hunkCurLine.startsWith("--- ") && numLines == 0) { iterator.previous(); break; } lastLine = parsePatchLine(hunkCurLine, 1); if (lastLine == null) { iterator.previous(); break; } hunk.addLine(lastLine); } return hunk; } @Nullable public String getLastName() { if (myPatches.isEmpty()) { return null; } else { final TextFilePatch patch = myPatches.get(myPatches.size() - 1); return patch.getBeforeName() == null ? patch.getAfterName() : patch.getBeforeName(); } } @Nullable private static PatchLine parsePatchLine(final String line, final int prefixLength) { PatchLine.Type type; if (line.startsWith("+")) { type = PatchLine.Type.ADD; } else if (line.startsWith("-")) { type = PatchLine.Type.REMOVE; } else if (line.startsWith(" ") || line.length() == 0) { type = PatchLine.Type.CONTEXT; } else { return null; } String lineText; if (line.length() < prefixLength) { lineText = ""; } else { lineText = line.substring(prefixLength); } return new PatchLine(type, lineText); } @Nullable private PatchHunk readNextHunkContext(ListIterator<String> iterator) throws PatchSyntaxException { while (iterator.hasNext()) { String curLine = iterator.next(); if (curLine.startsWith(CONTEXT_FILE_PREFIX)) { iterator.previous(); return null; } if (curLine.startsWith(CONTEXT_HUNK_PREFIX)) { break; } } if (! iterator.hasNext()) { return null; } Matcher beforeMatcher = ourContextBeforeHunkStartPattern.matcher(iterator.next()); if (! beforeMatcher.matches()) { throw new PatchSyntaxException(iterator.previousIndex(), "Unknown before hunk start syntax"); } List<String> beforeLines = readContextDiffLines(iterator); if (! iterator.hasNext()) { throw new PatchSyntaxException(iterator.previousIndex(), "Missing after hunk"); } Matcher afterMatcher = ourContextAfterHunkStartPattern.matcher(iterator.next()); if (! afterMatcher.matches()) { throw new PatchSyntaxException(iterator.previousIndex(), "Unknown after hunk start syntax"); } //if (! iterator.hasNext()) { //throw new PatchSyntaxException(iterator.previousIndex(), "Unexpected patch end"); //} List<String> afterLines = readContextDiffLines(iterator); int startLineBefore = Integer.parseInt(beforeMatcher.group(1)); int endLineBefore = Integer.parseInt(beforeMatcher.group(2)); int startLineAfter = Integer.parseInt(afterMatcher.group(1)); int endLineAfter = Integer.parseInt(afterMatcher.group(2)); PatchHunk hunk = new PatchHunk(startLineBefore-1, endLineBefore-1, startLineAfter-1, endLineAfter-1); int beforeLineIndex = 0; int afterLineIndex = 0; PatchLine lastBeforePatchLine = null; PatchLine lastAfterPatchLine = null; if (beforeLines.size() == 0) { for(String line: afterLines) { hunk.addLine(parsePatchLine(line, 2)); } } else if (afterLines.size() == 0) { for(String line: beforeLines) { hunk.addLine(parsePatchLine(line, 2)); } } else { while(beforeLineIndex < beforeLines.size() || afterLineIndex < afterLines.size()) { String beforeLine = beforeLineIndex >= beforeLines.size() ? null : beforeLines.get(beforeLineIndex); String afterLine = afterLineIndex >= afterLines.size() ? null : afterLines.get(afterLineIndex); if (startsWith(beforeLine, NO_NEWLINE_SIGNATURE) && lastBeforePatchLine != null) { lastBeforePatchLine.setSuppressNewLine(true); beforeLineIndex++; } else if (startsWith(afterLine, NO_NEWLINE_SIGNATURE) && lastAfterPatchLine != null) { lastAfterPatchLine.setSuppressNewLine(true); afterLineIndex++; } else if (startsWith(beforeLine, " ") && (startsWith(afterLine, " ") || afterLine == null /* handle some weird cases with line breaks truncated at EOF */ )) { addContextDiffLine(hunk, beforeLine, PatchLine.Type.CONTEXT); beforeLineIndex++; afterLineIndex++; } else if (startsWith(beforeLine, "-")) { lastBeforePatchLine = addContextDiffLine(hunk, beforeLine, PatchLine.Type.REMOVE); beforeLineIndex++; } else if (startsWith(afterLine, "+")) { lastAfterPatchLine = addContextDiffLine(hunk, afterLine, PatchLine.Type.ADD); afterLineIndex++; } else if (startsWith(beforeLine, "!") && startsWith(afterLine, "!")) { while(beforeLineIndex < beforeLines.size() && beforeLines.get(beforeLineIndex).startsWith("! ")) { lastBeforePatchLine = addContextDiffLine(hunk, beforeLines.get(beforeLineIndex), PatchLine.Type.REMOVE); beforeLineIndex++; } while(afterLineIndex < afterLines.size() && afterLines.get(afterLineIndex).startsWith("! ")) { lastAfterPatchLine = addContextDiffLine(hunk, afterLines.get(afterLineIndex), PatchLine.Type.ADD); afterLineIndex++; } } else { throw new PatchSyntaxException(-1, "Unknown line prefix"); } } } return hunk; } private static boolean startsWith(@Nullable final String line, final String prefix) { return line != null && line.startsWith(prefix); } private static PatchLine addContextDiffLine(final PatchHunk hunk, final String line, final PatchLine.Type type) { final PatchLine patchLine = new PatchLine(type, line.length() < 2 ? "" : line.substring(2)); hunk.addLine(patchLine); return patchLine; } private List<String> readContextDiffLines(ListIterator<String> iterator) { ArrayList<String> result = new ArrayList<String>(); while (iterator.hasNext()) { final String line = iterator.next(); if (!line.startsWith(" ") && !line.startsWith("+ ") && !line.startsWith("- ") && !line.startsWith("! ") && !line.startsWith(NO_NEWLINE_SIGNATURE)) { iterator.previous(); break; } result.add(line); } return result; } private static void extractFileName(final String curLine, final FilePatch patch, final boolean before, final boolean gitPatch) { String fileName = curLine.substring(4); int pos = fileName.indexOf('\t'); if (pos < 0) { pos = fileName.indexOf(' '); } if (pos >= 0) { String versionId = fileName.substring(pos).trim(); fileName = fileName.substring(0, pos); if (versionId.length() > 0) { if (before) { patch.setBeforeVersionId(versionId); } else { patch.setAfterVersionId(versionId); } } } if ("/dev/null".equals(fileName)) return; if (before) { if (gitPatch && fileName.startsWith("a/")) { fileName = fileName.substring(2); } patch.setBeforeName(fileName); } else { if (gitPatch && fileName.startsWith("b/")) { fileName = fileName.substring(2); } patch.setAfterName(fileName); } } } private interface Parser { boolean testIsStart(final String start); void parse(final String start, final ListIterator<String> iterator) throws PatchSyntaxException; } }
/* * Abora-Gold * Part of the Abora hypertext project: http://www.abora.org * Copyright 2003, 2005 David G Jones * * Translated from Udanax-Gold source code: http://www.udanax.com * Copyright 1979-1999 Udanax.com. All rights reserved */ package info.dgjones.abora.gold.be.ents; import info.dgjones.abora.gold.arrange.Arrangement; import info.dgjones.abora.gold.backrec.ResultRecorder; import info.dgjones.abora.gold.be.basic.BeCarrier; import info.dgjones.abora.gold.be.basic.BeEdition; import info.dgjones.abora.gold.be.basic.BeLabel; import info.dgjones.abora.gold.be.basic.BeRangeElement; import info.dgjones.abora.gold.be.basic.ID; import info.dgjones.abora.gold.be.canopy.BertCrum; import info.dgjones.abora.gold.be.canopy.PropFinder; import info.dgjones.abora.gold.be.canopy.SensorCrum; import info.dgjones.abora.gold.be.ents.ActualOrglRoot; import info.dgjones.abora.gold.be.ents.HUpperCrum; import info.dgjones.abora.gold.be.ents.HistoryCrum; import info.dgjones.abora.gold.be.ents.InnerLoaf; import info.dgjones.abora.gold.be.ents.Loaf; import info.dgjones.abora.gold.be.ents.OExpandingLoaf; import info.dgjones.abora.gold.be.ents.OPart; import info.dgjones.abora.gold.be.ents.OPartialLoaf; import info.dgjones.abora.gold.be.ents.OVirtualLoaf; import info.dgjones.abora.gold.be.ents.OrglRoot; import info.dgjones.abora.gold.be.ents.RegionLoaf; import info.dgjones.abora.gold.be.ents.SharedData; import info.dgjones.abora.gold.collection.basic.PrimArray; import info.dgjones.abora.gold.collection.basic.PrimDataArray; import info.dgjones.abora.gold.collection.cache.HashSetCache; import info.dgjones.abora.gold.collection.steppers.Stepper; import info.dgjones.abora.gold.collection.tables.MuTable; import info.dgjones.abora.gold.detect.FeFillRangeDetector; import info.dgjones.abora.gold.fossil.RecorderFossil; import info.dgjones.abora.gold.java.AboraBlockSupport; import info.dgjones.abora.gold.java.AboraSupport; import info.dgjones.abora.gold.java.exception.PasseException; import info.dgjones.abora.gold.java.exception.SubclassResponsibilityException; import info.dgjones.abora.gold.java.missing.HRoot; import info.dgjones.abora.gold.java.missing.XnSensor; import info.dgjones.abora.gold.java.missing.smalltalk.Set; import info.dgjones.abora.gold.nkernel.FeEdition; import info.dgjones.abora.gold.nkernel.FeRangeElement; import info.dgjones.abora.gold.spaces.basic.Dsp; import info.dgjones.abora.gold.spaces.basic.Mapping; import info.dgjones.abora.gold.spaces.basic.OrderSpec; import info.dgjones.abora.gold.spaces.basic.Position; import info.dgjones.abora.gold.spaces.basic.XnRegion; import info.dgjones.abora.gold.tclude.TrailBlazer; import info.dgjones.abora.gold.traces.TracePosition; import info.dgjones.abora.gold.turtle.Agenda; import info.dgjones.abora.gold.xcvr.Rcvr; import info.dgjones.abora.gold.xcvr.Xmtr; public class Loaf extends OPart { protected HUpperCrum myHCrum; /* udanax-top.st:7375: OPart subclass: #Loaf instanceVariableNames: 'myHCrum {HUpperCrum}' classVariableNames: '' poolDictionaries: '' category: 'Xanadu-Be-Ents'! */ /* udanax-top.st:7379: (Loaf getOrMakeCxxClassDescription) attributes: ((Set new) add: #SHEPHERD.PATRIARCH; add: #COPY; add: #DEFERRED; add: #DEFERRED.LOCKED; yourself)! */ /* udanax-top.st:7649: Loaf class instanceVariableNames: ''! */ /* udanax-top.st:7652: (Loaf getOrMakeCxxClassDescription) attributes: ((Set new) add: #SHEPHERD.PATRIARCH; add: #COPY; add: #DEFERRED; add: #DEFERRED.LOCKED; yourself)! */ public static void initializeClassAttributes() { AboraSupport.findAboraClass(Loaf.class).setAttributes( new Set().add("SHEPHERDPATRIARCH").add("COPY").add("DEFERRED").add("DEFERREDLOCKED")); /* Generated during transformation: AddMethod */ } /** * return a mapping from my data to corresponding stuff in the given trace */ public Mapping compare(TracePosition trace, XnRegion region) { throw new SubclassResponsibilityException(); /* udanax-top.st:7384:Loaf methodsFor: 'accessing'! {Mapping} compare: trace {TracePosition} with: region {XnRegion} "return a mapping from my data to corresponding stuff in the given trace" self subclassResponsibility! */ } public int count() { throw new SubclassResponsibilityException(); /* udanax-top.st:7388:Loaf methodsFor: 'accessing'! {IntegerVar} count self subclassResponsibility! */ } public XnRegion domain() { throw new SubclassResponsibilityException(); /* udanax-top.st:7391:Loaf methodsFor: 'accessing'! {XnRegion} domain self subclassResponsibility! */ } /** * Look up the range element for the key. If it is embedded within a virtual * structure, then make a virtual range element using the edition and globalKey. */ public FeRangeElement fetch(Position key, BeEdition edition, Position globalKey) { Someone.thingToDo(); /* This should softSplay the position up. */ throw new SubclassResponsibilityException(); /* udanax-top.st:7394:Loaf methodsFor: 'accessing'! {FeRangeElement | NULL} fetch: key {Position} with: edition {BeEdition} with: globalKey {Position} "Look up the range element for the key. If it is embedded within a virtual structure, then make a virtual range element using the edition and globalKey." self thingToDo. "This should softSplay the position up." self subclassResponsibility! */ } /** * Return the bottom-most Loaf. Used to get the owner and such of a position. */ public OExpandingLoaf fetchBottomAt(Position key) { throw new SubclassResponsibilityException(); /* udanax-top.st:7401:Loaf methodsFor: 'accessing'! {OExpandingLoaf} fetchBottomAt: key {Position} "Return the bottom-most Loaf. Used to get the owner and such of a position." self subclassResponsibility! */ } /** * Fill an array with my contents */ public void fill(XnRegion keys, Arrangement toArrange, PrimArray toArray, Dsp globalDsp, BeEdition edition) { throw new SubclassResponsibilityException(); /* udanax-top.st:7406:Loaf methodsFor: 'accessing'! {void} fill: keys {XnRegion} with: toArrange {Arrangement} with: toArray {PrimArray} with: globalDsp {Dsp} with: edition {BeEdition} "Fill an array with my contents" self subclassResponsibility! */ } /** * Get or Make the BeRangeElement at the location. */ public BeRangeElement getBe(Position key) { throw new SubclassResponsibilityException(); /* udanax-top.st:7415:Loaf methodsFor: 'accessing'! {BeRangeElement} getBe: key {Position} "Get or Make the BeRangeElement at the location." self subclassResponsibility! */ } public XnRegion rangeOwners(XnRegion positions) { throw new SubclassResponsibilityException(); /* udanax-top.st:7420:Loaf methodsFor: 'accessing'! {XnRegion} rangeOwners: positions {XnRegion | NULL} self subclassResponsibility! */ } /** * Recur assigning owners. Return the portion of the o-tree that * couldn't be assigned, or NULL if it was all assigned. */ public OrglRoot setAllOwners(ID owner) { throw new SubclassResponsibilityException(); /* udanax-top.st:7424:Loaf methodsFor: 'accessing'! {OrglRoot} setAllOwners: owner {ID} "Recur assigning owners. Return the portion of the o-tree that couldn't be assigned, or NULL if it was all assigned." self subclassResponsibility! */ } public XnRegion usedDomain() { throw new SubclassResponsibilityException(); /* udanax-top.st:7430:Loaf methodsFor: 'accessing'! {XnRegion} usedDomain self subclassResponsibility! */ } /** * Return a stepper of bundles according to the order. */ public Stepper bundleStepper(XnRegion region, OrderSpec order, Dsp globalDsp) { throw new SubclassResponsibilityException(); /* udanax-top.st:7435:Loaf methodsFor: 'operations'! {Stepper} bundleStepper: region {XnRegion} with: order {OrderSpec} with: globalDsp {Dsp} "Return a stepper of bundles according to the order." self subclassResponsibility! */ } public OrglRoot combine(ActualOrglRoot another, XnRegion limitRegion, Dsp globalDsp) { throw new SubclassResponsibilityException(); /* udanax-top.st:7440:Loaf methodsFor: 'operations'! {OrglRoot} combine: another {ActualOrglRoot} with: limitRegion {XnRegion} with: globalDsp {Dsp} self subclassResponsibility! */ } /** * Just search for now. */ public XnRegion keysLabelled(BeLabel label) { throw new SubclassResponsibilityException(); /* udanax-top.st:7443:Loaf methodsFor: 'operations'! {XnRegion} keysLabelled: label {BeLabel} "Just search for now." self subclassResponsibility! */ } /** * Return a region describing the stuff that can backfollow to trace. */ public XnRegion sharedRegion(TracePosition trace, XnRegion limitRegion) { throw new SubclassResponsibilityException(); /* udanax-top.st:7448:Loaf methodsFor: 'operations'! {XnRegion} sharedRegion: trace {TracePosition} with: limitRegion {XnRegion} "Return a region describing the stuff that can backfollow to trace." self subclassResponsibility! */ } /** * Return a copy with externalDsp added to the receiver's dsp. */ public Loaf transformedBy(Dsp externalDsp) { if (externalDsp.isIdentity()) { return this; } else { return InnerLoaf.make(this, externalDsp); } /* udanax-top.st:7452:Loaf methodsFor: 'operations'! {Loaf} transformedBy: externalDsp {Dsp} "Return a copy with externalDsp added to the receiver's dsp." externalDsp isIdentity ifTrue: [^self] ifFalse: [^InnerLoaf make: self with: externalDsp]! */ } /** * Return a copy with globalDsp removed from the receiver's dsp. */ public Loaf unTransformedBy(Dsp globalDsp) { if (globalDsp.isIdentity()) { return this; } else { return InnerLoaf.make(this, ((Dsp) globalDsp.inverse())); } /* udanax-top.st:7459:Loaf methodsFor: 'operations'! {Loaf} unTransformedBy: globalDsp {Dsp} "Return a copy with globalDsp removed from the receiver's dsp." globalDsp isIdentity ifTrue: [^self] ifFalse: [^InnerLoaf make: self with: (globalDsp inverse cast: Dsp)]! */ } /** * Make each child completely contained or completely outside * the region. Return the number of children completely in the region. * Full containment cases can be handled generically. */ public int splay(XnRegion region, XnRegion limitRegion) { if (limitRegion.isSubsetOf(region)) { return 2; } else { if (limitRegion.intersects(region)) { return actualSplay(region, limitRegion); } else { return 0; } } /* udanax-top.st:7468:Loaf methodsFor: 'splay'! {UInt8} splay: region {XnRegion} with: limitRegion {XnRegion} "Make each child completely contained or completely outside the region. Return the number of children completely in the region. Full containment cases can be handled generically." (limitRegion isSubsetOf: region) ifTrue: [^2] ifFalse: [(limitRegion intersects: region) ifTrue: [^self actualSplay: region with: limitRegion] ifFalse: [^Int0]]! */ } /** * Speciall handle the splay cases in which the region partially intersects * with limitedRegion. These require rotations and splitting. */ public int actualSplay(XnRegion region, XnRegion limitRegion) { throw new SubclassResponsibilityException(); /* udanax-top.st:7481:Loaf methodsFor: 'protected: splay'! {Int8} actualSplay: region {XnRegion} with: limitRegion {XnRegion} "Speciall handle the splay cases in which the region partially intersects with limitedRegion. These require rotations and splitting." self subclassResponsibility! */ } /** * This should probably take a bertCanopyCrum argument, as well. */ public void addOParent(OPart oParent) { /* add oParent to the set of upward pointers. */ myHCrum.addOParent(oParent); remember(); diskUpdate(); /* udanax-top.st:7489:Loaf methodsFor: 'backfollow'! {void} addOParent: oParent {OPart} "This should probably take a bertCanopyCrum argument, as well." "add oParent to the set of upward pointers." myHCrum addOParent: oParent. self remember. self diskUpdate! */ } public XnRegion attachTrailBlazer(TrailBlazer blazer) { throw new SubclassResponsibilityException(); /* udanax-top.st:7497:Loaf methodsFor: 'backfollow'! {XnRegion} attachTrailBlazer: blazer {TrailBlazer} self subclassResponsibility! */ } /** * send checkRecorders to all children */ public void checkChildRecorders(PropFinder finder) { throw new SubclassResponsibilityException(); /* udanax-top.st:7501:Loaf methodsFor: 'backfollow'! {void} checkChildRecorders: finder {PropFinder} "send checkRecorders to all children" self subclassResponsibility! */ } /** * check any recorders that might be triggered by a change in the edition. * Walk leafward on O-plane, filtered by sensor canopy, ringing recorders. * Not in a consistent block: It spawns unbounded work. */ public void checkRecorders(PropFinder finder, SensorCrum scrum) { PropFinder newFinder; /* Shrink finder to just what may be on this branch of O-tree. If there might be something on this branch Check the children using the simplified finder. */ newFinder = sensorCrum().checkRecorders(finder, scrum); if ( ! (newFinder.isEmpty())) { checkChildRecorders(newFinder); } /* udanax-top.st:7505:Loaf methodsFor: 'backfollow'! {void} checkRecorders: finder {PropFinder} with: scrum {SensorCrum | NULL} "check any recorders that might be triggered by a change in the edition. Walk leafward on O-plane, filtered by sensor canopy, ringing recorders. Not in a consistent block: It spawns unbounded work. " | newFinder {PropFinder} | "Shrink finder to just what may be on this branch of O-tree. If there might be something on this branch Check the children using the simplified finder." newFinder _ self sensorCrum checkRecorders: finder with: scrum. newFinder isEmpty ifFalse: [self checkChildRecorders: newFinder]! */ } public void checkTrailBlazer(TrailBlazer blazer) { throw new SubclassResponsibilityException(); /* udanax-top.st:7522:Loaf methodsFor: 'backfollow'! {void} checkTrailBlazer: blazer {TrailBlazer} self subclassResponsibility! */ } /** * One step of walk south on the O-tree during the 'now' part of a backfollow. */ public void delayedStoreMatching(PropFinder finder, RecorderFossil fossil, ResultRecorder recorder, HashSetCache hCrumCache) { throw new SubclassResponsibilityException(); /* udanax-top.st:7526:Loaf methodsFor: 'backfollow'! {void} delayedStoreMatching: finder {PropFinder} with: fossil {RecorderFossil} with: recorder {ResultRecorder} with: hCrumCache {HashSetCache of: HistoryCrum} "One step of walk south on the O-tree during the 'now' part of a backfollow." self subclassResponsibility! */ } public TrailBlazer fetchTrailBlazer() { throw new SubclassResponsibilityException(); /* udanax-top.st:7535:Loaf methodsFor: 'backfollow'! {TrailBlazer | NULL} fetchTrailBlazer self subclassResponsibility! */ } public HistoryCrum hCrum() { return myHCrum; /* udanax-top.st:7539:Loaf methodsFor: 'backfollow'! {HistoryCrum} hCrum ^myHCrum! */ } /** * remove oparent from the set of upward pointers. */ public void removeOParent(OPart oparent) { myHCrum.removeOParent(oparent); if (myHCrum.isEmpty()) { /* Now we get into the risky part of deletion. There are no more upward pointers, so destroy the receiver. */ destroy(); } else { diskUpdate(); } /* udanax-top.st:7542:Loaf methodsFor: 'backfollow'! {void} removeOParent: oparent {OPart} "remove oparent from the set of upward pointers." myHCrum removeOParent: oparent. myHCrum isEmpty ifTrue: ["Now we get into the risky part of deletion. There are no more upward pointers, so destroy the receiver." self destroy] ifFalse: [self diskUpdate]! */ } /** * Go ahead and actually store the recorder in the sensor canopy. However, instead of * propogating the props immediately, accumulate all those agenda items into the 'agenda' * parameter. This is done instead of scheduling them directly because our client needs to * schedule something else following all the prop propogation. */ public void storeRecordingAgents(RecorderFossil recorder, Agenda agenda) { throw new SubclassResponsibilityException(); /* udanax-top.st:7553:Loaf methodsFor: 'backfollow'! {void} storeRecordingAgents: recorder {RecorderFossil} with: agenda {Agenda} "Go ahead and actually store the recorder in the sensor canopy. However, instead of propogating the props immediately, accumulate all those agenda items into the 'agenda' parameter. This is done instead of scheduling them directly because our client needs to schedule something else following all the prop propogation." self subclassResponsibility! */ } /** * A Detector has been added to my parent. Walk down and trigger it on all non-partial stuff */ public void triggerDetector(FeFillRangeDetector detect) { throw new SubclassResponsibilityException(); /* udanax-top.st:7559:Loaf methodsFor: 'backfollow'! {void} triggerDetector: detect {FeFillRangeDetector} "A Detector has been added to my parent. Walk down and trigger it on all non-partial stuff" self subclassResponsibility! */ } /** * Ensure the my bertCrum is not be leafward of newBCrum. */ public boolean updateBCrumTo(BertCrum newBCrum) { if (myHCrum.propagateBCrum(newBCrum)) { diskUpdate(); return true; } return false; /* udanax-top.st:7564:Loaf methodsFor: 'backfollow'! {BooleanVar} updateBCrumTo: newBCrum {BertCrum} "Ensure the my bertCrum is not be leafward of newBCrum." (myHCrum propagateBCrum: newBCrum) ifTrue: [self diskUpdate. ^true]. ^false! */ } /** * Make a FeEdition out of myself. Used for triggering Detectors */ public FeEdition asFeEdition() { Object currentTraceOldValue = AboraBlockSupport.enterFluidBindDuring(CurrentTrace, hCrum().hCut()); try { Object currentBertCrumOldValue = AboraBlockSupport.enterFluidBindDuring(CurrentBertCrum, hCrum().bertCrum()); try { return FeEdition.on((BeEdition.make((ActualOrglRoot.make(this, domain()))))); } finally { AboraBlockSupport.exitFluidBindDuring(CurrentBertCrum, currentBertCrumOldValue); } } finally { AboraBlockSupport.exitFluidBindDuring(CurrentTrace, currentTraceOldValue); } /* udanax-top.st:7575:Loaf methodsFor: 'protected:'! {FeEdition} asFeEdition "Make a FeEdition out of myself. Used for triggering Detectors" CurrentTrace fluidBind: self hCrum hCut during: [CurrentBertCrum fluidBind: self hCrum bertCrum during: [^FeEdition on: (BeEdition make: (ActualOrglRoot make: self with: self domain))]]! */ } public void dismantle() { AboraBlockSupport.enterInsistent(2); try { super.dismantle(); myHCrum = null; } finally { AboraBlockSupport.exitInsistent(); } /* udanax-top.st:7582:Loaf methodsFor: 'protected:'! {void} dismantle DiskManager insistent: 2 with: [super dismantle. myHCrum _ NULL]! */ } public Loaf(HUpperCrum hcrum, SensorCrum scrum) { super(scrum); if (hcrum == null) { myHCrum = HUpperCrum.make(); } else { myHCrum = hcrum; } /* udanax-top.st:7589:Loaf methodsFor: 'create'! create: hcrum {HUpperCrum | NULL} with: scrum {SensorCrum | NULL} super create: scrum. hcrum == NULL ifTrue: [myHCrum _ HUpperCrum make] ifFalse: [myHCrum _ hcrum]! */ } public Loaf(int hash, HUpperCrum hcrum, SensorCrum scrum) { super(hash, scrum); if (hcrum == null) { myHCrum = HUpperCrum.make(); } else { myHCrum = hcrum; } /* udanax-top.st:7596:Loaf methodsFor: 'create'! create: hash {UInt32} with: hcrum {HUpperCrum | NULL} with: scrum {SensorCrum | NULL} super create: hash with: scrum. hcrum == NULL ifTrue: [myHCrum _ HUpperCrum make] ifFalse: [myHCrum _ hcrum]! */ } public int contentsHash() { return super.contentsHash() ^ myHCrum.hashForEqual(); /* udanax-top.st:7604:Loaf methodsFor: 'testing'! {UInt32} contentsHash ^super contentsHash bitXor: myHCrum hashForEqual! */ } /** * @deprecated */ public void checkChildRecorders(BeEdition stamp, PropFinder finder) { throw new PasseException(); /* udanax-top.st:7611:Loaf methodsFor: 'smalltalk: passe'! {void} checkChildRecorders: stamp {BeEdition} with: finder {PropFinder} self passe "fewer args"! */ } /** * @deprecated */ public void checkRecorders(BeEdition edition, PropFinder finder, SensorCrum scrum) { throw new PasseException(); /* udanax-top.st:7614:Loaf methodsFor: 'smalltalk: passe'! {void} checkRecorders: edition {BeEdition} with: finder {PropFinder} with: scrum {SensorCrum | NULL} self passe "fewer args"! */ } /** * @deprecated */ public void delayedStoreMatching(PropFinder finder, RecorderFossil recorder, HashSetCache hCrumCache) { throw new PasseException(); /* udanax-top.st:7619:Loaf methodsFor: 'smalltalk: passe'! {void} delayedStoreMatching: finder {PropFinder} with: recorder {RecorderFossil} with: hCrumCache {HashSetCache of: HistoryCrum} self passe "extra argument"! */ } /** * inform a piece of partiality * @deprecated */ public void inform(Position key, HRoot value, TracePosition trace) { throw new PasseException(); /* udanax-top.st:7624:Loaf methodsFor: 'smalltalk: passe'! {void} inform: key {Position} with: value {HRoot} with: trace {TracePosition} "inform a piece of partiality" self passe! */ } /** * @deprecated */ public void storeMatching(PropFinder finder, MuTable table, HashSetCache hCrumCache) { throw new PasseException(); /* udanax-top.st:7629:Loaf methodsFor: 'smalltalk: passe'! {void} storeMatching: finder {PropFinder} with: table {MuTable of: ID and: BeEdition} with: hCrumCache {HashSetCache of: HistoryCrum} self passe! */ } /** * @deprecated */ public void wait(XnSensor sensor) { throw new PasseException(); /* udanax-top.st:7634:Loaf methodsFor: 'smalltalk: passe'! {void} wait: sensor {XnSensor} self passe! */ } public Loaf(Rcvr receiver) { super(receiver); myHCrum = (HUpperCrum) receiver.receiveHeaper(); /* udanax-top.st:7640:Loaf methodsFor: 'generated:'! create.Rcvr: receiver {Rcvr} super create.Rcvr: receiver. myHCrum _ receiver receiveHeaper.! */ } public void sendSelfTo(Xmtr xmtr) { super.sendSelfTo(xmtr); xmtr.sendHeaper(myHCrum); /* udanax-top.st:7644:Loaf methodsFor: 'generated:'! {void} sendSelfTo: xmtr {Xmtr} super sendSelfTo: xmtr. xmtr sendHeaper: myHCrum.! */ } public static Loaf makeRegion(XnRegion region, BeCarrier element) { AboraBlockSupport.enterConsistent(7); try { return new RegionLoaf(region, element.fetchLabel(), element.rangeElement(), null); } finally { AboraBlockSupport.exitConsistent(); } /* udanax-top.st:7657:Loaf class methodsFor: 'create'! {Loaf} make.Region: region {XnRegion} with: element {BeCarrier} DiskManager consistent: 7 with: [^RegionLoaf create: region with: element fetchLabel with: element rangeElement with: NULL]! */ } public static Loaf makeXnRegion(XnRegion region) { AboraBlockSupport.enterConsistent(3); try { return new OPartialLoaf(region, null, SensorCrum.partial()); } finally { AboraBlockSupport.exitConsistent(); } /* udanax-top.st:7662:Loaf class methodsFor: 'create'! make.XnRegion: region {XnRegion} DiskManager consistent: 3 with: [^OPartialLoaf create: region with: NULL with: SensorCrum partial]! */ } public static Loaf make(PrimDataArray values, Arrangement arrangement) { AboraBlockSupport.enterConsistent(4); try { SharedData tmp; tmp = new SharedData(values, arrangement); return new OVirtualLoaf(arrangement.region(), tmp); } finally { AboraBlockSupport.exitConsistent(); } /* udanax-top.st:7669:Loaf class methodsFor: 'create'! make: values {PrimDataArray} with: arrangement {Arrangement} DiskManager consistent: 4 with: [| tmp {SharedData} | tmp _ SharedData create: values with: arrangement. ^OVirtualLoaf create: arrangement region with: tmp]! */ } public Loaf() { /* Generated during transformation */ } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.gradle.service.project; import com.intellij.execution.configurations.CommandLineTokenizer; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.externalSystem.model.ExternalSystemException; import com.intellij.openapi.externalSystem.model.task.ExternalSystemTaskId; import com.intellij.openapi.externalSystem.model.task.ExternalSystemTaskNotificationEvent; import com.intellij.openapi.externalSystem.model.task.ExternalSystemTaskNotificationListener; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.ArrayUtilRt; import com.intellij.util.Function; import com.intellij.util.PathUtil; import com.intellij.util.SystemProperties; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.ContainerUtilRt; import org.gradle.StartParameter; import org.gradle.process.internal.JvmOptions; import org.gradle.tooling.*; import org.gradle.tooling.internal.consumer.DefaultGradleConnector; import org.gradle.tooling.internal.consumer.Distribution; import org.gradle.tooling.model.build.BuildEnvironment; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.gradle.settings.DistributionType; import org.jetbrains.plugins.gradle.settings.GradleExecutionSettings; import org.jetbrains.plugins.gradle.util.GradleConstants; import org.jetbrains.plugins.gradle.util.GradleUtil; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Field; import java.util.List; import java.util.concurrent.TimeUnit; /** * @author Denis Zhdanov * @since 3/14/13 5:11 PM */ public class GradleExecutionHelper { private static final Logger LOG = Logger.getInstance(GradleExecutionHelper.class); @SuppressWarnings("MethodMayBeStatic") @NotNull public <T> ModelBuilder<T> getModelBuilder(@NotNull Class<T> modelType, @NotNull final ExternalSystemTaskId id, @Nullable GradleExecutionSettings settings, @NotNull ProjectConnection connection, @NotNull ExternalSystemTaskNotificationListener listener, @NotNull List<String> extraJvmArgs) { ModelBuilder<T> result = connection.model(modelType); prepare(result, id, settings, listener, extraJvmArgs, connection); return result; } @SuppressWarnings("MethodMayBeStatic") @NotNull public BuildLauncher getBuildLauncher(@NotNull final ExternalSystemTaskId id, @NotNull ProjectConnection connection, @Nullable GradleExecutionSettings settings, @NotNull ExternalSystemTaskNotificationListener listener, @Nullable final String vmOptions) { BuildLauncher result = connection.newBuild(); List<String> extraJvmArgs = vmOptions == null ? ContainerUtil.<String>emptyList() : ContainerUtil.newArrayList(vmOptions.trim()); prepare(result, id, settings, listener, extraJvmArgs, connection); return result; } @SuppressWarnings({"MethodMayBeStatic", "UnusedDeclaration"}) @NotNull public BuildLauncher getBuildLauncher(@NotNull final ExternalSystemTaskId id, @NotNull ProjectConnection connection, @Nullable GradleExecutionSettings settings, @NotNull ExternalSystemTaskNotificationListener listener, @Nullable final String vmOptions, @NotNull final OutputStream standardOutput, @NotNull final OutputStream standardError) { BuildLauncher result = connection.newBuild(); List<String> extraJvmArgs = vmOptions == null ? ContainerUtil.<String>emptyList() : ContainerUtil.newArrayList(vmOptions.trim()); prepare(result, id, settings, listener, extraJvmArgs, connection, standardOutput, standardError); return result; } @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") public static void prepare(@NotNull LongRunningOperation operation, @NotNull final ExternalSystemTaskId id, @Nullable GradleExecutionSettings settings, @NotNull final ExternalSystemTaskNotificationListener listener, @NotNull List<String> extraJvmArgs, @NotNull ProjectConnection connection) { prepare(operation, id, settings, listener, extraJvmArgs, connection, new OutputWrapper(listener, id, true), new OutputWrapper(listener, id, false)); } @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") public static void prepare(@NotNull LongRunningOperation operation, @NotNull final ExternalSystemTaskId id, @Nullable GradleExecutionSettings settings, @NotNull final ExternalSystemTaskNotificationListener listener, @NotNull List<String> extraJvmArgs, @NotNull ProjectConnection connection, @NotNull final OutputStream standardOutput, @NotNull final OutputStream standardError) { if (settings == null) { return; } List<String> jvmArgs = ContainerUtilRt.newArrayList(); String vmOptions = settings.getDaemonVmOptions(); if (!StringUtil.isEmpty(vmOptions)) { CommandLineTokenizer tokenizer = new CommandLineTokenizer(vmOptions); while (tokenizer.hasMoreTokens()) { String vmOption = tokenizer.nextToken(); if (!StringUtil.isEmpty(vmOption)) { jvmArgs.add(vmOption); } } } jvmArgs.addAll(extraJvmArgs); if (!jvmArgs.isEmpty()) { BuildEnvironment buildEnvironment = getBuildEnvironment(connection); List<String> merged = buildEnvironment != null ? mergeJvmArgs(buildEnvironment.getJava().getJvmArguments(), jvmArgs) : jvmArgs; operation.setJvmArguments(ArrayUtilRt.toStringArray(merged)); } listener.onStart(id); final String javaHome = settings.getJavaHome(); if (javaHome != null && new File(javaHome).isDirectory()) { operation.setJavaHome(new File(javaHome)); } operation.addProgressListener(new ProgressListener() { @Override public void statusChanged(ProgressEvent event) { listener.onStatusChange(new ExternalSystemTaskNotificationEvent(id, event.getDescription())); } }); operation.setStandardOutput(standardOutput); operation.setStandardError(standardError); } public <T> T execute(@NotNull String projectPath, @Nullable GradleExecutionSettings settings, @NotNull Function<ProjectConnection, T> f) { final String projectDir; final File projectPathFile = new File(projectPath); if (projectPathFile.isFile() && projectPath.endsWith(GradleConstants.EXTENSION) && projectPathFile.getParent() != null) { projectDir = projectPathFile.getParent(); } else { projectDir = projectPath; } // This is a workaround to get right base dir in case of 'PROJECT' setting used in case custom wrapper property file location // see org.gradle.wrapper.PathAssembler#getBaseDir for details String userDir = null; if (settings != null && settings.getDistributionType() == DistributionType.WRAPPED) { try { userDir = System.getProperty("user.dir"); System.setProperty("user.dir", projectDir); } catch (Exception e) { // ignore } } ProjectConnection connection = getConnection(projectDir, settings); try { return f.fun(connection); } catch (Throwable e) { throw new ExternalSystemException(e); } finally { try { connection.close(); if (userDir != null) { // restore original user.dir property System.setProperty("user.dir", userDir); } } catch (Throwable e) { // ignore } } } public void ensureInstalledWrapper(@NotNull ExternalSystemTaskId id, @NotNull String projectPath, @NotNull GradleExecutionSettings settings, @NotNull ExternalSystemTaskNotificationListener listener) { if (!settings.getDistributionType().isWrapped()) return; if (settings.getDistributionType() == DistributionType.DEFAULT_WRAPPED && GradleUtil.findDefaultWrapperPropertiesFile(projectPath) != null) { return; } ProjectConnection connection = getConnection(projectPath, settings); try { BuildLauncher launcher = getBuildLauncher(id, connection, settings, listener, null); try { final File tempFile = FileUtil.createTempFile("wrap", ".gradle"); tempFile.deleteOnExit(); final File wrapperPropertyFileLocation = FileUtil.createTempFile("wrap", "loc"); wrapperPropertyFileLocation.deleteOnExit(); final String[] lines = { "gradle.taskGraph.afterTask { Task task ->", " if (task instanceof Wrapper) {", " def wrapperPropertyFileLocation = task.jarFile.getCanonicalPath() - '.jar' + '.properties'", " new File('" + StringUtil.escapeBackSlashes(wrapperPropertyFileLocation.getCanonicalPath()) + "').write wrapperPropertyFileLocation", "}}", }; FileUtil.writeToFile(tempFile, StringUtil.join(lines, SystemProperties.getLineSeparator())); launcher.withArguments("--init-script", tempFile.getAbsolutePath()); launcher.forTasks("wrapper"); launcher.run(); String wrapperPropertyFile = FileUtil.loadFile(wrapperPropertyFileLocation); settings.setWrapperPropertyFile(wrapperPropertyFile); } catch (IOException e) { throw new ExternalSystemException(e); } } catch (Throwable e) { throw new ExternalSystemException(e); } finally { try { connection.close(); } catch (Throwable e) { // ignore } } } private static List<String> mergeJvmArgs(List<String> jvmArgs1, List<String> jvmArgs2) { JvmOptions jvmOptions = new JvmOptions(null); jvmOptions.setAllJvmArgs(ContainerUtil.concat(jvmArgs1, jvmArgs2)); return jvmOptions.getAllJvmArgs(); } /** * Allows to retrieve gradle api connection to use for the given project. * * @param projectPath target project path * @param settings execution settings to use * @return connection to use * @throws IllegalStateException if it's not possible to create the connection */ @NotNull private static ProjectConnection getConnection(@NotNull String projectPath, @Nullable GradleExecutionSettings settings) throws IllegalStateException { File projectDir = new File(projectPath); GradleConnector connector = GradleConnector.newConnector(); int ttl = -1; if (settings != null) { //noinspection EnumSwitchStatementWhichMissesCases switch (settings.getDistributionType()) { case LOCAL: String gradleHome = settings.getGradleHome(); if (gradleHome != null) { try { // There were problems with symbolic links processing at the gradle side. connector.useInstallation(new File(gradleHome).getCanonicalFile()); } catch (IOException e) { connector.useInstallation(new File(settings.getGradleHome())); } } break; case WRAPPED: if (settings.getWrapperPropertyFile() != null) { File propertiesFile = new File(settings.getWrapperPropertyFile()); if (propertiesFile.exists()) { Distribution distribution = new DistributionFactoryExt(StartParameter.DEFAULT_GRADLE_USER_HOME).getWrappedDistribution(propertiesFile); try { setField(connector, "distribution", distribution); } catch (Exception e) { throw new ExternalSystemException(e); } } } break; } // Setup service directory if necessary. String serviceDirectory = settings.getServiceDirectory(); if (serviceDirectory != null) { connector.useGradleUserHomeDir(new File(serviceDirectory)); } // Setup logging if necessary. if (settings.isVerboseProcessing() && connector instanceof DefaultGradleConnector) { ((DefaultGradleConnector)connector).setVerboseLogging(true); } ttl = (int)settings.getRemoteProcessIdleTtlInMs(); } if (ttl > 0 && connector instanceof DefaultGradleConnector) { ((DefaultGradleConnector)connector).daemonMaxIdleTime(ttl, TimeUnit.MILLISECONDS); } connector.forProjectDirectory(projectDir); ProjectConnection connection = connector.connect(); if (connection == null) { throw new IllegalStateException(String.format( "Can't create connection to the target project via gradle tooling api. Project path: '%s'", projectPath )); } return connection; } /** * Utility to set field in object if there is no public setter for it. * It's not recommended to use this method. * FIXME: remove this workaround after gradle API changed * * @param obj Object to be modified * @param fieldName name of object's field * @param fieldValue value to be set for field * @throws SecurityException * @throws NoSuchFieldException * @throws IllegalArgumentException * @throws IllegalAccessException */ public static void setField(Object obj, String fieldName, Object fieldValue) throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException { final Field field = obj.getClass().getDeclaredField(fieldName); final boolean isAccessible = field.isAccessible(); field.setAccessible(true); field.set(obj, fieldValue); field.setAccessible(isAccessible); } @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") public static void setInitScript(LongRunningOperation longRunningOperation) { try { InputStream stream = GradleProjectResolver.class.getResourceAsStream("/org/jetbrains/plugins/gradle/model/internal/init.gradle"); if (stream == null) return; String jarPath = PathUtil.getCanonicalPath(PathUtil.getJarPathForClass(GradleProjectResolver.class)); String s = FileUtil.loadTextAndClose(stream).replace("${JAR_PATH}", jarPath); final File tempFile = FileUtil.createTempFile("ijinit", '.' + GradleConstants.EXTENSION, true); FileUtil.writeToFile(tempFile, s); String[] buildExecutorArgs = new String[]{"--init-script", tempFile.getAbsolutePath()}; longRunningOperation.withArguments(buildExecutorArgs); } catch (Exception e) { LOG.warn("Can't use IJ gradle init script", e); } } @Nullable private static BuildEnvironment getBuildEnvironment(@NotNull ProjectConnection connection) { try { return connection.getModel(BuildEnvironment.class); } catch (Exception e) { LOG.warn("can not get BuildEnvironment model", e); return null; } } }
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.wf.impl.jobs; import com.evolveum.midpoint.audit.api.AuditEventRecord; import com.evolveum.midpoint.audit.api.AuditEventStage; import com.evolveum.midpoint.audit.api.AuditService; import com.evolveum.midpoint.model.impl.controller.ModelOperationTaskHandler; import com.evolveum.midpoint.prism.Item; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.repo.api.RepositoryService; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.task.api.TaskCategory; import com.evolveum.midpoint.task.api.TaskExecutionStatus; import com.evolveum.midpoint.task.api.TaskManager; import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.exception.SystemException; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.wf.impl.WfConfiguration; import com.evolveum.midpoint.wf.impl.activiti.ActivitiInterface; import com.evolveum.midpoint.wf.impl.activiti.dao.WorkItemProvider; import com.evolveum.midpoint.wf.api.ProcessListener; import com.evolveum.midpoint.wf.api.WorkItemListener; import com.evolveum.midpoint.wf.api.WorkflowException; import com.evolveum.midpoint.wf.impl.messages.ActivitiToMidPointMessage; import com.evolveum.midpoint.wf.impl.messages.ProcessEvent; import com.evolveum.midpoint.wf.impl.messages.ProcessFinishedEvent; import com.evolveum.midpoint.wf.impl.messages.ProcessStartedEvent; import com.evolveum.midpoint.wf.impl.messages.StartProcessCommand; import com.evolveum.midpoint.wf.impl.messages.TaskCompletedEvent; import com.evolveum.midpoint.wf.impl.messages.TaskCreatedEvent; import com.evolveum.midpoint.wf.impl.messages.TaskEvent; import com.evolveum.midpoint.wf.impl.processes.common.CommonProcessVariableNames; import com.evolveum.midpoint.wf.impl.processors.ChangeProcessor; import com.evolveum.midpoint.wf.impl.util.MiscDataUtil; import com.evolveum.midpoint.xml.ns._public.common.common_3.WorkItemType; import com.evolveum.midpoint.xml.ns.model.workflow.process_instance_state_3.ProcessInstanceState; import org.apache.commons.lang.Validate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; import javax.xml.bind.JAXBException; import java.text.DateFormat; import java.util.Date; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.TreeMap; /** * Manages everything related to a activiti process instance, including the task that monitors that process instance. * * This class provides a facade over ugly mess of code managing activiti + task pair describing a workflow process instance. * * For working with tasks only (e.g. not touching Job structure) it uses wfTaskUtil. * * @author mederly */ @Component public class JobController { private static final Trace LOGGER = TraceManager.getTrace(JobController.class); private static final long TASK_START_DELAY = 5000L; private static final boolean USE_WFSTATUS = true; private static final Object DOT_CLASS = JobController.class.getName() + "."; private Set<ProcessListener> processListeners = new HashSet<>(); private Set<WorkItemListener> workItemListeners = new HashSet<>(); //region Spring beans @Autowired private WfTaskUtil wfTaskUtil; @Autowired private TaskManager taskManager; @Autowired private ActivitiInterface activitiInterface; @Autowired @Qualifier("cacheRepositoryService") private RepositoryService repositoryService; @Autowired private AuditService auditService; @Autowired private MiscDataUtil miscDataUtil; @Autowired private WfConfiguration wfConfiguration; @Autowired private WorkItemProvider workItemProvider; //endregion //region Job creation & re-creation /** * Creates a job, just as prescribed by the job creation instruction. * * @param instruction the job creation instruction * @param parentJob the job that will be the parent of newly created one; it may be null */ public Job createJob(JobCreationInstruction instruction, Job parentJob, OperationResult result) throws SchemaException, ObjectNotFoundException { return createJob(instruction, parentJob.getTask(), result); } /** * As before, but this time we know only the parent task (not a job). * * @param instruction the job creation instruction * @param parentTask the task that will be the parent of the task of newly created job; it may be null */ public Job createJob(JobCreationInstruction instruction, Task parentTask, OperationResult result) throws SchemaException, ObjectNotFoundException { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Processing start instruction: " + instruction.debugDump()); } Task task = createTask(instruction, parentTask, result); Job job = new Job(this, task, instruction.getChangeProcessor()); if (!instruction.isNoProcess()) { startWorkflowProcessInstance(job, instruction, result); } return job; } /** * Re-creates a job, based on existing task information. * * @param task a task from task-processInstance pair * @return recreated job */ public Job recreateJob(Task task) throws SchemaException, ObjectNotFoundException { return new Job(this, task, wfTaskUtil.getProcessId(task), wfTaskUtil.getChangeProcessor(task)); } /** * Re-creates a child job, knowing the task and the parent job. * * @param subtask a task from task-processInstance pair * @param parentJob the parent job * @return recreated job */ public Job recreateChildJob(Task subtask, Job parentJob) { return new Job(this, subtask, wfTaskUtil.getProcessId(subtask), parentJob.getChangeProcessor()); } /** * Re-creates a root job, based on existing task information. Does not try to find the wf process instance. */ public Job recreateRootJob(Task task) { return new Job(this, task, wfTaskUtil.getChangeProcessor(task)); } //endregion //region Working with midPoint tasks private Task createTask(JobCreationInstruction instruction, Task parentTask, OperationResult result) throws SchemaException, ObjectNotFoundException { ChangeProcessor changeProcessor = instruction.getChangeProcessor(); if (LOGGER.isTraceEnabled()) { LOGGER.trace("createTask starting; parent task = " + parentTask); } Task task; if (parentTask != null) { task = parentTask.createSubtask(); } else { task = taskManager.createTaskInstance(); wfTaskUtil.setTaskOwner(task, instruction.getTaskOwner()); } // initial execution state if (instruction.isCreateTaskAsSuspended() && instruction.isCreateTaskAsWaiting()) { throw new IllegalStateException("Both createSuspended and createWaiting attributes are set to TRUE."); } if (instruction.isCreateTaskAsSuspended()) { task.setInitialExecutionStatus(TaskExecutionStatus.SUSPENDED); } else if (instruction.isCreateTaskAsWaiting()) { task.setInitialExecutionStatus(TaskExecutionStatus.WAITING); } if (instruction.getTaskObject() != null) { task.setObjectRef(instruction.getTaskObject().getOid(), instruction.getTaskObject().getDefinition().getTypeName()); } else if (parentTask != null && parentTask.getObjectRef() != null) { task.setObjectRef(parentTask.getObjectRef()); } wfTaskUtil.setChangeProcessor(task, changeProcessor); wfTaskUtil.setTaskNameIfEmpty(task, instruction.getTaskName()); wfTaskUtil.setDefaultTaskOwnerIfEmpty(task, result, this); task.setCategory(TaskCategory.WORKFLOW); // push the handlers, beginning with these that should execute last wfTaskUtil.pushHandlers(task, instruction.getHandlersAfterModelOperation()); if (instruction.isExecuteModelOperationHandler()) { task.pushHandlerUri(ModelOperationTaskHandler.MODEL_OPERATION_TASK_URI, null, null); } wfTaskUtil.pushHandlers(task, instruction.getHandlersBeforeModelOperation()); wfTaskUtil.pushHandlers(task, instruction.getHandlersAfterWfProcess()); if (instruction.startsWorkflowProcess()) { wfTaskUtil.pushProcessShadowHandler(instruction.isSimple(), task, TASK_START_DELAY, result); } // put task variables for (Item item : instruction.getTaskVariables().values()) { task.setExtensionItem(item); } if (LOGGER.isTraceEnabled()) { LOGGER.trace("Saving workflow monitoring/execution task: " + task.debugDump()); } taskManager.switchToBackground(task, result); return task; } /** * Beware, in order to make the change permanent, it is necessary to call commitChanges on * "executesFirst". It is advisable not to modify underlying tasks between 'addDependency' * and 'commitChanges' because of the savePendingModifications() mechanism that is used here. * * @param executesFirst * @param executesSecond */ public void addDependency(Job executesFirst, Job executesSecond) { Validate.notNull(executesFirst.getTask()); Validate.notNull(executesSecond.getTask()); LOGGER.trace("Setting dependency of {} on 'task0' {}", executesSecond, executesFirst); executesFirst.getTask().addDependent(executesSecond.getTask().getTaskIdentifier()); } public void resumeTask(Job job, OperationResult result) throws SchemaException, ObjectNotFoundException { taskManager.resumeTask(job.getTask(), result); } public void unpauseTask(Job job, OperationResult result) throws SchemaException, ObjectNotFoundException { taskManager.unpauseTask(job.getTask(), result); } //endregion //region Working with Activiti process instances private void startWorkflowProcessInstance(Job job, JobCreationInstruction instruction, OperationResult result) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("startWorkflowProcessInstance starting; instruction = " + instruction); } Task task = job.getTask(); // perhaps more useful would be state 'workflow process instance creation HAS BEEN requested'; // however, if we record process state AFTER the request is sent, it is possible // that the response would come even before we log the request recordProcessInstanceState(job, "Workflow process instance creation is being requested.", null, result); // prepare and send the start process instance command StartProcessCommand spc = new StartProcessCommand(); spc.setTaskOid(task.getOid()); spc.setProcessName(instruction.getProcessDefinitionKey()); spc.setSendStartConfirmation(instruction.isSendStartConfirmation()); spc.setVariablesFrom(instruction.getProcessVariables()); spc.setProcessOwner(task.getOwner().getOid()); try { activitiInterface.midpoint2activiti(spc, task, result); auditProcessStart(spc, job, result); notifyProcessStart(spc, job, result); } catch (JAXBException|SchemaException|RuntimeException e) { LoggingUtils.logException(LOGGER, "Couldn't send a request to start a process instance to workflow management system", e); recordProcessInstanceState(job, "Workflow process instance creation could not be requested: " + e, null, result); result.recordPartialError("Couldn't send a request to start a process instance to workflow management system: " + e.getMessage(), e); throw new SystemException("Workflow process instance creation could not be requested", e); } // final result.recordSuccessIfUnknown(); LOGGER.trace("startWorkflowProcessInstance finished"); } /** * Processes a message got from workflow engine - either synchronously (while waiting for * replies after sending - i.e. in a thread that requested the operation), or asynchronously * (directly from activiti2midpoint, in a separate thread). * * @param message an event got from workflow engine * @param task * @param asynchronous * @param result * @throws Exception */ // TODO fix exceptions list public void processWorkflowMessage(ActivitiToMidPointMessage message, Task task, boolean asynchronous, OperationResult result) throws SchemaException, ObjectNotFoundException, WorkflowException, ObjectAlreadyExistsException, JAXBException { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Received ActivitiToMidPointMessage: " + message); } if (message instanceof ProcessEvent) { Task task1 = getTaskFromEvent((ProcessEvent) message, task, result); if (asynchronous && task1.getExecutionStatus() != TaskExecutionStatus.WAITING) { LOGGER.trace("Asynchronous message received in a state different from WAITING (actual state: {}), ignoring it. Task = {}", task1.getExecutionStatus(), task1); } else { processProcessEvent((ProcessEvent) message, task1, result); } } else if (message instanceof TaskEvent) { processTaskEvent((TaskEvent) message, result); } else { throw new IllegalStateException("Unknown kind of event from Activiti: " + message.getClass()); } } private Task getTaskFromEvent(ProcessEvent event, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException { String taskOid = event.getTaskOid(); if (taskOid == null) { throw new IllegalStateException("Got a workflow message without taskOid: " + event.toString()); } if (task != null) { if (!taskOid.equals(task.getOid())) { throw new IllegalStateException("TaskOid received from the workflow (" + taskOid + ") is different from current task OID (" + task + "): " + event.toString()); } } else { task = taskManager.getTask(taskOid, result); } return task; } private void processProcessEvent(ProcessEvent event, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ObjectAlreadyExistsException, JAXBException { Job job = recreateJob(task); recordProcessInstanceState(job, getStateDescription(event), event, result); // let us record process id (if unknown or when getting "process started" event) if (job.getActivitiId() == null || event instanceof ProcessStartedEvent) { job.setWfProcessIdImmediate(event.getPid(), result); } // should we finish this task? if (event instanceof ProcessFinishedEvent || !event.isRunning()) { processFinishedEvent(event, job, result); } } private void processFinishedEvent(ProcessEvent event, Job job, OperationResult result) throws SchemaException, ObjectAlreadyExistsException, ObjectNotFoundException, JAXBException { LOGGER.trace("processFinishedEvent starting"); LOGGER.trace("Calling onProcessEnd on {}", job.getChangeProcessor()); job.getChangeProcessor().onProcessEnd(event, job, result); job.setProcessInstanceFinishedImmediate(true, result); auditProcessEnd(event, job, result); notifyProcessEnd(event, job, result); // passive tasks can be 'let go' at this point if (job.getTaskExecutionStatus() == TaskExecutionStatus.WAITING) { job.computeTaskResultIfUnknown(result); job.removeCurrentTaskHandlerAndUnpause(result); // removes WfProcessInstanceShadowTaskHandler } LOGGER.trace("processFinishedEvent done"); } private String getStateDescription(ProcessEvent event) { String pid = event.getPid(); String stateDescription = event.getState(); if (stateDescription == null || stateDescription.isEmpty()) { if (event instanceof ProcessStartedEvent) { stateDescription = "Workflow process instance has been created (process id " + pid + ")"; } else if (event instanceof ProcessFinishedEvent) { stateDescription = "Workflow process instance has ended (process id " + pid + ")"; } else { stateDescription = "Workflow process instance has proceeded further (process id " + pid + ")"; } } return stateDescription; } private void recordProcessInstanceState(Job job, String stateDescription, ProcessEvent event, OperationResult parentResult) { LOGGER.trace("recordProcessInstanceState starting."); Task task = job.getTask(); try { task.setDescription(stateDescription); if (event != null) { wfTaskUtil.setWfLastVariables(task, dumpVariables(event)); } if (USE_WFSTATUS) { wfTaskUtil.addWfStatus(task, prepareValueForWfStatusProperty(stateDescription)); } wfTaskUtil.setLastDetails(task, stateDescription); task.savePendingModifications(parentResult); } catch (Exception ex) { // todo LoggingUtils.logException(LOGGER, "Couldn't record information from WfMS into task {}", ex, task); parentResult.recordFatalError("Couldn't record information from WfMS into task " + task, ex); } LOGGER.trace("recordProcessInstanceState ending."); } private String prepareValueForWfStatusProperty(String stateDescription) { // statusTsDt (for wfStatus): [<timestamp>: <formatted datetime>] <status description> // (timestamp is to enable easy sorting, [] to easy parsing) Date d = new Date(); DateFormat df = DateFormat.getDateTimeInstance(); return "[" + d.getTime() + ": " + df.format(d) + "] " + stateDescription; } // variables should be sorted in order for dumpVariables produce nice output private Map<String,Object> getVariablesSorted(ProcessEvent event) { TreeMap<String,Object> variables = new TreeMap<String,Object>(); if (event.getVariables() != null) { variables.putAll(event.getVariables()); } return variables; } // Returns the content of process variables in more-or-less human readable format. // Sorts variables according to their names, in order to be able to decide whether // anything has changed since last event coming from the process. private String dumpVariables(ProcessEvent event) { StringBuffer sb = new StringBuffer(); boolean first = true; Map<String,Object> variables = getVariablesSorted(event); for (Map.Entry<String,Object> entry: variables.entrySet()) { if (!first) sb.append("; "); sb.append(entry.getKey() + "=" + entry.getValue()); first = false; } return sb.toString(); } private ChangeProcessor getChangeProcessor(Map<String,Object> variables) { String cpName = (String) variables.get(CommonProcessVariableNames.VARIABLE_MIDPOINT_CHANGE_PROCESSOR); Validate.notNull(cpName, "Change processor is not defined among process instance variables"); return wfConfiguration.findChangeProcessor(cpName); } private ChangeProcessor getChangeProcessor(WorkItemType workItemType) { String cpName = workItemType.getChangeProcessor(); Validate.notNull(cpName, "Change processor is not defined among process instance variables"); return wfConfiguration.findChangeProcessor(cpName); } private ChangeProcessor getChangeProcessor(TaskEvent taskEvent) { return getChangeProcessor(taskEvent.getVariables()); } //endregion //region Processing work item (task) events private void processTaskEvent(TaskEvent taskEvent, OperationResult result) throws WorkflowException { // auditing & notifications if (taskEvent instanceof TaskCreatedEvent) { auditWorkItemEvent(taskEvent, AuditEventStage.REQUEST, result); try { notifyWorkItemCreated( taskEvent.getTaskName(), taskEvent.getAssigneeOid(), taskEvent.getProcessInstanceName(), taskEvent.getVariables()); } catch (JAXBException|SchemaException e) { LoggingUtils.logException(LOGGER, "Couldn't send notification about work item create event", e); } } else if (taskEvent instanceof TaskCompletedEvent) { auditWorkItemEvent(taskEvent, AuditEventStage.EXECUTION, result); try { notifyWorkItemCompleted( taskEvent.getTaskName(), taskEvent.getAssigneeOid(), taskEvent.getProcessInstanceName(), taskEvent.getVariables(), (String) taskEvent.getVariables().get(CommonProcessVariableNames.FORM_FIELD_DECISION)); } catch (JAXBException|SchemaException e) { LoggingUtils.logException(LOGGER, "Couldn't audit work item complete event", e); } } } //endregion //region Auditing and notifications private void auditProcessStart(StartProcessCommand spc, Job job, OperationResult result) { auditProcessStartEnd(spc.getVariables(), job, AuditEventStage.REQUEST, result); } private void auditProcessEnd(ProcessEvent event, Job job, OperationResult result) { auditProcessStartEnd(event.getVariables(), job, AuditEventStage.EXECUTION, result); } private void auditProcessStartEnd(Map<String,Object> variables, Job job, AuditEventStage stage, OperationResult result) { AuditEventRecord auditEventRecord = getChangeProcessor(variables).prepareProcessInstanceAuditRecord(variables, job, stage, result); auditService.audit(auditEventRecord, job.getTask()); } private void notifyProcessStart(StartProcessCommand spc, Job job, OperationResult result) throws JAXBException, SchemaException { PrismObject<? extends ProcessInstanceState> state = job.getChangeProcessor().externalizeProcessInstanceState(spc.getVariables()); for (ProcessListener processListener : processListeners) { processListener.onProcessInstanceStart(state, result); } } private void notifyProcessEnd(ProcessEvent event, Job job, OperationResult result) throws JAXBException, SchemaException { PrismObject<? extends ProcessInstanceState> state = job.getChangeProcessor().externalizeProcessInstanceState(event.getVariables()); for (ProcessListener processListener : processListeners) { processListener.onProcessInstanceEnd(state, result); } } private void notifyWorkItemCreated(String workItemName, String assigneeOid, String processInstanceName, Map<String,Object> processVariables) throws JAXBException, SchemaException { ChangeProcessor cp = getChangeProcessor(processVariables); PrismObject<? extends ProcessInstanceState> state = cp.externalizeProcessInstanceState(processVariables); for (WorkItemListener workItemListener : workItemListeners) { workItemListener.onWorkItemCreation(workItemName, assigneeOid, state); } } private void notifyWorkItemCompleted(String workItemName, String assigneeOid, String processInstanceName, Map<String,Object> processVariables, String decision) throws JAXBException, SchemaException { ChangeProcessor cp = getChangeProcessor(processVariables); PrismObject<? extends ProcessInstanceState> state = cp.externalizeProcessInstanceState(processVariables); for (WorkItemListener workItemListener : workItemListeners) { workItemListener.onWorkItemCompletion(workItemName, assigneeOid, state, decision); } } private void auditWorkItemEvent(TaskEvent taskEvent, AuditEventStage stage, OperationResult result) throws WorkflowException { Task shadowTask; try { String taskOid = (String) taskEvent.getVariables().get(CommonProcessVariableNames.VARIABLE_MIDPOINT_TASK_OID); if (taskOid == null) { LOGGER.error("Shadow task OID is unknown for work item " + taskEvent.getDebugName() + ", no audit record will be produced."); return; } shadowTask = taskManager.getTask(taskOid, result); } catch (SchemaException e) { LoggingUtils.logException(LOGGER, "Couldn't retrieve workflow-related task", e); return; } catch (ObjectNotFoundException e) { LoggingUtils.logException(LOGGER, "Couldn't retrieve workflow-related task", e); return; } AuditEventRecord auditEventRecord = getChangeProcessor(taskEvent).prepareWorkItemAuditRecord(taskEvent, stage, result); auditService.audit(auditEventRecord, shadowTask); } private String getDebugName(WorkItemType workItemType) { return workItemType.getName() + " (id " + workItemType.getWorkItemId() + ")"; } public void registerProcessListener(ProcessListener processListener) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Registering process listener " + processListener); } processListeners.add(processListener); } public void registerWorkItemListener(WorkItemListener workItemListener) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Registering work item listener " + workItemListener); } workItemListeners.add(workItemListener); } //endregion //region Getters and setters public WfTaskUtil getWfTaskUtil() { return wfTaskUtil; } //endregion }
package org.daisy.pipeline.nlp.lexing.ruled.impl; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Locale; import java.util.Map; import java.util.Set; import org.daisy.pipeline.nlp.RuleBasedTextCategorizer; import org.daisy.pipeline.nlp.TextCategorizer; import org.daisy.pipeline.nlp.TextCategorizer.CategorizedWord; import org.daisy.pipeline.nlp.TextCategorizer.Category; import org.daisy.pipeline.nlp.TextCategorizer.MatchMode; import org.daisy.pipeline.nlp.lexing.LexService; import org.daisy.pipeline.nlp.ruledcategorizers.RuledFrenchCategorizer; import org.daisy.pipeline.nlp.ruledcategorizers.RuledMultilangCategorizer; import org.osgi.service.component.annotations.Component; /** * RuleLexerToken uses RuledBasedTextCategorizer in PREFIX_MODE for splitting * the input stream into words. Then it calls a SentenceDectector to group them * together in sentences. It can share categorizers and sentence detectors with * other pipeline jobs when they are thread-safe, since they may load big * dictionaries. */ @Component( name = "ruled-lex-service", service = { LexService.class } ) public class RuleBasedLexer implements LexService { class RuleLexerToken extends LexService.LexerToken { private Map<Locale, ISentenceDetector> mSentDetectors = new HashMap<Locale, ISentenceDetector>(); private Map<Locale, RuleBasedTextCategorizer> mTextCategorizers = new HashMap<Locale, RuleBasedTextCategorizer>(); private RuleBasedTextCategorizer mGenericCategorizer; private ISentenceDetector mGenericSentDetector; public RuleLexerToken(RuleBasedLexer l) { super(l); } @Override public List<Sentence> split(String input, Locale lang, List<String> parsingErrors) { if (input.length() == 0) return Collections.EMPTY_LIST; RuleBasedTextCategorizer categorizer = mTextCategorizers.get(lang); ISentenceDetector splitter = mSentDetectors.get(lang); // call the categorizer and the sentence detector List<CategorizedWord> words = splitIntoWords(input, lang, categorizer); for (int k = 0; k < words.size(); ++k) { if (words.get(k).category == Category.UNKNOWN) { StringBuilder error = new StringBuilder(getName() + ": the lexeme between square brackets could not be recognized: "); for (int i = Math.max(0, k - 10); i < k; ++i) error.append(words.get(i).word); error.append("[" + words.get(k).word + "]"); for (int i = k + 1; i < Math.min(words.size(), k + 10); ++i) error.append(words.get(i).word); parsingErrors.add(error.toString()); } } if (words.size() == 0 || (words.size() == 1 && !TextCategorizer.isSpeakable(words.iterator() .next().category))) return Collections.EMPTY_LIST; List<List<CategorizedWord>> sentences = splitter.split(words); // build the sentences in the expected format int currentPos = 0; List<Sentence> res = new ArrayList<Sentence>(); for (List<CategorizedWord> sentence : sentences) { //discard empty sentences int emptySize = 0; for (CategorizedWord word : sentence) { if (TextCategorizer.isSpeakable(word.category)) { emptySize = -1; break; } emptySize += word.word.length(); } if (emptySize != -1) { currentPos += emptySize; continue; } Sentence s = new Sentence(); s.boundaries = new TextBoundaries(); res.add(s); //NOTE: Now, the StringComposer should already trim the sentences //find the beginning of the sentence ListIterator<CategorizedWord> it = sentence.listIterator(); while (it.hasNext()) { CategorizedWord word = it.next(); if (word.category != Category.SPACE) { s.boundaries.left = currentPos; it.previous(); break; } currentPos += word.word.length(); } //content s.words = new ArrayList<LexService.TextBoundaries>(); while (it.hasNext()) { CategorizedWord word = it.next(); if (TextCategorizer.isSpeakable(word.category)) { TextBoundaries bounds = new TextBoundaries(); bounds.left = currentPos; bounds.right = bounds.left + word.word.length(); s.words.add(bounds); } currentPos += word.word.length(); } //go backward to find the end of the sentence int end = currentPos; while (it.hasPrevious()) { CategorizedWord word = it.previous(); if (word.category != Category.SPACE) { s.boundaries.right = end; break; } end -= word.word.length(); } } return res; } private List<CategorizedWord> splitIntoWords(String input, Locale locale, RuleBasedTextCategorizer categorizer) { String lowerCase = input.toLowerCase(locale); LinkedList<CategorizedWord> result = new LinkedList<CategorizedWord>(); int shift = 0; while (shift < input.length()) { String right = input.substring(shift); String lowerCaseRight = lowerCase.substring(shift); CategorizedWord w = categorizer.categorize(right, lowerCaseRight); result.add(w); shift += w.word.length(); } return result; } @Override public void shareResourcesWith(LexerToken other, Locale lang) { RuleLexerToken rother = (RuleLexerToken) other; if (mGenericCategorizer == null) { RuleBasedTextCategorizer genericCategorizer = rother.mGenericCategorizer; if (genericCategorizer != null && genericCategorizer.threadsafe()) { mGenericCategorizer = genericCategorizer; } } if (mGenericSentDetector == null) { ISentenceDetector genericSentDetector = rother.mGenericSentDetector; if (genericSentDetector != null && genericSentDetector.threadsafe()) { mGenericSentDetector = genericSentDetector; } } if (mTextCategorizers.get(lang) == null) { RuleBasedTextCategorizer categorizer = rother.mTextCategorizers.get(lang); if (categorizer != null && categorizer.threadsafe()) { mTextCategorizers.put(lang, categorizer); } } if (mSentDetectors.get(lang) == null) { ISentenceDetector sentDetector = rother.mSentDetectors.get(lang); if (sentDetector != null && sentDetector.threadsafe()) { mSentDetectors.put(lang, sentDetector); } } } @Override public void addLang(Locale lang) throws LexerInitException { if (mGenericSentDetector == null) { mGenericSentDetector = new EuroSentenceDetector(); } if (mSentDetectors.get(lang) == null) { mSentDetectors.put(lang, mGenericSentDetector); } try { if (mGenericCategorizer == null) { mGenericCategorizer = new RuledMultilangCategorizer(); mGenericCategorizer.init(MatchMode.PREFIX_MATCH); mGenericCategorizer.compile(); } if (mTextCategorizers.get(lang) == null) { String iso639_2lang = lang.getISO3Language(); RuleBasedTextCategorizer rtc = mGenericCategorizer; if ("fre".equals(iso639_2lang) || "fra".equals(iso639_2lang) || "frm".equals(iso639_2lang) || "fro".equals(iso639_2lang)) { rtc = new RuledFrenchCategorizer(); rtc.init(MatchMode.PREFIX_MATCH); rtc.compile(); } mTextCategorizers.put(lang, rtc); } } catch (IOException e) { throw new LexerInitException(e.getCause()); } } } @Override public int getLexQuality(Locale lang) { String language = lang.getLanguage(); if (language.equals(new Locale("fr").getLanguage())) return 3 * LexService.MinSpecializedLexQuality; if (language.equals(new Locale("en").getLanguage())) return 3 * LexService.MinSpecializedLexQuality; if (okLanguages.contains(language)) { return 2 * LexService.MinSpecializedLexQuality; } return 0; } @Override public String getName() { return "rule-based-lexer"; } @Override public LexerToken newToken() { return new RuleLexerToken(this); } @Override public void globalInit() throws LexerInitException { } @Override public void globalRelease() { } @Override public int getOverallQuality() { return -1; //cannot handle all the languages } private static Set<String> okLanguages; static { okLanguages = new HashSet<String>(); for (String code : new String[]{ "it", "pt" }) { okLanguages.add(new Locale(code).getLanguage()); } } }
package com.itracker.android.ui.fragment; import android.app.Activity; import android.content.Loader; import android.database.Cursor; import android.os.Build; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.content.ContextCompat; import android.support.v7.widget.DefaultItemAnimator; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.ProgressBar; import android.widget.TextView; import com.itracker.android.Application; import com.itracker.android.R; import com.itracker.android.data.model.Motion; import com.itracker.android.data.model.Track; import com.itracker.android.provider.TrackerContract; import com.itracker.android.ui.activity.FootprintActivity; import com.itracker.android.ui.adapter.TrackItemAdapter; import com.itracker.android.ui.listener.OnTrackItemSelectedListener; import com.itracker.android.ui.listener.OnSelectedTrackChangedListener; import com.itracker.android.ui.widget.ActionFrameLayout; import com.itracker.android.ui.widget.MotionsView; import com.itracker.android.ui.widget.TimeRangeController; import com.itracker.android.ui.widget.TimelineItem; import com.itracker.android.ui.widget.TimelinesView; import com.itracker.android.utils.ThrottledContentObserver; import static com.itracker.android.utils.LogUtils.LOGD; import static com.itracker.android.utils.LogUtils.makeLogTag; public class ActionFragment extends TrackerFragment implements OnTrackItemSelectedListener { private static final String TAG = makeLogTag(ActionFragment.class); private RecyclerView mTracksView; private TextView mShowTimelinesView; private TimelinesView mTimelinesView; private MotionsView mMotionsView; private ProgressBar mLoadingView; private TimeRangeController mTimeRangeController; private TrackItemAdapter mTrackItemAdapter; private Track mSelectedTrack; private ThrottledContentObserver mTracksObserver; private ThrottledContentObserver mMotionsObserver; private ThrottledContentObserver mActivitiesObserver; private boolean mSearchEnabled; public ActionFragment() { // Required empty public constructor } @Override public void onAttach(Activity activity) { super.onAttach(activity); mTimeRangeController = new TimeRangeController(this); mTracksObserver = new ThrottledContentObserver(() -> { LOGD(TAG, "ThrottledContentObserver fired (tracks). Content changed."); if (isAdded()) { LOGD(TAG, "Requesting motions cursor reload as a result of ContentObserver firing."); long beginTime = mTimeRangeController.getBeginDate().getMillis(); long endtime = mTimeRangeController.getEndDate().getMillis(); reloadTracks(getLoaderManager(), beginTime, endtime, ActionFragment.this); } }); activity.getContentResolver().registerContentObserver(TrackerContract.Tracks.CONTENT_URI, true, mTracksObserver); mMotionsObserver = new ThrottledContentObserver(() -> { LOGD(TAG, "ThrottledContentObserver fired (motions). Content changed."); if (isAdded()) { if (mSelectedTrack != null) { LOGD(TAG, "Requesting motions cursor reload as a result of ContentObserver firing."); reloadMotions(getLoaderManager(), mSelectedTrack, ActionFragment.this); } } }); activity.getContentResolver().registerContentObserver(TrackerContract.Motions.CONTENT_URI, true, mMotionsObserver); mActivitiesObserver = new ThrottledContentObserver(() -> { LOGD(TAG, "ThrottledContentObserver fired (activities). Content changed."); if (isAdded()) { if (mSelectedTrack != null) { LOGD(TAG, "Requesting motions cursor reload as a result of ContentObserver firing."); reloadActivities(getLoaderManager(), mSelectedTrack, ActionFragment.this); } } }); activity.getContentResolver().registerContentObserver(TrackerContract.Activities.CONTENT_URI, true, mActivitiesObserver); } @Override public void onDetach() { super.onDetach(); updateSelectedTrack(null); getActivity().getContentResolver().unregisterContentObserver(mTracksObserver); getActivity().getContentResolver().unregisterContentObserver(mMotionsObserver); getActivity().getContentResolver().unregisterContentObserver(mActivitiesObserver); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mTimeRangeController.create(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment ActionFrameLayout layout = (ActionFrameLayout) inflater.inflate(R.layout.fragment_action, container, false); mTracksView = (RecyclerView) layout.findViewById(R.id.tracks_view); mTimelinesView = (TimelinesView) layout.findViewById(R.id.activities_view); mMotionsView = (MotionsView) layout.findViewById(R.id.motions_view); mLoadingView = (ProgressBar) layout.findViewById(R.id.motions_loading_progress); mShowTimelinesView = (TextView) layout.findViewById(R.id.show_timeline); layout.setOnFootprintFabClickedListener(() -> { startActivity(FootprintActivity.createIntent(getActivity(), mSelectedTrack)); }); mTrackItemAdapter = new TrackItemAdapter(getActivity()); mTracksView.setLayoutManager(new GridLayoutManager(getActivity(), 2)); mTracksView.setItemAnimator(new DefaultItemAnimator()); mTracksView.setAdapter(mTrackItemAdapter); ListView listView = mTimelinesView.getListView(); listView.setAdapter(new TimelinesView.TimelineItemAdapter(getActivity())); listView.setOnItemClickListener((AdapterView<?> parent, View view, int position, long id) -> { TimelinesView.TimelineItemAdapter adapter = (TimelinesView.TimelineItemAdapter) parent.getAdapter(); adapter.selectItem(position); for (int i = 0; i < parent.getChildCount(); ++i) { TimelineItem item = (TimelineItem) parent.getChildAt(i); item.setTimelineDrawable(item == view); } TimelineItem.Timeline timeline = adapter.getItem(position); mMotionsView.moveViewport(timeline.getStartTime(), timeline.getStopTime()); }); listView.setOnItemLongClickListener((AdapterView<?> parent, View view, int position, long id) -> false); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { mShowTimelinesView.setBackground(ContextCompat.getDrawable(getActivity(), R.drawable.ripple_effect)); } return layout; } @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.menu_action, menu); if (mSearchEnabled) { Toolbar toolbar = (Toolbar) getActivity().findViewById(R.id.toolbar); toolbar.addView(mTimeRangeController.getTimeRange(), new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); menu.findItem(R.id.action_clear).setVisible(true); menu.findItem(R.id.action_search).setVisible(false); } mMenu = menu; } @Override public void onDestroyOptionsMenu() { if (mSearchEnabled) { Toolbar toolbar = (Toolbar) getActivity().findViewById(R.id.toolbar); toolbar.removeView(mTimeRangeController.getTimeRange()); } } @Override public boolean onOptionsItemSelected(MenuItem item) { Toolbar toolbar = (Toolbar) getActivity().findViewById(R.id.toolbar); switch (item.getItemId()) { case R.id.action_search: { mSearchEnabled = true; item.setVisible(false); toolbar.addView(mTimeRangeController.getTimeRange(), new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); mMenu.findItem(R.id.action_clear).setVisible(true); return true; } case R.id.action_clear: { mSearchEnabled = false; item.setVisible(false); toolbar.removeView(mTimeRangeController.getTimeRange()); mMenu.findItem(R.id.action_search).setVisible(true); } } return super.onOptionsItemSelected(item); } @Override public void onStart() { super.onStart(); mTimeRangeController.updateTimeRange(); Application.getInstance().addUIListener(OnTrackItemSelectedListener.class, this); } @Override public void onResume() { super.onResume(); LOGD(TAG, "Reloading data as a result of onResume()"); long beginTime = mTimeRangeController.getBeginDate().getMillis(); long endtime = mTimeRangeController.getEndDate().getMillis(); reloadTracks(getLoaderManager(), beginTime, endtime, this); } @Override public void onPause() { super.onPause(); mTracksObserver.cancelPendingCallback(); mMotionsObserver.cancelPendingCallback(); mActivitiesObserver.cancelPendingCallback(); mTimeRangeController.saveState(); } @Override public void onStop() { super.onStop(); Application.getInstance().removeUIListener(OnTrackItemSelectedListener.class, this); } @Override public void onDestroy() { super.onDestroy(); } @Override public void onTrackItemSelected(View view, int position) { showLoadingProgress(); updateSelectedTrack(mTrackItemAdapter.getItem(position)); reloadMotions(getLoaderManager(), mSelectedTrack, ActionFragment.this); reloadActivities(getLoaderManager(), mSelectedTrack, ActionFragment.this); } private void updateSelectedTrack(Track track) { mSelectedTrack = track; for (OnSelectedTrackChangedListener listener : Application.getInstance().getUIListeners(OnSelectedTrackChangedListener.class)) { listener.onSelectedTrackChanged(mSelectedTrack); } } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { if (!isAdded()) { return; } switch (loader.getId()) { case TracksQuery.TOKEN_NORMAL: { mTrackItemAdapter.changeCursor(data); break; } case MotionsQuery.TOKEN_NORMAL: { Motion[] motions = Motion.motionsFromCursor(data); mMotionsView.updateMotions(motions, () -> hideLoadingProgress()); break; } case ActivitiesQuery.TOKEN_NORMAL: { TimelinesView.TimelineItemAdapter adapter = (TimelinesView.TimelineItemAdapter) mTimelinesView.getListView().getAdapter(); adapter.setNotifyOnChange(false); adapter.clear(); adapter.addAll(TimelineItem.Timeline.fromActivities(data)); adapter.notifyDataSetChanged(); break; } } } @Override public void onLoaderReset(Loader<Cursor> loader) { switch (loader.getId()) { case TracksQuery.TOKEN_NORMAL: { mTrackItemAdapter.changeCursor(null); break; } case MotionsQuery.TOKEN_NORMAL: { break; } } } private void showLoadingProgress() { mHandler.postDelayed(() -> mLoadingView.setVisibility(View.VISIBLE), 50); } private void hideLoadingProgress() { mHandler.removeCallbacksAndMessages(null); mLoadingView.setVisibility(View.INVISIBLE); } @Override public void onSelected() { } @Override public void onUnselected() { if (isAdded()) { hideLoadingProgress(); } } }
package org.gwtbootstrap3.client.ui.base.form; import java.util.ArrayList; import java.util.List; import org.gwtbootstrap3.client.ui.constants.Attributes; import org.gwtbootstrap3.client.ui.form.validator.HasValidators; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.Scheduler; import com.google.gwt.core.client.Scheduler.ScheduledCommand; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.FormElement; import com.google.gwt.event.shared.EventHandler; import com.google.gwt.event.shared.GwtEvent; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.safehtml.client.SafeHtmlTemplates; import com.google.gwt.safehtml.shared.SafeHtml; import com.google.gwt.safehtml.shared.SafeUri; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.ui.FormPanel; import com.google.gwt.user.client.ui.HasOneWidget; import com.google.gwt.user.client.ui.HasWidgets; import com.google.gwt.user.client.ui.NamedFrame; import com.google.gwt.user.client.ui.RootPanel; import com.google.gwt.user.client.ui.Widget; import com.google.gwt.user.client.ui.impl.FormPanelImpl; import com.google.gwt.user.client.ui.impl.FormPanelImplHost; /* * #%L * GwtBootstrap3 * %% * Copyright (C) 2013 GwtBootstrap3 * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * @author Sven Jacobs * @author Steven Jardine */ public abstract class AbstractForm extends FormElementContainer implements FormPanelImplHost { /** * Fired when a form has been submitted successfully. */ public static class SubmitCompleteEvent extends GwtEvent<SubmitCompleteHandler> { /** * The event type. */ private static Type<SubmitCompleteHandler> TYPE; /** * Handler hook. * * @return the handler hook */ public static Type<SubmitCompleteHandler> getType() { if (TYPE == null) { TYPE = new Type<SubmitCompleteHandler>(); } return TYPE; } private final String resultHtml; /** * Create a submit complete event. * * @param resultsHtml * the results from submitting the form */ protected SubmitCompleteEvent(String resultsHtml) { this.resultHtml = resultsHtml; } @Override public final Type<SubmitCompleteHandler> getAssociatedType() { return getType(); } /** * Gets the result text of the form submission. * * @return the result html, or <code>null</code> if there was an error * reading it * @tip The result html can be <code>null</code> as a result of * submitting a form to a different domain. */ public String getResults() { return resultHtml; } @Override protected void dispatch(SubmitCompleteHandler handler) { handler.onSubmitComplete(this); } } /** * Handler for {@link SubmitCompleteEvent} events. */ public interface SubmitCompleteHandler extends EventHandler { /** * Fired when a form has been submitted successfully. * * @param event * the event */ void onSubmitComplete(SubmitCompleteEvent event); } interface IFrameTemplate extends SafeHtmlTemplates { static final IFrameTemplate INSTANCE = GWT.create(IFrameTemplate.class); @Template("<iframe src=\"javascript:''\" name='{0}' tabindex='-1' " + "style='position:absolute;width:0;height:0;border:0'>") SafeHtml get(String name); } private static final String FORM = "form"; private static int formId = 0; private static final FormPanelImpl impl = GWT.create(FormPanelImpl.class); private String frameName; private Element synthesizedFrame; public AbstractForm() { this(true); } public AbstractForm(boolean createIFrame) { this(Document.get().createFormElement(), createIFrame); getElement().setAttribute(Attributes.ROLE, FORM); } /** * This constructor may be used by subclasses to explicitly use an existing * element. This element must be a &lt;form&gt; element. * <p> * If the createIFrame parameter is set to <code>true</code>, then the * wrapped form's target attribute will be set to a hidden iframe. If not, * the form's target will be left alone, and the FormSubmitComplete event * will not be fired. * </p> * * @param element * the element to be used * @param createIFrame * <code>true</code> to create an &lt;iframe&gt; element that * will be targeted by this form */ protected AbstractForm(Element element, boolean createIFrame) { setElement(element); FormElement.as(element); if (createIFrame) { assert getTarget() == null || getTarget().trim().length() == 0 : "Cannot create target iframe if the form's target is already set."; // We use the module name as part of the unique ID to ensure that // ids are // unique across modules. frameName = "FormPanel_" + GWT.getModuleName() + "_" + (++formId); setTarget(frameName); sinkEvents(Event.ONLOAD); } } @Override protected void onAttach() { super.onAttach(); if (frameName != null) { // Create and attach a hidden iframe to the body element. createFrame(); Document.get().getBody().appendChild(synthesizedFrame); } // Hook up the underlying iframe's onLoad event when attached to the // DOM. // Making this connection only when attached avoids memory-leak issues. // The FormPanel cannot use the built-in GWT event-handling mechanism // because there is no standard onLoad event on iframes that works // across // browsers. impl.hookEvents(synthesizedFrame, getElement(), this); } @Override protected void onDetach() { super.onDetach(); // Unhook the iframe's onLoad when detached. impl.unhookEvents(synthesizedFrame, getElement()); if (synthesizedFrame != null) { // And remove it from the document. Document.get().getBody().removeChild(synthesizedFrame); synthesizedFrame = null; } } @Override public boolean onFormSubmit() { return onFormSubmitImpl(); } @Override public void onFrameLoad() { onFrameLoadImpl(); } /** * Adds a {@link SubmitCompleteEvent} handler. * * @param handler * the handler * @return the handler registration used to remove the handler */ public HandlerRegistration addSubmitCompleteHandler( SubmitCompleteHandler handler) { return addHandler(handler, SubmitCompleteEvent.getType()); } /** * Adds a {@link FormPanel.SubmitEvent} handler. * * @param handler * the handler * @return the handler registration used to remove the handler */ public HandlerRegistration addSubmitHandler(FormPanel.SubmitHandler handler) { return addHandler(handler, FormPanel.SubmitEvent.getType()); } /** * Gets the 'action' associated with this form. This is the URL to which it * will be submitted. * * @return the form's action */ public String getAction() { return getFormElement().getAction(); } /** * Sets the 'action' associated with this form. This is the URL to which it * will be submitted. * * @param action * the form's action */ public void setAction(final String action) { getFormElement().setAction(action); } /** * Sets the 'action' associated with this form. This is the URL to which it * will be submitted. * * @param url * the form's action */ public void setAction(SafeUri url) { getFormElement().setAction(url); } /** * Gets the HTTP method used for submitting this form. This should be either * {@link #METHOD_GET} or {@link #METHOD_POST}. * * @return the form's method */ public String getMethod() { return getFormElement().getMethod(); } /** * Sets the HTTP method used for submitting this form. This should be either * {@link #METHOD_GET} or {@link #METHOD_POST}. * * @param method * the form's method */ public void setMethod(final String method) { getFormElement().setMethod(method); } /** * Gets the form's 'target'. This is the name of the {@link NamedFrame} that * will receive the results of submission, or <code>null</code> if none has * been specified. * * @return the form's target. */ public String getTarget() { return getFormElement().getTarget(); } /** * Gets the encoding used for submitting this form. This should be either * {@link #ENCODING_MULTIPART} or {@link #ENCODING_URLENCODED}. * * @return the form's encoding */ public String getEncoding() { return impl.getEncoding(getElement()); } /** * Sets the encoding used for submitting this form. This should be either * {@link #ENCODING_MULTIPART} or {@link #ENCODING_URLENCODED}. * * @param encodingType * the form's encoding */ public void setEncoding(String encodingType) { impl.setEncoding(getElement(), encodingType); } /** * Submits form */ public void submit() { // Fire the onSubmit event, because javascript's form.submit() does not // fire the built-in onsubmit event. if (!fireSubmitEvent()) { return; } impl.submit(getElement(), synthesizedFrame); } /** * Resets form */ public void reset() { impl.reset(getElement()); for (HasValidators<?> child : getChildrenWithValidators(this)) { child.reset(); } } private void createFrame() { // Attach a hidden IFrame to the form. This is the target iframe to // which the form will be submitted. We have to create the iframe using // innerHTML, because setting an iframe's 'name' property dynamically // doesn't work on most browsers. Element dummy = Document.get().createDivElement(); dummy.setInnerSafeHtml(IFrameTemplate.INSTANCE.get(frameName)); synthesizedFrame = dummy.getFirstChildElement(); } /** * Fire a {@link AbstractForm.SubmitEvent}. * * @return true to continue, false if canceled */ private boolean fireSubmitEvent() { FormPanel.SubmitEvent event = new FormPanel.SubmitEvent(); fireEvent(event); return !event.isCanceled(); } FormElement getFormElement() { return FormElement.as(getElement()); } /** * Returns true if the form is submitted, false if canceled. */ private boolean onFormSubmitImpl() { return fireSubmitEvent(); } private void onFrameLoadImpl() { // Fire onComplete events in a deferred command. This is necessary // because clients that detach the form panel when submission is // complete can cause some browsers (i.e. Mozilla) to go into an // 'infinite loading' state. See issue 916. Scheduler.get().scheduleDeferred(new ScheduledCommand() { @Override public void execute() { fireEvent(new SubmitCompleteEvent(impl .getContents(synthesizedFrame))); } }); } private void setTarget(String target) { getFormElement().setTarget(target); } /** * @return true if the child input elements are all valid. */ public boolean validate() { return validate(true); } /** * @return true if the child input elements are all valid. */ public boolean validate(boolean show) { boolean result = true; for (HasValidators<?> child : getChildrenWithValidators(this)) { result &= child.validate(show); } return result; } /** * Get this forms child input elements with validators. * * @param widget the widget * @return the children with validators */ protected List<HasValidators<?>> getChildrenWithValidators(Widget widget) { List<HasValidators<?>> result = new ArrayList<HasValidators<?>>(); if (widget != null) { if (widget instanceof HasValidators<?>) { result.add((HasValidators<?>) widget); } if (widget instanceof HasOneWidget) { result.addAll(getChildrenWithValidators(((HasOneWidget) widget).getWidget())); } if (widget instanceof HasWidgets) { for (Widget child : (HasWidgets) widget) { result.addAll(getChildrenWithValidators(child)); } } } return result; } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.forms.editor.client.editor; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Optional; import com.google.gwtmockito.GwtMock; import org.guvnor.common.services.project.client.context.WorkspaceProjectContext; import org.guvnor.common.services.project.client.security.ProjectController; import org.guvnor.common.services.shared.metadata.model.Overview; import org.guvnor.messageconsole.client.console.widget.button.AlertsButtonMenuItemBuilder; import org.jboss.errai.common.client.dom.HTMLElement; import org.jboss.errai.ioc.client.api.ManagedInstance; import org.jboss.errai.ui.client.local.spi.TranslationService; import org.junit.Before; import org.kie.workbench.common.forms.editor.client.editor.changes.ChangesNotificationDisplayer; import org.kie.workbench.common.forms.editor.client.editor.errorMessage.ErrorMessageDisplayer; import org.kie.workbench.common.forms.editor.client.editor.rendering.EditorFieldLayoutComponent; import org.kie.workbench.common.forms.editor.client.editor.test.TestFormEditorHelper; import org.kie.workbench.common.forms.editor.client.resources.images.FormEditorImageResources; import org.kie.workbench.common.forms.editor.client.type.FormDefinitionResourceType; import org.kie.workbench.common.forms.editor.model.FormModelerContent; import org.kie.workbench.common.forms.editor.model.impl.FormModelSynchronizationResultImpl; import org.kie.workbench.common.forms.editor.service.shared.FormEditorService; import org.kie.workbench.common.forms.fields.test.TestFieldManager; import org.kie.workbench.common.forms.model.FieldDefinition; import org.kie.workbench.common.forms.model.FormDefinition; import org.kie.workbench.common.forms.model.ModelProperty; import org.kie.workbench.common.forms.model.impl.PortableJavaModel; import org.kie.workbench.common.services.refactoring.client.usages.ShowAssetUsagesDisplayer; import org.kie.workbench.common.services.refactoring.client.usages.ShowAssetUsagesDisplayerView; import org.kie.workbench.common.services.refactoring.service.AssetsUsageService; import org.kie.workbench.common.widgets.client.menu.FileMenuBuilder; import org.kie.workbench.common.widgets.metadata.client.KieEditorWrapperView; import org.kie.workbench.common.widgets.metadata.client.widget.OverviewWidgetPresenter; import org.kie.workbench.common.workbench.client.events.LayoutEditorFocusEvent; import org.mockito.Mock; import org.uberfire.backend.vfs.ObservablePath; import org.uberfire.backend.vfs.Path; import org.uberfire.ext.editor.commons.client.file.popups.CopyPopUpPresenter; import org.uberfire.ext.editor.commons.client.file.popups.DeletePopUpPresenter; import org.uberfire.ext.editor.commons.client.file.popups.DeletePopUpView; import org.uberfire.ext.editor.commons.client.file.popups.RenamePopUpPresenter; import org.uberfire.ext.editor.commons.client.file.popups.commons.ToggleCommentPresenter; import org.uberfire.ext.editor.commons.client.history.VersionRecordManager; import org.uberfire.ext.layout.editor.api.editor.LayoutTemplate; import org.uberfire.ext.layout.editor.client.api.LayoutDragComponentPalette; import org.uberfire.ext.layout.editor.client.api.LayoutEditor; import org.uberfire.ext.plugin.client.perspective.editor.layout.editor.HTMLLayoutDragComponent; import org.uberfire.ext.widgets.common.client.common.BusyIndicatorView; import org.uberfire.mocks.CallerMock; import org.uberfire.mocks.EventSourceMock; import org.uberfire.mvp.Command; import org.uberfire.mvp.PlaceRequest; import org.uberfire.workbench.events.NotificationEvent; import org.uberfire.workbench.model.menu.MenuItem; import org.uberfire.workbench.model.menu.Menus; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; public class FormEditorPresenterAbstractTest { public static final String LAST_NAME = "lastName"; protected List<FieldDefinition> employeeFields; protected List<ModelProperty> modelProperties; protected FormEditorHelper editorHelper; @Mock protected ChangesNotificationDisplayer modelChangesDisplayer; @GwtMock protected FormEditorImageResources formEditorImageResources; @Mock protected VersionRecordManager versionRecordManager; @Mock protected FormEditorPresenter.FormEditorView view; @Mock protected TranslationService translationService; @GwtMock protected KieEditorWrapperView kieView; @GwtMock protected ObservablePath path; @GwtMock protected FormDefinitionResourceType formDefinitionResourceType; @Mock protected LayoutEditor layoutEditorMock; @Mock protected LayoutDragComponentPalette layoutDragComponentPaletteMock; @Mock protected EventSourceMock<LayoutEditorFocusEvent> layoutFocusEventMock; @Mock protected HTMLLayoutDragComponent htmlLayoutDragComponent; @Mock protected ManagedInstance<EditorFieldLayoutComponent> editorFieldLayoutComponents; @Mock protected FormEditorService formEditorService; @Mock protected EventSourceMock<NotificationEvent> notificationEvent; @Mock protected FileMenuBuilder menuBuilderMock; @Mock protected ProjectController projectController; @Mock protected WorkspaceProjectContext workbenchContext; @Mock protected DeletePopUpView deletePopUpView; @Mock protected ToggleCommentPresenter toggleCommentPresenter; @Mock protected RenamePopUpPresenter renamePopUpPresenter; protected DeletePopUpPresenter deletePopUpPresenter; @Mock protected ShowAssetUsagesDisplayerView assetUsagesDisplayerView; @Mock protected AssetsUsageService assetsUsagService; @Mock protected AlertsButtonMenuItemBuilder alertsButtonMenuItemBuilder; @Mock protected CopyPopUpPresenter copyPopUpPresenter; @Mock protected MenuItem alertsButtonMenuItem; @Mock protected ErrorMessageDisplayer errorMessageDisplayer; protected TestFieldManager fieldManager; protected List<Path> assetUsages = new ArrayList<>(); protected ShowAssetUsagesDisplayer showAssetUsagesDisplayer; protected CallerMock<FormEditorService> editorServiceCallerMock; protected FormEditorPresenter presenter; protected FormModelerContent content; protected FormModelSynchronizationResultImpl synchronizationResult = new FormModelSynchronizationResultImpl(); protected PortableJavaModel model; protected FormDefinition form; @Before public void setUp() throws Exception { fieldManager = new TestFieldManager(); model = new PortableJavaModel("com.test.Employee"); model.addProperty("name", String.class.getName()); model.addProperty("lastName", String.class.getName()); model.addProperty("birthday", Date.class.getName()); model.addProperty("married", Boolean.class.getName()); form = new FormDefinition(model); form.setName("EmployeeTestForm"); form.setId("_random_id"); //model.getProperties().stream().map(fieldManager::getDefinitionByModelProperty).forEach(fieldDefinition -> form.getFields().add(fieldDefinition)); modelProperties = new ArrayList<>(model.getProperties()); employeeFields = new ArrayList<>(form.getFields()); when(workbenchContext.getActiveOrganizationalUnit()).thenReturn(Optional.empty()); when(workbenchContext.getActiveWorkspaceProject()).thenReturn(Optional.empty()); when(workbenchContext.getActiveModule()).thenReturn(Optional.empty()); when(workbenchContext.getActiveRepositoryRoot()).thenReturn(Optional.empty()); when(workbenchContext.getActivePackage()).thenReturn(Optional.empty()); when(alertsButtonMenuItemBuilder.build()).thenReturn(alertsButtonMenuItem); } protected void loadContent() { when(formDefinitionResourceType.getSuffix()).thenReturn("form.frm"); when(formDefinitionResourceType.accept(path)).thenReturn(true); when(editorFieldLayoutComponents.get()).thenAnswer(invocationOnMock -> mock(EditorFieldLayoutComponent.class)); when(formEditorService.loadContent(any())).then(invocation -> serviceLoad()); editorServiceCallerMock = new CallerMock<>(formEditorService); editorHelper = spy(new TestFormEditorHelper(fieldManager, editorFieldLayoutComponents)); when(layoutEditorMock.getLayout()).thenReturn(new LayoutTemplate()); when(menuBuilderMock.addSave(any(MenuItem.class))).thenReturn(menuBuilderMock); when(menuBuilderMock.addCopy(any(Command.class))).thenReturn(menuBuilderMock); when(menuBuilderMock.addRename(any(Command.class))).thenReturn(menuBuilderMock); when(menuBuilderMock.addDelete(any(ObservablePath.class))).thenReturn(menuBuilderMock); when(menuBuilderMock.addNewTopLevelMenu(any(MenuItem.class))).thenReturn(menuBuilderMock); when(menuBuilderMock.build()).thenReturn(mock(Menus.class)); when(versionRecordManager.getCurrentPath()).thenReturn(path); when(translationService.format(anyString(), anyString())).thenReturn(""); showAssetUsagesDisplayer = spy(new ShowAssetUsagesDisplayer(assetUsagesDisplayerView, translationService, new CallerMock<>(assetsUsagService))); when(assetUsagesDisplayerView.getDefaultMessageContainer()).thenReturn(mock(HTMLElement.class)); when(assetsUsagService.getAssetUsages(anyString(), any(), any())).thenReturn(assetUsages); deletePopUpPresenter = spy(new DeletePopUpPresenter(deletePopUpView, toggleCommentPresenter)); presenter = new FormEditorPresenter(view, modelChangesDisplayer, formDefinitionResourceType, editorServiceCallerMock, translationService, editorFieldLayoutComponents, showAssetUsagesDisplayer, errorMessageDisplayer) { { kieView = mock(KieEditorWrapperView.class); versionRecordManager = FormEditorPresenterAbstractTest.this.versionRecordManager; editorHelper = FormEditorPresenterAbstractTest.this.editorHelper; busyIndicatorView = mock(BusyIndicatorView.class); overviewWidget = mock(OverviewWidgetPresenter.class); layoutEditor = layoutEditorMock; layoutDragComponentPalette = layoutDragComponentPaletteMock; layoutFocusEvent = layoutFocusEventMock; htmlLayoutDragComponent = FormEditorPresenterAbstractTest.this.htmlLayoutDragComponent; notification = notificationEvent; fileMenuBuilder = menuBuilderMock; workbenchContext = FormEditorPresenterAbstractTest.this.workbenchContext; projectController = FormEditorPresenterAbstractTest.this.projectController; deletePopUpPresenter = FormEditorPresenterAbstractTest.this.deletePopUpPresenter; renamePopUpPresenter = FormEditorPresenterAbstractTest.this.renamePopUpPresenter; alertsButtonMenuItemBuilder = FormEditorPresenterAbstractTest.this.alertsButtonMenuItemBuilder; formEditorContext = mock(FormEditorContext.class); copyPopUpPresenter = FormEditorPresenterAbstractTest.this.copyPopUpPresenter; } @Override public void doLoadContent(FormModelerContent content) { super.doLoadContent(content); employeeFields.addAll(editorHelper.getAvailableFields().values()); } @Override protected void addSourcePage() { } }; presenter.onStartup(path, mock(PlaceRequest.class)); assertTrue("There should exist base field draggables", editorHelper.getBaseFieldsDraggables().size() > 0); } public FormModelerContent serviceLoad() { content = spy(new FormModelerContent()); content.setDefinition(form); content.setOverview(new Overview()); content.setPath(path); content.setSynchronizationResult(synchronizationResult); return content; } protected void loadAvailableFields() { employeeFields.forEach(editorHelper::addAvailableField); } protected void addField(FieldDefinition field) { if (editorHelper.getAvailableFields().containsKey(field.getId())) { editorHelper.getFormDefinition().getFields().add(field); editorHelper.getAvailableFields().remove(field.getId()); } } protected void addAllFields() { FormDefinition form = editorHelper.getFormDefinition(); editorHelper.getAvailableFields().values().forEach(field -> form.getFields().add(field)); } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.openxr; import javax.annotation.*; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.Checks.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * Debug utils object name info. * * <h5>Valid Usage</h5> * * <ul> * <li>If {@code objectType} is {@link XR10#XR_OBJECT_TYPE_UNKNOWN OBJECT_TYPE_UNKNOWN}, {@code objectHandle} <b>must</b> not be {@link XR10#XR_NULL_HANDLE NULL_HANDLE}</li> * <li>If {@code objectType} is not {@link XR10#XR_OBJECT_TYPE_UNKNOWN OBJECT_TYPE_UNKNOWN}, {@code objectHandle} <b>must</b> be {@link XR10#XR_NULL_HANDLE NULL_HANDLE} or an OpenXR handle of the type associated with {@code objectType}</li> * </ul> * * <h5>Valid Usage (Implicit)</h5> * * <ul> * <li>The {@link EXTDebugUtils XR_EXT_debug_utils} extension <b>must</b> be enabled prior to using {@link XrDebugUtilsObjectNameInfoEXT}</li> * <li>{@code type} <b>must</b> be {@link EXTDebugUtils#XR_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT}</li> * <li>{@code next} <b>must</b> be {@code NULL} or a valid pointer to the <a target="_blank" href="https://www.khronos.org/registry/OpenXR/specs/1.0/html/xrspec.html#valid-usage-for-structure-pointer-chains">next structure in a structure chain</a></li> * <li>{@code objectType} <b>must</b> be a valid {@code XrObjectType} value</li> * <li>If {@code objectName} is not {@code NULL}, {@code objectName} <b>must</b> be a null-terminated UTF-8 string</li> * </ul> * * <h5>See Also</h5> * * <p>{@link XrDebugUtilsMessengerCallbackDataEXT}, {@link EXTDebugUtils#xrSetDebugUtilsObjectNameEXT SetDebugUtilsObjectNameEXT}</p> * * <h3>Layout</h3> * * <pre><code> * struct XrDebugUtilsObjectNameInfoEXT { * XrStructureType {@link #type}; * void const * {@link #next}; * XrObjectType {@link #objectType}; * uint64_t {@link #objectHandle}; * char const * {@link #objectName}; * }</code></pre> */ public class XrDebugUtilsObjectNameInfoEXT extends Struct implements NativeResource { /** The struct size in bytes. */ public static final int SIZEOF; /** The struct alignment in bytes. */ public static final int ALIGNOF; /** The struct member offsets. */ public static final int TYPE, NEXT, OBJECTTYPE, OBJECTHANDLE, OBJECTNAME; static { Layout layout = __struct( __member(4), __member(POINTER_SIZE), __member(4), __member(8), __member(POINTER_SIZE) ); SIZEOF = layout.getSize(); ALIGNOF = layout.getAlignment(); TYPE = layout.offsetof(0); NEXT = layout.offsetof(1); OBJECTTYPE = layout.offsetof(2); OBJECTHANDLE = layout.offsetof(3); OBJECTNAME = layout.offsetof(4); } /** * Creates a {@code XrDebugUtilsObjectNameInfoEXT} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public XrDebugUtilsObjectNameInfoEXT(ByteBuffer container) { super(memAddress(container), __checkContainer(container, SIZEOF)); } @Override public int sizeof() { return SIZEOF; } /** the {@code XrStructureType} of this structure. */ @NativeType("XrStructureType") public int type() { return ntype(address()); } /** {@code NULL} or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension. */ @NativeType("void const *") public long next() { return nnext(address()); } /** an {@code XrObjectType} specifying the type of the object to be named. */ @NativeType("XrObjectType") public int objectType() { return nobjectType(address()); } /** the object to be named. */ @NativeType("uint64_t") public long objectHandle() { return nobjectHandle(address()); } /** a {@code NULL} terminated UTF-8 string specifying the name to apply to objectHandle. */ @Nullable @NativeType("char const *") public ByteBuffer objectName() { return nobjectName(address()); } /** a {@code NULL} terminated UTF-8 string specifying the name to apply to objectHandle. */ @Nullable @NativeType("char const *") public String objectNameString() { return nobjectNameString(address()); } /** Sets the specified value to the {@link #type} field. */ public XrDebugUtilsObjectNameInfoEXT type(@NativeType("XrStructureType") int value) { ntype(address(), value); return this; } /** Sets the {@link EXTDebugUtils#XR_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT} value to the {@link #type} field. */ public XrDebugUtilsObjectNameInfoEXT type$Default() { return type(EXTDebugUtils.XR_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT); } /** Sets the specified value to the {@link #next} field. */ public XrDebugUtilsObjectNameInfoEXT next(@NativeType("void const *") long value) { nnext(address(), value); return this; } /** Sets the specified value to the {@link #objectType} field. */ public XrDebugUtilsObjectNameInfoEXT objectType(@NativeType("XrObjectType") int value) { nobjectType(address(), value); return this; } /** Sets the specified value to the {@link #objectHandle} field. */ public XrDebugUtilsObjectNameInfoEXT objectHandle(@NativeType("uint64_t") long value) { nobjectHandle(address(), value); return this; } /** Sets the address of the specified encoded string to the {@link #objectName} field. */ public XrDebugUtilsObjectNameInfoEXT objectName(@Nullable @NativeType("char const *") ByteBuffer value) { nobjectName(address(), value); return this; } /** Initializes this struct with the specified values. */ public XrDebugUtilsObjectNameInfoEXT set( int type, long next, int objectType, long objectHandle, @Nullable ByteBuffer objectName ) { type(type); next(next); objectType(objectType); objectHandle(objectHandle); objectName(objectName); return this; } /** * Copies the specified struct data to this struct. * * @param src the source struct * * @return this struct */ public XrDebugUtilsObjectNameInfoEXT set(XrDebugUtilsObjectNameInfoEXT src) { memCopy(src.address(), address(), SIZEOF); return this; } // ----------------------------------- /** Returns a new {@code XrDebugUtilsObjectNameInfoEXT} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static XrDebugUtilsObjectNameInfoEXT malloc() { return wrap(XrDebugUtilsObjectNameInfoEXT.class, nmemAllocChecked(SIZEOF)); } /** Returns a new {@code XrDebugUtilsObjectNameInfoEXT} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static XrDebugUtilsObjectNameInfoEXT calloc() { return wrap(XrDebugUtilsObjectNameInfoEXT.class, nmemCallocChecked(1, SIZEOF)); } /** Returns a new {@code XrDebugUtilsObjectNameInfoEXT} instance allocated with {@link BufferUtils}. */ public static XrDebugUtilsObjectNameInfoEXT create() { ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF); return wrap(XrDebugUtilsObjectNameInfoEXT.class, memAddress(container), container); } /** Returns a new {@code XrDebugUtilsObjectNameInfoEXT} instance for the specified memory address. */ public static XrDebugUtilsObjectNameInfoEXT create(long address) { return wrap(XrDebugUtilsObjectNameInfoEXT.class, address); } /** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static XrDebugUtilsObjectNameInfoEXT createSafe(long address) { return address == NULL ? null : wrap(XrDebugUtilsObjectNameInfoEXT.class, address); } /** * Returns a new {@link XrDebugUtilsObjectNameInfoEXT.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static XrDebugUtilsObjectNameInfoEXT.Buffer malloc(int capacity) { return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity); } /** * Returns a new {@link XrDebugUtilsObjectNameInfoEXT.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static XrDebugUtilsObjectNameInfoEXT.Buffer calloc(int capacity) { return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity); } /** * Returns a new {@link XrDebugUtilsObjectNameInfoEXT.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static XrDebugUtilsObjectNameInfoEXT.Buffer create(int capacity) { ByteBuffer container = __create(capacity, SIZEOF); return wrap(Buffer.class, memAddress(container), capacity, container); } /** * Create a {@link XrDebugUtilsObjectNameInfoEXT.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static XrDebugUtilsObjectNameInfoEXT.Buffer create(long address, int capacity) { return wrap(Buffer.class, address, capacity); } /** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static XrDebugUtilsObjectNameInfoEXT.Buffer createSafe(long address, int capacity) { return address == NULL ? null : wrap(Buffer.class, address, capacity); } /** * Returns a new {@code XrDebugUtilsObjectNameInfoEXT} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static XrDebugUtilsObjectNameInfoEXT malloc(MemoryStack stack) { return wrap(XrDebugUtilsObjectNameInfoEXT.class, stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@code XrDebugUtilsObjectNameInfoEXT} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static XrDebugUtilsObjectNameInfoEXT calloc(MemoryStack stack) { return wrap(XrDebugUtilsObjectNameInfoEXT.class, stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link XrDebugUtilsObjectNameInfoEXT.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static XrDebugUtilsObjectNameInfoEXT.Buffer malloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link XrDebugUtilsObjectNameInfoEXT.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static XrDebugUtilsObjectNameInfoEXT.Buffer calloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** Unsafe version of {@link #type}. */ public static int ntype(long struct) { return UNSAFE.getInt(null, struct + XrDebugUtilsObjectNameInfoEXT.TYPE); } /** Unsafe version of {@link #next}. */ public static long nnext(long struct) { return memGetAddress(struct + XrDebugUtilsObjectNameInfoEXT.NEXT); } /** Unsafe version of {@link #objectType}. */ public static int nobjectType(long struct) { return UNSAFE.getInt(null, struct + XrDebugUtilsObjectNameInfoEXT.OBJECTTYPE); } /** Unsafe version of {@link #objectHandle}. */ public static long nobjectHandle(long struct) { return UNSAFE.getLong(null, struct + XrDebugUtilsObjectNameInfoEXT.OBJECTHANDLE); } /** Unsafe version of {@link #objectName}. */ @Nullable public static ByteBuffer nobjectName(long struct) { return memByteBufferNT1Safe(memGetAddress(struct + XrDebugUtilsObjectNameInfoEXT.OBJECTNAME)); } /** Unsafe version of {@link #objectNameString}. */ @Nullable public static String nobjectNameString(long struct) { return memUTF8Safe(memGetAddress(struct + XrDebugUtilsObjectNameInfoEXT.OBJECTNAME)); } /** Unsafe version of {@link #type(int) type}. */ public static void ntype(long struct, int value) { UNSAFE.putInt(null, struct + XrDebugUtilsObjectNameInfoEXT.TYPE, value); } /** Unsafe version of {@link #next(long) next}. */ public static void nnext(long struct, long value) { memPutAddress(struct + XrDebugUtilsObjectNameInfoEXT.NEXT, value); } /** Unsafe version of {@link #objectType(int) objectType}. */ public static void nobjectType(long struct, int value) { UNSAFE.putInt(null, struct + XrDebugUtilsObjectNameInfoEXT.OBJECTTYPE, value); } /** Unsafe version of {@link #objectHandle(long) objectHandle}. */ public static void nobjectHandle(long struct, long value) { UNSAFE.putLong(null, struct + XrDebugUtilsObjectNameInfoEXT.OBJECTHANDLE, value); } /** Unsafe version of {@link #objectName(ByteBuffer) objectName}. */ public static void nobjectName(long struct, @Nullable ByteBuffer value) { if (CHECKS) { checkNT1Safe(value); } memPutAddress(struct + XrDebugUtilsObjectNameInfoEXT.OBJECTNAME, memAddressSafe(value)); } // ----------------------------------- /** An array of {@link XrDebugUtilsObjectNameInfoEXT} structs. */ public static class Buffer extends StructBuffer<XrDebugUtilsObjectNameInfoEXT, Buffer> implements NativeResource { private static final XrDebugUtilsObjectNameInfoEXT ELEMENT_FACTORY = XrDebugUtilsObjectNameInfoEXT.create(-1L); /** * Creates a new {@code XrDebugUtilsObjectNameInfoEXT.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link XrDebugUtilsObjectNameInfoEXT#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container, container.remaining() / SIZEOF); } public Buffer(long address, int cap) { super(address, null, -1, 0, cap, cap); } Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected XrDebugUtilsObjectNameInfoEXT getElementFactory() { return ELEMENT_FACTORY; } /** @return the value of the {@link XrDebugUtilsObjectNameInfoEXT#type} field. */ @NativeType("XrStructureType") public int type() { return XrDebugUtilsObjectNameInfoEXT.ntype(address()); } /** @return the value of the {@link XrDebugUtilsObjectNameInfoEXT#next} field. */ @NativeType("void const *") public long next() { return XrDebugUtilsObjectNameInfoEXT.nnext(address()); } /** @return the value of the {@link XrDebugUtilsObjectNameInfoEXT#objectType} field. */ @NativeType("XrObjectType") public int objectType() { return XrDebugUtilsObjectNameInfoEXT.nobjectType(address()); } /** @return the value of the {@link XrDebugUtilsObjectNameInfoEXT#objectHandle} field. */ @NativeType("uint64_t") public long objectHandle() { return XrDebugUtilsObjectNameInfoEXT.nobjectHandle(address()); } /** @return a {@link ByteBuffer} view of the null-terminated string pointed to by the {@link XrDebugUtilsObjectNameInfoEXT#objectName} field. */ @Nullable @NativeType("char const *") public ByteBuffer objectName() { return XrDebugUtilsObjectNameInfoEXT.nobjectName(address()); } /** @return the null-terminated string pointed to by the {@link XrDebugUtilsObjectNameInfoEXT#objectName} field. */ @Nullable @NativeType("char const *") public String objectNameString() { return XrDebugUtilsObjectNameInfoEXT.nobjectNameString(address()); } /** Sets the specified value to the {@link XrDebugUtilsObjectNameInfoEXT#type} field. */ public XrDebugUtilsObjectNameInfoEXT.Buffer type(@NativeType("XrStructureType") int value) { XrDebugUtilsObjectNameInfoEXT.ntype(address(), value); return this; } /** Sets the {@link EXTDebugUtils#XR_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT} value to the {@link XrDebugUtilsObjectNameInfoEXT#type} field. */ public XrDebugUtilsObjectNameInfoEXT.Buffer type$Default() { return type(EXTDebugUtils.XR_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT); } /** Sets the specified value to the {@link XrDebugUtilsObjectNameInfoEXT#next} field. */ public XrDebugUtilsObjectNameInfoEXT.Buffer next(@NativeType("void const *") long value) { XrDebugUtilsObjectNameInfoEXT.nnext(address(), value); return this; } /** Sets the specified value to the {@link XrDebugUtilsObjectNameInfoEXT#objectType} field. */ public XrDebugUtilsObjectNameInfoEXT.Buffer objectType(@NativeType("XrObjectType") int value) { XrDebugUtilsObjectNameInfoEXT.nobjectType(address(), value); return this; } /** Sets the specified value to the {@link XrDebugUtilsObjectNameInfoEXT#objectHandle} field. */ public XrDebugUtilsObjectNameInfoEXT.Buffer objectHandle(@NativeType("uint64_t") long value) { XrDebugUtilsObjectNameInfoEXT.nobjectHandle(address(), value); return this; } /** Sets the address of the specified encoded string to the {@link XrDebugUtilsObjectNameInfoEXT#objectName} field. */ public XrDebugUtilsObjectNameInfoEXT.Buffer objectName(@Nullable @NativeType("char const *") ByteBuffer value) { XrDebugUtilsObjectNameInfoEXT.nobjectName(address(), value); return this; } } }
package io.jenkins.blueocean.rest.impl.pipeline; import hudson.model.Action; import hudson.model.Queue; import hudson.model.queue.CauseOfBlockage; import io.jenkins.blueocean.rest.model.BlueRun; import org.jenkinsci.plugins.pipeline.StageStatus; import org.jenkinsci.plugins.pipeline.SyntheticStage; import org.jenkinsci.plugins.workflow.actions.ErrorAction; import org.jenkinsci.plugins.workflow.actions.LabelAction; import org.jenkinsci.plugins.workflow.actions.LogAction; import org.jenkinsci.plugins.workflow.actions.QueueItemAction; import org.jenkinsci.plugins.workflow.actions.StageAction; import org.jenkinsci.plugins.workflow.actions.TagsAction; import org.jenkinsci.plugins.workflow.actions.ThreadNameAction; import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; import org.jenkinsci.plugins.workflow.cps.nodes.StepStartNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException; import org.jenkinsci.plugins.workflow.steps.StepDescriptor; import org.jenkinsci.plugins.workflow.support.actions.PauseAction; import org.jenkinsci.plugins.workflow.support.steps.ExecutorStep; import org.jenkinsci.plugins.workflow.support.steps.input.InputAction; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.function.Predicate; /** * @author Vivek Pandey */ public class PipelineNodeUtil { @Nonnull public static BlueRun.BlueRunResult getStatus(@Nullable ErrorAction errorAction){ if(errorAction == null){ return BlueRun.BlueRunResult.SUCCESS; }else{ return getStatus(errorAction.getError()); } } @Nonnull public static BlueRun.BlueRunResult getStatus(@Nonnull Throwable error){ if(error instanceof FlowInterruptedException){ return BlueRun.BlueRunResult.ABORTED; }else{ return BlueRun.BlueRunResult.FAILURE; } } @Nonnull public static String getDisplayName(@Nonnull FlowNode node) { ThreadNameAction threadNameAction = node.getAction(ThreadNameAction.class); return threadNameAction != null ? threadNameAction.getThreadName() : node.getDisplayName(); } public static boolean isStage(FlowNode node){ return node !=null && ((node.getAction(StageAction.class) != null && !isSyntheticStage(node)) || (node.getAction(LabelAction.class) != null && node.getAction(ThreadNameAction.class) == null)); } public static boolean isSyntheticStage(@Nullable FlowNode node){ return node!= null && getSyntheticStage(node) != null; } @CheckForNull public static TagsAction getSyntheticStage(@Nullable FlowNode node){ if(node != null) { for (Action action : node.getActions()) { if (action instanceof TagsAction && ((TagsAction) action).getTagValue(SyntheticStage.TAG_NAME) != null) { return (TagsAction) action; } } } return null; } public static boolean isPostSyntheticStage(@Nullable FlowNode node){ if(node == null){ return false; } TagsAction tagsAction = getSyntheticStage(node); if(tagsAction == null){ return false; } String value = tagsAction.getTagValue(SyntheticStage.TAG_NAME); return value!=null && value.equals(SyntheticStage.getPost()); } public static boolean isSkippedStage(@Nullable FlowNode node){ if(node == null){ return false; } for (Action action : node.getActions()) { if (action instanceof TagsAction && ((TagsAction) action).getTagValue(StageStatus.TAG_NAME) != null) { TagsAction tagsAction = (TagsAction) action; String value = tagsAction.getTagValue(StageStatus.TAG_NAME); return value != null && (value.equals(StageStatus.getSkippedForConditional()) || value.equals(StageStatus.getSkippedForFailure()) || value.equals(StageStatus.getSkippedForUnstable()) ); } } return false; } public static boolean isPreSyntheticStage(@Nullable FlowNode node){ if(node == null){ return false; } TagsAction tagsAction = getSyntheticStage(node); if(tagsAction == null){ return false; } String value = tagsAction.getTagValue(SyntheticStage.TAG_NAME); return value!=null && value.equals(SyntheticStage.getPre()); } public static boolean isParallelBranch(@Nullable FlowNode node){ return node!=null && node.getAction(LabelAction.class) != null && node.getAction(ThreadNameAction.class) != null; } /** * Gives cause of block for declarative style plugin where agent (node block) is declared inside a stage. * <pre> * pipeline { * agent none * stages { * stage ('first') { * agent { * label 'first' * } * steps{ * sh 'echo "from first"' * } * } * } * } * </pre> * * @param stage stage's {@link FlowNode} * @param nodeBlock agent or node block's {@link FlowNode} * @return cause of block if present, nul otherwise */ public static @CheckForNull String getCauseOfBlockage(@Nonnull FlowNode stage, @Nullable FlowNode nodeBlock) { if(nodeBlock != null){ //Check and see if this node block is inside this stage for(FlowNode p:nodeBlock.getParents()){ if(p.equals(stage)){ Queue.Item item = QueueItemAction.getQueueItem(nodeBlock); if (item != null) { CauseOfBlockage causeOfBlockage = item.getCauseOfBlockage(); String cause = null; if (causeOfBlockage != null) { cause = causeOfBlockage.getShortDescription(); if (cause == null) { causeOfBlockage = item.task.getCauseOfBlockage(); if(causeOfBlockage != null) { return causeOfBlockage.getShortDescription(); } } } return cause; } } } } return null; } public static final Predicate<FlowNode> isLoggable = input -> { if(input == null) return false; return input.getAction(LogAction.class) != null; }; public static boolean isPausedForInputStep(@Nonnull StepAtomNode step, @Nullable InputAction inputAction){ if(inputAction == null){ return false; } PauseAction pauseAction = step.getAction(PauseAction.class); return (pauseAction != null && pauseAction.isPaused() && pauseAction.getCause().equals("Input")); } /** * Determine if the given {@link FlowNode} is the initial {@link StepStartNode} for an {@link ExecutorStep}. * * @param node a possibly null {@link FlowNode} * @return true if {@code node} is the non-body start of the agent execution. */ public static boolean isAgentStart(@Nullable FlowNode node) { if (node != null) { if (node instanceof StepStartNode) { StepStartNode stepStartNode = (StepStartNode) node; if (stepStartNode.getDescriptor() != null) { StepDescriptor sd = stepStartNode.getDescriptor(); return sd != null && ExecutorStep.DescriptorImpl.class.equals(sd.getClass()) && !stepStartNode.isBody(); } } } return false; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.tests.tpch; import com.google.common.collect.ImmutableMap; import io.trino.Session; import io.trino.plugin.tpch.ColumnNaming; import io.trino.plugin.tpch.TpchConnectorFactory; import io.trino.testing.LocalQueryRunner; import io.trino.testing.statistics.StatisticsAssertion; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import static io.trino.SystemSessionProperties.COLLECT_PLAN_STATISTICS_FOR_ALL_QUERIES; import static io.trino.SystemSessionProperties.PREFER_PARTIAL_AGGREGATION; import static io.trino.plugin.tpch.TpchConnectorFactory.TPCH_COLUMN_NAMING_PROPERTY; import static io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME; import static io.trino.testing.TestingSession.testSessionBuilder; import static io.trino.testing.statistics.MetricComparisonStrategies.absoluteError; import static io.trino.testing.statistics.MetricComparisonStrategies.defaultTolerance; import static io.trino.testing.statistics.MetricComparisonStrategies.noError; import static io.trino.testing.statistics.MetricComparisonStrategies.relativeError; import static io.trino.testing.statistics.Metrics.OUTPUT_ROW_COUNT; import static io.trino.testing.statistics.Metrics.distinctValuesCount; import static io.trino.testing.statistics.Metrics.highValue; import static io.trino.testing.statistics.Metrics.lowValue; import static io.trino.testing.statistics.Metrics.nullsFraction; public class TestTpchLocalStats { private StatisticsAssertion statisticsAssertion; @BeforeClass public void setUp() { Session defaultSession = testSessionBuilder() .setCatalog("tpch") .setSchema(TINY_SCHEMA_NAME) // We are not able to calculate stats for PARTIAL aggregations .setSystemProperty(PREFER_PARTIAL_AGGREGATION, "false") // Stats for non-EXPLAIN queries are not collected by default .setSystemProperty(COLLECT_PLAN_STATISTICS_FOR_ALL_QUERIES, "true") .build(); LocalQueryRunner queryRunner = LocalQueryRunner.create(defaultSession); queryRunner.createCatalog( "tpch", new TpchConnectorFactory(1), ImmutableMap.of(TPCH_COLUMN_NAMING_PROPERTY, ColumnNaming.STANDARD.name())); statisticsAssertion = new StatisticsAssertion(queryRunner); } @AfterClass(alwaysRun = true) public void tearDown() { statisticsAssertion.close(); statisticsAssertion = null; } @Test public void testTableScanStats() { statisticsAssertion.check("SELECT * FROM nation", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("n_nationkey") .verifyExactColumnStatistics("n_regionkey") .verifyExactColumnStatistics("n_name")); } @Test public void testDateComparisons() { statisticsAssertion.check("SELECT * FROM orders WHERE o_orderdate >= DATE '1993-10-01'", checks -> checks.estimate(OUTPUT_ROW_COUNT, defaultTolerance())); statisticsAssertion.check("SELECT * FROM orders WHERE o_orderdate < DATE '1993-10-01' + INTERVAL '3' MONTH", checks -> checks.estimate(OUTPUT_ROW_COUNT, defaultTolerance())); statisticsAssertion.check("SELECT * FROM orders WHERE o_orderdate >= DATE '1993-10-01' AND o_orderdate < DATE '1993-10-01' + INTERVAL '3' MONTH", checks -> checks.estimate(OUTPUT_ROW_COUNT, defaultTolerance())); statisticsAssertion.check("SELECT * FROM orders WHERE o_orderdate >= DATE '1993-10-01' OR o_orderdate < DATE '1993-10-01' + INTERVAL '3' MONTH", checks -> checks.estimate(OUTPUT_ROW_COUNT, defaultTolerance())); statisticsAssertion.check("SELECT * FROM orders WHERE NOT (o_orderdate >= DATE '1993-10-01' AND o_orderdate < DATE '1993-10-01' + INTERVAL '3' MONTH)", checks -> checks.estimate(OUTPUT_ROW_COUNT, defaultTolerance())); } @Test public void testLimit() { // TODO merge with TestTpchDistributedStats.testLimit once that class tests new calculator statisticsAssertion.check("SELECT * FROM nation LIMIT 10", checks -> checks.estimate(OUTPUT_ROW_COUNT, noError())); } @Test public void testEnforceSingleRow() { // TODO merge with TestTpchDistributedStats.testEnforceSingleRow once that class tests new calculator statisticsAssertion.check("SELECT (SELECT n_regionkey FROM nation WHERE n_name = 'GERMANY') AS sub", checks -> checks // TODO .estimate(distinctValuesCount("sub"), defaultTolerance()) .estimate(OUTPUT_ROW_COUNT, noError())); } @Test public void testVarcharComparisons() { statisticsAssertion.check("SELECT * FROM orders WHERE o_comment = 'requests above the furiously even instructions use alw'", checks -> checks.estimate(OUTPUT_ROW_COUNT, defaultTolerance())); statisticsAssertion.check("SELECT * FROM orders WHERE 'this is always ...' = '... false'", checks -> checks.estimate(OUTPUT_ROW_COUNT, noError())); } @Test public void testInnerJoinStats() { // cross join statisticsAssertion.check("SELECT * FROM supplier, nation", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("s_nationkey") .verifyExactColumnStatistics("n_nationkey") .verifyExactColumnStatistics("s_suppkey")); statisticsAssertion.check("SELECT * FROM supplier, nation WHERE n_nationkey <= 12", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("s_nationkey") .verifyColumnStatistics("n_nationkey", relativeError(0.10)) .verifyExactColumnStatistics("s_suppkey")); // simple equi joins statisticsAssertion.check("SELECT * FROM supplier, nation WHERE s_nationkey = n_nationkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("s_nationkey") .verifyExactColumnStatistics("n_nationkey") .verifyExactColumnStatistics("s_suppkey")); statisticsAssertion.check("SELECT * FROM supplier, nation WHERE s_nationkey = n_nationkey AND n_nationkey <= 12", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.15)) .verifyColumnStatistics("s_nationkey", relativeError(0.15)) .verifyColumnStatistics("n_nationkey", relativeError(0.15))); // simple equi join, different ranges statisticsAssertion.check("SELECT n1.n_nationkey FROM nation n1, nation n2 WHERE n1.n_nationkey + 1 = n2.n_nationkey - 1 AND n1.n_nationkey > 5 AND n2.n_nationkey < 20", // Join is over expressions so that predicate push down doesn't unify ranges of n_nationkey coming from n1 and n2. This, however, makes symbols // stats inaccurate (rules can't update them), so we don't verify them. checks -> checks.estimate(OUTPUT_ROW_COUNT, absoluteError(8))); // two joins on different keys statisticsAssertion.check("SELECT * FROM nation, supplier, partsupp WHERE n_nationkey = s_nationkey AND s_suppkey = ps_suppkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("ps_partkey") .verifyExactColumnStatistics("n_nationkey") .verifyExactColumnStatistics("s_nationkey") .verifyExactColumnStatistics("n_name")); statisticsAssertion.check("SELECT * FROM nation, supplier, partsupp WHERE n_nationkey = s_nationkey AND s_suppkey = ps_suppkey AND n_nationkey <= 12", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.15)) .verifyColumnStatistics("ps_partkey", relativeError(0.15)) .verifyColumnStatistics("n_nationkey", relativeError(0.15)) .verifyColumnStatistics("s_nationkey", relativeError(0.15))); // join with two keys statisticsAssertion.check("SELECT * FROM partsupp, lineitem WHERE ps_partkey = l_partkey AND ps_suppkey = l_suppkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(4.0)) .verifyExactColumnStatistics("ps_partkey") .verifyExactColumnStatistics("l_partkey") .verifyExactColumnStatistics("ps_suppkey") .verifyExactColumnStatistics("l_suppkey") .verifyExactColumnStatistics("l_orderkey")); } @Test public void testLeftJoinStats() { // non equi predicates statisticsAssertion.check("SELECT * FROM supplier LEFT JOIN nation ON true", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("s_nationkey") .verifyExactColumnStatistics("n_nationkey") .verifyExactColumnStatistics("s_suppkey")); statisticsAssertion.check("SELECT * FROM supplier LEFT JOIN nation ON false", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("s_nationkey") .verifyExactColumnStatistics("s_suppkey")); // simple equi join statisticsAssertion.check("SELECT * FROM supplier LEFT JOIN nation ON s_nationkey = n_nationkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.70)) .verifyColumnStatistics("s_nationkey", absoluteError(0.40)) .verifyColumnStatistics("n_nationkey", absoluteError(0.40)) .verifyColumnStatistics("s_suppkey", absoluteError(0.40))); statisticsAssertion.check("SELECT * FROM supplier LEFT JOIN nation ON s_nationkey = n_nationkey AND n_nationkey <= 12", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.70)) .verifyColumnStatistics("s_nationkey", absoluteError(0.40)) .verifyColumnStatistics("n_nationkey", relativeError(0.40)) .verifyColumnStatistics("s_suppkey", absoluteError(0.40))); statisticsAssertion.check("SELECT * FROM (SELECT * FROM supplier WHERE s_nationkey <= 12) LEFT JOIN nation ON s_nationkey = n_nationkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.70)) .verifyColumnStatistics("s_nationkey", absoluteError(2.0)) .verifyColumnStatistics("n_nationkey", absoluteError(2.0))); // join with two keys statisticsAssertion.check("SELECT * FROM partsupp LEFT JOIN lineitem ON ps_partkey = l_partkey AND ps_suppkey = l_suppkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(4.0)) .verifyExactColumnStatistics("ps_partkey") .verifyColumnStatistics("l_partkey", absoluteError(6.0)) .verifyExactColumnStatistics("ps_suppkey") .verifyColumnStatistics("l_suppkey", absoluteError(6.0)) .verifyColumnStatistics("l_orderkey", absoluteError(6.0))); // simple non-equi join statisticsAssertion.check("SELECT * FROM partsupp LEFT JOIN lineitem ON ps_partkey = l_partkey AND ps_suppkey < l_suppkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(4.0)) .verifyExactColumnStatistics("ps_partkey") .verifyColumnStatistics("l_partkey", relativeError(0.10)) .verifyExactColumnStatistics("ps_suppkey") .verifyColumnStatistics("l_suppkey", relativeError(1.0)) .verifyColumnStatistics("l_orderkey", relativeError(0.10))); } @Test public void testRightJoinStats() { // non equi predicates statisticsAssertion.check("SELECT * FROM nation RIGHT JOIN supplier ON true", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("s_nationkey") .verifyExactColumnStatistics("n_nationkey") .verifyExactColumnStatistics("s_suppkey")); statisticsAssertion.check("SELECT * FROM nation RIGHT JOIN supplier ON false", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("s_nationkey") .verifyExactColumnStatistics("s_suppkey")); // simple equi join statisticsAssertion.check("SELECT * FROM nation RIGHT JOIN supplier ON s_nationkey = n_nationkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.70)) .verifyColumnStatistics("s_nationkey", absoluteError(0.40)) .verifyColumnStatistics("n_nationkey", absoluteError(0.40)) .verifyColumnStatistics("s_suppkey", absoluteError(0.40))); statisticsAssertion.check("SELECT * FROM nation RIGHT JOIN supplier ON s_nationkey = n_nationkey AND n_nationkey <= 12", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.70)) .verifyColumnStatistics("s_nationkey", absoluteError(0.40)) .verifyColumnStatistics("n_nationkey", relativeError(0.40)) .verifyColumnStatistics("s_suppkey", absoluteError(0.40))); statisticsAssertion.check("SELECT * FROM nation RIGHT JOIN (SELECT * FROM supplier WHERE s_nationkey <= 12) ON s_nationkey = n_nationkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.70)) .verifyColumnStatistics("s_nationkey", absoluteError(2.0)) .verifyColumnStatistics("n_nationkey", absoluteError(2.0))); // join with two keys statisticsAssertion.check("SELECT * FROM lineitem RIGHT JOIN partsupp ON ps_partkey = l_partkey AND ps_suppkey = l_suppkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(4.0)) .verifyExactColumnStatistics("ps_partkey") .verifyColumnStatistics("l_partkey", absoluteError(6.0)) .verifyExactColumnStatistics("ps_suppkey") .verifyColumnStatistics("l_suppkey", absoluteError(6.0)) .verifyColumnStatistics("l_orderkey", absoluteError(6.0))); // simple non-equi join statisticsAssertion.check("SELECT * FROM lineitem RIGHT JOIN partsupp ON ps_partkey = l_partkey AND ps_suppkey < l_suppkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(4.0)) .verifyExactColumnStatistics("ps_partkey") .verifyColumnStatistics("l_partkey", relativeError(0.10)) .verifyExactColumnStatistics("ps_suppkey") .verifyColumnStatistics("l_suppkey", relativeError(1.0)) .verifyColumnStatistics("l_orderkey", relativeError(0.10))); } @Test public void testFullJoinStats() { // non equi predicates statisticsAssertion.check("SELECT * FROM supplier FULL JOIN nation ON true", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyExactColumnStatistics("s_nationkey") .verifyExactColumnStatistics("n_nationkey") .verifyExactColumnStatistics("s_suppkey")); // simple equi join statisticsAssertion.check("SELECT * FROM nation FULL JOIN supplier ON s_nationkey = n_nationkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.70)) .verifyColumnStatistics("s_nationkey", absoluteError(0.40)) .verifyColumnStatistics("n_nationkey", absoluteError(0.40)) .verifyColumnStatistics("s_suppkey", absoluteError(0.40))); statisticsAssertion.check("SELECT * FROM (SELECT * FROM nation WHERE n_nationkey <= 12) FULL JOIN supplier ON s_nationkey = n_nationkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.70)) .verifyColumnStatistics("s_nationkey", absoluteError(0.40)) .verifyColumnStatistics("n_nationkey", relativeError(0.40)) .verifyColumnStatistics("s_suppkey", absoluteError(0.40))); statisticsAssertion.check("SELECT * FROM nation FULL JOIN (SELECT * FROM supplier WHERE s_nationkey <= 12) ON s_nationkey = n_nationkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.70)) .verifyColumnStatistics("s_nationkey", relativeError(0.40)) .verifyColumnStatistics("n_nationkey", relativeError(0.40))); // join with two keys statisticsAssertion.check("SELECT * FROM lineitem FULL JOIN partsupp ON ps_partkey = l_partkey AND ps_suppkey = l_suppkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(4.0)) .verifyColumnStatistics("ps_partkey", absoluteError(6.0)) .verifyColumnStatistics("l_partkey", absoluteError(6.0)) .verifyColumnStatistics("ps_suppkey", absoluteError(6.0)) .verifyColumnStatistics("l_suppkey", absoluteError(6.0)) .verifyColumnStatistics("l_orderkey", absoluteError(6.0))); // simple non-equi join statisticsAssertion.check("SELECT * FROM lineitem FULL JOIN partsupp ON ps_partkey = l_partkey AND ps_suppkey < l_suppkey", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(4.0)) .verifyColumnStatistics("ps_partkey", relativeError(0.10)) .verifyColumnStatistics("l_partkey", relativeError(0.10)) .verifyColumnStatistics("ps_suppkey", relativeError(0.10)) .verifyColumnStatistics("l_suppkey", relativeError(1.0)) .verifyColumnStatistics("l_orderkey", relativeError(0.10))); } @Test public void testAggregation() { statisticsAssertion.check("SELECT count() AS count FROM nation", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyNoColumnStatistics("count")); statisticsAssertion.check("SELECT n_name, count() AS count FROM nation GROUP BY n_name", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyNoColumnStatistics("count") .verifyExactColumnStatistics("n_name")); statisticsAssertion.check("SELECT n_name, count() AS count FROM nation, region GROUP BY n_name", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .verifyNoColumnStatistics("count") .verifyExactColumnStatistics("n_name")); } @Test public void testUnion() { statisticsAssertion.check("SELECT * FROM nation UNION SELECT * FROM nation", // real count is 25, estimation cannot know all rows are duplicate. checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(1, 1)) .verifyExactColumnStatistics("n_nationkey") .verifyExactColumnStatistics("n_regionkey")); statisticsAssertion.check("SELECT * FROM nation UNION ALL SELECT * FROM nation", checks -> checks .estimate(OUTPUT_ROW_COUNT, noError()) .verifyExactColumnStatistics("n_nationkey") .verifyExactColumnStatistics("n_regionkey")); statisticsAssertion.check("SELECT * FROM orders WHERE o_custkey < 755 OR o_orderstatus = '0' UNION SELECT * FROM orders WHERE o_custkey > 755 OR o_orderstatus = 'F'", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(.3, .35)) .estimate(distinctValuesCount("o_orderkey"), relativeError(-.4, -.3)) .estimate(nullsFraction("o_orderkey"), relativeError(.3, .35)) .estimate(lowValue("o_orderkey"), noError()) .estimate(highValue("o_orderkey"), noError()) .estimate(distinctValuesCount("o_custkey"), relativeError(0.5)) .estimate(nullsFraction("o_custkey"), relativeError(.45, .55)) .estimate(lowValue("o_custkey"), noError()) .estimate(highValue("o_custkey"), noError()) .estimate(distinctValuesCount("o_orderstatus"), relativeError(0.5)) .estimate(nullsFraction("o_orderstatus"), noError()) .estimate(lowValue("o_orderstatus"), noError()) .estimate(highValue("o_orderstatus"), noError())); statisticsAssertion.check("SELECT * FROM orders WHERE o_custkey < 755 OR o_orderstatus = '0' UNION ALL SELECT * FROM orders WHERE o_custkey > 755 OR o_orderstatus = 'F'", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .estimate(distinctValuesCount("o_orderkey"), relativeError(-.4, -.3)) .estimate(nullsFraction("o_orderkey"), relativeError(.3, .35)) .estimate(lowValue("o_orderkey"), noError()) .estimate(highValue("o_orderkey"), noError()) .estimate(distinctValuesCount("o_custkey"), relativeError(0.5)) .estimate(nullsFraction("o_custkey"), relativeError(.45, .55)) .estimate(lowValue("o_custkey"), noError()) .estimate(highValue("o_custkey"), noError()) .estimate(distinctValuesCount("o_orderstatus"), relativeError(0.5)) .estimate(nullsFraction("o_orderstatus"), noError()) .estimate(lowValue("o_orderstatus"), noError()) .estimate(highValue("o_orderstatus"), noError())); statisticsAssertion.check("SELECT * FROM orders WHERE o_custkey < 900 UNION SELECT * FROM orders WHERE o_custkey > 600", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(.15, .25)) .estimate(distinctValuesCount("o_orderkey"), relativeError(-.4, -.3)) .estimate(nullsFraction("o_orderkey"), relativeError(.15, .25)) .estimate(lowValue("o_orderkey"), noError()) .estimate(highValue("o_orderkey"), noError()) .estimate(distinctValuesCount("o_custkey"), relativeError(-.4, -.3)) .estimate(nullsFraction("o_custkey"), relativeError(.15, .25)) .estimate(lowValue("o_custkey"), noError()) .estimate(highValue("o_custkey"), noError())); statisticsAssertion.check("SELECT * FROM orders WHERE o_custkey < 900 UNION ALL SELECT * FROM orders WHERE o_custkey > 600", checks -> checks .estimate(OUTPUT_ROW_COUNT, defaultTolerance()) .estimate(distinctValuesCount("o_orderkey"), relativeError(-.4, -.3)) .estimate(nullsFraction("o_orderkey"), relativeError(-.4, -.3)) .estimate(lowValue("o_orderkey"), noError()) .estimate(highValue("o_orderkey"), noError()) .estimate(distinctValuesCount("o_custkey"), relativeError(-.4, -.3)) .estimate(nullsFraction("o_custkey"), relativeError(.15, .25)) .estimate(lowValue("o_custkey"), noError()) .estimate(highValue("o_custkey"), noError())); } @Test public void testIntersect() { statisticsAssertion.check("SELECT * FROM nation INTERSECT SELECT * FROM nation", checks -> checks.noEstimate(OUTPUT_ROW_COUNT)); statisticsAssertion.check("SELECT * FROM orders WHERE o_custkey < 900 INTERSECT SELECT * FROM orders WHERE o_custkey > 600", checks -> checks.noEstimate(OUTPUT_ROW_COUNT)); } @Test public void testExcept() { statisticsAssertion.check("SELECT * FROM nation EXCEPT SELECT * FROM nation", checks -> checks.noEstimate(OUTPUT_ROW_COUNT)); statisticsAssertion.check("SELECT * FROM orders WHERE o_custkey < 900 EXCEPT SELECT * FROM orders WHERE o_custkey > 600", checks -> checks.noEstimate(OUTPUT_ROW_COUNT)); } @Test public void testInSubquery() { statisticsAssertion.check("select * from lineitem where l_orderkey in (select o_orderkey from orders where o_orderdate >= DATE '1993-10-01')", checks -> checks.estimate(OUTPUT_ROW_COUNT, defaultTolerance())); } @Test public void testNotInSubquery() { statisticsAssertion.check("select * from lineitem where l_orderkey not in (select o_orderkey from orders where o_orderdate >= DATE '1993-10-01')", // we allow overestimating here. That is because safety heuristic for antijoin which enforces that not more that 50% // of values are filtered out. checks -> checks.estimate(OUTPUT_ROW_COUNT, relativeError(0.0, 1.0))); } @Test public void testCorrelatedSubquery() { statisticsAssertion.check("SELECT (SELECT count(*) FROM nation n1 WHERE n1.n_nationkey = n2.n_nationkey AND n1.n_regionkey > n2.n_regionkey) FROM nation n2", checks -> checks .estimate(OUTPUT_ROW_COUNT, relativeError(0.5))); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.distributed.impl; import java.io.File; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.Collections; import java.util.EnumSet; import java.util.Map; import java.util.TreeMap; import java.util.UUID; import java.util.function.Function; import com.vdurmont.semver4j.Semver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.distributed.api.Feature; import org.apache.cassandra.distributed.api.IInstanceConfig; import org.apache.cassandra.distributed.shared.NetworkTopology; import org.apache.cassandra.distributed.shared.Shared; import org.apache.cassandra.distributed.upgrade.UpgradeTestBase; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.locator.SimpleSeedProvider; @Shared public class InstanceConfig implements IInstanceConfig { private static final Object NULL = new Object(); private static final Logger logger = LoggerFactory.getLogger(InstanceConfig.class); public final int num; public int num() { return num; } private final NetworkTopology networkTopology; public NetworkTopology networkTopology() { return networkTopology; } public final UUID hostId; public UUID hostId() { return hostId; } private final Map<String, Object> params = new TreeMap<>(); private final Map<String, Object> dtestParams = new TreeMap<>(); private final EnumSet featureFlags; private volatile InetAddressAndPort broadcastAddressAndPort; private InstanceConfig(int num, NetworkTopology networkTopology, String broadcast_address, String listen_address, String broadcast_rpc_address, String rpc_address, String seedIp, int seedPort, String saved_caches_directory, String[] data_file_directories, String commitlog_directory, String hints_directory, String cdc_raw_directory, String initial_token, int storage_port, int native_transport_port) { this.num = num; this.networkTopology = networkTopology; this.hostId = java.util.UUID.randomUUID(); this .set("num_tokens", 1) .set("broadcast_address", broadcast_address) .set("listen_address", listen_address) .set("broadcast_rpc_address", broadcast_rpc_address) .set("rpc_address", rpc_address) .set("saved_caches_directory", saved_caches_directory) .set("data_file_directories", data_file_directories) .set("commitlog_directory", commitlog_directory) .set("hints_directory", hints_directory) .set("cdc_raw_directory", cdc_raw_directory) .set("initial_token", initial_token) .set("partitioner", "org.apache.cassandra.dht.Murmur3Partitioner") .set("start_native_transport", true) .set("concurrent_writes", 2) .set("concurrent_counter_writes", 2) .set("concurrent_materialized_view_writes", 2) .set("concurrent_reads", 2) .set("memtable_flush_writers", 1) .set("concurrent_compactors", 1) .set("memtable_heap_space_in_mb", 10) .set("commitlog_sync", "batch") .set("storage_port", storage_port) .set("native_transport_port", native_transport_port) .set("endpoint_snitch", DistributedTestSnitch.class.getName()) .set("seed_provider", new ParameterizedClass(SimpleSeedProvider.class.getName(), Collections.singletonMap("seeds", seedIp + ":" + seedPort))) // required settings for dtest functionality .set("diagnostic_events_enabled", true) .set("auto_bootstrap", false) // capacities that are based on `totalMemory` that should be fixed size .set("index_summary_capacity_in_mb", 50l) .set("counter_cache_size_in_mb", 50l) .set("key_cache_size_in_mb", 50l) // legacy parameters .forceSet("commitlog_sync_batch_window_in_ms", 1.0); this.featureFlags = EnumSet.noneOf(Feature.class); } private InstanceConfig(InstanceConfig copy) { this.num = copy.num; this.networkTopology = new NetworkTopology(copy.networkTopology); this.params.putAll(copy.params); this.dtestParams.putAll(copy.dtestParams); this.hostId = copy.hostId; this.featureFlags = copy.featureFlags; this.broadcastAddressAndPort = copy.broadcastAddressAndPort; } @Override public InetSocketAddress broadcastAddress() { return DistributedTestSnitch.fromCassandraInetAddressAndPort(getBroadcastAddressAndPort()); } public void unsetBroadcastAddressAndPort() { broadcastAddressAndPort = null; } protected InetAddressAndPort getBroadcastAddressAndPort() { if (broadcastAddressAndPort == null) { broadcastAddressAndPort = getAddressAndPortFromConfig("broadcast_address", "storage_port"); } return broadcastAddressAndPort; } private InetAddressAndPort getAddressAndPortFromConfig(String addressProp, String portProp) { try { return InetAddressAndPort.getByNameOverrideDefaults(getString(addressProp), getInt(portProp)); } catch (UnknownHostException e) { throw new IllegalStateException(e); } } public String localRack() { return networkTopology().localRack(broadcastAddress()); } public String localDatacenter() { return networkTopology().localDC(broadcastAddress()); } public InstanceConfig with(Feature featureFlag) { featureFlags.add(featureFlag); return this; } public InstanceConfig with(Feature... flags) { for (Feature flag : flags) featureFlags.add(flag); return this; } public boolean has(Feature featureFlag) { return featureFlags.contains(featureFlag); } public InstanceConfig set(String fieldName, Object value) { if (value == null) value = NULL; getParams(fieldName).put(fieldName, value); return this; } private InstanceConfig forceSet(String fieldName, Object value) { if (value == null) value = NULL; // test value getParams(fieldName).put(fieldName, value); return this; } private Map<String, Object> getParams(String fieldName) { Map<String, Object> map = params; if (fieldName.startsWith("dtest")) map = dtestParams; return map; } public void propagate(Object writeToConfig, Map<Class<?>, Function<Object, Object>> mapping) { throw new IllegalStateException("In-JVM dtests no longer support propagate"); } public void validate() { if (((int) get("num_tokens")) > 1) throw new IllegalArgumentException("In-JVM dtests do not support vnodes as of now."); } public Object get(String name) { return getParams(name).get(name); } public int getInt(String name) { return (Integer) get(name); } public String getString(String name) { return (String) get(name); } public Map<String, Object> getParams() { return params; } public static InstanceConfig generate(int nodeNum, INodeProvisionStrategy provisionStrategy, NetworkTopology networkTopology, File root, String token, int datadirCount) { return new InstanceConfig(nodeNum, networkTopology, provisionStrategy.ipAddress(nodeNum), provisionStrategy.ipAddress(nodeNum), provisionStrategy.ipAddress(nodeNum), provisionStrategy.ipAddress(nodeNum), provisionStrategy.seedIp(), provisionStrategy.seedPort(), String.format("%s/node%d/saved_caches", root, nodeNum), datadirs(datadirCount, root, nodeNum), String.format("%s/node%d/commitlog", root, nodeNum), String.format("%s/node%d/hints", root, nodeNum), String.format("%s/node%d/cdc", root, nodeNum), token, provisionStrategy.storagePort(nodeNum), provisionStrategy.nativeTransportPort(nodeNum)); } private static String[] datadirs(int datadirCount, File root, int nodeNum) { String datadirFormat = String.format("%s/node%d/data%%d", root.getPath(), nodeNum); String [] datadirs = new String[datadirCount]; for (int i = 0; i < datadirs.length; i++) datadirs[i] = String.format(datadirFormat, i); return datadirs; } public InstanceConfig forVersion(Semver version) { // Versions before 4.0 need to set 'seed_provider' without specifying the port if (UpgradeTestBase.v40.compareTo(version) < 0) return this; else return new InstanceConfig(this) .set("seed_provider", new ParameterizedClass(SimpleSeedProvider.class.getName(), Collections.singletonMap("seeds", "127.0.0.1"))); } public String toString() { return params.toString(); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/container/v1/cluster_service.proto package com.google.container.v1; /** * * * <pre> * Represents the Maintenance exclusion option. * </pre> * * Protobuf type {@code google.container.v1.MaintenanceExclusionOptions} */ public final class MaintenanceExclusionOptions extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.container.v1.MaintenanceExclusionOptions) MaintenanceExclusionOptionsOrBuilder { private static final long serialVersionUID = 0L; // Use MaintenanceExclusionOptions.newBuilder() to construct. private MaintenanceExclusionOptions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MaintenanceExclusionOptions() { scope_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new MaintenanceExclusionOptions(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MaintenanceExclusionOptions( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); scope_ = rawValue; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_MaintenanceExclusionOptions_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_MaintenanceExclusionOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.MaintenanceExclusionOptions.class, com.google.container.v1.MaintenanceExclusionOptions.Builder.class); } /** * * * <pre> * Scope of exclusion. * </pre> * * Protobuf enum {@code google.container.v1.MaintenanceExclusionOptions.Scope} */ public enum Scope implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * NO_UPGRADES excludes all upgrades, including patch upgrades and minor * upgrades across control planes and nodes. This is the default exclusion * behavior. * </pre> * * <code>NO_UPGRADES = 0;</code> */ NO_UPGRADES(0), /** * * * <pre> * NO_MINOR_UPGRADES excludes all minor upgrades for the cluster, only * patches are allowed. * </pre> * * <code>NO_MINOR_UPGRADES = 1;</code> */ NO_MINOR_UPGRADES(1), /** * * * <pre> * NO_MINOR_OR_NODE_UPGRADES excludes all minor upgrades for the cluster, * and also exclude all node pool upgrades. Only control * plane patches are allowed. * </pre> * * <code>NO_MINOR_OR_NODE_UPGRADES = 2;</code> */ NO_MINOR_OR_NODE_UPGRADES(2), UNRECOGNIZED(-1), ; /** * * * <pre> * NO_UPGRADES excludes all upgrades, including patch upgrades and minor * upgrades across control planes and nodes. This is the default exclusion * behavior. * </pre> * * <code>NO_UPGRADES = 0;</code> */ public static final int NO_UPGRADES_VALUE = 0; /** * * * <pre> * NO_MINOR_UPGRADES excludes all minor upgrades for the cluster, only * patches are allowed. * </pre> * * <code>NO_MINOR_UPGRADES = 1;</code> */ public static final int NO_MINOR_UPGRADES_VALUE = 1; /** * * * <pre> * NO_MINOR_OR_NODE_UPGRADES excludes all minor upgrades for the cluster, * and also exclude all node pool upgrades. Only control * plane patches are allowed. * </pre> * * <code>NO_MINOR_OR_NODE_UPGRADES = 2;</code> */ public static final int NO_MINOR_OR_NODE_UPGRADES_VALUE = 2; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Scope valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Scope forNumber(int value) { switch (value) { case 0: return NO_UPGRADES; case 1: return NO_MINOR_UPGRADES; case 2: return NO_MINOR_OR_NODE_UPGRADES; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Scope> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Scope> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Scope>() { public Scope findValueByNumber(int number) { return Scope.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.container.v1.MaintenanceExclusionOptions.getDescriptor() .getEnumTypes() .get(0); } private static final Scope[] VALUES = values(); public static Scope valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Scope(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.container.v1.MaintenanceExclusionOptions.Scope) } public static final int SCOPE_FIELD_NUMBER = 1; private int scope_; /** * * * <pre> * Scope specifies the upgrade scope which upgrades are blocked by the * exclusion. * </pre> * * <code>.google.container.v1.MaintenanceExclusionOptions.Scope scope = 1;</code> * * @return The enum numeric value on the wire for scope. */ @java.lang.Override public int getScopeValue() { return scope_; } /** * * * <pre> * Scope specifies the upgrade scope which upgrades are blocked by the * exclusion. * </pre> * * <code>.google.container.v1.MaintenanceExclusionOptions.Scope scope = 1;</code> * * @return The scope. */ @java.lang.Override public com.google.container.v1.MaintenanceExclusionOptions.Scope getScope() { @SuppressWarnings("deprecation") com.google.container.v1.MaintenanceExclusionOptions.Scope result = com.google.container.v1.MaintenanceExclusionOptions.Scope.valueOf(scope_); return result == null ? com.google.container.v1.MaintenanceExclusionOptions.Scope.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (scope_ != com.google.container.v1.MaintenanceExclusionOptions.Scope.NO_UPGRADES.getNumber()) { output.writeEnum(1, scope_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (scope_ != com.google.container.v1.MaintenanceExclusionOptions.Scope.NO_UPGRADES.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, scope_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.container.v1.MaintenanceExclusionOptions)) { return super.equals(obj); } com.google.container.v1.MaintenanceExclusionOptions other = (com.google.container.v1.MaintenanceExclusionOptions) obj; if (scope_ != other.scope_) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SCOPE_FIELD_NUMBER; hash = (53 * hash) + scope_; hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.MaintenanceExclusionOptions parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.container.v1.MaintenanceExclusionOptions parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.MaintenanceExclusionOptions parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.container.v1.MaintenanceExclusionOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents the Maintenance exclusion option. * </pre> * * Protobuf type {@code google.container.v1.MaintenanceExclusionOptions} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.container.v1.MaintenanceExclusionOptions) com.google.container.v1.MaintenanceExclusionOptionsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_MaintenanceExclusionOptions_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_MaintenanceExclusionOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.MaintenanceExclusionOptions.class, com.google.container.v1.MaintenanceExclusionOptions.Builder.class); } // Construct using com.google.container.v1.MaintenanceExclusionOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); scope_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_MaintenanceExclusionOptions_descriptor; } @java.lang.Override public com.google.container.v1.MaintenanceExclusionOptions getDefaultInstanceForType() { return com.google.container.v1.MaintenanceExclusionOptions.getDefaultInstance(); } @java.lang.Override public com.google.container.v1.MaintenanceExclusionOptions build() { com.google.container.v1.MaintenanceExclusionOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.container.v1.MaintenanceExclusionOptions buildPartial() { com.google.container.v1.MaintenanceExclusionOptions result = new com.google.container.v1.MaintenanceExclusionOptions(this); result.scope_ = scope_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.container.v1.MaintenanceExclusionOptions) { return mergeFrom((com.google.container.v1.MaintenanceExclusionOptions) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.container.v1.MaintenanceExclusionOptions other) { if (other == com.google.container.v1.MaintenanceExclusionOptions.getDefaultInstance()) return this; if (other.scope_ != 0) { setScopeValue(other.getScopeValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.container.v1.MaintenanceExclusionOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.container.v1.MaintenanceExclusionOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int scope_ = 0; /** * * * <pre> * Scope specifies the upgrade scope which upgrades are blocked by the * exclusion. * </pre> * * <code>.google.container.v1.MaintenanceExclusionOptions.Scope scope = 1;</code> * * @return The enum numeric value on the wire for scope. */ @java.lang.Override public int getScopeValue() { return scope_; } /** * * * <pre> * Scope specifies the upgrade scope which upgrades are blocked by the * exclusion. * </pre> * * <code>.google.container.v1.MaintenanceExclusionOptions.Scope scope = 1;</code> * * @param value The enum numeric value on the wire for scope to set. * @return This builder for chaining. */ public Builder setScopeValue(int value) { scope_ = value; onChanged(); return this; } /** * * * <pre> * Scope specifies the upgrade scope which upgrades are blocked by the * exclusion. * </pre> * * <code>.google.container.v1.MaintenanceExclusionOptions.Scope scope = 1;</code> * * @return The scope. */ @java.lang.Override public com.google.container.v1.MaintenanceExclusionOptions.Scope getScope() { @SuppressWarnings("deprecation") com.google.container.v1.MaintenanceExclusionOptions.Scope result = com.google.container.v1.MaintenanceExclusionOptions.Scope.valueOf(scope_); return result == null ? com.google.container.v1.MaintenanceExclusionOptions.Scope.UNRECOGNIZED : result; } /** * * * <pre> * Scope specifies the upgrade scope which upgrades are blocked by the * exclusion. * </pre> * * <code>.google.container.v1.MaintenanceExclusionOptions.Scope scope = 1;</code> * * @param value The scope to set. * @return This builder for chaining. */ public Builder setScope(com.google.container.v1.MaintenanceExclusionOptions.Scope value) { if (value == null) { throw new NullPointerException(); } scope_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Scope specifies the upgrade scope which upgrades are blocked by the * exclusion. * </pre> * * <code>.google.container.v1.MaintenanceExclusionOptions.Scope scope = 1;</code> * * @return This builder for chaining. */ public Builder clearScope() { scope_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.container.v1.MaintenanceExclusionOptions) } // @@protoc_insertion_point(class_scope:google.container.v1.MaintenanceExclusionOptions) private static final com.google.container.v1.MaintenanceExclusionOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.container.v1.MaintenanceExclusionOptions(); } public static com.google.container.v1.MaintenanceExclusionOptions getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MaintenanceExclusionOptions> PARSER = new com.google.protobuf.AbstractParser<MaintenanceExclusionOptions>() { @java.lang.Override public MaintenanceExclusionOptions parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new MaintenanceExclusionOptions(input, extensionRegistry); } }; public static com.google.protobuf.Parser<MaintenanceExclusionOptions> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MaintenanceExclusionOptions> getParserForType() { return PARSER; } @java.lang.Override public com.google.container.v1.MaintenanceExclusionOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package org.fcrepo.server.types.mtom.gen; import javax.xml.bind.annotation.XmlRegistry; /** * This object contains factory methods for each * Java content interface and Java element interface * generated in the org.fcrepo.server.types.mtom.gen package. * <p>An ObjectFactory allows you to programatically * construct new instances of the Java representation * for XML content. The Java representation of XML * content can consist of schema derived interfaces * and classes representing the binding of schema * type definitions, element declarations and model * groups. Factory methods for each of these are * provided in this class. * */ @XmlRegistry public class ObjectFactory { /** * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: org.fcrepo.server.types.mtom.gen * */ public ObjectFactory() { } /** * Create an instance of {@link AddRelationship } * */ public AddRelationship createAddRelationship() { return new AddRelationship(); } /** * Create an instance of {@link Validate } * */ public Validate createValidate() { return new Validate(); } /** * Create an instance of {@link IngestResponse } * */ public IngestResponse createIngestResponse() { return new IngestResponse(); } /** * Create an instance of {@link DescribeRepositoryResponse } * */ public DescribeRepositoryResponse createDescribeRepositoryResponse() { return new DescribeRepositoryResponse(); } /** * Create an instance of {@link PurgeObject } * */ public PurgeObject createPurgeObject() { return new PurgeObject(); } /** * Create an instance of {@link ModifyDatastreamByReferenceResponse } * */ public ModifyDatastreamByReferenceResponse createModifyDatastreamByReferenceResponse() { return new ModifyDatastreamByReferenceResponse(); } /** * Create an instance of {@link GetObjectProfileResponse } * */ public GetObjectProfileResponse createGetObjectProfileResponse() { return new GetObjectProfileResponse(); } /** * Create an instance of {@link GetObjectProfile } * */ public GetObjectProfile createGetObjectProfile() { return new GetObjectProfile(); } /** * Create an instance of {@link ListDatastreamsResponse } * */ public ListDatastreamsResponse createListDatastreamsResponse() { return new ListDatastreamsResponse(); } /** * Create an instance of {@link ModifyObject } * */ public ModifyObject createModifyObject() { return new ModifyObject(); } /** * Create an instance of {@link GetNextPID } * */ public GetNextPID createGetNextPID() { return new GetNextPID(); } /** * Create an instance of {@link CompareDatastreamChecksumResponse } * */ public CompareDatastreamChecksumResponse createCompareDatastreamChecksumResponse() { return new CompareDatastreamChecksumResponse(); } /** * Create an instance of {@link GetDatastreamDisseminationResponse } * */ public GetDatastreamDisseminationResponse createGetDatastreamDisseminationResponse() { return new GetDatastreamDisseminationResponse(); } /** * Create an instance of {@link GetDatastreamHistory } * */ public GetDatastreamHistory createGetDatastreamHistory() { return new GetDatastreamHistory(); } /** * Create an instance of {@link ResumeFindObjectsResponse } * */ public ResumeFindObjectsResponse createResumeFindObjectsResponse() { return new ResumeFindObjectsResponse(); } /** * Create an instance of {@link GetDissemination.Parameters } * */ public GetDissemination.Parameters createGetDisseminationParameters() { return new GetDissemination.Parameters(); } /** * Create an instance of {@link GetNextPIDResponse } * */ public GetNextPIDResponse createGetNextPIDResponse() { return new GetNextPIDResponse(); } /** * Create an instance of {@link CompareDatastreamChecksum } * */ public CompareDatastreamChecksum createCompareDatastreamChecksum() { return new CompareDatastreamChecksum(); } /** * Create an instance of {@link Export } * */ public Export createExport() { return new Export(); } /** * Create an instance of {@link GetDatastreamsResponse } * */ public GetDatastreamsResponse createGetDatastreamsResponse() { return new GetDatastreamsResponse(); } /** * Create an instance of {@link GetDatastreamDissemination } * */ public GetDatastreamDissemination createGetDatastreamDissemination() { return new GetDatastreamDissemination(); } /** * Create an instance of {@link ListMethodsResponse } * */ public ListMethodsResponse createListMethodsResponse() { return new ListMethodsResponse(); } /** * Create an instance of {@link AddRelationshipResponse } * */ public AddRelationshipResponse createAddRelationshipResponse() { return new AddRelationshipResponse(); } /** * Create an instance of {@link GetRelationships } * */ public GetRelationships createGetRelationships() { return new GetRelationships(); } /** * Create an instance of {@link GetObjectXMLResponse } * */ public GetObjectXMLResponse createGetObjectXMLResponse() { return new GetObjectXMLResponse(); } /** * Create an instance of {@link AddDatastream } * */ public AddDatastream createAddDatastream() { return new AddDatastream(); } /** * Create an instance of {@link SetDatastreamVersionable } * */ public SetDatastreamVersionable createSetDatastreamVersionable() { return new SetDatastreamVersionable(); } /** * Create an instance of {@link GetObjectHistoryResponse } * */ public GetObjectHistoryResponse createGetObjectHistoryResponse() { return new GetObjectHistoryResponse(); } /** * Create an instance of {@link SetDatastreamStateResponse } * */ public SetDatastreamStateResponse createSetDatastreamStateResponse() { return new SetDatastreamStateResponse(); } /** * Create an instance of {@link ModifyObjectResponse } * */ public ModifyObjectResponse createModifyObjectResponse() { return new ModifyObjectResponse(); } /** * Create an instance of {@link ResumeFindObjects } * */ public ResumeFindObjects createResumeFindObjects() { return new ResumeFindObjects(); } /** * Create an instance of {@link SetDatastreamVersionableResponse } * */ public SetDatastreamVersionableResponse createSetDatastreamVersionableResponse() { return new SetDatastreamVersionableResponse(); } /** * Create an instance of {@link PurgeRelationshipResponse } * */ public PurgeRelationshipResponse createPurgeRelationshipResponse() { return new PurgeRelationshipResponse(); } /** * Create an instance of {@link GetDatastreamResponse } * */ public GetDatastreamResponse createGetDatastreamResponse() { return new GetDatastreamResponse(); } /** * Create an instance of {@link GetDatastreams } * */ public GetDatastreams createGetDatastreams() { return new GetDatastreams(); } /** * Create an instance of {@link PurgeDatastream } * */ public PurgeDatastream createPurgeDatastream() { return new PurgeDatastream(); } /** * Create an instance of {@link ExportResponse } * */ public ExportResponse createExportResponse() { return new ExportResponse(); } /** * Create an instance of {@link GetDissemination } * */ public GetDissemination createGetDissemination() { return new GetDissemination(); } /** * Create an instance of {@link AddDatastreamResponse } * */ public AddDatastreamResponse createAddDatastreamResponse() { return new AddDatastreamResponse(); } /** * Create an instance of {@link SetDatastreamState } * */ public SetDatastreamState createSetDatastreamState() { return new SetDatastreamState(); } /** * Create an instance of {@link PurgeRelationship } * */ public PurgeRelationship createPurgeRelationship() { return new PurgeRelationship(); } /** * Create an instance of {@link PurgeDatastreamResponse } * */ public PurgeDatastreamResponse createPurgeDatastreamResponse() { return new PurgeDatastreamResponse(); } /** * Create an instance of {@link GetDatastreamHistoryResponse } * */ public GetDatastreamHistoryResponse createGetDatastreamHistoryResponse() { return new GetDatastreamHistoryResponse(); } /** * Create an instance of {@link ListDatastreams } * */ public ListDatastreams createListDatastreams() { return new ListDatastreams(); } /** * Create an instance of {@link PurgeObjectResponse } * */ public PurgeObjectResponse createPurgeObjectResponse() { return new PurgeObjectResponse(); } /** * Create an instance of {@link Ingest } * */ public Ingest createIngest() { return new Ingest(); } /** * Create an instance of {@link ModifyDatastreamByValue } * */ public ModifyDatastreamByValue createModifyDatastreamByValue() { return new ModifyDatastreamByValue(); } /** * Create an instance of {@link GetDisseminationResponse } * */ public GetDisseminationResponse createGetDisseminationResponse() { return new GetDisseminationResponse(); } /** * Create an instance of {@link FindObjects } * */ public FindObjects createFindObjects() { return new FindObjects(); } /** * Create an instance of {@link GetObjectXML } * */ public GetObjectXML createGetObjectXML() { return new GetObjectXML(); } /** * Create an instance of {@link GetRelationshipsResponse } * */ public GetRelationshipsResponse createGetRelationshipsResponse() { return new GetRelationshipsResponse(); } /** * Create an instance of {@link GetObjectHistory } * */ public GetObjectHistory createGetObjectHistory() { return new GetObjectHistory(); } /** * Create an instance of {@link GetDatastream } * */ public GetDatastream createGetDatastream() { return new GetDatastream(); } /** * Create an instance of {@link DescribeRepository } * */ public DescribeRepository createDescribeRepository() { return new DescribeRepository(); } /** * Create an instance of {@link FindObjectsResponse } * */ public FindObjectsResponse createFindObjectsResponse() { return new FindObjectsResponse(); } /** * Create an instance of {@link MIMETypedStream } * */ public MIMETypedStream createMIMETypedStream() { return new MIMETypedStream(); } /** * Create an instance of {@link MIMETypedStream.Header } * */ public MIMETypedStream.Header createMIMETypedStreamHeader() { return new MIMETypedStream.Header(); } /** * Create an instance of {@link ModifyDatastreamByReference } * */ public ModifyDatastreamByReference createModifyDatastreamByReference() { return new ModifyDatastreamByReference(); } /** * Create an instance of {@link ListMethods } * */ public ListMethods createListMethods() { return new ListMethods(); } /** * Create an instance of {@link ValidateResponse } * */ public ValidateResponse createValidateResponse() { return new ValidateResponse(); } /** * Create an instance of {@link ModifyDatastreamByValueResponse } * */ public ModifyDatastreamByValueResponse createModifyDatastreamByValueResponse() { return new ModifyDatastreamByValueResponse(); } }
/* * Copyright (C) 2012 United States Government as represented by the Administrator of the * National Aeronautics and Space Administration. * All Rights Reserved. */ package gov.nasa.worldwind.symbology.milstd2525.graphics; import gov.nasa.worldwind.*; import gov.nasa.worldwind.avlist.*; import gov.nasa.worldwind.geom.*; import gov.nasa.worldwind.render.*; import gov.nasa.worldwind.symbology.*; import gov.nasa.worldwind.symbology.milstd2525.*; import gov.nasa.worldwind.util.*; import java.awt.*; import java.awt.geom.*; import java.util.*; import java.util.List; /** * Implementation of TacticalSymbol to render point graphics defined by MIL-STD-2525C Appendix B (Tactical Graphics). * This class implements the logic for rendering tactical point graphics, but actually implements the TacticalSymbol * interface. * <p/> * This class is not meant to be used directly by applications. Instead, apps should use {@link MilStd2525PointGraphic}, * which implements the {@link TacticalGraphic} interface. (MilStd2525PointGraphic uses TacticalGraphicSymbol internally * to render the point graphic.) * * @author pabercrombie * @version $Id: TacticalGraphicSymbol.java 710 2012-08-13 16:10:58Z pabercrombie $ * @see MilStd2525PointGraphic */ public class TacticalGraphicSymbol extends AbstractTacticalSymbol { /** * Object that provides the default offset for each point graphic. Most graphics are centered on their position, but * some require a different offset. */ protected static DefaultOffsets defaultOffsets = new DefaultOffsets(); /** Object that provides the default label layouts for each point graphic. */ protected static DefaultLabelLayouts defaultLayouts = new DefaultLabelLayouts(); protected static final Offset BELOW_BOTTOM_CENTER_OFFSET = Offset.fromFraction(0.5, -0.1); /** The default number of label lines to expect when computing the minimum size of the text layout rectangle. */ protected static final int DEFAULT_LABEL_LINES = 2; public static class LabelLayout { protected String modifier; protected List<OffsetPair> offsets = new ArrayList<OffsetPair>(); public LabelLayout(String modifier) { this.modifier = modifier; } public void add(Offset offset, Offset hotspot) { this.offsets.add(new OffsetPair(offset, hotspot)); } public String getModifier() { return modifier; } public List<OffsetPair> getOffsets() { return this.offsets; } } public static class OffsetPair { public Offset offset; public Offset hotSpot; public OffsetPair(Offset offset, Offset hotSpot) { this.offset = offset; this.hotSpot = hotSpot; } } /** * Indicates a string identifier for this symbol. The format of the identifier depends on the symbol set to which * this graphic belongs. For symbols belonging to the MIL-STD-2525 symbol set, this returns a 15-character * alphanumeric symbol identification code (SIDC). Calculated from the current modifiers at construction and during * each call to {@link #setModifier(String, Object)}. Initially <code>null</code>. */ protected SymbolCode symbolCode; /** * Symbol identifier with fields that do not influence the type of graphic replaced with hyphens. See {@link * SymbolCode#toMaskedString}. */ protected String maskedSymbolCode; /** * Constructs a new symbol with no position. * * @param sidc Code that identifies the graphic. */ public TacticalGraphicSymbol(String sidc) { super(); init(sidc); } /** * Constructs a new symbol with the specified position. The position specifies the latitude, longitude, and altitude * where this symbol is drawn on the globe. The position's altitude component is interpreted according to the * altitudeMode. * * @param sidc Code that identifies the graphic. * @param position The latitude, longitude, and altitude where the symbol is drawn. * * @throws IllegalArgumentException if the position is <code>null</code>. */ public TacticalGraphicSymbol(String sidc, Position position) { super(position); init(sidc); } /** * Indicates the current value of graphic's Status/Operational Condition field. * * @return this graphic's Status/Operational Condition field. * * @see #setStatus(String) */ public String getStatus() { return this.symbolCode.getStatus(); } /** * Specifies this graphic's Status/Operational Condition field. A graphic's Status defines whether the represented * object exists at the time the symbol was generated, or is anticipated to exist in the future. Additionally, a * graphic's Status can define its operational condition. The recognized values depend on the graphic's scheme: * <p/> * <strong>Tactical graphics</strong> * <p/> * <ul> <li>STATUS_ANTICIPATED</li> <li>STATUS_SUSPECTED</li> <li>STATUS_PRESENT</li> <li>STATUS_KNOWN</li> </ul> * <p/> * <strong>Meteorological and Oceanographic</strong> * <p/> * <ul> <li>Not supported</li> </ul> * <p/> * <strong>Emergency Management</strong> * <p/> * <ul> <li>STATUS_ANTICIPATED</li> <li>STATUS_PRESENT</li> </ul> * * @param value the new value for the Status/Operational Condition field. * * @throws IllegalArgumentException if the specified value is <code>null</code> or is not one of the accepted status * values. */ public void setStatus(String value) { if (value == null) { String msg = Logging.getMessage("nullValue.StringIsNull"); Logging.logger().severe(msg); throw new IllegalArgumentException(msg); } if (!SymbologyConstants.STATUS_ALL.contains(value.toUpperCase())) { String msg = Logging.getMessage("Symbology.InvalidStatus", value); Logging.logger().severe(msg); throw new IllegalArgumentException(msg); } this.symbolCode.setStatus(value); } /** * Initialize the new symbol. * * @param sidc Code that identifies the graphic. */ protected void init(String sidc) { this.symbolCode = new SymbolCode(sidc); this.maskedSymbolCode = this.symbolCode.toMaskedString(); this.setAltitudeMode(WorldWind.CLAMP_TO_GROUND); // Configure this tactical point graphic's icon retriever and modifier retriever with either the // configuration value or the default value (in that order of precedence). String iconRetrieverPath = Configuration.getStringValue(AVKey.MIL_STD_2525_ICON_RETRIEVER_PATH, MilStd2525Constants.DEFAULT_ICON_RETRIEVER_PATH); this.setIconRetriever(new MilStd2525PointGraphicRetriever(iconRetrieverPath)); Offset offset = defaultOffsets.get(this.symbolCode.toMaskedString()); this.setOffset(offset); // By default, show the hostile indicator (the letters "ENY"). Note that this default is different from // MilStd2525TacticalSymbol, which does not display the hostile indicator by default. Section 5.5.1.1 (pg. 37) // of MIL-STD-2525C states that the indicator is not required if color is used in the display. We choose to // display the indicator by default following the principle that by default hostile entities should look as // hostile as possible (to avoid being mistaken for friendly entities). In the case of tactical symbols, however // the indicator is redundant to both the symbol frame and fill, so it is not displayed by default. this.setShowHostileIndicator(true); // Use the same default unit format as 2525 tactical symbols. this.setUnitsFormat(MilStd2525TacticalSymbol.DEFAULT_UNITS_FORMAT); } /** {@inheritDoc} */ public String getIdentifier() { return this.symbolCode.toString(); } @Override protected int getMaxLabelLines(AVList modifiers) { return DEFAULT_LABEL_LINES; } @Override protected void applyImplicitModifiers(AVList modifiers) { String si = this.symbolCode.getStandardIdentity(); // If this symbol represents a hostile entity, and the "hostile/enemy" indicator is enabled, then set the // hostile modifier to "ENY". boolean isHostile = SymbologyConstants.STANDARD_IDENTITY_HOSTILE.equalsIgnoreCase(si) || SymbologyConstants.STANDARD_IDENTITY_SUSPECT.equalsIgnoreCase(si) || SymbologyConstants.STANDARD_IDENTITY_JOKER.equalsIgnoreCase(si) || SymbologyConstants.STANDARD_IDENTITY_FAKER.equalsIgnoreCase(si); if (!modifiers.hasKey(SymbologyConstants.HOSTILE_ENEMY) && this.isShowHostileIndicator() && isHostile) { modifiers.setValue(SymbologyConstants.HOSTILE_ENEMY, SymbologyConstants.HOSTILE_ENEMY); } // Determine location, if location modifier is enabled. if (!modifiers.hasKey(SymbologyConstants.LOCATION) && this.isShowLocation()) { modifiers.setValue(SymbologyConstants.LOCATION, this.getFormattedPosition()); } // Determine altitude, if location modifier is enabled. if (!modifiers.hasKey(SymbologyConstants.ALTITUDE_DEPTH) && this.isShowLocation()) { Position position = this.getPosition(); UnitsFormat format = this.getUnitsFormat(); // If the symbol is clamped to the ground, return "GL" (Ground Level) for the altitude. Otherwise format // the altitude using the active units format, and append the datum. See MIL-STD-2525C section 5.5.2.5.2 (pg. 41). String altitude; int altitudeMode = this.getAltitudeMode(); if (altitudeMode == WorldWind.CLAMP_TO_GROUND) altitude = "GL"; else if (altitudeMode == WorldWind.RELATIVE_TO_GROUND) altitude = format.eyeAltitude(position.getElevation()) + " AGL"; else altitude = format.eyeAltitude(position.getElevation()) + " AMSL"; modifiers.setValue(SymbologyConstants.ALTITUDE_DEPTH, altitude); } if (!modifiers.hasKey(SymbologyConstants.TYPE)) { if (TacGrpSidc.MOBSU_CBRN_REEVNT_BIO.equalsIgnoreCase(this.maskedSymbolCode)) modifiers.setValue(SymbologyConstants.TYPE, "BIO"); else if (TacGrpSidc.MOBSU_CBRN_REEVNT_CML.equalsIgnoreCase(this.maskedSymbolCode)) modifiers.setValue(SymbologyConstants.TYPE, "CML"); } } /** * Layout text and graphic modifiers around the symbol. * * @param dc Current draw context. * @param modifiers Modifiers applied to this graphic. */ @Override protected void layoutTextModifiers(DrawContext dc, AVList modifiers) { this.currentLabels.clear(); Font font = this.getActiveAttributes().getTextModifierFont(); List<LabelLayout> allLayouts = this.getLayouts(this.symbolCode.toMaskedString()); for (LabelLayout layout : allLayouts) { java.util.List<OffsetPair> offsets = layout.offsets; if (WWUtil.isEmpty(offsets)) continue; Object value = modifiers.getValue(layout.modifier); if (WWUtil.isEmpty(value)) continue; // If we're retrieving the date modifier, maybe add a hyphen to the first value to indicate a date range. if (SymbologyConstants.DATE_TIME_GROUP.equals(layout.modifier) && (value instanceof Iterable)) { value = this.addHyphenToDateRange((Iterable) value, offsets); } String mode = SymbologyConstants.LOCATION.equals(layout.modifier) ? LAYOUT_RELATIVE : LAYOUT_NONE; // Some graphics support multiple instances of the same modifier. Handle this case differently than the // single instance case. if (value instanceof Iterable) { this.layoutMultiLabel(dc, font, offsets, (Iterable) value, mode); } else if (value != null) { this.layoutLabel(dc, font, layout.offsets.get(0), value.toString(), mode); } } } /** * Indicates the label layouts designed to a particular graphic. * * @param sidc Symbol ID to for which to determine layout. * * @return List of label layouts for the specified symbol. */ protected List<LabelLayout> getLayouts(String sidc) { return defaultLayouts.get(sidc); } @Override protected void layoutDynamicModifiers(DrawContext dc, AVList modifiers) { this.currentLines.clear(); if (!this.isShowGraphicModifiers()) return; // Direction of Movement indicator. Placed at the bottom of the symbol layout. Direction of Movement applies // only to CBRN graphics (see MIL-STD-2525C table XI, pg. 38). Object o = modifiers.getValue(SymbologyConstants.DIRECTION_OF_MOVEMENT); if (this.isShowDirectionOfMovement() && o instanceof Angle) { // The length of the direction of movement line is equal to the height of the symbol frame. See // MIL-STD-2525C section 5.3.4.1.c, page 33. double length = this.iconRect.getHeight(); java.util.List<? extends Point2D> points = MilStd2525Util.computeGroundHeadingIndicatorPoints(dc, this.placePoint, (Angle) o, length, this.iconRect.getHeight()); this.addLine(dc, BELOW_BOTTOM_CENTER_OFFSET, points, LAYOUT_RELATIVE, points.size() - 1); } } ////////////////////////////////////////////// // Modifier layout ////////////////////////////////////////////// /** * Add a hyphen to the first element in a list of dates to indicate a date range. This method only modifiers the * date list if exactly two dates are displayed in the graphic. * * @param value Iterable of date modifiers. * @param offsets Layouts for the date modifiers. * * @return Iterable of modified dates. This may be a new, modified list, or the same list as {@code value} if no * modification was required. */ protected Iterable addHyphenToDateRange(Iterable value, java.util.List<OffsetPair> offsets) { // Only add a hyphen if exactly two dates are displayed in the graphic. if (offsets.size() != 2) return value; // Make sure that two date values are provided. Iterator iterator = value.iterator(); Object date1 = iterator.hasNext() ? iterator.next() : null; Object date2 = iterator.hasNext() ? iterator.next() : null; // If only two dates were provided, add a hyphen to indicate a date range. If more or less // date were provided it's not a date range, so don't change anything. if (date1 != null && date2 != null) { return Arrays.asList(date1 + "-", date2); } return value; } protected void layoutLabel(DrawContext dc, Font font, OffsetPair layout, String value, String mode) { if (!WWUtil.isEmpty(value)) { this.addLabel(dc, layout.offset, layout.hotSpot, value, font, null, mode); } } protected void layoutMultiLabel(DrawContext dc, Font font, java.util.List<OffsetPair> layouts, Iterable values, String mode) { Iterator valueIterator = values.iterator(); Iterator<OffsetPair> layoutIterator = layouts.iterator(); while (layoutIterator.hasNext() && valueIterator.hasNext()) { OffsetPair layout = layoutIterator.next(); Object value = valueIterator.next(); if (value != null) { this.layoutLabel(dc, font, layout, value.toString(), mode); } } } /** * Indicates whether or not this graphic supports the direction of movement indicator. Only chemical, biological, * radiological, and nuclear point graphics support this modifier (see MIL-STD-2525C, table XI, pg. 38). * * @return True if the graphic is chemical, biological, radiological, or nuclear. */ protected boolean isShowDirectionOfMovement() { String code = this.maskedSymbolCode; return TacGrpSidc.MOBSU_CBRN_NDGZ.equalsIgnoreCase(code) || TacGrpSidc.MOBSU_CBRN_FAOTP.equalsIgnoreCase(code) || TacGrpSidc.MOBSU_CBRN_REEVNT_BIO.equalsIgnoreCase(code) || TacGrpSidc.MOBSU_CBRN_REEVNT_CML.equalsIgnoreCase(code); } @Override protected void computeTransform(DrawContext dc) { super.computeTransform(dc); // Compute an appropriate offset if the application has not specified an offset and this symbol supports the // direction of movement indicator. Only the CBRN graphics in MIL-STD-2525C support this indicator. (Using the // graphic's default offset would cause the direction of movement line and location label to be cut off by the // surface when the globe is tilted.) if (this.iconRect != null && this.layoutRect != null && this.isShowDirectionOfMovement()) { this.dx = -this.iconRect.getCenterX(); this.dy = -this.layoutRect.getMinY(); } } }
package net.meisen.dissertation.performance.implementations.similarity.tida; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.text.ParseException; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import net.meisen.dissertation.config.TidaConfig; import net.meisen.dissertation.impl.datasets.SingleStaticDataSet; import net.meisen.dissertation.impl.parser.query.QueryFactory; import net.meisen.dissertation.impl.parser.query.select.SelectQuery; import net.meisen.dissertation.model.data.TidaModel; import net.meisen.dissertation.model.handler.TidaModelHandler; import net.meisen.general.genmisc.types.Dates; import net.meisen.general.genmisc.types.Objects; import net.meisen.general.sbconfigurator.runners.JUnitConfigurationRunner; import net.meisen.general.sbconfigurator.runners.annotations.ContextClass; import net.meisen.general.sbconfigurator.runners.annotations.ContextFile; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; /** * Tests the implementation of the {@code TimeSeriesSimilarityEvaluator}. * * @author pmeisen * */ @ContextClass(TidaConfig.class) @ContextFile("sbconfigurator-core.xml") @RunWith(JUnitConfigurationRunner.class) public class TestTimeSeriesSimilarityEvaluator { @Autowired private TidaModelHandler loader; @Autowired private QueryFactory queryFactory; private TidaModel model; private TimeSeriesSimilarityEvaluator evaluator; /** * Helper method to load the default model for the test. Unloads all other * models prior to loading it. */ @Before public void loadModel() { loader.unloadAll(); model = loader .loadViaXslt("/net/meisen/dissertation/performance/implementations/similarity/tida/tida-model-timeSeriesSimilarityEvaluator.xml"); evaluator = new TimeSeriesSimilarityEvaluator(model); } /** * Helper method to load data into the model. * * @param name * the name * @param ideas * the ideas * @param start * the start * @param end * the end */ protected void loadData(final String name, final int ideas, final String start, final String end) { try { loadData(name, ideas, Dates.parseDate(start, "dd.MM.yyyy HH:mm:ss"), Dates.parseDate(end, "dd.MM.yyyy HH:mm:ss")); } catch (final ParseException e) { fail(e.getMessage()); } } /** * Helper method to load data into the model. * * @param name * the name * @param ideas * the ideas * @param start * the start * @param end * the end */ protected void loadData(final String name, final int ideas, final Date start, final Date end) { final Map<String, Object> values = new HashMap<String, Object>(); // add the values values.put("NAME", name); values.put("IDEAS", ideas); values.put("START", start); values.put("END", end); loadData(values); } /** * Helper method to load data into the model. * * @param values * the values to be loaded */ protected void loadData(final Map<String, Object> values) { model.loadRecord(new SingleStaticDataSet(values)); } /** * Helper method to create a query with the specified measure and filter * (can be {@code null}) * * @param measure * the measure to be used * @param filter * the filter, can be {@code null} * * @return the parsed query */ protected SelectQuery getQuery(final String measure, final String filter) { return getQuery("SELECT TIMESERIES OF " + measure + " FROM testTimeSeriesSimilarityEvaluator IN [01.01.2015, 02.01.2015)" + (Objects.empty(filter) ? "" : " WHERE " + filter)); } /** * Helper method to get the passed query parsed. * * @param query * the query to be parsed * * @return the parsed query */ protected SelectQuery getQuery(final String query) { return queryFactory.parseQuery(query); } /** * This test is just use to initialize everything and ensure a better * performance measure for all the other tests. */ @Test public void testInit() { /* * This test is just use to initialize everything and ensure a better * performance measure for all the other tests. */ } /** * Tests the similarity calculated with measure-similarity only. */ @Test public void testMeasureSimilarity() { List<TimeSeriesSimilarityCollection> res; TimeSeriesSimilarityCollection r; // we are interested in measure now evaluator.setSimilarity(true, false, false); // add some data // @formatter:off // 01.01.15: (00:00) ++++++++++++ (02:00) Philipp (5) // 31.12.15: (00:00) ++++++++++++ (02:00) Tobias (5) // @formatter:on loadData("Philipp", 5, "01.01.2015 00:00:00", "01.01.2015 02:00:00"); loadData("Tobias", 5, "31.12.2015 00:00:00", "31.12.2015 02:00:00"); // get the similar once on a global measure level res = evaluator.evaluateSimilarity( getQuery("SUM(IDEAS) AS IDEAS", null), 1); assertEquals(1, res.size()); r = res.get(0); assertEquals(0.0, r.getMeasureDistance(), 0.0); assertEquals(0.0, r.getTotalDistance(), 0.0); // 1451520000000l == 31 Dec 2015 00:00:00 UTC assertEquals(1451520000000l, ((Date) r.getLabelValue(0)).getTime()); // add another data package // @formatter:off // 01.01.15: (00:00) ++++++++++++ (02:00) Philipp (5) // 30.11.15: (00:10) +++++ (01:00) Philipp (5) // 31.12.15: (00:00) ++++++++++++ (02:00) Tobias (5) // @formatter:on loadData("Philipp", 5, "30.11.2015 00:10:00", "30.11.2015 01:00:00"); // get the similar once on a filtered measure level res = evaluator.evaluateSimilarity( getQuery("SUM(IDEAS) AS IDEAS", "NAME='Philipp'"), 1); r = res.get(0); // 70 minutes (00:00 - 00:09 and 01:01 - 02:00), each 5 => 350 assertEquals(350.0, r.getMeasureDistance(), 0.0); assertEquals(350.0, r.getTotalDistance(), 0.0); // 1448841600000l == 30 Nov 2015 00:00:00 UTC assertEquals(1448841600000l, ((Date) r.getLabelValue(0)).getTime()); // add another data package // @formatter:off // 01.01.15: (00:00) ++++++++++++ (02:00) Philipp (5) // (00:10) ++++ (00:50) Edison (5) // 30.11.15: (00:00) +++++ (00:50) Edison (5) // (00:10) +++++ (01:00) Philipp (5) // (01:00) ++++++ (02:00) Philipp (5) // 31.12.15: (00:00) ++++++++++++ (02:00) Tobias (5) // @formatter:on loadData("Edison", 5, "30.11.2015 00:00:00", "30.11.2015 00:50:00"); loadData("Philipp", 5, "30.11.2015 01:00:00", "30.11.2015 02:00:00"); loadData("Edison", 5, "01.01.2015 00:10:00", "01.01.2015 00:50:00"); // get the similar once on a filtered measure level res = evaluator.evaluateSimilarity( getQuery("SUM(IDEAS) AS IDEAS", null), 1); r = res.get(0); // 1 minute (01:00:00), each 5 as value => 5 assertEquals(5.0, r.getMeasureDistance(), 0.0); assertEquals(5.0, r.getTotalDistance(), 0.0); // 1448841600000l == 30 Nov 2015 00:00:00 UTC assertEquals(1448841600000l, ((Date) r.getLabelValue(0)).getTime()); // fire one with dimensions // @formatter:off // 01.01.15: (00:00) ++++++++++++ (02:00) Philipp (5) // (00:10) ++++ (00:50) Edison (5) // 30.11.15: (00:00) +++++ (00:50) Edison (5) // (00:10) +++++ (01:00) Philipp (5) // (01:00) ++++++ (02:00) Philipp (5) // 31.12.15: (00:00) ++++++++++++ (02:00) Tobias (5) // @formatter:on res = evaluator.evaluateSimilarity( getQuery("MAX(SUM(IDEAS)) AS IDEAS ON TIME.DEF.HOUR", null), 1); assertEquals(1, res.size()); r = res.get(0); // 1 minute (01:00:00), each 5 for the whole hour (* 60) as value => 300 assertEquals(300.0, r.getMeasureDistance(), 0.0); assertEquals(300.0, r.getTotalDistance(), 0.0); assertEquals("R20151130_0000_0059", r.getLabelValue(0)); } /** * Tests the similarity calculated with count-similarity only. */ @Test public void testCountSimilarity() { List<TimeSeriesSimilarityCollection> res; TimeSeriesSimilarityCollection r; // we are interested in measure now evaluator.setSimilarity(false, true, false); // add some data // @formatter:off // 01.01.15: (00:00) ++++++++++++ (02:00) Philipp (5) // 31.12.15: (00:00) ++++++++++++ (02:00) Tobias (5) // @formatter:on loadData("Philipp", 15, "01.01.2015 00:00:00", "01.01.2015 02:00:00"); loadData("Tobias", 5, "31.12.2015 00:00:00", "31.12.2015 02:00:00"); // get the similar once on a global measure level res = evaluator.evaluateSimilarity( getQuery("MAX(IDEAS) AS IDEAS ON TIME.DEF.HOUR", null), 1); assertEquals(1, res.size()); r = res.get(0); assertEquals(0.0, r.getCountDistance(), 0.0); assertEquals(0.0, r.getTotalDistance(), 0.0); // 1451520000000l == 31 Dec 2015 00:00:00 UTC assertEquals(1451520000000l, ((Date) r.getLabelValue(0)).getTime()); // add another data package // @formatter:off // 01.01.15: (00:00) ++++++++++++ (02:00) Philipp (5) // 30.11.15: (00:10) +++++ (01:00) Philipp (5) // 31.12.15: (00:00) ++++++++++++ (02:00) Tobias (5) // @formatter:on loadData("Philipp", 5, "30.11.2015 00:10:00", "30.11.2015 01:00:00"); // get the similar once on a filtered measure level res = evaluator.evaluateSimilarity( getQuery("MIN(IDEAS) AS IDEAS ON TIME.DEF.HOUR", "NAME='Philipp'"), 1); r = res.get(0); // 70 minutes (00:00 - 00:09 and 01:01 - 02:00) assertEquals(70.0, r.getCountDistance(), 0.0); assertEquals(70.0, r.getTotalDistance(), 0.0); // 1448841600000l == 30 Nov 2015 00:00:00 UTC assertEquals(1448841600000l, ((Date) r.getLabelValue(0)).getTime()); // add another data package // @formatter:off // 01.01.15: (00:00) ++++++++++++ (02:00) Philipp (5) // (00:10) ++++ (00:50) Edison (5) // 30.11.15: (00:00) +++++ (00:50) Edison (5) // (00:10) +++++ (01:00) Philipp (5) // (01:00) ++++++ (02:00) Philipp (5) // 31.12.15: (00:00) ++++++++++++ (02:00) Tobias (5) // @formatter:on loadData("Edison", 5, "30.11.2015 00:00:00", "30.11.2015 00:50:00"); loadData("Philipp", 5, "30.11.2015 01:00:00", "30.11.2015 02:00:00"); loadData("Edison", 5, "01.01.2015 00:10:00", "01.01.2015 00:50:00"); // get the similar once on a filtered measure level res = evaluator.evaluateSimilarity( getQuery("SUM(IDEAS) AS IDEAS ON TIME.DEF.HOUR", null), 1); assertEquals(1, res.size()); r = res.get(0); // 1 minute (01:00:00) assertEquals(1.0, r.getCountDistance(), 0.0); assertEquals(1.0, r.getTotalDistance(), 0.0); // 1448841600000l == 30 Nov 2015 00:00:00 UTC assertEquals(1448841600000l, ((Date) r.getLabelValue(0)).getTime()); } /** * Tests some scenarios with structural similarity. */ @Test public void testStructureSimilarity1() { List<TimeSeriesSimilarityCollection> res; TimeSeriesSimilarityCollection r; // we are interested in measure now evaluator.setSimilarity(false, false, true); // add some data // @formatter:off // 01.01.15: (00:00) ++++++ (01:00) Philipp (3) // 01.01.15: (00:00) ++++++ (01:00) Philipp (7) // 31.12.15: (00:00) ++++++ (01:00) Tobias (13) // 31.12.15: (00:00) ++++++ (01:00) Tobias (17) // @formatter:on loadData("Philipp", 3, "01.01.2015 00:00:00", "01.01.2015 01:00:00"); loadData("Philipp", 7, "01.01.2015 00:00:00", "01.01.2015 01:00:00"); loadData("Tobias", 13, "31.12.2015 00:00:00", "31.12.2015 01:00:00"); loadData("Tobias", 17, "31.12.2015 00:00:00", "31.12.2015 01:00:00"); // get the similar once on a filtered measure level res = evaluator.evaluateSimilarity( getQuery("SUM(IDEAS) AS IDEAS ON TIME.DEF.HOUR", null), 1); assertEquals(1, res.size()); r = res.get(0); assertEquals(0.0, r.getStructureDistance(), 0.0); assertEquals(0.0, r.getTotalDistance(), 0.0); // 1451520000000l == 31 Dec 2015 00:00:00 UTC assertEquals(1451520000000l, ((Date) r.getLabelValue(0)).getTime()); // add some data // @formatter:off // 01.01.15: (00:00) ++++++ (01:00) Philipp (3) // 01.01.15: (00:00) ++++++ (01:00) Philipp (7) // 01.01.15: (00:00) ++++++++++++ (02:00) Philipp (10) // 01.01.15: (00:00) + (00:10) Philipp (10) // 31.12.15: (00:00) ++++++ (01:00) Tobias (13) // 31.12.15: (00:00) ++++++ (01:00) Tobias (17) // 31.12.15: (00:00) ++++++ (01:00) Tobias (1) // 31.12.15: (01:01) ++++++ (02:00) Tobias (2) // 31.12.15: (00:00) + (00:10) Philipp (10) // @formatter:on loadData("Philipp", 10, "01.01.2015 00:00:00", "01.01.2015 02:00:00"); loadData("Philipp", 10, "01.01.2015 00:00:00", "01.01.2015 00:10:00"); loadData("Tobias", 1, "31.12.2015 00:00:00", "31.12.2015 02:00:00"); loadData("Tobias", 2, "31.12.2015 01:01:00", "31.12.2015 02:00:00"); loadData("Philipp", 10, "31.12.2015 00:00:00", "31.12.2015 00:10:00"); // get the similar once on a filtered measure level res = evaluator.evaluateSimilarity( getQuery("MAX(SUM(IDEAS)) AS IDEAS ON TIME.DEF.HOUR", null), 1); // assertEquals(1, res.size()); r = res.get(0); assertEquals(3.2, r.getStructureDistance(), 0.0); assertEquals(3.2, r.getTotalDistance(), 0.0); // 1451520000000l == 31 Dec 2015 00:00:00 UTC assertEquals(1451520000000l, ((Date) r.getLabelValue(0)).getTime()); } /** * Tests some scenarios with structural similarity. */ @Test public void testStructureSimilarity2() { List<TimeSeriesSimilarityCollection> res; TimeSeriesSimilarityCollection r; // we are interested in measure now evaluator.setSimilarity(false, false, true); // add some data // @formatter:off // 01.01.15: (00:00) ++++++ (01:00) Philipp (3) // 01.01.15: (01:01) +++ (01:30) Philipp (7) // 01.01.15: (01:20) ++++ (02:00) Philipp (7) // 31.12.15: (00:00) ++++++ (01:00) Tobias (13) // 31.12.15: (01:01) +++ (01:30) Tobias (17) // 31.12.15: (01:10) +++++ (02:00) Tobias (17) // @formatter:on loadData("Philipp", 3, "01.01.2015 00:00:00", "01.01.2015 01:00:00"); loadData("Philipp", 7, "01.01.2015 01:01:00", "01.01.2015 01:30:00"); loadData("Philipp", 7, "01.01.2015 01:20:00", "01.01.2015 02:00:00"); loadData("Tobias", 13, "31.12.2015 00:00:00", "31.12.2015 01:00:00"); loadData("Tobias", 17, "31.12.2015 01:01:00", "31.12.2015 01:30:00"); loadData("Tobias", 17, "31.12.2015 01:10:00", "31.12.2015 02:00:00"); // get the similar once on a filtered measure level res = evaluator.evaluateSimilarity( getQuery("SUM(IDEAS) AS IDEAS ON TIME.DEF.HOUR", null), 1); assertEquals(1, res.size()); r = res.get(0); assertEquals(0.0, r.getStructureDistance(), 0.0); assertEquals(0.0, r.getTotalDistance(), 0.0); // 1451520000000l == 31 Dec 2015 00:00:00 UTC assertEquals(1451520000000l, ((Date) r.getLabelValue(0)).getTime()); // add some data // @formatter:off // 01.01.15: (00:00) ++++++ (01:00) Philipp (3) // 01.01.15: (01:01) +++ (01:30) Philipp (7) // 01.01.15: (01:20) ++++ (02:00) Philipp (7) // 31.12.15: (00:00) ++++++ (01:00) Tobias (13) // 31.12.15: (01:01) +++ (01:30) Tobias (17) // 31.12.15: (01:10) +++++ (02:00) Tobias (17) // 31.12.15: (01:30) +++ (02:00) Tobias (17) // @formatter:on loadData("Tobias", 17, "31.12.2015 01:30:00", "31.12.2015 02:00:00"); // get the similar once on a filtered measure level res = evaluator.evaluateSimilarity( getQuery("SUM(IDEAS) AS IDEAS ON TIME.DEF.HOUR", null), 364); assertEquals(364, res.size()); for (int i = 0; i < 363; i++) { r = res.get(i); assertEquals(3.0, r.getStructureDistance(), 0.0); assertEquals(3.0, r.getTotalDistance(), 0.0); } r = res.get(363); assertEquals(3.5, r.getStructureDistance(), 0.0); assertEquals(3.5, r.getTotalDistance(), 0.0); // 1451520000000l == 31 Dec 2015 00:00:00 UTC assertEquals(1451520000000l, ((Date) r.getLabelValue(0)).getTime()); for (TimeSeriesSimilarityCollection a : res) { System.out.println((Date) a.getLabelValue(0)); System.out.println(a); } } /** * Cleans-up after every test. */ @After public void cleanUp() { if (model != null) { model.release(true); loader.unloadAll(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.compaction; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Random; import java.util.UUID; import junit.framework.Assert; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.OrderedJUnit4ClassRunner; import org.apache.cassandra.SchemaLoader; import org.apache.cassandra.Util; import org.apache.cassandra.UpdateBuilder; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.Keyspace; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.io.sstable.ISSTableScanner; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.notifications.SSTableAddedNotification; import org.apache.cassandra.notifications.SSTableRepairStatusChanged; import org.apache.cassandra.repair.RepairJobDesc; import org.apache.cassandra.repair.Validator; import org.apache.cassandra.schema.CompactionParams; import org.apache.cassandra.schema.KeyspaceParams; import org.apache.cassandra.service.ActiveRepairService; import org.apache.cassandra.utils.FBUtilities; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @RunWith(OrderedJUnit4ClassRunner.class) public class LeveledCompactionStrategyTest { private static final Logger logger = LoggerFactory.getLogger(LeveledCompactionStrategyTest.class); private static final String KEYSPACE1 = "LeveledCompactionStrategyTest"; private static final String CF_STANDARDDLEVELED = "StandardLeveled"; private Keyspace keyspace; private ColumnFamilyStore cfs; @BeforeClass public static void defineSchema() throws ConfigurationException { SchemaLoader.prepareServer(); SchemaLoader.createKeyspace(KEYSPACE1, KeyspaceParams.simple(1), SchemaLoader.standardCFMD(KEYSPACE1, CF_STANDARDDLEVELED) .compaction(CompactionParams.lcs(Collections.singletonMap("sstable_size_in_mb", "1")))); } @Before public void enableCompaction() { keyspace = Keyspace.open(KEYSPACE1); cfs = keyspace.getColumnFamilyStore(CF_STANDARDDLEVELED); cfs.enableAutoCompaction(); } /** * Since we use StandardLeveled CF for every test, we want to clean up after the test. */ @After public void truncateSTandardLeveled() { cfs.truncateBlocking(); } /** * Ensure that the grouping operation preserves the levels of grouped tables */ @Test public void testGrouperLevels() throws Exception{ ByteBuffer value = ByteBuffer.wrap(new byte[100 * 1024]); // 100 KB value, make it easy to have multiple files //Need entropy to prevent compression so size is predictable with compression enabled/disabled new Random().nextBytes(value.array()); // Enough data to have a level 1 and 2 int rows = 40; int columns = 20; // Adds enough data to trigger multiple sstable per level for (int r = 0; r < rows; r++) { UpdateBuilder update = UpdateBuilder.create(cfs.metadata, String.valueOf(r)); for (int c = 0; c < columns; c++) update.newRow("column" + c).add("val", value); update.applyUnsafe(); cfs.forceBlockingFlush(); } waitForLeveling(cfs); CompactionStrategyManager strategy = cfs.getCompactionStrategyManager(); // Checking we're not completely bad at math int l1Count = strategy.getSSTableCountPerLevel()[1]; int l2Count = strategy.getSSTableCountPerLevel()[2]; if (l1Count == 0 || l2Count == 0) { logger.error("L1 or L2 has 0 sstables. Expected > 0 on both."); logger.error("L1: " + l1Count); logger.error("L2: " + l2Count); Assert.fail(); } Collection<Collection<SSTableReader>> groupedSSTables = cfs.getCompactionStrategyManager().groupSSTablesForAntiCompaction(cfs.getLiveSSTables()); for (Collection<SSTableReader> sstableGroup : groupedSSTables) { int groupLevel = -1; Iterator<SSTableReader> it = sstableGroup.iterator(); while (it.hasNext()) { SSTableReader sstable = it.next(); int tableLevel = sstable.getSSTableLevel(); if (groupLevel == -1) groupLevel = tableLevel; assert groupLevel == tableLevel; } } } /* * This exercises in particular the code of #4142 */ @Test public void testValidationMultipleSSTablePerLevel() throws Exception { byte [] b = new byte[100 * 1024]; new Random().nextBytes(b); ByteBuffer value = ByteBuffer.wrap(b); // 100 KB value, make it easy to have multiple files // Enough data to have a level 1 and 2 int rows = 40; int columns = 20; // Adds enough data to trigger multiple sstable per level for (int r = 0; r < rows; r++) { UpdateBuilder update = UpdateBuilder.create(cfs.metadata, String.valueOf(r)); for (int c = 0; c < columns; c++) update.newRow("column" + c).add("val", value); update.applyUnsafe(); cfs.forceBlockingFlush(); } waitForLeveling(cfs); CompactionStrategyManager strategy = cfs.getCompactionStrategyManager(); // Checking we're not completely bad at math assertTrue(strategy.getSSTableCountPerLevel()[1] > 0); assertTrue(strategy.getSSTableCountPerLevel()[2] > 0); Range<Token> range = new Range<>(Util.token(""), Util.token("")); int gcBefore = keyspace.getColumnFamilyStore(CF_STANDARDDLEVELED).gcBefore(FBUtilities.nowInSeconds()); UUID parentRepSession = UUID.randomUUID(); ActiveRepairService.instance.registerParentRepairSession(parentRepSession, Arrays.asList(cfs), Arrays.asList(range), false, System.currentTimeMillis(), true); RepairJobDesc desc = new RepairJobDesc(parentRepSession, UUID.randomUUID(), KEYSPACE1, CF_STANDARDDLEVELED, Arrays.asList(range)); Validator validator = new Validator(desc, FBUtilities.getBroadcastAddress(), gcBefore); CompactionManager.instance.submitValidation(cfs, validator).get(); } /** * wait for leveled compaction to quiesce on the given columnfamily */ private void waitForLeveling(ColumnFamilyStore cfs) throws InterruptedException { CompactionStrategyManager strategy = cfs.getCompactionStrategyManager(); // L0 is the lowest priority, so when that's done, we know everything is done while (strategy.getSSTableCountPerLevel()[0] > 1) Thread.sleep(100); // in AbstractCompationStrategy.replaceSSTables() first we remove and then we add sstables so wait a little bit longer Thread.sleep(10); } @Test public void testCompactionProgress() throws Exception { // make sure we have SSTables in L1 byte [] b = new byte[100 * 1024]; new Random().nextBytes(b); ByteBuffer value = ByteBuffer.wrap(b); int rows = 2; int columns = 10; for (int r = 0; r < rows; r++) { UpdateBuilder update = UpdateBuilder.create(cfs.metadata, String.valueOf(r)); for (int c = 0; c < columns; c++) update.newRow("column" + c).add("val", value); update.applyUnsafe(); cfs.forceBlockingFlush(); } waitForLeveling(cfs); LeveledCompactionStrategy strategy = (LeveledCompactionStrategy) (cfs.getCompactionStrategyManager()).getStrategies().get(1); assert strategy.getLevelSize(1) > 0; // get LeveledScanner for level 1 sstables Collection<SSTableReader> sstables = strategy.manifest.getLevel(1); List<ISSTableScanner> scanners = strategy.getScanners(sstables).scanners; assertEquals(1, scanners.size()); // should be one per level ISSTableScanner scanner = scanners.get(0); // scan through to the end while (scanner.hasNext()) scanner.next(); // scanner.getCurrentPosition should be equal to total bytes of L1 sstables assertEquals(scanner.getCurrentPosition(), SSTableReader.getTotalUncompressedBytes(sstables)); } @Test public void testMutateLevel() throws Exception { cfs.disableAutoCompaction(); ByteBuffer value = ByteBuffer.wrap(new byte[100 * 1024]); // 100 KB value, make it easy to have multiple files // Enough data to have a level 1 and 2 int rows = 40; int columns = 20; // Adds enough data to trigger multiple sstable per level for (int r = 0; r < rows; r++) { UpdateBuilder update = UpdateBuilder.create(cfs.metadata, String.valueOf(r)); for (int c = 0; c < columns; c++) update.newRow("column" + c).add("val", value); update.applyUnsafe(); cfs.forceBlockingFlush(); } cfs.forceBlockingFlush(); LeveledCompactionStrategy strategy = (LeveledCompactionStrategy) ( cfs.getCompactionStrategyManager()).getStrategies().get(1); cfs.forceMajorCompaction(); for (SSTableReader s : cfs.getLiveSSTables()) { assertTrue(s.getSSTableLevel() != 6 && s.getSSTableLevel() > 0); strategy.manifest.remove(s); s.descriptor.getMetadataSerializer().mutateLevel(s.descriptor, 6); s.reloadSSTableMetadata(); strategy.manifest.add(s); } // verify that all sstables in the changed set is level 6 for (SSTableReader s : cfs.getLiveSSTables()) assertEquals(6, s.getSSTableLevel()); int[] levels = strategy.manifest.getAllLevelSize(); // verify that the manifest has correct amount of sstables assertEquals(cfs.getLiveSSTables().size(), levels[6]); } @Test public void testNewRepairedSSTable() throws Exception { byte [] b = new byte[100 * 1024]; new Random().nextBytes(b); ByteBuffer value = ByteBuffer.wrap(b); // 100 KB value, make it easy to have multiple files // Enough data to have a level 1 and 2 int rows = 40; int columns = 20; // Adds enough data to trigger multiple sstable per level for (int r = 0; r < rows; r++) { UpdateBuilder update = UpdateBuilder.create(cfs.metadata, String.valueOf(r)); for (int c = 0; c < columns; c++) update.newRow("column" + c).add("val", value); update.applyUnsafe(); cfs.forceBlockingFlush(); } waitForLeveling(cfs); cfs.disableAutoCompaction(); while(CompactionManager.instance.isCompacting(Arrays.asList(cfs))) Thread.sleep(100); CompactionStrategyManager strategy = cfs.getCompactionStrategyManager(); List<AbstractCompactionStrategy> strategies = strategy.getStrategies(); LeveledCompactionStrategy repaired = (LeveledCompactionStrategy) strategies.get(0); LeveledCompactionStrategy unrepaired = (LeveledCompactionStrategy) strategies.get(1); assertEquals(0, repaired.manifest.getLevelCount() ); assertEquals(2, unrepaired.manifest.getLevelCount()); assertTrue(strategy.getSSTableCountPerLevel()[1] > 0); assertTrue(strategy.getSSTableCountPerLevel()[2] > 0); for (SSTableReader sstable : cfs.getLiveSSTables()) assertFalse(sstable.isRepaired()); int sstableCount = 0; for (List<SSTableReader> level : unrepaired.manifest.generations) sstableCount += level.size(); // we only have unrepaired sstables: assertEquals(sstableCount, cfs.getLiveSSTables().size()); SSTableReader sstable1 = unrepaired.manifest.generations[2].get(0); SSTableReader sstable2 = unrepaired.manifest.generations[1].get(0); sstable1.descriptor.getMetadataSerializer().mutateRepairedAt(sstable1.descriptor, System.currentTimeMillis()); sstable1.reloadSSTableMetadata(); assertTrue(sstable1.isRepaired()); strategy.handleNotification(new SSTableRepairStatusChanged(Arrays.asList(sstable1)), this); int repairedSSTableCount = 0; for (List<SSTableReader> level : repaired.manifest.generations) repairedSSTableCount += level.size(); assertEquals(1, repairedSSTableCount); // make sure the repaired sstable ends up in the same level in the repaired manifest: assertTrue(repaired.manifest.generations[2].contains(sstable1)); // and that it is gone from unrepaired assertFalse(unrepaired.manifest.generations[2].contains(sstable1)); unrepaired.removeSSTable(sstable2); strategy.handleNotification(new SSTableAddedNotification(Collections.singleton(sstable2)), this); assertTrue(unrepaired.manifest.getLevel(1).contains(sstable2)); assertFalse(repaired.manifest.getLevel(1).contains(sstable2)); } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import com.facebook.buck.android.StringResources.Gender; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.step.ExecutionContext; import com.facebook.buck.step.TestExecutionContext; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.facebook.buck.testutil.integration.TestDataHelper; import com.facebook.buck.util.xml.XmlDomParser; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.common.io.Files; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.attribute.FileAttribute; import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.regex.Matcher; import org.easymock.EasyMockSupport; import org.junit.Before; import org.junit.Test; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; public class CompileStringsStepTest extends EasyMockSupport { private static final String XML_HEADER = "<?xml version='1.0' encoding='utf-8'?>"; private Path testdataDir; private Path firstFile; private Path secondFile; private Path thirdFile; private Path fourthFile; private Path fifthFile; @Before public void findTestData() { testdataDir = TestDataHelper.getTestDataDirectory(this).resolve("compile_strings"); firstFile = testdataDir.resolve("first/res/values-es/strings.xml"); secondFile = testdataDir.resolve("second/res/values-es/strings.xml"); thirdFile = testdataDir.resolve("third/res/values-pt/strings.xml"); fourthFile = testdataDir.resolve("third/res/values-pt-rBR/strings.xml"); fifthFile = testdataDir.resolve("third/res/values/strings.xml"); } @Test public void testStringFilePattern() { testStringPathRegex("res/values-es/strings.xml", true, "es", null); testStringPathRegex("/one/res/values-es/strings.xml", true, "es", null); testStringPathRegex("/two/res/values-es-rUS/strings.xml", true, "es", "US"); // Not matching strings. testStringPathRegex("/one/res/values/strings.xml", false, null, null); testStringPathRegex("/one/res/values-/strings.xml", false, null, null); testStringPathRegex("/one/res/values-e/strings.xml", false, null, null); testStringPathRegex("/one/res/values-esc/strings.xml", false, null, null); testStringPathRegex("/one/res/values-es-rU/strings.xml", false, null, null); testStringPathRegex("/one/res/values-es-rUSA/strings.xml", false, null, null); testStringPathRegex("/one/res/values-es-RUS/strings.xml", false, null, null); testStringPathRegex("/one/res/values-rUS/strings.xml", false, null, null); } private void testStringPathRegex(String input, boolean matches, String locale, String country) { Matcher matcher = CompileStringsStep.NON_ENGLISH_STRING_FILE_PATTERN.matcher(input); assertEquals(matches, matcher.matches()); if (!matches) { return; } assertEquals(locale, matcher.group(1)); assertEquals(country, matcher.group(2)); } @Test public void testRDotTxtContentsPattern() { testContentRegex(" int string r_name 0xdeadbeef", false, null, null, null); testContentRegex("int string r_name 0xdeadbeef ", false, null, null, null); testContentRegex("int string r_name 0xdeadbeef", true, "string", "r_name", "deadbeef"); testContentRegex("int string r_name 0x", false, null, null, null); testContentRegex("int array r_name 0xdead", true, "array", "r_name", "dead"); testContentRegex("int plurals r_name 0xdead", true, "plurals", "r_name", "dead"); testContentRegex("int plural r_name 0xdead", false, null, null, null); testContentRegex("int plurals r name 0xdead", false, null, null, null); testContentRegex("int[] string r_name 0xdead", false, null, null, null); } private void testContentRegex( String input, boolean matches, String resourceType, String resourceName, String resourceId) { Matcher matcher = CompileStringsStep.R_DOT_TXT_STRING_RESOURCE_PATTERN.matcher(input); assertEquals(matches, matcher.matches()); if (!matches) { return; } assertEquals("Resource type does not match.", resourceType, matcher.group(1)); assertEquals("Resource name does not match.", resourceName, matcher.group(2)); assertEquals("Resource id does not match.", resourceId, matcher.group(3)); } @Test public void testGroupFilesByLocale() { Path path0 = Paths.get("project/dir/res/values-da/strings.xml"); Path path1 = Paths.get("project/dir/res/values-da-rAB/strings.xml"); Path path2 = Paths.get("project/dir/res/values/strings.xml"); Path path3 = Paths.get("project/groupme/res/values-da/strings.xml"); Path path4 = Paths.get("project/groupmetoo/res/values-da-rAB/strings.xml"); Path path5 = Paths.get("project/foreveralone/res/values-es/strings.xml"); ImmutableList<Path> files = ImmutableList.of(path0, path1, path2, path3, path4, path5); ImmutableMultimap<String, Path> groupedByLocale = createNonExecutingStep().groupFilesByLocale(ImmutableList.copyOf(files)); ImmutableMultimap<String, Path> expectedMap = ImmutableMultimap.<String, Path>builder() .putAll("da", ImmutableSet.of(path0, path3)) .putAll("da_AB", ImmutableSet.of(path1, path4)) .putAll("es", ImmutableSet.of(path5)) .putAll("en", ImmutableSet.of(path2)) .build(); assertEquals( "Result of CompileStringsStep.groupFilesByLocale() should match the expected value.", expectedMap, groupedByLocale); } @Test public void testScrapeStringNodes() throws IOException, SAXException { String xmlInput = "<string name='name1' gender='unknown'>Value1</string>" + "<string name='name1_f1gender' gender='female'>Value1_f1</string>" + "<string name='name2' gender='unknown'>Value with space</string>" + "<string name='name2_m2gender' gender='male'>Value with space m2</string>" + "<string name='name3' gender='unknown'>Value with \"quotes\"</string>" + "<string name='name4' gender='unknown'></string>" + // ignored because "name3" already found "<string name='name3' gender='unknown'>IGNORE</string>" + "<string name='name5' gender='unknown'>Value with %1$s</string>"; NodeList stringNodes = XmlDomParser.parse(createResourcesXml(xmlInput)).getElementsByTagName("string"); EnumMap<Gender, String> map1 = Maps.newEnumMap(Gender.class); map1.put(Gender.unknown, "Value1"); map1.put(Gender.female, "Value1_f1"); EnumMap<Gender, String> map2 = Maps.newEnumMap(Gender.class); map2.put(Gender.unknown, "Value with space"); map2.put(Gender.male, "Value with space m2"); EnumMap<Gender, String> map3 = Maps.newEnumMap(Gender.class); map3.put(Gender.unknown, "Value with \"quotes\""); EnumMap<Gender, String> map4 = Maps.newEnumMap(Gender.class); map4.put(Gender.unknown, ""); EnumMap<Gender, String> map5 = Maps.newEnumMap(Gender.class); map5.put(Gender.unknown, "Value with %1$s"); Map<Integer, EnumMap<Gender, String>> stringsMap = new HashMap<>(); CompileStringsStep step = createNonExecutingStep(); step.addStringResourceNameToIdMap( ImmutableMap.of( "name1", 1, "name2", 2, "name3", 3, "name4", 4, "name5", 5)); step.scrapeStringNodes(stringNodes, stringsMap); assertEquals( "Incorrect map of resource id to string values.", ImmutableMap.of( 1, map1, 2, map2, 3, map3, 4, map4, 5, map5), stringsMap); } @Test public void testScrapePluralsNodes() throws IOException, SAXException { String xmlInput = "<plurals name='name1' gender='unknown'>" + "<item quantity='zero'>%d people saw this</item>" + "<item quantity='one'>%d person saw this</item>" + "<item quantity='many'>%d people saw this</item>" + "</plurals>" + "<plurals name='name1_f1gender' gender='female'>" + "<item quantity='zero'>%d people saw this f1</item>" + "<item quantity='one'>%d person saw this f1</item>" + "<item quantity='many'>%d people saw this f1</item>" + "</plurals>" + "<plurals name='name2' gender='unknown'>" + "<item quantity='zero'>%d people ate this</item>" + "<item quantity='many'>%d people ate this</item>" + "</plurals>" + "<plurals name='name2_m2gender' gender='male'>" + "<item quantity='zero'>%d people ate this m2</item>" + "<item quantity='many'>%d people ate this m2</item>" + "</plurals>" + "<plurals name='name3' gender='unknown'></plurals>" + // Test empty array. // Ignored since "name2" already found. "<plurals name='name2' gender='unknown'></plurals>"; NodeList pluralsNodes = XmlDomParser.parse(createResourcesXml(xmlInput)).getElementsByTagName("plurals"); EnumMap<Gender, ImmutableMap<String, String>> map1 = Maps.newEnumMap(Gender.class); map1.put( Gender.unknown, ImmutableMap.of( "zero", "%d people saw this", "one", "%d person saw this", "many", "%d people saw this")); map1.put( Gender.female, ImmutableMap.of( "zero", "%d people saw this f1", "one", "%d person saw this f1", "many", "%d people saw this f1")); EnumMap<Gender, ImmutableMap<String, String>> map2 = Maps.newEnumMap(Gender.class); map2.put( Gender.unknown, ImmutableMap.of( "zero", "%d people ate this", "many", "%d people ate this")); map2.put( Gender.male, ImmutableMap.of( "zero", "%d people ate this m2", "many", "%d people ate this m2")); EnumMap<Gender, ImmutableMap<String, String>> map3 = Maps.newEnumMap(Gender.class); map3.put(Gender.unknown, ImmutableMap.of()); Map<Integer, EnumMap<Gender, ImmutableMap<String, String>>> pluralsMap = new HashMap<>(); CompileStringsStep step = createNonExecutingStep(); step.addPluralsResourceNameToIdMap( ImmutableMap.of( "name1", 1, "name2", 2, "name3", 3)); step.scrapePluralsNodes(pluralsNodes, pluralsMap); assertEquals( "Incorrect map of resource id to plural values.", ImmutableMap.of( 1, map1, 2, map2, 3, map3), pluralsMap); } @Test public void testScrapeStringArrayNodes() throws IOException, SAXException { String xmlInput = "<string-array name='name1' gender='unknown'>" + "<item>Value12</item>" + "<item>Value11</item>" + "</string-array>" + "<string-array name='name1_f1gender' gender='female'>" + "<item>Value12 f1</item>" + "<item>Value11 f1</item>" + "</string-array>" + "<string-array name='name2' gender='unknown'>" + "<item>Value21</item>" + "</string-array>" + "<string-array name='name2_m2gender' gender='male'>" + "<item>Value21 m2</item>" + "</string-array>" + "<string-array name='name3' gender='unknown'></string-array>" + "<string-array name='name2' gender='unknown'>" + "<item>ignored</item>" + // Ignored because "name2" already found above. "</string-array>"; EnumMap<Gender, List<String>> map1 = Maps.newEnumMap(Gender.class); map1.put(Gender.unknown, ImmutableList.of("Value12", "Value11")); map1.put(Gender.female, ImmutableList.of("Value12 f1", "Value11 f1")); EnumMap<Gender, List<String>> map2 = Maps.newEnumMap(Gender.class); map2.put(Gender.unknown, ImmutableList.of("Value21")); map2.put(Gender.male, ImmutableList.of("Value21 m2")); NodeList arrayNodes = XmlDomParser.parse(createResourcesXml(xmlInput)).getElementsByTagName("string-array"); Map<Integer, EnumMap<Gender, ImmutableList<String>>> arraysMap = new TreeMap<>(); CompileStringsStep step = createNonExecutingStep(); step.addArrayResourceNameToIdMap( ImmutableMap.of( "name1", 1, "name2", 2, "name3", 3)); step.scrapeStringArrayNodes(arrayNodes, arraysMap); assertEquals( "Incorrect map of resource id to string arrays.", ImmutableMap.of(1, map1, 2, map2), arraysMap); } @Test public void testScrapeNodesWithSameName() throws IOException, SAXException { String xmlInput = "<string name='name1' gender='unknown'>1</string>" + "<string name='name1_f1gender' gender='female'>1 f1</string>" + "<plurals name='name1' gender='unknown'>" + "<item quantity='one'>2</item>" + "<item quantity='other'>3</item>" + "</plurals>" + "<plurals name='name1_f1gender' gender='female'>" + "<item quantity='one'>2 f1</item>" + "<item quantity='other'>3 f1</item>" + "</plurals>" + "<string-array name='name1' gender='unknown'>" + "<item>4</item>" + "<item>5</item>" + "</string-array>" + "<string-array name='name1_f1gender' gender='female'>" + "<item>4 f1</item>" + "<item>5 f1</item>" + "</string-array>"; NodeList stringNodes = XmlDomParser.parse(createResourcesXml(xmlInput)).getElementsByTagName("string"); NodeList pluralsNodes = XmlDomParser.parse(createResourcesXml(xmlInput)).getElementsByTagName("plurals"); NodeList arrayNodes = XmlDomParser.parse(createResourcesXml(xmlInput)).getElementsByTagName("string-array"); Map<Integer, EnumMap<Gender, String>> stringMap = new TreeMap<>(); Map<Integer, EnumMap<Gender, ImmutableMap<String, String>>> pluralsMap = new TreeMap<>(); Map<Integer, EnumMap<Gender, ImmutableList<String>>> arraysMap = new TreeMap<>(); EnumMap<Gender, String> map1 = Maps.newEnumMap(Gender.class); map1.put(Gender.unknown, "1"); map1.put(Gender.female, "1 f1"); EnumMap<Gender, Map<String, String>> map2 = Maps.newEnumMap(Gender.class); map2.put(Gender.unknown, ImmutableMap.of("one", "2", "other", "3")); map2.put(Gender.female, ImmutableMap.of("one", "2 f1", "other", "3 f1")); EnumMap<Gender, ImmutableList<String>> map3 = Maps.newEnumMap(Gender.class); map3.put(Gender.unknown, ImmutableList.of("4", "5")); map3.put(Gender.female, ImmutableList.of("4 f1", "5 f1")); CompileStringsStep step = createNonExecutingStep(); step.addStringResourceNameToIdMap(ImmutableMap.of("name1", 1)); step.addPluralsResourceNameToIdMap(ImmutableMap.of("name1", 2)); step.addArrayResourceNameToIdMap(ImmutableMap.of("name1", 3)); step.scrapeStringNodes(stringNodes, stringMap); step.scrapePluralsNodes(pluralsNodes, pluralsMap); step.scrapeStringArrayNodes(arrayNodes, arraysMap); assertEquals("Incorrect map of resource id to string.", ImmutableMap.of(1, map1), stringMap); assertEquals("Incorrect map of resource id to plurals.", ImmutableMap.of(2, map2), pluralsMap); assertEquals( "Incorrect map of resource id to string arrays.", ImmutableMap.of(3, map3), arraysMap); } private CompileStringsStep createNonExecutingStep() { return new CompileStringsStep( new FakeProjectFilesystem(), ImmutableList.of(), createMock(Path.class), locale -> { throw new UnsupportedOperationException(); }); } private String createResourcesXml(String contents) { return XML_HEADER + "<resources>" + contents + "</resources>"; } @Test public void testSuccessfulStepExecution() throws InterruptedException, IOException { final Path destinationDir = Paths.get(""); Path rDotJavaSrcDir = Paths.get(""); ExecutionContext context = TestExecutionContext.newInstance(); FakeProjectFileSystem fileSystem = new FakeProjectFileSystem(); ImmutableList<Path> stringFiles = ImmutableList.of(firstFile, secondFile, thirdFile, fourthFile, fifthFile); replayAll(); CompileStringsStep step = new CompileStringsStep( fileSystem, stringFiles, rDotJavaSrcDir.resolve("R.txt"), input -> destinationDir.resolve(input + PackageStringAssets.STRING_ASSET_FILE_EXTENSION)); assertEquals(0, step.execute(context).getExitCode()); Map<String, byte[]> fileContentsMap = fileSystem.getFileContents(); assertEquals("Incorrect number of string files written.", 4, fileContentsMap.size()); for (Map.Entry<String, byte[]> entry : fileContentsMap.entrySet()) { File expectedFile = testdataDir.resolve(entry.getKey()).toFile(); assertArrayEquals(createBinaryStream(expectedFile), fileContentsMap.get(entry.getKey())); } verifyAll(); } private byte[] createBinaryStream(File expectedFile) throws IOException { try (ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream stream = new DataOutputStream(bos)) { for (String line : Files.readLines(expectedFile, Charset.defaultCharset())) { for (String token : Splitter.on('|').split(line)) { char dataType = token.charAt(0); String value = token.substring(2); switch (dataType) { case 'i': stream.writeInt(Integer.parseInt(value)); break; case 's': stream.writeShort(Integer.parseInt(value)); break; case 'b': stream.writeByte(Integer.parseInt(value)); break; case 't': stream.write(value.getBytes(StandardCharsets.UTF_8)); break; default: throw new RuntimeException("Unexpected data type in .fbstr file: " + dataType); } } } return bos.toByteArray(); } } private class FakeProjectFileSystem extends ProjectFilesystem { private ImmutableMap.Builder<String, byte[]> fileContentsMapBuilder = ImmutableMap.builder(); public FakeProjectFileSystem() throws InterruptedException { super(Paths.get(".").toAbsolutePath()); } @Override public List<String> readLines(Path path) throws IOException { Path fullPath = testdataDir.resolve(path); return Files.readLines(fullPath.toFile(), Charset.defaultCharset()); } @Override public void writeBytesToPath(byte[] content, Path path, FileAttribute<?>... attrs) { fileContentsMapBuilder.put(path.getFileName().toString(), content); } public Map<String, byte[]> getFileContents() { return fileContentsMapBuilder.build(); } } }
/* * Copyright 2010 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.base.Predicate; import com.google.javascript.jscomp.MinimizedCondition.MeasuredNode; import com.google.javascript.jscomp.MinimizedCondition.MinimizationStyle; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.jstype.TernaryValue; /** * A peephole optimization that minimizes conditional expressions * according to De Morgan's laws. * Also rewrites conditional statements as expressions by replacing them * with HOOKs and short-circuit binary operators. * * Based on PeepholeSubstituteAlternateSyntax */ class PeepholeMinimizeConditions extends AbstractPeepholeOptimization { private static final int AND_PRECEDENCE = NodeUtil.precedence(Token.AND); private final boolean late; /** * @param late When late is false, this mean we are currently running before most of the other * optimizations. In this case we would avoid optimizations that would make the code harder to * analyze (such as using string splitting, merging statements with commas, etc). When late is * true, we would do anything to minimize for size. */ PeepholeMinimizeConditions(boolean late) { this.late = late; } /** * Tries to apply our various peephole minimizations on the passed in node. */ @Override @SuppressWarnings("fallthrough") public Node optimizeSubtree(Node node) { switch (node.getToken()) { case THROW: case RETURN: { Node result = tryRemoveRedundantExit(node); if (result != node) { return result; } return tryReplaceExitWithBreak(node); } // TODO(johnlenz): Maybe remove redundant BREAK and CONTINUE. Overlaps // with MinimizeExitPoints. case NOT: tryMinimizeCondition(node.getFirstChild()); return tryMinimizeNot(node); case IF: performConditionSubstitutions(node.getFirstChild()); return tryMinimizeIf(node); case EXPR_RESULT: performConditionSubstitutions(node.getFirstChild()); return tryMinimizeExprResult(node); case HOOK: performConditionSubstitutions(node.getFirstChild()); return tryMinimizeHook(node); case WHILE: case DO: tryMinimizeCondition(NodeUtil.getConditionExpression(node)); return node; case FOR: tryJoinForCondition(node); tryMinimizeCondition(NodeUtil.getConditionExpression(node)); return node; case BLOCK: return tryReplaceIf(node); default: return node; //Nothing changed } } private void tryJoinForCondition(Node n) { if (!late) { return; } Node block = n.getLastChild(); Node maybeIf = block.getFirstChild(); if (maybeIf != null && maybeIf.isIf()) { Node thenBlock = maybeIf.getSecondChild(); Node maybeBreak = thenBlock.getFirstChild(); if (maybeBreak != null && maybeBreak.isBreak() && !maybeBreak.hasChildren()) { // Preserve the IF ELSE expression is there is one. if (maybeIf.hasXChildren(3)) { block.replaceChild(maybeIf, maybeIf.getLastChild().detach()); } else { NodeUtil.redeclareVarsInsideBranch(thenBlock); block.removeFirstChild(); } Node ifCondition = maybeIf.removeFirstChild(); Node fixedIfCondition = IR.not(ifCondition) .srcref(ifCondition); // OK, join the IF expression with the FOR expression Node forCondition = NodeUtil.getConditionExpression(n); if (forCondition.isEmpty()) { n.replaceChild(forCondition, fixedIfCondition); reportChangeToEnclosingScope(fixedIfCondition); } else { Node replacement = new Node(Token.AND); n.replaceChild(forCondition, replacement); replacement.addChildToBack(forCondition); replacement.addChildToBack(fixedIfCondition); reportChangeToEnclosingScope(replacement); } } } } /** * Use "return x?1:2;" in place of "if(x)return 1;return 2;" */ private Node tryReplaceIf(Node n) { Node next = null; for (Node child = n.getFirstChild(); child != null; child = next){ next = child.getNext(); if (child.isIf()){ Node cond = child.getFirstChild(); Node thenBranch = cond.getNext(); Node elseBranch = thenBranch.getNext(); Node nextNode = child.getNext(); if (nextNode != null && elseBranch == null && isReturnBlock(thenBranch) && nextNode.isIf()) { Node nextCond = nextNode.getFirstChild(); Node nextThen = nextCond.getNext(); Node nextElse = nextThen.getNext(); if (thenBranch.isEquivalentToTyped(nextThen)) { // Transform // if (x) return 1; if (y) return 1; // to // if (x||y) return 1; child.detach(); child.detachChildren(); Node newCond = new Node(Token.OR, cond); nextNode.replaceChild(nextCond, newCond); newCond.addChildToBack(nextCond); reportChangeToEnclosingScope(newCond); } else if (nextElse != null && thenBranch.isEquivalentToTyped(nextElse)) { // Transform // if (x) return 1; if (y) foo() else return 1; // to // if (!x&&y) foo() else return 1; child.detach(); child.detachChildren(); Node newCond = new Node(Token.AND, IR.not(cond).srcref(cond)); nextNode.replaceChild(nextCond, newCond); newCond.addChildToBack(nextCond); reportChangeToEnclosingScope(newCond); } } else if (nextNode != null && elseBranch == null && isReturnBlock(thenBranch) && isReturnExpression(nextNode)) { Node thenExpr = null; // if(x)return; return 1 -> return x?void 0:1 if (isReturnExpressBlock(thenBranch)) { thenExpr = getBlockReturnExpression(thenBranch); thenExpr.detach(); } else { thenExpr = NodeUtil.newUndefinedNode(child); } Node elseExpr = nextNode.getFirstChild(); cond.detach(); elseExpr.detach(); Node returnNode = IR.returnNode( IR.hook(cond, thenExpr, elseExpr) .srcref(child)); n.replaceChild(child, returnNode); n.removeChild(nextNode); reportChangeToEnclosingScope(n); // everything else in the block is dead code. break; } else if (elseBranch != null && statementMustExitParent(thenBranch)) { child.removeChild(elseBranch); n.addChildAfter(elseBranch, child); reportChangeToEnclosingScope(n); } } } return n; } private static boolean statementMustExitParent(Node n) { switch (n.getToken()) { case THROW: case RETURN: return true; case BLOCK: if (n.hasChildren()) { Node child = n.getLastChild(); return statementMustExitParent(child); } return false; // TODO(johnlenz): handle TRY/FINALLY case FUNCTION: default: return false; } } /** * Replace duplicate exits in control structures. If the node following * the exit node expression has the same effect as exit node, the node can * be replaced or removed. * For example: * "while (a) {return f()} return f();" ==> "while (a) {break} return f();" * "while (a) {throw 'ow'} throw 'ow';" ==> "while (a) {break} throw 'ow';" * * @param n An follow control exit expression (a THROW or RETURN node) * @return The replacement for n, or the original if no change was made. */ private Node tryReplaceExitWithBreak(Node n) { Node result = n.getFirstChild(); // Find the enclosing control structure, if any, that a "break" would exit // from. Node breakTarget = n; for (; !ControlFlowAnalysis.isBreakTarget(breakTarget, null /* no label */); breakTarget = breakTarget.getParent()) { if (breakTarget.isFunction() || breakTarget.isScript()) { // No break target. return n; } } Node follow = ControlFlowAnalysis.computeFollowNode(breakTarget); // Skip pass all the finally blocks because both the break and return will // also trigger all the finally blocks. However, the order of execution is // slightly changed. Consider: // // return a() -> finally { b() } -> return a() // // which would call a() first. However, changing the first return to a // break will result in calling b(). Node prefinallyFollows = follow; follow = skipFinallyNodes(follow); if (prefinallyFollows != follow) { // There were finally clauses if (!isPure(result)) { // Can't defer the exit return n; } } if (follow == null && (n.isThrow() || result != null)) { // Can't complete remove a throw here or a return with a result. return n; } // When follow is null, this mean the follow of a break target is the // end of a function. This means a break is same as return. if (follow == null || areMatchingExits(n, follow)) { Node replacement = IR.breakNode(); n.replaceWith(replacement); reportChangeToEnclosingScope(replacement); return replacement; } return n; } /** * Remove duplicate exits. If the node following the exit node expression * has the same effect as exit node, the node can be removed. * For example: * "if (a) {return f()} return f();" ==> "if (a) {} return f();" * "if (a) {throw 'ow'} throw 'ow';" ==> "if (a) {} throw 'ow';" * * @param n An follow control exit expression (a THROW or RETURN node) * @return The replacement for n, or the original if no change was made. */ private Node tryRemoveRedundantExit(Node n) { Node exitExpr = n.getFirstChild(); Node follow = ControlFlowAnalysis.computeFollowNode(n); // Skip pass all the finally blocks because both the fall through and return // will also trigger all the finally blocks. Node prefinallyFollows = follow; follow = skipFinallyNodes(follow); if (prefinallyFollows != follow) { // There were finally clauses if (!isPure(exitExpr)) { // Can't replace the return return n; } } if (follow == null && (n.isThrow() || exitExpr != null)) { // Can't complete remove a throw here or a return with a result. return n; } // When follow is null, this mean the follow of a break target is the // end of a function. This means a break is same as return. if (follow == null || areMatchingExits(n, follow)) { reportChangeToEnclosingScope(n); n.detach(); return null; } return n; } /** * @return Whether the expression does not produces and can not be affected * by side-effects. */ boolean isPure(Node n) { return n == null || (!NodeUtil.canBeSideEffected(n) && !mayHaveSideEffects(n)); } /** * @return n or the node following any following finally nodes. */ static Node skipFinallyNodes(Node n) { while (n != null && NodeUtil.isTryFinallyNode(n.getParent(), n)) { n = ControlFlowAnalysis.computeFollowNode(n); } return n; } /** * Check whether one exit can be replaced with another. Verify: * 1) They are identical expressions * 2) If an exception is possible that the statements, the original * and the potential replacement are in the same exception handler. */ boolean areMatchingExits(Node nodeThis, Node nodeThat) { if (!isASTNormalized() && (nodeThis.isThrow() || nodeThis.isReturn()) && nodeThis.hasChildren()) { // if the ast isn't normalized "return a" or "throw a" may not mean the same thing in // different blocks. return false; } return nodeThis.isEquivalentTo(nodeThat) && (!isExceptionPossible(nodeThis) || getExceptionHandler(nodeThis) == getExceptionHandler(nodeThat)); } static boolean isExceptionPossible(Node n) { // TODO(johnlenz): maybe use ControlFlowAnalysis.mayThrowException? checkState(n.isReturn() || n.isThrow(), n); return n.isThrow() || (n.hasChildren() && !NodeUtil.isLiteralValue(n.getLastChild(), true)); } static Node getExceptionHandler(Node n) { return ControlFlowAnalysis.getExceptionHandler(n); } /** * Try to minimize NOT nodes such as !(x==y). * * Returns the replacement for n or the original if no change was made */ private Node tryMinimizeNot(Node n) { checkArgument(n.isNot()); Node parent = n.getParent(); Node notChild = n.getFirstChild(); // negative operator of the current one : == -> != for instance. Token complementOperator; switch (notChild.getToken()) { case EQ: complementOperator = Token.NE; break; case NE: complementOperator = Token.EQ; break; case SHEQ: complementOperator = Token.SHNE; break; case SHNE: complementOperator = Token.SHEQ; break; // GT, GE, LT, LE are not handled in this because !(x<NaN) != x>=NaN. default: return n; } Node newOperator = n.removeFirstChild(); newOperator.setToken(complementOperator); parent.replaceChild(n, newOperator); reportChangeToEnclosingScope(parent); return newOperator; } /** * Try to remove leading NOTs from EXPR_RESULTS. * * Returns the replacement for n or the original if no replacement was * necessary. */ private Node tryMinimizeExprResult(Node n) { Node originalExpr = n.getFirstChild(); MinimizedCondition minCond = MinimizedCondition.fromConditionNode(originalExpr); MeasuredNode mNode = minCond.getMinimized(MinimizationStyle.ALLOW_LEADING_NOT); if (mNode.isNot()) { // Remove the leading NOT in the EXPR_RESULT. replaceNode(originalExpr, mNode.withoutNot()); } else { replaceNode(originalExpr, mNode); } return n; } /** * Try flipping HOOKs that have negated conditions. * * Returns the replacement for n or the original if no replacement was * necessary. */ private Node tryMinimizeHook(Node n) { Node originalCond = n.getFirstChild(); MinimizedCondition minCond = MinimizedCondition.fromConditionNode(originalCond); MeasuredNode mNode = minCond.getMinimized(MinimizationStyle.ALLOW_LEADING_NOT); if (mNode.isNot()) { // Swap the HOOK Node thenBranch = n.getSecondChild(); replaceNode(originalCond, mNode.withoutNot()); n.removeChild(thenBranch); n.addChildToBack(thenBranch); reportChangeToEnclosingScope(n); } else { replaceNode(originalCond, mNode); } return n; } /** * Try turning IF nodes into smaller HOOKs * * Returns the replacement for n or the original if no replacement was * necessary. */ private Node tryMinimizeIf(Node n) { Node parent = n.getParent(); Node originalCond = n.getFirstChild(); /* If the condition is a literal, we'll let other * optimizations try to remove useless code. */ if (NodeUtil.isLiteralValue(originalCond, true)) { return n; } Node thenBranch = originalCond.getNext(); Node elseBranch = thenBranch.getNext(); MinimizedCondition minCond = MinimizedCondition.fromConditionNode(originalCond); // Compute two minimized representations. The first representation counts // a leading NOT node, and the second ignores a leading NOT node. // If we can fold the if statement into a HOOK or boolean operation, // then the NOT node does not matter, and we prefer the second condition. // If we cannot fold the if statement, then we prefer the first condition. MeasuredNode unnegatedCond = minCond.getMinimized(MinimizationStyle.PREFER_UNNEGATED); MeasuredNode shortCond = minCond.getMinimized(MinimizationStyle.ALLOW_LEADING_NOT); if (elseBranch == null) { if (isFoldableExpressBlock(thenBranch)) { Node expr = getBlockExpression(thenBranch); if (!late && isPropertyAssignmentInExpression(expr)) { // Keep opportunities for CollapseProperties such as // a.longIdentifier || a.longIdentifier = ... -> var a = ...; // until CollapseProperties has been run. replaceNode(originalCond, unnegatedCond); return n; } if (shortCond.isNot()) { // if(!x)bar(); -> x||bar(); Node replacementCond = replaceNode(originalCond, shortCond.withoutNot()).detach(); Node or = IR.or( replacementCond, expr.removeFirstChild()).srcref(n); Node newExpr = NodeUtil.newExpr(or); parent.replaceChild(n, newExpr); reportChangeToEnclosingScope(parent); return newExpr; } // True, but removed for performance reasons. // Preconditions.checkState(shortCond.isEquivalentTo(unnegatedCond)); // if(x)foo(); -> x&&foo(); if (shortCond.isLowerPrecedenceThan(AND_PRECEDENCE) && isLowerPrecedence(expr.getFirstChild(), AND_PRECEDENCE)) { // One additional set of parentheses is worth the change even if // there is no immediate code size win. However, two extra pair of // {}, we would have to think twice. (unless we know for sure the // we can further optimize its parent. replaceNode(originalCond, shortCond); return n; } Node replacementCond = replaceNode(originalCond, shortCond).detach(); Node and = IR.and(replacementCond, expr.removeFirstChild()).srcref(n); Node newExpr = NodeUtil.newExpr(and); parent.replaceChild(n, newExpr); reportChangeToEnclosingScope(parent); return newExpr; } else { // Try to combine two IF-ELSE if (NodeUtil.isStatementBlock(thenBranch) && thenBranch.hasOneChild()) { Node innerIf = thenBranch.getFirstChild(); if (innerIf.isIf()) { Node innerCond = innerIf.getFirstChild(); Node innerThenBranch = innerCond.getNext(); Node innerElseBranch = innerThenBranch.getNext(); if (innerElseBranch == null && !(unnegatedCond.isLowerPrecedenceThan(AND_PRECEDENCE) && isLowerPrecedence(innerCond, AND_PRECEDENCE))) { Node replacementCond = replaceNode(originalCond, unnegatedCond).detach(); n.detachChildren(); n.addChildToBack( IR.and( replacementCond, innerCond.detach()) .srcref(originalCond)); n.addChildToBack(innerThenBranch.detach()); reportChangeToEnclosingScope(n); // Not worth trying to fold the current IF-ELSE into && because // the inner IF-ELSE wasn't able to be folded into && anyways. return n; } } } } replaceNode(originalCond, unnegatedCond); return n; } /* TODO(dcc) This modifies the siblings of n, which is undesirable for a * peephole optimization. This should probably get moved to another pass. */ tryRemoveRepeatedStatements(n); // if(!x)foo();else bar(); -> if(x)bar();else foo(); // An additional set of curly braces isn't worth it. if (shortCond.isNot() && !consumesDanglingElse(elseBranch)) { replaceNode(originalCond, shortCond.withoutNot()); n.removeChild(thenBranch); n.addChildToBack(thenBranch); reportChangeToEnclosingScope(n); return n; } // if(x)return 1;else return 2; -> return x?1:2; if (isReturnExpressBlock(thenBranch) && isReturnExpressBlock(elseBranch)) { Node thenExpr = getBlockReturnExpression(thenBranch); Node elseExpr = getBlockReturnExpression(elseBranch); Node replacementCond = replaceNode(originalCond, shortCond).detach(); thenExpr.detach(); elseExpr.detach(); // note - we ignore any cases with "return;", technically this // can be converted to "return undefined;" or some variant, but // that does not help code size. Node returnNode = IR.returnNode( IR.hook(replacementCond, thenExpr, elseExpr) .srcref(n)); parent.replaceChild(n, returnNode); reportChangeToEnclosingScope(returnNode); return returnNode; } boolean thenBranchIsExpressionBlock = isFoldableExpressBlock(thenBranch); boolean elseBranchIsExpressionBlock = isFoldableExpressBlock(elseBranch); if (thenBranchIsExpressionBlock && elseBranchIsExpressionBlock) { Node thenOp = getBlockExpression(thenBranch).getFirstChild(); Node elseOp = getBlockExpression(elseBranch).getFirstChild(); if (thenOp.getToken() == elseOp.getToken()) { // if(x)a=1;else a=2; -> a=x?1:2; if (NodeUtil.isAssignmentOp(thenOp)) { Node lhs = thenOp.getFirstChild(); if (areNodesEqualForInlining(lhs, elseOp.getFirstChild()) // if LHS has side effects, don't proceed [since the optimization // evaluates LHS before cond] // NOTE - there are some circumstances where we can // proceed even if there are side effects... && !mayEffectMutableState(lhs) && (!mayHaveSideEffects(originalCond) || (thenOp.isAssign() && thenOp.getFirstChild().isName()))) { Node replacementCond = replaceNode(originalCond, shortCond).detach(); Node assignName = thenOp.removeFirstChild(); Node thenExpr = thenOp.removeFirstChild(); Node elseExpr = elseOp.getLastChild(); elseOp.removeChild(elseExpr); Node hookNode = IR.hook(replacementCond, thenExpr, elseExpr) .srcref(n); Node assign = new Node(thenOp.getToken(), assignName, hookNode).srcref(thenOp); Node expr = NodeUtil.newExpr(assign); parent.replaceChild(n, expr); reportChangeToEnclosingScope(parent); return expr; } } } // if(x)foo();else bar(); -> x?foo():bar() Node replacementCond = replaceNode(originalCond, shortCond).detach(); thenOp.detach(); elseOp.detach(); Node expr = IR.exprResult( IR.hook(replacementCond, thenOp, elseOp).srcref(n)); parent.replaceChild(n, expr); reportChangeToEnclosingScope(parent); return expr; } boolean thenBranchIsVar = isVarBlock(thenBranch); boolean elseBranchIsVar = isVarBlock(elseBranch); // if(x)var y=1;else y=2 -> var y=x?1:2 if (thenBranchIsVar && elseBranchIsExpressionBlock && getBlockExpression(elseBranch).getFirstChild().isAssign()) { Node var = getBlockVar(thenBranch); Node elseAssign = getBlockExpression(elseBranch).getFirstChild(); Node name1 = var.getFirstChild(); Node maybeName2 = elseAssign.getFirstChild(); if (name1.hasChildren() && maybeName2.isName() && name1.getString().equals(maybeName2.getString())) { checkState(name1.hasOneChild()); Node thenExpr = name1.removeFirstChild(); Node elseExpr = elseAssign.getLastChild().detach(); Node replacementCond = replaceNode(originalCond, shortCond).detach(); Node hookNode = IR.hook(replacementCond, thenExpr, elseExpr).srcref(n); var.detach(); name1.addChildToBack(hookNode); parent.replaceChild(n, var); reportChangeToEnclosingScope(parent); return var; } // if(x)y=1;else var y=2 -> var y=x?1:2 } else if (elseBranchIsVar && thenBranchIsExpressionBlock && getBlockExpression(thenBranch).getFirstChild().isAssign()) { Node var = getBlockVar(elseBranch); Node thenAssign = getBlockExpression(thenBranch).getFirstChild(); Node maybeName1 = thenAssign.getFirstChild(); Node name2 = var.getFirstChild(); if (name2.hasChildren() && maybeName1.isName() && maybeName1.getString().equals(name2.getString())) { Node thenExpr = thenAssign.getLastChild().detach(); checkState(name2.hasOneChild()); Node elseExpr = name2.removeFirstChild(); Node replacementCond = replaceNode(originalCond, shortCond).detach(); Node hookNode = IR.hook(replacementCond, thenExpr, elseExpr).srcref(n); var.detach(); name2.addChildToBack(hookNode); parent.replaceChild(n, var); reportChangeToEnclosingScope(parent); return var; } } replaceNode(originalCond, unnegatedCond); return n; } /** * Try to remove duplicate statements from IF blocks. For example: * * if (a) { * x = 1; * return true; * } else { * x = 2; * return true; * } * * becomes: * * if (a) { * x = 1; * } else { * x = 2; * } * return true; * * @param n The IF node to examine. */ private void tryRemoveRepeatedStatements(Node n) { // Only run this if variable names are guaranteed to be unique. Otherwise bad things can happen: // see PeepholeMinimizeConditionsTest#testDontRemoveDuplicateStatementsWithoutNormalization if (!isASTNormalized()) { return; } checkState(n.isIf(), n); Node parent = n.getParent(); if (!NodeUtil.isStatementBlock(parent)) { // If the immediate parent is something like a label, we // can't move the statement, so bail. return; } Node cond = n.getFirstChild(); Node trueBranch = cond.getNext(); Node falseBranch = trueBranch.getNext(); checkNotNull(trueBranch); checkNotNull(falseBranch); while (true) { Node lastTrue = trueBranch.getLastChild(); Node lastFalse = falseBranch.getLastChild(); if (lastTrue == null || lastFalse == null || !areNodesEqualForInlining(lastTrue, lastFalse)) { break; } lastTrue.detach(); lastFalse.detach(); parent.addChildAfter(lastTrue, n); reportChangeToEnclosingScope(parent); } } /** * @return Whether the node is a block with a single statement that is * an expression. */ private static boolean isFoldableExpressBlock(Node n) { if (n.isBlock()) { if (n.hasOneChild()) { Node maybeExpr = n.getFirstChild(); if (maybeExpr.isExprResult()) { // IE has a bug where event handlers behave differently when // their return value is used vs. when their return value is in // an EXPR_RESULT. It's pretty freaking weird. See: // http://blickly.github.io/closure-compiler-issues/#291 // We try to detect this case, and not fold EXPR_RESULTs // into other expressions. // e.g.: // if (e.onchange) { // e.onchange({ // _extendedByPrototype: Prototype.emptyFunction, // target: e // }); // } if (maybeExpr.getFirstChild().isCall() || maybeExpr.getFirstChild().isOptChainCall()) { Node calledFn = maybeExpr.getFirstFirstChild(); // We only have to worry about methods with an implicit 'this' // param, or this doesn't happen. if (calledFn.isGetElem() || calledFn.isOptChainGetElem()) { return false; } else if ((calledFn.isGetProp() || calledFn.isOptChainGetProp()) && calledFn.getLastChild().getString().startsWith("on")) { return false; } } return true; } return false; } } return false; } /** * @return The expression node. */ private static Node getBlockExpression(Node n) { checkState(isFoldableExpressBlock(n)); return n.getFirstChild(); } /** * @return Whether the node is a block with a single statement that is * an return with or without an expression. */ private static boolean isReturnBlock(Node n) { if (n.isBlock()) { if (n.hasOneChild()) { Node first = n.getFirstChild(); return first.isReturn(); } } return false; } /** * @return Whether the node is a block with a single statement that is * an return. */ private static boolean isReturnExpressBlock(Node n) { if (n.isBlock()) { if (n.hasOneChild()) { Node first = n.getFirstChild(); if (first.isReturn()) { return first.hasOneChild(); } } } return false; } /** * @return Whether the node is a single return statement. */ private static boolean isReturnExpression(Node n) { if (n.isReturn()) { return n.hasOneChild(); } return false; } /** * @return The expression that is part of the return. */ private static Node getBlockReturnExpression(Node n) { checkState(isReturnExpressBlock(n)); return n.getFirstFirstChild(); } /** * @return Whether the node is a block with a single statement that is * a VAR declaration of a single variable. */ private static boolean isVarBlock(Node n) { if (n.isBlock()) { if (n.hasOneChild()) { Node first = n.getFirstChild(); if (first.isVar()) { return first.hasOneChild(); } } } return false; } /** * @return The var node. */ private static Node getBlockVar(Node n) { checkState(isVarBlock(n)); return n.getFirstChild(); } /** * Does a statement consume a 'dangling else'? A statement consumes * a 'dangling else' if an 'else' token following the statement * would be considered by the parser to be part of the statement. */ private static boolean consumesDanglingElse(Node n) { while (true) { switch (n.getToken()) { case IF: if (n.getChildCount() < 3) { return true; } // This IF node has no else clause. n = n.getLastChild(); continue; case BLOCK: if (!n.hasOneChild()) { return false; } // This BLOCK has no curly braces. n = n.getLastChild(); continue; case WITH: case WHILE: case FOR: case FOR_IN: n = n.getLastChild(); continue; default: return false; } } } /** * Whether the node type has lower precedence than "precedence" */ static boolean isLowerPrecedence(Node n, int precedence) { return NodeUtil.precedence(n.getToken()) < precedence; } /** * Does the expression contain a property assignment? */ private static boolean isPropertyAssignmentInExpression(Node n) { Predicate<Node> isPropertyAssignmentInExpressionPredicate = (Node input) -> (input.isGetProp() && input.getParent().isAssign()); return NodeUtil.has(n, isPropertyAssignmentInExpressionPredicate, NodeUtil.MATCH_NOT_FUNCTION); } /** * Try to minimize condition expression, as there are additional * assumptions that can be made when it is known that the final result * is a boolean. * * @return The replacement for n, or the original if no change was made. */ private Node tryMinimizeCondition(Node n) { n = performConditionSubstitutions(n); MinimizedCondition minCond = MinimizedCondition.fromConditionNode(n); return replaceNode( n, minCond.getMinimized(MinimizationStyle.PREFER_UNNEGATED)); } private Node replaceNode(Node original, MeasuredNode measuredNodeReplacement) { if (measuredNodeReplacement.willChange(original)) { Node replacement = measuredNodeReplacement.applyTo(original); reportChangeToEnclosingScope(replacement); return replacement; } return original; } /** * Try to minimize the given condition by applying local substitutions. * * The following types of transformations are performed: * x || true --> true * x && true --> x * x ? false : true --> !x * x ? true : y --> x || y * x ? x : y --> x || y * * Returns the replacement for n, or the original if no change was made */ private Node performConditionSubstitutions(Node n) { Node parent = n.getParent(); switch (n.getToken()) { case OR: case AND: { Node left = n.getFirstChild(); Node right = n.getLastChild(); // Because the expression is in a boolean context minimize // the children, this can't be done in the general case. left = performConditionSubstitutions(left); right = performConditionSubstitutions(right); // Remove useless conditionals // Handle the following cases: // x || false --> x // x && true --> x // This works regardless of whether x has side effects. // // If x does not have side effects: // x || true --> true // x && false --> false // // If x may have side effects: // x || true --> x,true // x && false --> x,false // // In the last two cases, code size may increase slightly (adding // some parens because the comma operator has a low precedence) but // the new AST is easier for other passes to handle. TernaryValue rightVal = getSideEffectFreeBooleanValue(right); if (getSideEffectFreeBooleanValue(right) != TernaryValue.UNKNOWN) { Token type = n.getToken(); Node replacement = null; boolean rval = rightVal.toBoolean(true); // (x || FALSE) => x // (x && TRUE) => x if ((type == Token.OR && !rval) || (type == Token.AND && rval)) { replacement = left; } else if (!mayHaveSideEffects(left)) { replacement = right; } else { // expr_with_sideeffects || true => expr_with_sideeffects, true // expr_with_sideeffects && false => expr_with_sideeffects, false n.detachChildren(); replacement = IR.comma(left, right); } if (replacement != null) { n.detachChildren(); parent.replaceChild(n, replacement); reportChangeToEnclosingScope(parent); return replacement; } } return n; } case HOOK: { Node condition = n.getFirstChild(); Node trueNode = n.getSecondChild(); Node falseNode = n.getLastChild(); // Because the expression is in a boolean context minimize // the result children, this can't be done in the general case. // The condition is handled in the general case in #optimizeSubtree trueNode = performConditionSubstitutions(trueNode); falseNode = performConditionSubstitutions(falseNode); // Handle five cases: // x ? true : false --> x // x ? false : true --> !x // x ? true : y --> x || y // x ? y : false --> x && y // Only when x is NAME, hence x does not have side effects // x ? x : y --> x || y Node replacement = null; TernaryValue trueNodeVal = getSideEffectFreeBooleanValue(trueNode); TernaryValue falseNodeVal = getSideEffectFreeBooleanValue(falseNode); if (trueNodeVal == TernaryValue.TRUE && falseNodeVal == TernaryValue.FALSE) { // Remove useless conditionals, keep the condition condition.detach(); replacement = condition; } else if (trueNodeVal == TernaryValue.FALSE && falseNodeVal == TernaryValue.TRUE) { // Remove useless conditionals, keep the condition condition.detach(); replacement = IR.not(condition); } else if (trueNodeVal == TernaryValue.TRUE) { // Remove useless true case. n.detachChildren(); replacement = IR.or(condition, falseNode); } else if (falseNodeVal == TernaryValue.FALSE) { // Remove useless false case n.detachChildren(); replacement = IR.and(condition, trueNode); } else if (!mayHaveSideEffects(condition) && !mayHaveSideEffects(trueNode) && condition.isEquivalentTo(trueNode)) { // Remove redundant condition n.detachChildren(); replacement = IR.or(trueNode, falseNode); } if (replacement != null) { parent.replaceChild(n, replacement); reportChangeToEnclosingScope(replacement); n = replacement; } return n; } default: // while(true) --> while(1) TernaryValue nVal = getSideEffectFreeBooleanValue(n); if (nVal != TernaryValue.UNKNOWN) { boolean result = nVal.toBoolean(true); int equivalentResult = result ? 1 : 0; return maybeReplaceChildWithNumber(n, parent, equivalentResult); } // We can't do anything else currently. return n; } } /** * Replaces a node with a number node if the new number node is not equivalent * to the current node. * * Returns the replacement for n if it was replaced, otherwise returns n. */ private Node maybeReplaceChildWithNumber(Node n, Node parent, int num) { Node newNode = IR.number(num); if (!newNode.isEquivalentTo(n)) { parent.replaceChild(n, newNode); reportChangeToEnclosingScope(newNode); markFunctionsDeleted(n); return newNode; } return n; } }
/** * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.xpydev.paycoinj.store; import io.xpydev.paycoinj.core.*; import io.xpydev.paycoinj.utils.Threading; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; /** * An SPVBlockStore holds a limited number of block headers in a memory mapped ring buffer. With such a store, you * may not be able to process very deep re-orgs and could be disconnected from the chain (requiring a replay), * but as they are virtually unheard of this is not a significant risk. */ public class SPVBlockStore implements BlockStore { private static final Logger log = LoggerFactory.getLogger(SPVBlockStore.class); /** The default number of headers that will be stored in the ring buffer. */ public static final int DEFAULT_NUM_HEADERS = 5000; public static final String HEADER_MAGIC = "SPVB"; protected volatile MappedByteBuffer buffer; protected int numHeaders; protected NetworkParameters params; protected ReentrantLock lock = Threading.lock("SPVBlockStore"); // The entire ring-buffer is mmapped and accessing it should be as fast as accessing regular memory once it's // faulted in. Unfortunately, in theory practice and theory are the same. In practice they aren't. // // MMapping a file in Java does not give us a byte[] as you may expect but rather a ByteBuffer, and whilst on // the OpenJDK/Oracle JVM calls into the get() methods are compiled down to inlined native code on Android each // get() call is actually a full-blown JNI method under the hood, meaning it's unbelievably slow. The caches // below let us stay in the JIT-compiled Java world without expensive JNI transitions and make a 10x difference! protected LinkedHashMap<Sha256Hash, StoredBlock> blockCache = new LinkedHashMap<Sha256Hash, StoredBlock>() { @Override protected boolean removeEldestEntry(Map.Entry<Sha256Hash, StoredBlock> entry) { return size() > 2050; // Slightly more than the difficulty transition period. } }; // Use a separate cache to track get() misses. This is to efficiently handle the case of an unconnected block // during chain download. Each new block will do a get() on the unconnected block so if we haven't seen it yet we // must efficiently respond. // // We don't care about the value in this cache. It is always notFoundMarker. Unfortunately LinkedHashSet does not // provide the removeEldestEntry control. protected static final Object notFoundMarker = new Object(); protected LinkedHashMap<Sha256Hash, Object> notFoundCache = new LinkedHashMap<Sha256Hash, Object>() { @Override protected boolean removeEldestEntry(Map.Entry<Sha256Hash, Object> entry) { return size() > 100; // This was chosen arbitrarily. } }; // Used to stop other applications/processes from opening the store. protected FileLock fileLock = null; protected RandomAccessFile randomAccessFile = null; /** * Creates and initializes an SPV block store. Will create the given file if it's missing. This operation * will block on disk. */ public SPVBlockStore(NetworkParameters params, File file) throws BlockStoreException { checkNotNull(file); this.params = checkNotNull(params); try { this.numHeaders = DEFAULT_NUM_HEADERS; boolean exists = file.exists(); // Set up the backing file. randomAccessFile = new RandomAccessFile(file, "rw"); long fileSize = getFileSize(); if (!exists) { log.info("Creating new SPV block chain file " + file); randomAccessFile.setLength(fileSize); } else if (randomAccessFile.length() != fileSize) { throw new BlockStoreException("File size on disk does not match expected size: " + randomAccessFile.length() + " vs " + fileSize); } FileChannel channel = randomAccessFile.getChannel(); fileLock = channel.tryLock(); if (fileLock == null) throw new BlockStoreException("Store file is already locked by another process"); // Map it into memory read/write. The kernel will take care of flushing writes to disk at the most // efficient times, which may mean that until the map is deallocated the data on disk is randomly // inconsistent. However the only process accessing it is us, via this mapping, so our own view will // always be correct. Once we establish the mmap the underlying file and channel can go away. Note that // the details of mmapping vary between platforms. buffer = channel.map(FileChannel.MapMode.READ_WRITE, 0, fileSize); // Check or initialize the header bytes to ensure we don't try to open some random file. byte[] header; if (exists) { header = new byte[4]; buffer.get(header); if (!new String(header, "US-ASCII").equals(HEADER_MAGIC)) throw new BlockStoreException("Header bytes do not equal " + HEADER_MAGIC); } else { initNewStore(params); } } catch (Exception e) { try { if (randomAccessFile != null) randomAccessFile.close(); } catch (IOException e2) { throw new BlockStoreException(e2); } throw new BlockStoreException(e); } } private void initNewStore(NetworkParameters params) throws Exception { byte[] header; header = HEADER_MAGIC.getBytes("US-ASCII"); buffer.put(header); // Insert the genesis block. lock.lock(); try { setRingCursor(buffer, FILE_PROLOGUE_BYTES); } finally { lock.unlock(); } Block genesis = params.getGenesisBlock().cloneAsHeader(); StoredBlock storedGenesis = new StoredBlock(genesis, genesis.getWork(), 0); put(storedGenesis); setChainHead(storedGenesis); } /** Returns the size in bytes of the file that is used to store the chain with the current parameters. */ public int getFileSize() { return RECORD_SIZE * numHeaders + FILE_PROLOGUE_BYTES /* extra kilobyte for stuff */; } @Override public void put(StoredBlock block) throws BlockStoreException { final MappedByteBuffer buffer = this.buffer; if (buffer == null) throw new BlockStoreException("Store closed"); lock.lock(); try { int cursor = getRingCursor(buffer); if (cursor == getFileSize()) { // Wrapped around. cursor = FILE_PROLOGUE_BYTES; } buffer.position(cursor); Sha256Hash hash = block.getHeader().getHash(); notFoundCache.remove(hash); buffer.put(hash.getBytes()); block.serializeCompact(buffer); setRingCursor(buffer, buffer.position()); blockCache.put(hash, block); } finally { lock.unlock(); } } @Override @Nullable public StoredBlock get(Sha256Hash hash) throws BlockStoreException { final MappedByteBuffer buffer = this.buffer; if (buffer == null) throw new BlockStoreException("Store closed"); lock.lock(); try { StoredBlock cacheHit = blockCache.get(hash); if (cacheHit != null) return cacheHit; if (notFoundCache.get(hash) != null) return null; // Starting from the current tip of the ring work backwards until we have either found the block or // wrapped around. int cursor = getRingCursor(buffer); final int startingPoint = cursor; final int fileSize = getFileSize(); final byte[] targetHashBytes = hash.getBytes(); byte[] scratch = new byte[32]; do { cursor -= RECORD_SIZE; if (cursor < FILE_PROLOGUE_BYTES) { // We hit the start, so wrap around. cursor = fileSize - RECORD_SIZE; } // Cursor is now at the start of the next record to check, so read the hash and compare it. buffer.position(cursor); buffer.get(scratch); if (Arrays.equals(scratch, targetHashBytes)) { // Found the target. StoredBlock storedBlock = StoredBlock.deserializeCompact(params, buffer); blockCache.put(hash, storedBlock); return storedBlock; } } while (cursor != startingPoint); // Not found. notFoundCache.put(hash, notFoundMarker); return null; } catch (ProtocolException e) { throw new RuntimeException(e); // Cannot happen. } finally { lock.unlock(); } } protected StoredBlock lastChainHead = null; @Override public StoredBlock getChainHead() throws BlockStoreException { final MappedByteBuffer buffer = this.buffer; if (buffer == null) throw new BlockStoreException("Store closed"); lock.lock(); try { if (lastChainHead == null) { byte[] headHash = new byte[32]; buffer.position(8); buffer.get(headHash); Sha256Hash hash = new Sha256Hash(headHash); StoredBlock block = get(hash); if (block == null) throw new BlockStoreException("Corrupted block store: could not find chain head: " + hash); lastChainHead = block; } return lastChainHead; } finally { lock.unlock(); } } @Override public void setChainHead(StoredBlock chainHead) throws BlockStoreException { final MappedByteBuffer buffer = this.buffer; if (buffer == null) throw new BlockStoreException("Store closed"); lock.lock(); try { lastChainHead = chainHead; byte[] headHash = chainHead.getHeader().getHash().getBytes(); buffer.position(8); buffer.put(headHash); } finally { lock.unlock(); } } @Override public void close() throws BlockStoreException { try { buffer.force(); buffer = null; // Allow it to be GCd and the underlying file mapping to go away. randomAccessFile.close(); } catch (IOException e) { throw new BlockStoreException(e); } } protected static final int RECORD_SIZE = 32 /* hash */ + StoredBlock.COMPACT_SERIALIZED_SIZE; // File format: // 4 header bytes = "SPVB" // 4 cursor bytes, which indicate the offset from the first kb where the next block header should be written. // 32 bytes for the hash of the chain head // // For each header (128 bytes) // 32 bytes hash of the header // 12 bytes of chain work // 4 bytes of height // 80 bytes of block header data protected static final int FILE_PROLOGUE_BYTES = 1024; /** Returns the offset from the file start where the latest block should be written (end of prev block). */ private int getRingCursor(ByteBuffer buffer) { int c = buffer.getInt(4); checkState(c >= FILE_PROLOGUE_BYTES, "Integer overflow"); return c; } private void setRingCursor(ByteBuffer buffer, int newCursor) { checkArgument(newCursor >= 0); buffer.putInt(4, newCursor); } }
package ru.stqa.pft.addressbook.model; import com.google.gson.annotations.Expose; import com.thoughtworks.xstream.annotations.XStreamAlias; import com.thoughtworks.xstream.annotations.XStreamOmitField; @XStreamAlias("contact") public class ContactData { @XStreamOmitField private int id = Integer.MAX_VALUE; @Expose private String username; @Expose private String familyName; @Expose private String email; @Expose private String group; @Expose private String address; @Expose private String home; @Expose private String mobile; @Expose private String work; @Expose private String allPhones; @Expose private String mail1; @Expose private String mail2; @Expose private String mail3; @Expose private String emails; @Expose private String photoname; public Phones getPhones() { return phones; } public ContactData withPhones(Phones phones) { this.phones = phones; return this; } @Expose Phones phones; public String getPhotoname() { return photoname; } public ContactData withPhotoname(String photo) { this.photoname = photo; return this; } public String getEmails() { return emails; } public ContactData withEmails(String emails) { this.emails = emails; return this; } public String getMail1() { return mail1; } public ContactData withMail1(String mail1) { this.mail1 = mail1; return this; } public String getMail2() { return mail2; } public ContactData withMail2(String mail2) { this.mail2 = mail2; return this; } public String getMail3() { return mail3; } public ContactData withMail3(String mail3) { this.mail3 = mail3; return this; } public String getAllPhones() { return allPhones; } public ContactData withAllPhones(String allPhones) { this.allPhones = allPhones; return this; } public int getId() { return id; } public String getUsername() { return username; } public String getFamilyName() { return familyName; } public String getHomePhone() { return home; } public String getMobilePhone() { return mobile; } public String getWorkPhone() { return work; } public String getAddress() { return address; } public String getEmail() { return email; } public String getGroup() { return group; } public ContactData withGroup(String group) { this.group = group; return this; } public ContactData withId(int id) { this.id = id; return this; } public ContactData withUsername(String username) { this.username = username; return this; } public ContactData withFamilyName(String familyName) { this.familyName = familyName; return this; } public ContactData withEmail(String email) { this.email = email; return this; } public ContactData withAddress(String address) { this.address = address; return this; } public ContactData withHomePhone(String home) { this.home = home; return this; } public ContactData withMobilePhone(String mobile) { this.mobile = mobile; return this; } public ContactData withWorkPhone(String work) { this.work = work; return this; } @Override public String toString() { return "ContactData{" + "id='" + id + '\'' + ", username='" + username + '\'' + ", familyName='" + familyName + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ContactData that = (ContactData) o; if (id != that.id) return false; if (username != null ? !username.equals(that.username) : that.username != null) return false; return familyName != null ? familyName.equals(that.familyName) : that.familyName == null; } @Override public int hashCode() { int result = id; result = 31 * result + (username != null ? username.hashCode() : 0); result = 31 * result + (familyName != null ? familyName.hashCode() : 0); return result; } }
package interdroid.vdb.avro.view; import interdroid.vdb.avro.R; import interdroid.vdb.avro.view.factory.AvroViewFactory; import java.util.ArrayList; import java.util.List; import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.Schema.Type; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.view.View; import android.view.ViewGroup; import android.widget.CursorAdapter; import android.widget.LinearLayout; import android.widget.TableLayout; /** * This class provides a list adapter for data stored by an Avro VDB. * * @author nick <palmer@cs.vu.nl> * */ public class AvroListAdapter extends CursorAdapter { /** Our logging interface. */ private static final Logger LOG = LoggerFactory.getLogger(AvroListAdapter.class); /** * The schema for the data. */ private Schema mSchema; /** * The title fields. */ private Field[] mTitleFields; /** * The string to use to mean "This". */ private String mThis; /** * A field for when there is nothing in the list view. */ private static final Field sIdField = new Field("_id", Schema.create(Type.INT), null, null); /** * Build a list adapter to work for the given AvroBaseList. * * @param context the base list this will work for * @param schema the schema for the data being listed * @param uri the uri for the data to show */ public AvroListAdapter(final AvroBaseList context, final Schema schema, final Uri uri) { super(context, getCursor(context, schema, uri)); mThis = context.getString(R.string.title_this) + " " + schema.getName(); mSchema = schema; mTitleFields = getTitleFields(schema); } /** * Returns the title fields for a given schema. * @param schema the schema to get the title fields from * @return the title field names */ private Field[] getTitleFields(final Schema schema) { ArrayList<Field> title = new ArrayList<Field>(); boolean foundOne = false; for (Field field : schema.getFields()) { if (propertyIsSet(field, "ui.title")) { title.add(field); foundOne = true; } } // Add the _id field if we found none. if (!foundOne) { title.add(sIdField); } return title.toArray(new Field[title.size()]); } /** * * @param context the AvroBaseList this will run in * @param schema the schema for the data * @param uri the uri for the data * @return a cursor for the given uri */ private static Cursor getCursor(final AvroBaseList context, final Schema schema, final Uri uri) { return context.managedQuery(uri, getProjection(schema), null, null, getSortOrder(schema)); } /** * Returns the default sort order for the given schema. * @param schema the schema for which the sort order is desired * @return the sort order portion of the query. */ private static String getSortOrder(final Schema schema) { // TODO: This should come from the sort order on the fields return schema.getProp("ui.default_sort"); } /** * Returns an array of field names which are required to list data * for the given schema. * @param schema the schema to list data for * @return an array of field names */ private static String[] getProjection(final Schema schema) { ArrayList<String> listFields = new ArrayList<String>(); // Add the _id field be in the PROJECTION. // Since this is synthetic we add it here. listFields.add("_id"); for (Field field : schema.getFields()) { if (isListField(field)) { LOG.debug("List field includes: {}", field.name()); List<String> fields = AvroViewFactory.getProjectionFields(field); if (fields != null) { listFields.addAll(fields); } } else { LOG.debug("Skipping field {} from list view.", field.name()); } } if (listFields.isEmpty()) { LOG.debug("No list fields. Adding _id."); // No list fields. Add the _id field. listFields.add("_id"); } return listFields.toArray(new String[listFields.size()]); } /** * * @param schema the schema to check * @return true if the type can be listed */ private static boolean isValidListType(final Schema schema) { return isBasicType(schema) || isValidUnionType(schema) || isValidRecordType(schema); } /** * * @param schema the schema to check * @return true if the type is a union type that can be listed */ private static boolean isValidUnionType(final Schema schema) { if (!schema.getType().equals(Type.UNION)) { return false; } for (Schema branch : schema.getTypes()) { if (!isBasicType(branch) && !isValidUnionType(branch) && !isValidRecordType(branch)) { return false; } } return true; } /** * * @param schema the schema * @return true if this record can be in a list */ private static boolean isValidRecordType(final Schema schema) { // TODO: We would like to support location here but does it make sense? // The content provider would need a subquery and that would // totally kill performance. We want to join the image... // So maybe how we store locations needs to change... return false; } /** * * @param schema the schema for the field * @return true if the field is a basic type */ private static boolean isBasicType(final Schema schema) { switch (schema.getType()) { case BOOLEAN: case DOUBLE: case ENUM: case FLOAT: case INT: case LONG: case STRING: return true; case BYTES: if (schema.getProp("ui.widget") != null && (schema.getProp("ui.widget").equals("photo") || schema.getProp("ui.widget").equals("video"))) { return true; } default: return false; } } /** * * @param field the field to check * @return true if the field is in the list */ private static boolean isListField(final Field field) { return isListedField(field) && isValidListType(field.schema()); } /** * * @param field the field to check * @return true if the field is marked with ui.list */ private static boolean isListedField(final Field field) { return propertyIsSet(field, "ui.list"); } /** * * @param field the field to check * @param property the property to check * @return true if the property in the field is set to true */ private static boolean propertyIsSet(final Field field, final String property) { boolean isSet = field.getProp(property) != null && Boolean.TRUE.equals( Boolean.parseBoolean(field.getProp(property))); return isSet; } @Override public final View newView(final Context context, final Cursor cursor, final ViewGroup parent) { TableLayout layout = new TableLayout(context); layout.setOrientation(LinearLayout.VERTICAL); boolean builtOne = false; for (Field field : mSchema.getFields()) { if (isListField(field)) { buildView(context, layout, field); builtOne = true; } } // There were no list fields so bind the _id field if (!builtOne) { buildView(context, layout, sIdField); } // Bind the data in the cursor. bindView(layout, context, cursor); return layout; } /** * Build a view for a given field. * @param context the activity we are working in * @param layout the layout to add the view to * @param field the field we are working on */ private void buildView(final Context context, final LinearLayout layout, final Field field) { View view = AvroViewFactory.buildListView(context, field); if (view != null) { layout.addView(view); } } @Override public final void bindView(final View recycleView, final Context context, final Cursor cursor) { View view = recycleView; if (view == null) { view = newView(context, cursor, null); } boolean boundOne = false; for (Field field : mSchema.getFields()) { if (isListField(field)) { bindView(view, cursor, field); boundOne = true; } } // We didn't bind one, so bind the _id field if (!boundOne) { bindView(view, cursor, sIdField); } } /** * Binds a view to the cursor for the given field. * @param view the view to fill * @param cursor the cursor with the data * @param field the field we ware binding */ private void bindView(final View view, final Cursor cursor, final Field field) { AvroViewFactory.bindListView(view, cursor, field); } /** * Returns a title for the given cursor. * @param cursor the cursor with the data * @return the title */ public final CharSequence getTitle(final Cursor cursor) { if (mTitleFields.length > 0) { StringBuffer ret = new StringBuffer(); for (Field field : mTitleFields) { AvroViewFactory.appendTitleField(ret, cursor, field); } if (ret.length() > 0) { return ret.toString(); } } return mThis; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cognitoidp.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Represents the request to update the user's attributes as an administrator. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminUpdateUserAttributes" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AdminUpdateUserAttributesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The user pool ID for the user pool where you want to update user attributes. * </p> */ private String userPoolId; /** * <p> * The user name of the user for whom you want to update user attributes. * </p> */ private String username; /** * <p> * An array of name-value pairs representing user attributes. * </p> * <p> * For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name. * </p> */ private java.util.List<AttributeType> userAttributes; /** * <p> * The user pool ID for the user pool where you want to update user attributes. * </p> * * @param userPoolId * The user pool ID for the user pool where you want to update user attributes. */ public void setUserPoolId(String userPoolId) { this.userPoolId = userPoolId; } /** * <p> * The user pool ID for the user pool where you want to update user attributes. * </p> * * @return The user pool ID for the user pool where you want to update user attributes. */ public String getUserPoolId() { return this.userPoolId; } /** * <p> * The user pool ID for the user pool where you want to update user attributes. * </p> * * @param userPoolId * The user pool ID for the user pool where you want to update user attributes. * @return Returns a reference to this object so that method calls can be chained together. */ public AdminUpdateUserAttributesRequest withUserPoolId(String userPoolId) { setUserPoolId(userPoolId); return this; } /** * <p> * The user name of the user for whom you want to update user attributes. * </p> * * @param username * The user name of the user for whom you want to update user attributes. */ public void setUsername(String username) { this.username = username; } /** * <p> * The user name of the user for whom you want to update user attributes. * </p> * * @return The user name of the user for whom you want to update user attributes. */ public String getUsername() { return this.username; } /** * <p> * The user name of the user for whom you want to update user attributes. * </p> * * @param username * The user name of the user for whom you want to update user attributes. * @return Returns a reference to this object so that method calls can be chained together. */ public AdminUpdateUserAttributesRequest withUsername(String username) { setUsername(username); return this; } /** * <p> * An array of name-value pairs representing user attributes. * </p> * <p> * For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name. * </p> * * @return An array of name-value pairs representing user attributes.</p> * <p> * For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name. */ public java.util.List<AttributeType> getUserAttributes() { return userAttributes; } /** * <p> * An array of name-value pairs representing user attributes. * </p> * <p> * For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name. * </p> * * @param userAttributes * An array of name-value pairs representing user attributes.</p> * <p> * For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name. */ public void setUserAttributes(java.util.Collection<AttributeType> userAttributes) { if (userAttributes == null) { this.userAttributes = null; return; } this.userAttributes = new java.util.ArrayList<AttributeType>(userAttributes); } /** * <p> * An array of name-value pairs representing user attributes. * </p> * <p> * For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setUserAttributes(java.util.Collection)} or {@link #withUserAttributes(java.util.Collection)} if you want * to override the existing values. * </p> * * @param userAttributes * An array of name-value pairs representing user attributes.</p> * <p> * For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name. * @return Returns a reference to this object so that method calls can be chained together. */ public AdminUpdateUserAttributesRequest withUserAttributes(AttributeType... userAttributes) { if (this.userAttributes == null) { setUserAttributes(new java.util.ArrayList<AttributeType>(userAttributes.length)); } for (AttributeType ele : userAttributes) { this.userAttributes.add(ele); } return this; } /** * <p> * An array of name-value pairs representing user attributes. * </p> * <p> * For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name. * </p> * * @param userAttributes * An array of name-value pairs representing user attributes.</p> * <p> * For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name. * @return Returns a reference to this object so that method calls can be chained together. */ public AdminUpdateUserAttributesRequest withUserAttributes(java.util.Collection<AttributeType> userAttributes) { setUserAttributes(userAttributes); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getUserPoolId() != null) sb.append("UserPoolId: ").append(getUserPoolId()).append(","); if (getUsername() != null) sb.append("Username: ").append("***Sensitive Data Redacted***").append(","); if (getUserAttributes() != null) sb.append("UserAttributes: ").append(getUserAttributes()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof AdminUpdateUserAttributesRequest == false) return false; AdminUpdateUserAttributesRequest other = (AdminUpdateUserAttributesRequest) obj; if (other.getUserPoolId() == null ^ this.getUserPoolId() == null) return false; if (other.getUserPoolId() != null && other.getUserPoolId().equals(this.getUserPoolId()) == false) return false; if (other.getUsername() == null ^ this.getUsername() == null) return false; if (other.getUsername() != null && other.getUsername().equals(this.getUsername()) == false) return false; if (other.getUserAttributes() == null ^ this.getUserAttributes() == null) return false; if (other.getUserAttributes() != null && other.getUserAttributes().equals(this.getUserAttributes()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getUserPoolId() == null) ? 0 : getUserPoolId().hashCode()); hashCode = prime * hashCode + ((getUsername() == null) ? 0 : getUsername().hashCode()); hashCode = prime * hashCode + ((getUserAttributes() == null) ? 0 : getUserAttributes().hashCode()); return hashCode; } @Override public AdminUpdateUserAttributesRequest clone() { return (AdminUpdateUserAttributesRequest) super.clone(); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.Request; import com.amazonaws.services.ec2.model.transform.CreateVpcEndpointRequestMarshaller; /** * */ public class CreateVpcEndpointRequest extends AmazonWebServiceRequest implements Serializable, Cloneable, DryRunSupportedRequest<CreateVpcEndpointRequest> { /** * <p> * The ID of the VPC in which the endpoint will be used. * </p> */ private String vpcId; /** * <p> * The AWS service name, in the form * <code>com.amazonaws.<i>region</i>.<i>service</i></code>. To get a list of * available services, use the <a>DescribeVpcEndpointServices</a> request. * </p> */ private String serviceName; /** * <p> * A policy to attach to the endpoint that controls access to the service. * The policy must be in valid JSON format. If this parameter is not * specified, we attach a default policy that allows full access to the * service. * </p> */ private String policyDocument; /** * <p> * One or more route table IDs. * </p> */ private com.amazonaws.internal.SdkInternalList<String> routeTableIds; /** * <p> * Unique, case-sensitive identifier you provide to ensure the idempotency * of the request. For more information, see <a href= * "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html" * >How to Ensure Idempotency</a>. * </p> */ private String clientToken; /** * <p> * The ID of the VPC in which the endpoint will be used. * </p> * * @param vpcId * The ID of the VPC in which the endpoint will be used. */ public void setVpcId(String vpcId) { this.vpcId = vpcId; } /** * <p> * The ID of the VPC in which the endpoint will be used. * </p> * * @return The ID of the VPC in which the endpoint will be used. */ public String getVpcId() { return this.vpcId; } /** * <p> * The ID of the VPC in which the endpoint will be used. * </p> * * @param vpcId * The ID of the VPC in which the endpoint will be used. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateVpcEndpointRequest withVpcId(String vpcId) { setVpcId(vpcId); return this; } /** * <p> * The AWS service name, in the form * <code>com.amazonaws.<i>region</i>.<i>service</i></code>. To get a list of * available services, use the <a>DescribeVpcEndpointServices</a> request. * </p> * * @param serviceName * The AWS service name, in the form * <code>com.amazonaws.<i>region</i>.<i>service</i></code>. To get a * list of available services, use the * <a>DescribeVpcEndpointServices</a> request. */ public void setServiceName(String serviceName) { this.serviceName = serviceName; } /** * <p> * The AWS service name, in the form * <code>com.amazonaws.<i>region</i>.<i>service</i></code>. To get a list of * available services, use the <a>DescribeVpcEndpointServices</a> request. * </p> * * @return The AWS service name, in the form * <code>com.amazonaws.<i>region</i>.<i>service</i></code>. To get a * list of available services, use the * <a>DescribeVpcEndpointServices</a> request. */ public String getServiceName() { return this.serviceName; } /** * <p> * The AWS service name, in the form * <code>com.amazonaws.<i>region</i>.<i>service</i></code>. To get a list of * available services, use the <a>DescribeVpcEndpointServices</a> request. * </p> * * @param serviceName * The AWS service name, in the form * <code>com.amazonaws.<i>region</i>.<i>service</i></code>. To get a * list of available services, use the * <a>DescribeVpcEndpointServices</a> request. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateVpcEndpointRequest withServiceName(String serviceName) { setServiceName(serviceName); return this; } /** * <p> * A policy to attach to the endpoint that controls access to the service. * The policy must be in valid JSON format. If this parameter is not * specified, we attach a default policy that allows full access to the * service. * </p> * * @param policyDocument * A policy to attach to the endpoint that controls access to the * service. The policy must be in valid JSON format. If this * parameter is not specified, we attach a default policy that allows * full access to the service. */ public void setPolicyDocument(String policyDocument) { this.policyDocument = policyDocument; } /** * <p> * A policy to attach to the endpoint that controls access to the service. * The policy must be in valid JSON format. If this parameter is not * specified, we attach a default policy that allows full access to the * service. * </p> * * @return A policy to attach to the endpoint that controls access to the * service. The policy must be in valid JSON format. If this * parameter is not specified, we attach a default policy that * allows full access to the service. */ public String getPolicyDocument() { return this.policyDocument; } /** * <p> * A policy to attach to the endpoint that controls access to the service. * The policy must be in valid JSON format. If this parameter is not * specified, we attach a default policy that allows full access to the * service. * </p> * * @param policyDocument * A policy to attach to the endpoint that controls access to the * service. The policy must be in valid JSON format. If this * parameter is not specified, we attach a default policy that allows * full access to the service. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateVpcEndpointRequest withPolicyDocument(String policyDocument) { setPolicyDocument(policyDocument); return this; } /** * <p> * One or more route table IDs. * </p> * * @return One or more route table IDs. */ public java.util.List<String> getRouteTableIds() { if (routeTableIds == null) { routeTableIds = new com.amazonaws.internal.SdkInternalList<String>(); } return routeTableIds; } /** * <p> * One or more route table IDs. * </p> * * @param routeTableIds * One or more route table IDs. */ public void setRouteTableIds(java.util.Collection<String> routeTableIds) { if (routeTableIds == null) { this.routeTableIds = null; return; } this.routeTableIds = new com.amazonaws.internal.SdkInternalList<String>( routeTableIds); } /** * <p> * One or more route table IDs. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setRouteTableIds(java.util.Collection)} or * {@link #withRouteTableIds(java.util.Collection)} if you want to override * the existing values. * </p> * * @param routeTableIds * One or more route table IDs. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateVpcEndpointRequest withRouteTableIds(String... routeTableIds) { if (this.routeTableIds == null) { setRouteTableIds(new com.amazonaws.internal.SdkInternalList<String>( routeTableIds.length)); } for (String ele : routeTableIds) { this.routeTableIds.add(ele); } return this; } /** * <p> * One or more route table IDs. * </p> * * @param routeTableIds * One or more route table IDs. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateVpcEndpointRequest withRouteTableIds( java.util.Collection<String> routeTableIds) { setRouteTableIds(routeTableIds); return this; } /** * <p> * Unique, case-sensitive identifier you provide to ensure the idempotency * of the request. For more information, see <a href= * "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html" * >How to Ensure Idempotency</a>. * </p> * * @param clientToken * Unique, case-sensitive identifier you provide to ensure the * idempotency of the request. For more information, see <a href= * "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html" * >How to Ensure Idempotency</a>. */ public void setClientToken(String clientToken) { this.clientToken = clientToken; } /** * <p> * Unique, case-sensitive identifier you provide to ensure the idempotency * of the request. For more information, see <a href= * "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html" * >How to Ensure Idempotency</a>. * </p> * * @return Unique, case-sensitive identifier you provide to ensure the * idempotency of the request. For more information, see <a href= * "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html" * >How to Ensure Idempotency</a>. */ public String getClientToken() { return this.clientToken; } /** * <p> * Unique, case-sensitive identifier you provide to ensure the idempotency * of the request. For more information, see <a href= * "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html" * >How to Ensure Idempotency</a>. * </p> * * @param clientToken * Unique, case-sensitive identifier you provide to ensure the * idempotency of the request. For more information, see <a href= * "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html" * >How to Ensure Idempotency</a>. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateVpcEndpointRequest withClientToken(String clientToken) { setClientToken(clientToken); return this; } /** * This method is intended for internal use only. Returns the marshaled * request configured with additional parameters to enable operation * dry-run. */ @Override public Request<CreateVpcEndpointRequest> getDryRunRequest() { Request<CreateVpcEndpointRequest> request = new CreateVpcEndpointRequestMarshaller() .marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getVpcId() != null) sb.append("VpcId: " + getVpcId() + ","); if (getServiceName() != null) sb.append("ServiceName: " + getServiceName() + ","); if (getPolicyDocument() != null) sb.append("PolicyDocument: " + getPolicyDocument() + ","); if (getRouteTableIds() != null) sb.append("RouteTableIds: " + getRouteTableIds() + ","); if (getClientToken() != null) sb.append("ClientToken: " + getClientToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateVpcEndpointRequest == false) return false; CreateVpcEndpointRequest other = (CreateVpcEndpointRequest) obj; if (other.getVpcId() == null ^ this.getVpcId() == null) return false; if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false) return false; if (other.getServiceName() == null ^ this.getServiceName() == null) return false; if (other.getServiceName() != null && other.getServiceName().equals(this.getServiceName()) == false) return false; if (other.getPolicyDocument() == null ^ this.getPolicyDocument() == null) return false; if (other.getPolicyDocument() != null && other.getPolicyDocument().equals(this.getPolicyDocument()) == false) return false; if (other.getRouteTableIds() == null ^ this.getRouteTableIds() == null) return false; if (other.getRouteTableIds() != null && other.getRouteTableIds().equals(this.getRouteTableIds()) == false) return false; if (other.getClientToken() == null ^ this.getClientToken() == null) return false; if (other.getClientToken() != null && other.getClientToken().equals(this.getClientToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode()); hashCode = prime * hashCode + ((getServiceName() == null) ? 0 : getServiceName().hashCode()); hashCode = prime * hashCode + ((getPolicyDocument() == null) ? 0 : getPolicyDocument() .hashCode()); hashCode = prime * hashCode + ((getRouteTableIds() == null) ? 0 : getRouteTableIds() .hashCode()); hashCode = prime * hashCode + ((getClientToken() == null) ? 0 : getClientToken().hashCode()); return hashCode; } @Override public CreateVpcEndpointRequest clone() { return (CreateVpcEndpointRequest) super.clone(); } }