gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.Collection; import java.util.Collections; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import org.apache.ignite.IgniteCache; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.transactions.Transaction; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; /** * Abstract class for cache tests. */ public class GridCacheFinishPartitionsSelfTest extends GridCacheAbstractSelfTest { /** */ private static final int GRID_CNT = 1; /** Grid kernal. */ private IgniteKernal grid; /** {@inheritDoc} */ @Override protected int gridCount() { return GRID_CNT; } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { grid = (IgniteKernal)grid(0); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { grid = null; } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration c = super.getConfiguration(gridName); CacheConfiguration cc = defaultCacheConfiguration(); cc.setCacheMode(PARTITIONED); cc.setBackups(1); cc.setAtomicityMode(TRANSACTIONAL); cc.setNearConfiguration(new NearCacheConfiguration()); c.setCacheConfiguration(cc); return c; } /** * @throws Exception If failed. */ public void testTxFinishPartitions() throws Exception { String key = "key"; String val = "value"; IgniteCache<String, String> cache = grid.cache(null); int keyPart = grid.<String, String>internalCache().context().affinity().partition(key); cache.put(key, val); // Wait for tx-enlisted partition. long waitTime = runTransactions(key, keyPart, F.asList(keyPart)); info("Wait time, ms: " + waitTime); // Wait for not enlisted partition. waitTime = runTransactions(key, keyPart, F.asList(keyPart + 1)); info("Wait time, ms: " + waitTime); // Wait for both partitions. waitTime = runTransactions(key, keyPart, F.asList(keyPart, keyPart + 1)); info("Wait time, ms: " + waitTime); } /** * @param key Key. * @param keyPart Key partition. * @param waitParts Partitions to wait. * @return Wait time. * @throws Exception If failed. */ private long runTransactions(final String key, final int keyPart, final Collection<Integer> waitParts) throws Exception { int threadNum = 1; final CyclicBarrier barrier = new CyclicBarrier(threadNum); final CountDownLatch latch = new CountDownLatch(threadNum); final AtomicLong start = new AtomicLong(); GridTestUtils.runMultiThreaded(new Callable() { @Override public Object call() throws Exception { if (barrier.await() == 0) start.set(System.currentTimeMillis()); IgniteCache<String, String> cache = grid(0).cache(null); Transaction tx = grid(0).transactions().txStart(PESSIMISTIC, REPEATABLE_READ); cache.get(key); IgniteInternalFuture<?> fut = grid.context().cache().context().partitionReleaseFuture( new AffinityTopologyVersion(GRID_CNT + 1)); fut.listen(new CI1<IgniteInternalFuture<?>>() { @Override public void apply(IgniteInternalFuture<?> e) { latch.countDown(); } }); assert !fut.isDone() : "Failed waiting for locks " + "[keyPart=" + keyPart + ", waitParts=" + waitParts + ", done=" + fut.isDone() + ']'; tx.commit(); return null; } }, threadNum, "test-finish-partitions-thread"); latch.await(); return System.currentTimeMillis() - start.get(); } /** * Tests method {@link GridCacheMvccManager#finishLocks(org.apache.ignite.lang.IgnitePredicate, * AffinityTopologyVersion)}. * * @throws Exception If failed. */ public void testMvccFinishPartitions() throws Exception { String key = "key"; int keyPart = grid.internalCache().context().affinity().partition(key); // Wait for tx-enlisted partition. long waitTime = runLock(key, keyPart, F.asList(keyPart)); info("Wait time, ms: " + waitTime); // Wait for not enlisted partition. waitTime = runLock(key, keyPart, F.asList(keyPart + 1)); info("Wait time, ms: " + waitTime); // Wait for both partitions. waitTime = runLock(key, keyPart, F.asList(keyPart, keyPart + 1)); info("Wait time, ms: " + waitTime); } /** * Tests finish future for particular set of keys. * * @throws Exception If failed. */ public void testMvccFinishKeys() throws Exception { IgniteCache<String, Integer> cache = grid(0).cache(null); try (Transaction tx = grid(0).transactions().txStart(PESSIMISTIC, REPEATABLE_READ)) { final String key = "key"; cache.get(key); GridCacheAdapter<String, Integer> internal = grid.internalCache(); KeyCacheObject cacheKey = internal.context().toCacheKeyObject(key); IgniteInternalFuture<?> nearFut = internal.context().mvcc().finishKeys(Collections.singletonList(cacheKey), new AffinityTopologyVersion(2)); IgniteInternalFuture<?> dhtFut = internal.context().near().dht().context().mvcc().finishKeys( Collections.singletonList(cacheKey), new AffinityTopologyVersion(2)); assert !nearFut.isDone(); assert !dhtFut.isDone(); tx.commit(); } } /** * Tests chained locks and partitions release future. * * @throws Exception If failed. */ public void testMvccFinishPartitionsContinuousLockAcquireRelease() throws Exception { int key = 1; GridCacheSharedContext<Object, Object> ctx = grid.context().cache().context(); final AtomicLong end = new AtomicLong(0); final CountDownLatch latch = new CountDownLatch(1); IgniteCache<Integer, String> cache = grid.cache(null); Lock lock = cache.lock(key); lock.lock(); long start = System.currentTimeMillis(); info("Start time: " + start); IgniteInternalFuture<?> fut = ctx.partitionReleaseFuture(new AffinityTopologyVersion(GRID_CNT + 1)); assert fut != null; fut.listen(new CI1<IgniteInternalFuture<?>>() { @Override public void apply(IgniteInternalFuture<?> e) { end.set(System.currentTimeMillis()); latch.countDown(); info("End time: " + end.get()); } }); Lock lock1 = cache.lock(key + 1); lock1.lock(); lock.unlock(); Lock lock2 = cache.lock(key + 2); lock2.lock(); lock1.unlock(); assert !fut.isDone() : "Failed waiting for locks"; lock2.unlock(); latch.await(); } /** * @param key Key. * @param keyPart Key partition. * @param waitParts Partitions to wait. * @return Wait time. * @throws Exception If failed. */ private long runLock(String key, int keyPart, Collection<Integer> waitParts) throws Exception { GridCacheSharedContext<Object, Object> ctx = grid.context().cache().context(); final AtomicLong end = new AtomicLong(0); final CountDownLatch latch = new CountDownLatch(1); IgniteCache<String, String> cache = grid.cache(null); Lock lock = cache.lock(key); lock.lock(); long start; try { start = System.currentTimeMillis(); info("Start time: " + start); IgniteInternalFuture<?> fut = ctx.partitionReleaseFuture(new AffinityTopologyVersion(GRID_CNT + 1)); assert fut != null; fut.listen(new CI1<IgniteInternalFuture<?>>() { @Override public void apply(IgniteInternalFuture<?> e) { end.set(System.currentTimeMillis()); latch.countDown(); info("End time: " + end.get()); } }); assert !fut.isDone() : "Failed waiting for locks [keyPart=" + keyPart + ", waitParts=" + waitParts + ", done=" + fut.isDone() + ']'; } finally { lock.unlock(); } latch.await(); return end.get() - start; } }
/* * Copyright 2014 Realm Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.realm.internal; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; /** * This class is used to serialize tables to either disk or memory. It consists of a collection of tables. */ public class Group implements Closeable { // Below values must match the values in realm::group::OpenMode in C++ public static final int MODE_READONLY = 0; // Open in read-only mode. Fail if the file does not already exist. public static final int MODE_READWRITE = 1; // Open in read/write mode. Create the file if it doesn't exist. public static final int MODE_READWRITE_NOCREATE = 2; // Open in read/write mode. Fail if the file does not already exist. protected long nativePtr; protected boolean immutable; private final Context context; private void checkNativePtrNotZero() { if (this.nativePtr == 0) // FIXME: It is wrong to assume that a null pointer means 'out // of memory'. An out of memory condition in // createNative() must be handled by having createNative() // throw OutOfMemoryError. throw new OutOfMemoryError("Out of native memory."); } public Group() { this.immutable = false; this.context = new Context(); this.nativePtr = createNative(); checkNativePtrNotZero(); } public Group(String filepath, int mode) { this.immutable = (mode == MODE_READONLY); this.context = new Context(); this.nativePtr = createNative(filepath, mode); checkNativePtrNotZero(); } public Group(String filepath) { this(filepath, MODE_READONLY); } public Group(File file) { this(file.getAbsolutePath(), file.canWrite() ? MODE_READWRITE : MODE_READONLY); } public Group(byte[] data) { this.immutable = false; this.context = new Context(); if (data != null) { this.nativePtr = createNative(data); checkNativePtrNotZero(); } else { throw new IllegalArgumentException(); } } public Group(ByteBuffer buffer) { this.immutable = false; this.context = new Context(); if (buffer != null) { this.nativePtr = createNative(buffer); checkNativePtrNotZero(); } else { throw new IllegalArgumentException(); } } Group(Context context, long nativePointer, boolean immutable) { this.context = context; this.nativePtr = nativePointer; this.immutable = immutable; } // If close() is called, no penalty is paid for delayed disposal // via the context public void close() { synchronized (context) { if (nativePtr != 0) { nativeClose(nativePtr); nativePtr = 0; } } } /** * Checks if a group has been closed and can no longer be used. * * @return {@code true} if closed, {@code false} otherwise. */ boolean isClosed() { return nativePtr == 0; } protected void finalize() { synchronized (context) { if (nativePtr != 0) { context.asyncDisposeGroup(nativePtr); nativePtr = 0; // Set to 0 if finalize is called before close() for some reason } } } private void verifyGroupIsValid() { if (nativePtr == 0) { throw new IllegalStateException("Illegal to call methods on a closed Group."); } } public long size() { verifyGroupIsValid(); return nativeSize(nativePtr); } public boolean isEmpty(){ return size() == 0; } /** * Checks whether {@link Table} exists in the Group. * * @param name the name of the {@link Table}. * @return {@code true} if the table exists, otherwise {@code false}. */ public boolean hasTable(String name) { verifyGroupIsValid(); return name != null && nativeHasTable(nativePtr, name); } public String getTableName(int index) { verifyGroupIsValid(); long cnt = size(); if (index < 0 || index >= cnt) { throw new IndexOutOfBoundsException( "Table index argument is out of range. possible range is [0, " + (cnt - 1) + "]"); } return nativeGetTableName(nativePtr, index); } /** * Removes a table from the group and delete all data. */ public void removeTable(String name) { nativeRemoveTable(nativePtr, name); } native void nativeRemoveTable(long nativeGroupPtr, String tableName); /** * Renames a table */ public void renameTable(String oldName, String newName) { nativeRenameTable(nativePtr, oldName, newName); } native void nativeRenameTable(long nativeGroupPtr, String oldName, String newName); /** * Returns a table with the specified name. * * @param name the name of the {@link Table}. * @return the {@link Table} if it exists, otherwise create it. */ public Table getTable(String name) { verifyGroupIsValid(); if (name == null || name.isEmpty()) { throw new IllegalArgumentException("Invalid name. Name must be a non-empty String."); } if (immutable && !hasTable(name)) { throw new IllegalStateException("Requested table is not in this Realm. " + "Creating it requires a transaction: " + name); } // Execute the disposal of abandoned realm objects each time a new realm object is created context.executeDelayedDisposal(); long nativeTablePointer = nativeGetTableNativePtr(nativePtr, name); try { // Copy context reference from parent return new Table(context, this, nativeTablePointer); } catch (RuntimeException e) { Table.nativeClose(nativeTablePointer); throw e; } } /** * Serializes the group to the specific file on the disk using encryption. * * @param file a File object representing the file. * @param key A 64 bytes long byte array containing the key to the encrypted Realm file. Can be null if encryption * is not required. * @throws IOException. */ public void writeToFile(File file, byte[] key) throws IOException { verifyGroupIsValid(); if (file.isFile() && file.exists()) { throw new IllegalArgumentException("The destination file must not exist"); } if (key != null && key.length != 64) { throw new IllegalArgumentException("Realm AES keys must be 64 bytes long"); } nativeWriteToFile(nativePtr, file.getAbsolutePath(), key); } /** * Serializes the group to a memory buffer. The byte[] is owned by the JVM. * * @return the binary array of the serialized group. */ public byte[] writeToMem() { verifyGroupIsValid(); return nativeWriteToMem(nativePtr); } /* * Checks if the Group contains any objects. It only checks for "class_" tables or non-metadata tables, e.g. this * return true if the "pk" table contained information. * * @return {@code true} if empty, @{code false} otherwise. */ public boolean isObjectTablesEmpty() { return nativeIsEmpty(nativePtr); } /* * TODO: Find a way to release the malloc'ed native memory automatically public ByteBuffer writeToByteBuffer() { verifyGroupIsValid(); return nativeWriteToByteBuffer(nativePtr); } protected native ByteBuffer nativeWriteToByteBuffer(long nativeGroupPtr); */ public void commit() { verifyGroupIsValid(); nativeCommit(nativePtr); } public String toJson() { return nativeToJson(nativePtr); } public String toString() { return nativeToString(nativePtr); } protected native long createNative(); protected native long createNative(String filepath, int value); protected native long createNative(byte[] data); protected native long createNative(ByteBuffer buffer); protected static native void nativeClose(long nativeGroupPtr); protected native long nativeSize(long nativeGroupPtr); protected native String nativeGetTableName(long nativeGroupPtr, int index); protected native boolean nativeHasTable(long nativeGroupPtr, String name); protected native void nativeWriteToFile(long nativeGroupPtr, String fileName, byte[] keyArray) throws IOException; protected native long nativeGetTableNativePtr(long nativeGroupPtr, String name); protected native long nativeLoadFromMem(byte[] buffer); protected native byte[] nativeWriteToMem(long nativeGroupPtr); protected native String nativeToJson(long nativeGroupPtr); protected native void nativeCommit(long nativeGroupPtr); protected native String nativeToString(long nativeGroupPtr); protected native boolean nativeIsEmpty(long nativeGroupPtr); }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.core.xml; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlTransient; import org.apache.camel.CamelContext; import org.apache.camel.CamelException; import org.apache.camel.RoutesBuilder; import org.apache.camel.ShutdownRoute; import org.apache.camel.ShutdownRunningTask; import org.apache.camel.builder.ErrorHandlerBuilderRef; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.properties.PropertiesComponent; import org.apache.camel.component.properties.PropertiesParser; import org.apache.camel.component.properties.PropertiesResolver; import org.apache.camel.core.xml.scan.PatternBasedPackageScanFilter; import org.apache.camel.management.DefaultManagementAgent; import org.apache.camel.management.DefaultManagementLifecycleStrategy; import org.apache.camel.management.DefaultManagementStrategy; import org.apache.camel.management.ManagedManagementStrategy; import org.apache.camel.model.ContextScanDefinition; import org.apache.camel.model.IdentifiedType; import org.apache.camel.model.InterceptDefinition; import org.apache.camel.model.InterceptFromDefinition; import org.apache.camel.model.InterceptSendToEndpointDefinition; import org.apache.camel.model.ModelCamelContext; import org.apache.camel.model.OnCompletionDefinition; import org.apache.camel.model.OnExceptionDefinition; import org.apache.camel.model.PackageScanDefinition; import org.apache.camel.model.RouteBuilderDefinition; import org.apache.camel.model.RouteContainer; import org.apache.camel.model.RouteContextRefDefinition; import org.apache.camel.model.RouteDefinition; import org.apache.camel.model.RouteDefinitionHelper; import org.apache.camel.model.ThreadPoolProfileDefinition; import org.apache.camel.model.config.PropertiesDefinition; import org.apache.camel.model.dataformat.DataFormatsDefinition; import org.apache.camel.processor.interceptor.Delayer; import org.apache.camel.processor.interceptor.HandleFault; import org.apache.camel.processor.interceptor.TraceFormatter; import org.apache.camel.processor.interceptor.Tracer; import org.apache.camel.spi.ClassResolver; import org.apache.camel.spi.Debugger; import org.apache.camel.spi.EndpointStrategy; import org.apache.camel.spi.EventFactory; import org.apache.camel.spi.EventNotifier; import org.apache.camel.spi.ExecutorServiceManager; import org.apache.camel.spi.FactoryFinderResolver; import org.apache.camel.spi.InflightRepository; import org.apache.camel.spi.InterceptStrategy; import org.apache.camel.spi.LifecycleStrategy; import org.apache.camel.spi.ManagementNamingStrategy; import org.apache.camel.spi.ManagementStrategy; import org.apache.camel.spi.PackageScanClassResolver; import org.apache.camel.spi.PackageScanFilter; import org.apache.camel.spi.ProcessorFactory; import org.apache.camel.spi.ShutdownStrategy; import org.apache.camel.spi.ThreadPoolFactory; import org.apache.camel.spi.ThreadPoolProfile; import org.apache.camel.spi.UuidGenerator; import org.apache.camel.util.CamelContextHelper; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A factory to create and initialize a * {@link CamelContext} and install routes either explicitly configured * or found by searching the classpath for Java classes which extend * {@link org.apache.camel.builder.RouteBuilder}. * * @version */ @XmlAccessorType(XmlAccessType.FIELD) public abstract class AbstractCamelContextFactoryBean<T extends ModelCamelContext> extends IdentifiedType implements RouteContainer { /** * JVM system property to control lazy loading of type converters. */ public static final String LAZY_LOAD_TYPE_CONVERTERS = "CamelLazyLoadTypeConverters"; private static final Logger LOG = LoggerFactory.getLogger(AbstractCamelContextFactoryBean.class); @XmlTransient private List<RoutesBuilder> builders = new ArrayList<RoutesBuilder>(); @XmlTransient private ClassLoader contextClassLoaderOnStart; public AbstractCamelContextFactoryBean() { // Lets keep track of the class loader for when we actually do start things up contextClassLoaderOnStart = Thread.currentThread().getContextClassLoader(); } public Object getObject() throws Exception { return getContext(); } public Class getObjectType() { return CamelContext.class; } public boolean isSingleton() { return true; } public ClassLoader getContextClassLoaderOnStart() { return contextClassLoaderOnStart; } public void afterPropertiesSet() throws Exception { if (ObjectHelper.isEmpty(getId())) { throw new IllegalArgumentException("Id must be set"); } // set the package scan resolver as soon as possible PackageScanClassResolver packageResolver = getBeanForType(PackageScanClassResolver.class); if (packageResolver != null) { LOG.info("Using custom PackageScanClassResolver: " + packageResolver); getContext().setPackageScanClassResolver(packageResolver); } // then set custom properties if (getProperties() != null) { getContext().setProperties(getProperties().asMap()); } // and enable lazy loading of type converters if applicable initLazyLoadTypeConverteres(); ClassResolver classResolver = getBeanForType(ClassResolver.class); if (classResolver != null) { LOG.info("Using custom ClassResolver: " + classResolver); getContext().setClassResolver(classResolver); } FactoryFinderResolver factoryFinderResolver = getBeanForType(FactoryFinderResolver.class); if (factoryFinderResolver != null) { LOG.info("Using custom FactoryFinderResolver: " + factoryFinderResolver); getContext().setFactoryFinderResolver(factoryFinderResolver); } ExecutorServiceManager executorServiceStrategy = getBeanForType(ExecutorServiceManager.class); if (executorServiceStrategy != null) { LOG.info("Using custom ExecutorServiceStrategy: " + executorServiceStrategy); getContext().setExecutorServiceManager(executorServiceStrategy); } ThreadPoolFactory threadPoolFactory = getBeanForType(ThreadPoolFactory.class); if (threadPoolFactory != null) { LOG.info("Using custom ThreadPoolFactory: " + threadPoolFactory); getContext().getExecutorServiceManager().setThreadPoolFactory(threadPoolFactory); } ProcessorFactory processorFactory = getBeanForType(ProcessorFactory.class); if (processorFactory != null) { LOG.info("Using custom ProcessorFactory: " + processorFactory); getContext().setProcessorFactory(processorFactory); } Debugger debugger = getBeanForType(Debugger.class); if (debugger != null) { LOG.info("Using custom Debugger: " + debugger); getContext().setDebugger(debugger); } UuidGenerator uuidGenerator = getBeanForType(UuidGenerator.class); if (uuidGenerator != null) { LOG.info("Using custom UuidGenerator: " + uuidGenerator); getContext().setUuidGenerator(uuidGenerator); } // set the custom registry if defined initCustomRegistry(getContext()); // setup property placeholder so we got it as early as possible initPropertyPlaceholder(); // setup JMX agent at first initJMXAgent(); Tracer tracer = getBeanForType(Tracer.class); if (tracer != null) { // use formatter if there is a TraceFormatter bean defined TraceFormatter formatter = getBeanForType(TraceFormatter.class); if (formatter != null) { tracer.setFormatter(formatter); } LOG.info("Using custom Tracer: " + tracer); getContext().addInterceptStrategy(tracer); } HandleFault handleFault = getBeanForType(HandleFault.class); if (handleFault != null) { LOG.info("Using custom HandleFault: " + handleFault); getContext().addInterceptStrategy(handleFault); } Delayer delayer = getBeanForType(Delayer.class); if (delayer != null) { LOG.info("Using custom Delayer: " + delayer); getContext().addInterceptStrategy(delayer); } InflightRepository inflightRepository = getBeanForType(InflightRepository.class); if (delayer != null) { LOG.info("Using custom InflightRepository: " + inflightRepository); getContext().setInflightRepository(inflightRepository); } ManagementStrategy managementStrategy = getBeanForType(ManagementStrategy.class); if (managementStrategy != null) { LOG.info("Using custom ManagementStrategy: " + managementStrategy); getContext().setManagementStrategy(managementStrategy); } ManagementNamingStrategy managementNamingStrategy = getBeanForType(ManagementNamingStrategy.class); if (managementNamingStrategy != null) { LOG.info("Using custom ManagementNamingStrategy: " + managementNamingStrategy); getContext().getManagementStrategy().setManagementNamingStrategy(managementNamingStrategy); } EventFactory eventFactory = getBeanForType(EventFactory.class); if (eventFactory != null) { LOG.info("Using custom EventFactory: " + eventFactory); getContext().getManagementStrategy().setEventFactory(eventFactory); } // set the event notifier strategies if defined Map<String, EventNotifier> eventNotifiers = getContext().getRegistry().lookupByType(EventNotifier.class); if (eventNotifiers != null && !eventNotifiers.isEmpty()) { for (Entry<String, EventNotifier> entry : eventNotifiers.entrySet()) { EventNotifier notifier = entry.getValue(); // do not add if already added, for instance a tracer that is also an InterceptStrategy class if (!getContext().getManagementStrategy().getEventNotifiers().contains(notifier)) { LOG.info("Using custom EventNotifier with id: " + entry.getKey() + " and implementation: " + notifier); getContext().getManagementStrategy().addEventNotifier(notifier); } } } // set endpoint strategies if defined Map<String, EndpointStrategy> endpointStrategies = getContext().getRegistry().lookupByType(EndpointStrategy.class); if (endpointStrategies != null && !endpointStrategies.isEmpty()) { for (Entry<String, EndpointStrategy> entry : endpointStrategies.entrySet()) { EndpointStrategy strategy = entry.getValue(); LOG.info("Using custom EndpointStrategy with id: " + entry.getKey() + " and implementation: " + strategy); getContext().addRegisterEndpointCallback(strategy); } } // shutdown ShutdownStrategy shutdownStrategy = getBeanForType(ShutdownStrategy.class); if (shutdownStrategy != null) { LOG.info("Using custom ShutdownStrategy: " + shutdownStrategy); getContext().setShutdownStrategy(shutdownStrategy); } // add global interceptors Map<String, InterceptStrategy> interceptStrategies = getContext().getRegistry().lookupByType(InterceptStrategy.class); if (interceptStrategies != null && !interceptStrategies.isEmpty()) { for (Entry<String, InterceptStrategy> entry : interceptStrategies.entrySet()) { InterceptStrategy strategy = entry.getValue(); // do not add if already added, for instance a tracer that is also an InterceptStrategy class if (!getContext().getInterceptStrategies().contains(strategy)) { LOG.info("Using custom InterceptStrategy with id: " + entry.getKey() + " and implementation: " + strategy); getContext().addInterceptStrategy(strategy); } } } // set the lifecycle strategy if defined Map<String, LifecycleStrategy> lifecycleStrategies = getContext().getRegistry().lookupByType(LifecycleStrategy.class); if (lifecycleStrategies != null && !lifecycleStrategies.isEmpty()) { for (Entry<String, LifecycleStrategy> entry : lifecycleStrategies.entrySet()) { LifecycleStrategy strategy = entry.getValue(); // do not add if already added, for instance a tracer that is also an InterceptStrategy class if (!getContext().getLifecycleStrategies().contains(strategy)) { LOG.info("Using custom LifecycleStrategy with id: " + entry.getKey() + " and implementation: " + strategy); getContext().addLifecycleStrategy(strategy); } } } // set the default thread pool profile if defined initThreadPoolProfiles(getContext()); // Set the application context and camelContext for the beanPostProcessor initBeanPostProcessor(getContext()); // init camel context initCamelContext(getContext()); // must init route refs before we prepare the routes below initRouteRefs(); // do special preparation for some concepts such as interceptors and policies // this is needed as JAXB does not build exactly the same model definition as Spring DSL would do // using route builders. So we have here a little custom code to fix the JAXB gaps prepareRoutes(); // and add the routes getContext().addRouteDefinitions(getRoutes()); LOG.debug("Found JAXB created routes: {}", getRoutes()); findRouteBuilders(); installRoutes(); } /** * Do special preparation for some concepts such as interceptors and policies * this is needed as JAXB does not build exactly the same model definition as Spring DSL would do * using route builders. So we have here a little custom code to fix the JAXB gaps */ private void prepareRoutes() { for (RouteDefinition route : getRoutes()) { // sanity check first as the route is created using XML RouteDefinitionHelper.sanityCheckRoute(route); // leverage logic from route definition helper to prepare the route RouteDefinitionHelper.prepareRoute(getContext(), route, getOnExceptions(), getIntercepts(), getInterceptFroms(), getInterceptSendToEndpoints(), getOnCompletions()); // mark the route as prepared now route.markPrepared(); } } protected abstract void initCustomRegistry(T context); protected void initLazyLoadTypeConverteres() { if (getLazyLoadTypeConverters() != null) { getContext().setLazyLoadTypeConverters(getLazyLoadTypeConverters()); } else if (System.getProperty(LAZY_LOAD_TYPE_CONVERTERS) != null) { // suppose a JVM property to control it so we can use that for example for unit testing // to speedup testing by enabling lazy loading of type converters String lazy = System.getProperty(LAZY_LOAD_TYPE_CONVERTERS); if ("true".equalsIgnoreCase(lazy)) { getContext().setLazyLoadTypeConverters(true); } else if ("false".equalsIgnoreCase(lazy)) { getContext().setLazyLoadTypeConverters(false); } else { throw new IllegalArgumentException("System property with key " + LAZY_LOAD_TYPE_CONVERTERS + " has unknown value: " + lazy); } } } protected void initJMXAgent() throws Exception { CamelJMXAgentDefinition camelJMXAgent = getCamelJMXAgent(); boolean disabled = false; if (camelJMXAgent != null) { disabled = CamelContextHelper.parseBoolean(getContext(), camelJMXAgent.getDisabled()); } if (disabled) { LOG.info("JMXAgent disabled"); // clear the existing lifecycle strategies define by the DefaultCamelContext constructor getContext().getLifecycleStrategies().clear(); // no need to add a lifecycle strategy as we do not need one as JMX is disabled getContext().setManagementStrategy(new DefaultManagementStrategy()); } else if (camelJMXAgent != null) { LOG.info("JMXAgent enabled: " + camelJMXAgent); DefaultManagementAgent agent = new DefaultManagementAgent(getContext()); agent.setConnectorPort(CamelContextHelper.parseInteger(getContext(), camelJMXAgent.getConnectorPort())); agent.setCreateConnector(CamelContextHelper.parseBoolean(getContext(), camelJMXAgent.getCreateConnector())); agent.setMBeanObjectDomainName(CamelContextHelper.parseText(getContext(), camelJMXAgent.getMbeanObjectDomainName())); agent.setMBeanServerDefaultDomain(CamelContextHelper.parseText(getContext(), camelJMXAgent.getMbeanServerDefaultDomain())); agent.setRegistryPort(CamelContextHelper.parseInteger(getContext(), camelJMXAgent.getRegistryPort())); agent.setServiceUrlPath(CamelContextHelper.parseText(getContext(), camelJMXAgent.getServiceUrlPath())); agent.setUsePlatformMBeanServer(CamelContextHelper.parseBoolean(getContext(), camelJMXAgent.getUsePlatformMBeanServer())); agent.setOnlyRegisterProcessorWithCustomId(CamelContextHelper.parseBoolean(getContext(), camelJMXAgent.getOnlyRegisterProcessorWithCustomId())); agent.setRegisterAlways(CamelContextHelper.parseBoolean(getContext(), camelJMXAgent.getRegisterAlways())); agent.setRegisterNewRoutes(CamelContextHelper.parseBoolean(getContext(), camelJMXAgent.getRegisterNewRoutes())); ManagementStrategy managementStrategy = new ManagedManagementStrategy(agent); getContext().setManagementStrategy(managementStrategy); // clear the existing lifecycle strategies define by the DefaultCamelContext constructor getContext().getLifecycleStrategies().clear(); getContext().addLifecycleStrategy(new DefaultManagementLifecycleStrategy(getContext())); // set additional configuration from camelJMXAgent boolean onlyId = agent.getOnlyRegisterProcessorWithCustomId() != null && agent.getOnlyRegisterProcessorWithCustomId(); getContext().getManagementStrategy().onlyManageProcessorWithCustomId(onlyId); getContext().getManagementStrategy().setStatisticsLevel(camelJMXAgent.getStatisticsLevel()); } } protected void initPropertyPlaceholder() throws Exception { if (getCamelPropertyPlaceholder() != null) { CamelPropertyPlaceholderDefinition def = getCamelPropertyPlaceholder(); PropertiesComponent pc = new PropertiesComponent(); pc.setLocation(def.getLocation()); // if using a custom resolver if (ObjectHelper.isNotEmpty(def.getPropertiesResolverRef())) { PropertiesResolver resolver = CamelContextHelper.mandatoryLookup(getContext(), def.getPropertiesResolverRef(), PropertiesResolver.class); pc.setPropertiesResolver(resolver); } // if using a custom parser if (ObjectHelper.isNotEmpty(def.getPropertiesParserRef())) { PropertiesParser parser = CamelContextHelper.mandatoryLookup(getContext(), def.getPropertiesParserRef(), PropertiesParser.class); pc.setPropertiesParser(parser); } pc.setPropertyPrefix(def.getPropertyPrefix()); pc.setPropertySuffix(def.getPropertySuffix()); if (def.isFallbackToUnaugmentedProperty() != null) { pc.setFallbackToUnaugmentedProperty(def.isFallbackToUnaugmentedProperty()); } pc.setPrefixToken(def.getPrefixToken()); pc.setSuffixToken(def.getSuffixToken()); // register the properties component getContext().addComponent("properties", pc); } } protected void initRouteRefs() throws Exception { // add route refs to existing routes if (getRouteRefs() != null) { for (RouteContextRefDefinition ref : getRouteRefs()) { List<RouteDefinition> defs = ref.lookupRoutes(getContext()); for (RouteDefinition def : defs) { LOG.debug("Adding route from {} -> {}", ref, def); // add in top as they are most likely to be common/shared // which you may want to start first getRoutes().add(0, def); } } } } protected abstract <S> S getBeanForType(Class<S> clazz); public void destroy() throws Exception { getContext().stop(); } // Properties // ------------------------------------------------------------------------- public T getContext() { return getContext(true); } public abstract T getContext(boolean create); public abstract List<RouteDefinition> getRoutes(); public abstract List<? extends AbstractCamelEndpointFactoryBean> getEndpoints(); public abstract List<? extends AbstractCamelRedeliveryPolicyFactoryBean> getRedeliveryPolicies(); public abstract List<InterceptDefinition> getIntercepts(); public abstract List<InterceptFromDefinition> getInterceptFroms(); public abstract List<InterceptSendToEndpointDefinition> getInterceptSendToEndpoints(); public abstract PropertiesDefinition getProperties(); public abstract String[] getPackages(); public abstract PackageScanDefinition getPackageScan(); public abstract void setPackageScan(PackageScanDefinition packageScan); public abstract ContextScanDefinition getContextScan(); public abstract void setContextScan(ContextScanDefinition contextScan); public abstract CamelPropertyPlaceholderDefinition getCamelPropertyPlaceholder(); public abstract String getTrace(); public abstract String getStreamCache(); public abstract String getDelayer(); public abstract String getHandleFault(); public abstract String getAutoStartup(); public abstract String getUseMDCLogging(); public abstract String getUseBreadcrumb(); public abstract Boolean getLazyLoadTypeConverters(); public abstract CamelJMXAgentDefinition getCamelJMXAgent(); public abstract List<RouteBuilderDefinition> getBuilderRefs(); public abstract List<RouteContextRefDefinition> getRouteRefs(); public abstract String getErrorHandlerRef(); public abstract DataFormatsDefinition getDataFormats(); public abstract List<OnExceptionDefinition> getOnExceptions(); public abstract List<OnCompletionDefinition> getOnCompletions(); public abstract ShutdownRoute getShutdownRoute(); public abstract ShutdownRunningTask getShutdownRunningTask(); public abstract List<ThreadPoolProfileDefinition> getThreadPoolProfiles(); public abstract String getDependsOn(); // Implementation methods // ------------------------------------------------------------------------- /** * Initializes the context * * @param ctx the context * @throws Exception is thrown if error occurred */ protected void initCamelContext(T ctx) throws Exception { if (getStreamCache() != null) { ctx.setStreamCaching(CamelContextHelper.parseBoolean(getContext(), getStreamCache())); } if (getTrace() != null) { ctx.setTracing(CamelContextHelper.parseBoolean(getContext(), getTrace())); } if (getDelayer() != null) { ctx.setDelayer(CamelContextHelper.parseLong(getContext(), getDelayer())); } if (getHandleFault() != null) { ctx.setHandleFault(CamelContextHelper.parseBoolean(getContext(), getHandleFault())); } if (getErrorHandlerRef() != null) { ctx.setErrorHandlerBuilder(new ErrorHandlerBuilderRef(getErrorHandlerRef())); } if (getAutoStartup() != null) { ctx.setAutoStartup(CamelContextHelper.parseBoolean(getContext(), getAutoStartup())); } if (getUseMDCLogging() != null) { ctx.setUseMDCLogging(CamelContextHelper.parseBoolean(getContext(), getUseMDCLogging())); } if (getUseBreadcrumb() != null) { ctx.setUseBreadcrumb(CamelContextHelper.parseBoolean(getContext(), getUseBreadcrumb())); } if (getShutdownRoute() != null) { ctx.setShutdownRoute(getShutdownRoute()); } if (getShutdownRunningTask() != null) { ctx.setShutdownRunningTask(getShutdownRunningTask()); } if (getDataFormats() != null) { ctx.setDataFormats(getDataFormats().asMap()); } } protected void initThreadPoolProfiles(T context) throws Exception { Set<String> defaultIds = new HashSet<String>(); // lookup and use custom profiles from the registry Map<String, ThreadPoolProfile> profiles = context.getRegistry().lookupByType(ThreadPoolProfile.class); if (profiles != null && !profiles.isEmpty()) { for (Entry<String, ThreadPoolProfile> entry : profiles.entrySet()) { ThreadPoolProfile profile = entry.getValue(); // do not add if already added, for instance a tracer that is also an InterceptStrategy class if (profile.isDefaultProfile()) { LOG.info("Using custom default ThreadPoolProfile with id: " + entry.getKey() + " and implementation: " + profile); context.getExecutorServiceManager().setDefaultThreadPoolProfile(profile); defaultIds.add(entry.getKey()); } else { context.getExecutorServiceManager().registerThreadPoolProfile(profile); } } } // use custom profiles defined in the CamelContext if (getThreadPoolProfiles() != null && !getThreadPoolProfiles().isEmpty()) { for (ThreadPoolProfileDefinition definition : getThreadPoolProfiles()) { if (definition.isDefaultProfile()) { LOG.info("Using custom default ThreadPoolProfile with id: " + definition.getId() + " and implementation: " + definition); context.getExecutorServiceManager().setDefaultThreadPoolProfile(asThreadPoolProfile(context, definition)); defaultIds.add(definition.getId()); } else { context.getExecutorServiceManager().registerThreadPoolProfile(asThreadPoolProfile(context, definition)); } } } // validate at most one is defined if (defaultIds.size() > 1) { throw new IllegalArgumentException("Only exactly one default ThreadPoolProfile is allowed, was " + defaultIds.size() + " ids: " + defaultIds); } } /** * Creates a {@link ThreadPoolProfile} instance based on the definition. * * @param context the camel context * @return the profile * @throws Exception is thrown if error creating the profile */ private ThreadPoolProfile asThreadPoolProfile(CamelContext context, ThreadPoolProfileDefinition definition) throws Exception { ThreadPoolProfile answer = new ThreadPoolProfile(); answer.setId(definition.getId()); answer.setDefaultProfile(definition.getDefaultProfile()); answer.setPoolSize(CamelContextHelper.parseInteger(context, definition.getPoolSize())); answer.setMaxPoolSize(CamelContextHelper.parseInteger(context, definition.getMaxPoolSize())); answer.setKeepAliveTime(CamelContextHelper.parseLong(context, definition.getKeepAliveTime())); answer.setMaxQueueSize(CamelContextHelper.parseInteger(context, definition.getMaxQueueSize())); answer.setRejectedPolicy(definition.getRejectedPolicy()); answer.setTimeUnit(definition.getTimeUnit()); return answer; } protected abstract void initBeanPostProcessor(T context); /** * Strategy to install all available routes into the context */ protected void installRoutes() throws Exception { List<RouteBuilder> builders = new ArrayList<RouteBuilder>(); // lets add route builders added from references if (getBuilderRefs() != null) { for (RouteBuilderDefinition builderRef : getBuilderRefs()) { RouteBuilder builder = builderRef.createRouteBuilder(getContext()); if (builder != null) { builders.add(builder); } else { // support to get the route here RoutesBuilder routes = builderRef.createRoutes(getContext()); if (routes != null) { this.builders.add(routes); } else { // Throw the exception that we can't find any build here throw new CamelException("Cannot find any routes with this RouteBuilder reference: " + builderRef); } } } } // install already configured routes for (RoutesBuilder routeBuilder : this.builders) { getContext().addRoutes(routeBuilder); } // install builders for (RouteBuilder builder : builders) { // Inject the annotated resource postProcessBeforeInit(builder); getContext().addRoutes(builder); } } protected abstract void postProcessBeforeInit(RouteBuilder builder); /** * Strategy method to try find {@link org.apache.camel.builder.RouteBuilder} instances on the classpath */ protected void findRouteBuilders() throws Exception { // package scan addPackageElementContentsToScanDefinition(); PackageScanDefinition packageScanDef = getPackageScan(); if (packageScanDef != null && packageScanDef.getPackages().size() > 0) { // use package scan filter PatternBasedPackageScanFilter filter = new PatternBasedPackageScanFilter(); // support property placeholders in include and exclude for (String include : packageScanDef.getIncludes()) { include = getContext().resolvePropertyPlaceholders(include); filter.addIncludePattern(include); } for (String exclude : packageScanDef.getExcludes()) { exclude = getContext().resolvePropertyPlaceholders(exclude); filter.addExcludePattern(exclude); } String[] normalized = normalizePackages(getContext(), packageScanDef.getPackages()); findRouteBuildersByPackageScan(normalized, filter, builders); } // context scan ContextScanDefinition contextScanDef = getContextScan(); if (contextScanDef != null) { // use package scan filter PatternBasedPackageScanFilter filter = new PatternBasedPackageScanFilter(); // support property placeholders in include and exclude for (String include : contextScanDef.getIncludes()) { include = getContext().resolvePropertyPlaceholders(include); filter.addIncludePattern(include); } for (String exclude : contextScanDef.getExcludes()) { exclude = getContext().resolvePropertyPlaceholders(exclude); filter.addExcludePattern(exclude); } findRouteBuildersByContextScan(filter, builders); } } protected abstract void findRouteBuildersByPackageScan(String[] packages, PackageScanFilter filter, List<RoutesBuilder> builders) throws Exception; protected abstract void findRouteBuildersByContextScan(PackageScanFilter filter, List<RoutesBuilder> builders) throws Exception; private void addPackageElementContentsToScanDefinition() { PackageScanDefinition packageScanDef = getPackageScan(); if (getPackages() != null && getPackages().length > 0) { if (packageScanDef == null) { packageScanDef = new PackageScanDefinition(); setPackageScan(packageScanDef); } for (String pkg : getPackages()) { packageScanDef.getPackages().add(pkg); } } } private String[] normalizePackages(T context, List<String> unnormalized) throws Exception { List<String> packages = new ArrayList<String>(); for (String name : unnormalized) { // it may use property placeholders name = context.resolvePropertyPlaceholders(name); name = ObjectHelper.normalizeClassName(name); if (ObjectHelper.isNotEmpty(name)) { LOG.trace("Using package: {} to scan for RouteBuilder classes", name); packages.add(name); } } return packages.toArray(new String[packages.size()]); } }
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.document; import com.yahoo.document.datatypes.FieldValue; import com.yahoo.document.fieldpathupdate.FieldPathUpdate; import com.yahoo.document.serialization.DocumentSerializerFactory; import com.yahoo.document.serialization.DocumentUpdateReader; import com.yahoo.document.serialization.DocumentUpdateWriter; import com.yahoo.document.update.AssignValueUpdate; import com.yahoo.document.update.ClearValueUpdate; import com.yahoo.document.update.FieldUpdate; import com.yahoo.document.update.ValueUpdate; import com.yahoo.io.GrowableByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; /** * <p>Specifies one or more field updates to a document.</p> <p>A document update contains a list of {@link * com.yahoo.document.update.FieldUpdate field updates} for fields to be updated by this update. Each field update is * applied atomically, but the entire document update is not. A document update can only contain one field update per * field. To make multiple updates to the same field in the same document update, add multiple {@link * com.yahoo.document.update.ValueUpdate value updates} to the same field update.</p> <p>To update a document and * set a string field to a new value:</p> * <pre> * DocumentType musicType = DocumentTypeManager.getInstance().getDocumentType("music", 0); * DocumentUpdate docUpdate = new DocumentUpdate(musicType, * new DocumentId("id:test:music::http://music.yahoo.com/")); * FieldUpdate update = FieldUpdate.createAssign(musicType.getField("artist"), "lillbabs"); * docUpdate.addFieldUpdate(update); * </pre> * * @author Einar M R Rosenvinge * @see com.yahoo.document.update.FieldUpdate * @see com.yahoo.document.update.ValueUpdate */ public class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> { //see src/vespa/document/util/identifiableid.h public static final int CLASSID = 0x1000 + 6; private DocumentId docId; private final Map<Integer, FieldUpdate> id2FieldUpdates; private final List<FieldPathUpdate> fieldPathUpdates; private DocumentType documentType; private Boolean createIfNonExistent; /** * Creates a DocumentUpdate. * * @param docId the ID of the update * @param docType the document type that this update is valid for */ public DocumentUpdate(DocumentType docType, DocumentId docId) { this.docId = docId; this.documentType = docType; this.id2FieldUpdates = new HashMap<>(); this.fieldPathUpdates = new ArrayList<>(); } /** * Creates a new document update using a reader */ public DocumentUpdate(DocumentUpdateReader reader) { docId = null; documentType = null; id2FieldUpdates = new HashMap<>(); fieldPathUpdates = new ArrayList<>(); reader.read(this); } /** Creates a new document update which is a copy of the argument. */ public DocumentUpdate(DocumentUpdate update) { super(update); docId = update.docId; documentType = update.documentType; id2FieldUpdates = new HashMap<>(update.id2FieldUpdates); fieldPathUpdates = new ArrayList<>(update.fieldPathUpdates); createIfNonExistent = update.createIfNonExistent; } /** * Creates a DocumentUpdate. * * @param docId the ID of the update * @param docType the document type that this update is valid for */ public DocumentUpdate(DocumentType docType, String docId) { this(docType, new DocumentId(docId)); } public DocumentId getId() { return docId; } /** * Sets the document id of the document to update. * Use only while deserializing - changing the document id after creation has undefined behaviour. */ public void setId(DocumentId id) { docId = id; } private void verifyType(Document doc) { if (!documentType.equals(doc.getDataType())) { throw new IllegalArgumentException( "Document " + doc.getId() + " with type " + doc.getDataType() + " must have same type as update, which is type " + documentType); } } /** * Applies this document update. * * @param doc the document to apply the update to * @return a reference to itself * @throws IllegalArgumentException if the document does not have the same document type as this update */ public DocumentUpdate applyTo(Document doc) { verifyType(doc); for (FieldUpdate fieldUpdate : id2FieldUpdates.values()) { fieldUpdate.applyTo(doc); } for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) { fieldPathUpdate.applyTo(doc); } return this; } /** * Prune away any field update that will not modify any field in the document. * @param doc document to check against * @return a reference to itself * @throws IllegalArgumentException if the document does not have the same document type as this update */ public DocumentUpdate prune(Document doc) { verifyType(doc); for (Iterator<Map.Entry<Integer, FieldUpdate>> iter = id2FieldUpdates.entrySet().iterator(); iter.hasNext();) { Map.Entry<Integer, FieldUpdate> entry = iter.next(); FieldUpdate update = entry.getValue(); if (!update.isEmpty()) { ValueUpdate last = update.getValueUpdate(update.size() - 1); if (last instanceof AssignValueUpdate) { FieldValue currentValue = doc.getFieldValue(update.getField()); if ((currentValue != null) && currentValue.equals(last.getValue())) { iter.remove(); } } else if (last instanceof ClearValueUpdate) { FieldValue currentValue = doc.getFieldValue(update.getField()); if (currentValue == null) { iter.remove(); } else { FieldValue copy = currentValue.clone(); copy.clear(); if (currentValue.equals(copy)) { iter.remove(); } } } } } return this; } /** * Get an unmodifiable collection of all field updates that this document update specifies. * * @return a collection of all FieldUpdates in this DocumentUpdate */ public Collection<FieldUpdate> fieldUpdates() { return Collections.unmodifiableCollection(id2FieldUpdates.values()); } /** * Get an unmodifiable collection of all field path updates that this document update specifies. * * @return a collection of all FieldPathUpdates in this DocumentUpdate */ public Collection<FieldPathUpdate> fieldPathUpdates() { return Collections.unmodifiableCollection(fieldPathUpdates); } /** Returns the type of the document this updates * * @return The documentype of the document */ public DocumentType getDocumentType() { return documentType; } /** * Sets the document type. Use only while deserializing - changing the document type after creation * has undefined behaviour. */ public void setDocumentType(DocumentType type) { documentType = type; } /** * Returns the update for a field * * @param field the field to return the update of * @return the update for the field, or null if that field has no update in this */ public FieldUpdate getFieldUpdate(Field field) { return getFieldUpdateById(field.getId()); } /** Removes all field updates from the list for field updates. */ public void clearFieldUpdates() { id2FieldUpdates.clear(); } /** * Returns the update for a field name * * @param fieldName the field name to return the update of * @return the update for the field, or null if that field has no update in this */ public FieldUpdate getFieldUpdate(String fieldName) { Field field = documentType.getField(fieldName); return field != null ? getFieldUpdate(field) : null; } private FieldUpdate getFieldUpdateById(Integer fieldId) { return id2FieldUpdates.get(fieldId); } /** * Assigns the field updates of this document update. * Also note that no assumptions can be made on the order of item after this call. * They might have been joined if for the same field or reordered. * * @param fieldUpdates the new list of updates of this * @throws NullPointerException if the argument passed is null */ public void setFieldUpdates(Collection<FieldUpdate> fieldUpdates) { if (fieldUpdates == null) { throw new NullPointerException("The field updates of a document update can not be null"); } clearFieldUpdates(); addFieldUpdates(fieldUpdates); } /** The same as setFieldUpdates(Collection&lt;FieldUpdate&gt;) */ public void setFieldUpdates(List<FieldUpdate> fieldUpdates) { setFieldUpdates((Collection<FieldUpdate>) fieldUpdates); } public void addFieldUpdates(Collection<FieldUpdate> fieldUpdates) { for (FieldUpdate fieldUpdate : fieldUpdates) { addFieldUpdate(fieldUpdate); } } /** * Get the number of field updates in this document update. * * @return the size of the List of FieldUpdates */ public int size() { return id2FieldUpdates.size(); } /** * Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate * for the named field, the content of the given FieldUpdate is added to the existing one. * * @param update The FieldUpdate to add to this DocumentUpdate. * @return This, to allow chaining. * @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding * field. */ public DocumentUpdate addFieldUpdate(FieldUpdate update) { int fieldId = update.getField().getId(); if (documentType.getField(fieldId) == null) { throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" + update.getField().getName() + "'."); } FieldUpdate prevUpdate = getFieldUpdateById(fieldId); if (prevUpdate != update) { if (prevUpdate != null) { prevUpdate.addAll(update); } else { id2FieldUpdates.put(fieldId, update); } } return this; } /** * Adds a field path update to perform on the document. * * @return a reference to itself. */ public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) { fieldPathUpdates.add(fieldPathUpdate); return this; } /** * Adds all the field- and field path updates of the given document update to this. If the given update refers to a * different document or document type than this, this method throws an exception. * * @param update The update whose content to add to this. * @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate * does not match the content of this. */ public void addAll(DocumentUpdate update) { if (update == null) { return; } if (!docId.equals(update.docId)) { throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + "."); } if (!documentType.equals(update.documentType)) { throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + "."); } addFieldUpdates(update.fieldUpdates()); for (FieldPathUpdate pathUpd : update.fieldPathUpdates) { addFieldPathUpdate(pathUpd); } } public FieldUpdate removeFieldUpdate(Field field) { return id2FieldUpdates.remove(field.getId()); } public FieldUpdate removeFieldUpdate(String fieldName) { Field field = documentType.getField(fieldName); return field != null ? removeFieldUpdate(field) : null; } /** * Returns the document type of this document update. * * @return the document type of this document update */ public DocumentType getType() { return documentType; } public final void serialize(GrowableByteBuffer buf) { serialize(DocumentSerializerFactory.create6(buf)); } public void serialize(DocumentUpdateWriter data) { data.write(this); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof DocumentUpdate)) return false; DocumentUpdate that = (DocumentUpdate) o; if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false; if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false; if ( ! fieldPathUpdates.equals(that.fieldPathUpdates)) return false; if ( ! id2FieldUpdates.equals(that.id2FieldUpdates)) return false; if (this.getCreateIfNonExistent() != that.getCreateIfNonExistent()) return false; if ( ! Objects.equals(getCondition(), that.getCondition())) return false; return true; } @Override public int hashCode() { int result = docId != null ? docId.hashCode() : 0; result = 31 * result + id2FieldUpdates.hashCode(); result = 31 * result + fieldPathUpdates.hashCode(); result = 31 * result + (documentType != null ? documentType.hashCode() : 0); return result; } @Override public String toString() { StringBuilder string = new StringBuilder(); string.append("update of document '"); string.append(docId); string.append("': "); string.append("create-if-non-existent="); string.append(getCreateIfNonExistent()); string.append(": "); string.append("["); for (FieldUpdate fieldUpdate : id2FieldUpdates.values()) { string.append(fieldUpdate).append(" "); } string.append("]"); if (fieldPathUpdates.size() > 0) { string.append(" [ "); for (FieldPathUpdate up : fieldPathUpdates) { string.append(up.toString()).append(" "); } string.append(" ]"); } return string.toString(); } @Override public Iterator<FieldPathUpdate> iterator() { return fieldPathUpdates.iterator(); } /** * Returns whether or not this field update contains any field- or field path updates. * * @return True if this update is empty. */ public boolean isEmpty() { return id2FieldUpdates.isEmpty() && fieldPathUpdates.isEmpty(); } /** * Sets whether this update should create the document it updates if that document does not exist. * In this case an empty document is created before the update is applied. * * @param value Whether the document it updates should be created. */ public void setCreateIfNonExistent(boolean value) { createIfNonExistent = value; } /** * Gets whether this update should create the document it updates if that document does not exist. * * @return whether the document it updates should be created. */ public boolean getCreateIfNonExistent() { return createIfNonExistent != null && createIfNonExistent; } public Optional<Boolean> getOptionalCreateIfNonExistent() { return Optional.ofNullable(createIfNonExistent); } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.source.hls; import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.extractor.DefaultExtractorInput; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.id3.Id3Decoder; import com.google.android.exoplayer2.metadata.id3.PrivFrame; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.TimestampAdjuster; import com.google.android.exoplayer2.util.UriUtil; import com.google.android.exoplayer2.util.Util; import java.io.EOFException; import java.io.IOException; import java.math.BigInteger; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** * An HLS {@link MediaChunk}. */ /* package */ final class HlsMediaChunk extends MediaChunk { /** * Creates a new instance. * * @param extractorFactory A {@link HlsExtractorFactory} from which the HLS media chunk extractor * is obtained. * @param dataSource The source from which the data should be loaded. * @param format The chunk format. * @param startOfPlaylistInPeriodUs The position of the playlist in the period in microseconds. * @param mediaPlaylist The media playlist from which this chunk was obtained. * @param playlistUrl The url of the playlist from which this chunk was obtained. * @param muxedCaptionFormats List of muxed caption {@link Format}s. Null if no closed caption * information is available in the master playlist. * @param trackSelectionReason See {@link #trackSelectionReason}. * @param trackSelectionData See {@link #trackSelectionData}. * @param isMasterTimestampSource True if the chunk can initialize the timestamp adjuster. * @param timestampAdjusterProvider The provider from which to obtain the {@link * TimestampAdjuster}. * @param previousChunk The {@link HlsMediaChunk} that preceded this one. May be null. * @param mediaSegmentKey The media segment decryption key, if fully encrypted. Null otherwise. * @param initSegmentKey The initialization segment decryption key, if fully encrypted. Null * otherwise. */ public static HlsMediaChunk createInstance( HlsExtractorFactory extractorFactory, DataSource dataSource, Format format, long startOfPlaylistInPeriodUs, HlsMediaPlaylist mediaPlaylist, int segmentIndexInPlaylist, Uri playlistUrl, @Nullable List<Format> muxedCaptionFormats, int trackSelectionReason, @Nullable Object trackSelectionData, boolean isMasterTimestampSource, TimestampAdjusterProvider timestampAdjusterProvider, @Nullable HlsMediaChunk previousChunk, @Nullable byte[] mediaSegmentKey, @Nullable byte[] initSegmentKey) { // Media segment. HlsMediaPlaylist.Segment mediaSegment = mediaPlaylist.segments.get(segmentIndexInPlaylist); DataSpec dataSpec = new DataSpec( UriUtil.resolveToUri(mediaPlaylist.baseUri, mediaSegment.url), mediaSegment.byterangeOffset, mediaSegment.byterangeLength, /* key= */ null); boolean mediaSegmentEncrypted = mediaSegmentKey != null; byte[] mediaSegmentIv = mediaSegmentEncrypted ? getEncryptionIvArray(Assertions.checkNotNull(mediaSegment.encryptionIV)) : null; DataSource mediaDataSource = buildDataSource(dataSource, mediaSegmentKey, mediaSegmentIv); // Init segment. HlsMediaPlaylist.Segment initSegment = mediaSegment.initializationSegment; DataSpec initDataSpec = null; boolean initSegmentEncrypted = false; DataSource initDataSource = null; if (initSegment != null) { initSegmentEncrypted = initSegmentKey != null; byte[] initSegmentIv = initSegmentEncrypted ? getEncryptionIvArray(Assertions.checkNotNull(initSegment.encryptionIV)) : null; Uri initSegmentUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, initSegment.url); initDataSpec = new DataSpec( initSegmentUri, initSegment.byterangeOffset, initSegment.byterangeLength, /* key= */ null); initDataSource = buildDataSource(dataSource, initSegmentKey, initSegmentIv); } long segmentStartTimeInPeriodUs = startOfPlaylistInPeriodUs + mediaSegment.relativeStartTimeUs; long segmentEndTimeInPeriodUs = segmentStartTimeInPeriodUs + mediaSegment.durationUs; int discontinuitySequenceNumber = mediaPlaylist.discontinuitySequence + mediaSegment.relativeDiscontinuitySequence; Extractor previousExtractor = null; Id3Decoder id3Decoder; ParsableByteArray scratchId3Data; boolean shouldSpliceIn; if (previousChunk != null) { id3Decoder = previousChunk.id3Decoder; scratchId3Data = previousChunk.scratchId3Data; shouldSpliceIn = !playlistUrl.equals(previousChunk.playlistUrl) || !previousChunk.loadCompleted; previousExtractor = previousChunk.isExtractorReusable && previousChunk.discontinuitySequenceNumber == discontinuitySequenceNumber && !shouldSpliceIn ? previousChunk.extractor : null; } else { id3Decoder = new Id3Decoder(); scratchId3Data = new ParsableByteArray(Id3Decoder.ID3_HEADER_LENGTH); shouldSpliceIn = false; } return new HlsMediaChunk( extractorFactory, mediaDataSource, dataSpec, format, mediaSegmentEncrypted, initDataSource, initDataSpec, initSegmentEncrypted, playlistUrl, muxedCaptionFormats, trackSelectionReason, trackSelectionData, segmentStartTimeInPeriodUs, segmentEndTimeInPeriodUs, /* chunkMediaSequence= */ mediaPlaylist.mediaSequence + segmentIndexInPlaylist, discontinuitySequenceNumber, mediaSegment.hasGapTag, isMasterTimestampSource, /* timestampAdjuster= */ timestampAdjusterProvider.getAdjuster(discontinuitySequenceNumber), mediaSegment.drmInitData, previousExtractor, id3Decoder, scratchId3Data, shouldSpliceIn); } public static final String PRIV_TIMESTAMP_FRAME_OWNER = "com.apple.streaming.transportStreamTimestamp"; private static final PositionHolder DUMMY_POSITION_HOLDER = new PositionHolder(); private static final AtomicInteger uidSource = new AtomicInteger(); /** * A unique identifier for the chunk. */ public final int uid; /** * The discontinuity sequence number of the chunk. */ public final int discontinuitySequenceNumber; /** The url of the playlist from which this chunk was obtained. */ public final Uri playlistUrl; @Nullable private final DataSource initDataSource; @Nullable private final DataSpec initDataSpec; @Nullable private final Extractor previousExtractor; private final boolean isMasterTimestampSource; private final boolean hasGapTag; private final TimestampAdjuster timestampAdjuster; private final boolean shouldSpliceIn; private final HlsExtractorFactory extractorFactory; @Nullable private final List<Format> muxedCaptionFormats; @Nullable private final DrmInitData drmInitData; private final Id3Decoder id3Decoder; private final ParsableByteArray scratchId3Data; private final boolean mediaSegmentEncrypted; private final boolean initSegmentEncrypted; @MonotonicNonNull private Extractor extractor; private boolean isExtractorReusable; @MonotonicNonNull private HlsSampleStreamWrapper output; // nextLoadPosition refers to the init segment if initDataLoadRequired is true. // Otherwise, nextLoadPosition refers to the media segment. private int nextLoadPosition; private boolean initDataLoadRequired; private volatile boolean loadCanceled; private boolean loadCompleted; private HlsMediaChunk( HlsExtractorFactory extractorFactory, DataSource mediaDataSource, DataSpec dataSpec, Format format, boolean mediaSegmentEncrypted, @Nullable DataSource initDataSource, @Nullable DataSpec initDataSpec, boolean initSegmentEncrypted, Uri playlistUrl, @Nullable List<Format> muxedCaptionFormats, int trackSelectionReason, @Nullable Object trackSelectionData, long startTimeUs, long endTimeUs, long chunkMediaSequence, int discontinuitySequenceNumber, boolean hasGapTag, boolean isMasterTimestampSource, TimestampAdjuster timestampAdjuster, @Nullable DrmInitData drmInitData, @Nullable Extractor previousExtractor, Id3Decoder id3Decoder, ParsableByteArray scratchId3Data, boolean shouldSpliceIn) { super( mediaDataSource, dataSpec, format, trackSelectionReason, trackSelectionData, startTimeUs, endTimeUs, chunkMediaSequence); this.mediaSegmentEncrypted = mediaSegmentEncrypted; this.discontinuitySequenceNumber = discontinuitySequenceNumber; this.initDataSpec = initDataSpec; this.initDataSource = initDataSource; this.initDataLoadRequired = initDataSpec != null; this.initSegmentEncrypted = initSegmentEncrypted; this.playlistUrl = playlistUrl; this.isMasterTimestampSource = isMasterTimestampSource; this.timestampAdjuster = timestampAdjuster; this.hasGapTag = hasGapTag; this.extractorFactory = extractorFactory; this.muxedCaptionFormats = muxedCaptionFormats; this.drmInitData = drmInitData; this.previousExtractor = previousExtractor; this.id3Decoder = id3Decoder; this.scratchId3Data = scratchId3Data; this.shouldSpliceIn = shouldSpliceIn; uid = uidSource.getAndIncrement(); } /** * Initializes the chunk for loading, setting the {@link HlsSampleStreamWrapper} that will receive * samples as they are loaded. * * @param output The output that will receive the loaded samples. */ public void init(HlsSampleStreamWrapper output) { this.output = output; } @Override public boolean isLoadCompleted() { return loadCompleted; } // Loadable implementation @Override public void cancelLoad() { loadCanceled = true; } @Override public void load() throws IOException, InterruptedException { // output == null means init() hasn't been called. Assertions.checkNotNull(output); if (extractor == null && previousExtractor != null) { extractor = previousExtractor; isExtractorReusable = true; initDataLoadRequired = false; output.init(uid, shouldSpliceIn, /* reusingExtractor= */ true); } maybeLoadInitData(); if (!loadCanceled) { if (!hasGapTag) { loadMedia(); } loadCompleted = true; } } // Internal methods. @RequiresNonNull("output") private void maybeLoadInitData() throws IOException, InterruptedException { if (!initDataLoadRequired) { return; } // initDataLoadRequired => initDataSource != null && initDataSpec != null Assertions.checkNotNull(initDataSource); Assertions.checkNotNull(initDataSpec); feedDataToExtractor(initDataSource, initDataSpec, initSegmentEncrypted); nextLoadPosition = 0; initDataLoadRequired = false; } @RequiresNonNull("output") private void loadMedia() throws IOException, InterruptedException { if (!isMasterTimestampSource) { timestampAdjuster.waitUntilInitialized(); } else if (timestampAdjuster.getFirstSampleTimestampUs() == TimestampAdjuster.DO_NOT_OFFSET) { // We're the master and we haven't set the desired first sample timestamp yet. timestampAdjuster.setFirstSampleTimestampUs(startTimeUs); } feedDataToExtractor(dataSource, dataSpec, mediaSegmentEncrypted); } /** * Attempts to feed the given {@code dataSpec} to {@code this.extractor}. Whenever the operation * concludes (because of a thrown exception or because the operation finishes), the number of fed * bytes is written to {@code nextLoadPosition}. */ @RequiresNonNull("output") private void feedDataToExtractor( DataSource dataSource, DataSpec dataSpec, boolean dataIsEncrypted) throws IOException, InterruptedException { // If we previously fed part of this chunk to the extractor, we need to skip it this time. For // encrypted content we need to skip the data by reading it through the source, so as to ensure // correct decryption of the remainder of the chunk. For clear content, we can request the // remainder of the chunk directly. DataSpec loadDataSpec; boolean skipLoadedBytes; if (dataIsEncrypted) { loadDataSpec = dataSpec; skipLoadedBytes = nextLoadPosition != 0; } else { loadDataSpec = dataSpec.subrange(nextLoadPosition); skipLoadedBytes = false; } try { ExtractorInput input = prepareExtraction(dataSource, loadDataSpec); if (skipLoadedBytes) { input.skipFully(nextLoadPosition); } try { int result = Extractor.RESULT_CONTINUE; while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { result = extractor.read(input, DUMMY_POSITION_HOLDER); } } finally { nextLoadPosition = (int) (input.getPosition() - dataSpec.absoluteStreamPosition); } } finally { Util.closeQuietly(dataSource); } } @RequiresNonNull("output") @EnsuresNonNull("extractor") private DefaultExtractorInput prepareExtraction(DataSource dataSource, DataSpec dataSpec) throws IOException, InterruptedException { long bytesToRead = dataSource.open(dataSpec); DefaultExtractorInput extractorInput = new DefaultExtractorInput(dataSource, dataSpec.absoluteStreamPosition, bytesToRead); if (extractor == null) { long id3Timestamp = peekId3PrivTimestamp(extractorInput); extractorInput.resetPeekPosition(); HlsExtractorFactory.Result result = extractorFactory.createExtractor( previousExtractor, dataSpec.uri, trackFormat, muxedCaptionFormats, drmInitData, timestampAdjuster, dataSource.getResponseHeaders(), extractorInput); extractor = result.extractor; isExtractorReusable = result.isReusable; if (result.isPackedAudioExtractor) { output.setSampleOffsetUs( id3Timestamp != C.TIME_UNSET ? timestampAdjuster.adjustTsTimestamp(id3Timestamp) : startTimeUs); } else { // In case the container format changes mid-stream to non-packed-audio, we need to reset // the timestamp offset. output.setSampleOffsetUs(/* sampleOffsetUs= */ 0L); } output.init(uid, shouldSpliceIn, /* reusingExtractor= */ false); extractor.init(output); } return extractorInput; } /** * Peek the presentation timestamp of the first sample in the chunk from an ID3 PRIV as defined * in the HLS spec, version 20, Section 3.4. Returns {@link C#TIME_UNSET} if the frame is not * found. This method only modifies the peek position. * * @param input The {@link ExtractorInput} to obtain the PRIV frame from. * @return The parsed, adjusted timestamp in microseconds * @throws IOException If an error occurred peeking from the input. * @throws InterruptedException If the thread was interrupted. */ private long peekId3PrivTimestamp(ExtractorInput input) throws IOException, InterruptedException { input.resetPeekPosition(); try { input.peekFully(scratchId3Data.data, 0, Id3Decoder.ID3_HEADER_LENGTH); } catch (EOFException e) { // The input isn't long enough for there to be any ID3 data. return C.TIME_UNSET; } scratchId3Data.reset(Id3Decoder.ID3_HEADER_LENGTH); int id = scratchId3Data.readUnsignedInt24(); if (id != Id3Decoder.ID3_TAG) { return C.TIME_UNSET; } scratchId3Data.skipBytes(3); // version(2), flags(1). int id3Size = scratchId3Data.readSynchSafeInt(); int requiredCapacity = id3Size + Id3Decoder.ID3_HEADER_LENGTH; if (requiredCapacity > scratchId3Data.capacity()) { byte[] data = scratchId3Data.data; scratchId3Data.reset(requiredCapacity); System.arraycopy(data, 0, scratchId3Data.data, 0, Id3Decoder.ID3_HEADER_LENGTH); } input.peekFully(scratchId3Data.data, Id3Decoder.ID3_HEADER_LENGTH, id3Size); Metadata metadata = id3Decoder.decode(scratchId3Data.data, id3Size); if (metadata == null) { return C.TIME_UNSET; } int metadataLength = metadata.length(); for (int i = 0; i < metadataLength; i++) { Metadata.Entry frame = metadata.get(i); if (frame instanceof PrivFrame) { PrivFrame privFrame = (PrivFrame) frame; if (PRIV_TIMESTAMP_FRAME_OWNER.equals(privFrame.owner)) { System.arraycopy( privFrame.privateData, 0, scratchId3Data.data, 0, 8 /* timestamp size */); scratchId3Data.reset(8); // The top 31 bits should be zeros, but explicitly zero them to wrap in the case that the // streaming provider forgot. See: https://github.com/google/ExoPlayer/pull/3495. return scratchId3Data.readLong() & 0x1FFFFFFFFL; } } } return C.TIME_UNSET; } // Internal methods. private static byte[] getEncryptionIvArray(String ivString) { String trimmedIv; if (Util.toLowerInvariant(ivString).startsWith("0x")) { trimmedIv = ivString.substring(2); } else { trimmedIv = ivString; } byte[] ivData = new BigInteger(trimmedIv, /* radix= */ 16).toByteArray(); byte[] ivDataWithPadding = new byte[16]; int offset = ivData.length > 16 ? ivData.length - 16 : 0; System.arraycopy( ivData, offset, ivDataWithPadding, ivDataWithPadding.length - ivData.length + offset, ivData.length - offset); return ivDataWithPadding; } /** * If the segment is fully encrypted, returns an {@link Aes128DataSource} that wraps the original * in order to decrypt the loaded data. Else returns the original. * * <p>{@code fullSegmentEncryptionKey} & {@code encryptionIv} can either both be null, or neither. */ private static DataSource buildDataSource( DataSource dataSource, @Nullable byte[] fullSegmentEncryptionKey, @Nullable byte[] encryptionIv) { if (fullSegmentEncryptionKey != null) { Assertions.checkNotNull(encryptionIv); return new Aes128DataSource(dataSource, fullSegmentEncryptionKey, encryptionIv); } return dataSource; } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.config.annotation.web.socket; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.SmartInitializingSingleton; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; import org.springframework.messaging.Message; import org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolver; import org.springframework.messaging.simp.annotation.support.SimpAnnotationMethodMessageHandler; import org.springframework.messaging.simp.config.ChannelRegistration; import org.springframework.security.access.AccessDecisionVoter; import org.springframework.security.access.expression.SecurityExpressionHandler; import org.springframework.security.access.vote.AffirmativeBased; import org.springframework.security.config.annotation.web.messaging.MessageSecurityMetadataSourceRegistry; import org.springframework.security.messaging.access.expression.MessageExpressionVoter; import org.springframework.security.messaging.access.intercept.ChannelSecurityInterceptor; import org.springframework.security.messaging.access.intercept.MessageSecurityMetadataSource; import org.springframework.security.messaging.context.AuthenticationPrincipalArgumentResolver; import org.springframework.security.messaging.context.SecurityContextChannelInterceptor; import org.springframework.security.messaging.web.csrf.CsrfChannelInterceptor; import org.springframework.security.messaging.web.socket.server.CsrfTokenHandshakeInterceptor; import org.springframework.util.AntPathMatcher; import org.springframework.util.PathMatcher; import org.springframework.web.servlet.handler.SimpleUrlHandlerMapping; import org.springframework.web.socket.config.annotation.AbstractWebSocketMessageBrokerConfigurer; import org.springframework.web.socket.config.annotation.StompEndpointRegistry; import org.springframework.web.socket.server.HandshakeInterceptor; import org.springframework.web.socket.server.support.WebSocketHttpRequestHandler; import org.springframework.web.socket.sockjs.SockJsService; import org.springframework.web.socket.sockjs.support.SockJsHttpRequestHandler; import org.springframework.web.socket.sockjs.transport.TransportHandlingSockJsService; /** * Allows configuring WebSocket Authorization. * * <p> * For example: * </p> * * <pre> * &#064;Configuration * public class WebSocketSecurityConfig extends * AbstractSecurityWebSocketMessageBrokerConfigurer { * * &#064;Override * protected void configureInbound(MessageSecurityMetadataSourceRegistry messages) { * messages.simpDestMatchers(&quot;/user/queue/errors&quot;).permitAll() * .simpDestMatchers(&quot;/admin/**&quot;).hasRole(&quot;ADMIN&quot;).anyMessage() * .authenticated(); * } * } * </pre> * * * @since 4.0 * @author Rob Winch */ @Order(Ordered.HIGHEST_PRECEDENCE + 100) public abstract class AbstractSecurityWebSocketMessageBrokerConfigurer extends AbstractWebSocketMessageBrokerConfigurer implements SmartInitializingSingleton { private final WebSocketMessageSecurityMetadataSourceRegistry inboundRegistry = new WebSocketMessageSecurityMetadataSourceRegistry(); private SecurityExpressionHandler<Message<Object>> expressionHandler; private ApplicationContext context; public void registerStompEndpoints(StompEndpointRegistry registry) { } @Override public void addArgumentResolvers(List<HandlerMethodArgumentResolver> argumentResolvers) { argumentResolvers.add(new AuthenticationPrincipalArgumentResolver()); } @Override public final void configureClientInboundChannel(ChannelRegistration registration) { ChannelSecurityInterceptor inboundChannelSecurity = inboundChannelSecurity(); registration.setInterceptors(securityContextChannelInterceptor()); if (!sameOriginDisabled()) { registration.setInterceptors(csrfChannelInterceptor()); } if (inboundRegistry.containsMapping()) { registration.setInterceptors(inboundChannelSecurity); } customizeClientInboundChannel(registration); } private PathMatcher getDefaultPathMatcher() { try { return context.getBean(SimpAnnotationMethodMessageHandler.class).getPathMatcher(); } catch(NoSuchBeanDefinitionException e) { return new AntPathMatcher(); } } /** * <p> * Determines if a CSRF token is required for connecting. This protects against remote * sites from connecting to the application and being able to read/write data over the * connection. The default is false (the token is required). * </p> * <p> * Subclasses can override this method to disable CSRF protection * </p> * * @return false if a CSRF token is required for connecting, else true */ protected boolean sameOriginDisabled() { return false; } /** * Allows subclasses to customize the configuration of the {@link ChannelRegistration} * . * * @param registration the {@link ChannelRegistration} to customize */ protected void customizeClientInboundChannel(ChannelRegistration registration) { } @Bean public CsrfChannelInterceptor csrfChannelInterceptor() { return new CsrfChannelInterceptor(); } @Bean public ChannelSecurityInterceptor inboundChannelSecurity() { ChannelSecurityInterceptor channelSecurityInterceptor = new ChannelSecurityInterceptor( inboundMessageSecurityMetadataSource()); MessageExpressionVoter<Object> voter = new MessageExpressionVoter<Object>(); if(expressionHandler != null) { voter.setExpressionHandler(expressionHandler); } List<AccessDecisionVoter<? extends Object>> voters = new ArrayList<AccessDecisionVoter<? extends Object>>(); voters.add(voter); AffirmativeBased manager = new AffirmativeBased(voters); channelSecurityInterceptor.setAccessDecisionManager(manager); return channelSecurityInterceptor; } @Bean public SecurityContextChannelInterceptor securityContextChannelInterceptor() { return new SecurityContextChannelInterceptor(); } @Bean public MessageSecurityMetadataSource inboundMessageSecurityMetadataSource() { if(expressionHandler != null) { inboundRegistry.expressionHandler(expressionHandler); } configureInbound(inboundRegistry); return inboundRegistry.createMetadataSource(); } /** * * @param messages */ protected void configureInbound(MessageSecurityMetadataSourceRegistry messages) { } private static class WebSocketMessageSecurityMetadataSourceRegistry extends MessageSecurityMetadataSourceRegistry { @Override public MessageSecurityMetadataSource createMetadataSource() { return super.createMetadataSource(); } @Override protected boolean containsMapping() { return super.containsMapping(); } @Override protected boolean isSimpDestPathMatcherConfigured() { return super.isSimpDestPathMatcherConfigured(); } } @Autowired public void setApplicationContext(ApplicationContext context) { this.context = context; } @Autowired(required = false) public void setMessageExpessionHandler(List<SecurityExpressionHandler<Message<Object>>> expressionHandlers) { if(expressionHandlers.size() == 1) { this.expressionHandler = expressionHandlers.get(0); } } public void afterSingletonsInstantiated() { if (sameOriginDisabled()) { return; } String beanName = "stompWebSocketHandlerMapping"; SimpleUrlHandlerMapping mapping = context.getBean(beanName, SimpleUrlHandlerMapping.class); Map<String, Object> mappings = mapping.getHandlerMap(); for (Object object : mappings.values()) { if (object instanceof SockJsHttpRequestHandler) { SockJsHttpRequestHandler sockjsHandler = (SockJsHttpRequestHandler) object; SockJsService sockJsService = sockjsHandler.getSockJsService(); if (!(sockJsService instanceof TransportHandlingSockJsService)) { throw new IllegalStateException( "sockJsService must be instance of TransportHandlingSockJsService got " + sockJsService); } TransportHandlingSockJsService transportHandlingSockJsService = (TransportHandlingSockJsService) sockJsService; List<HandshakeInterceptor> handshakeInterceptors = transportHandlingSockJsService .getHandshakeInterceptors(); List<HandshakeInterceptor> interceptorsToSet = new ArrayList<HandshakeInterceptor>( handshakeInterceptors.size() + 1); interceptorsToSet.add(new CsrfTokenHandshakeInterceptor()); interceptorsToSet.addAll(handshakeInterceptors); transportHandlingSockJsService .setHandshakeInterceptors(interceptorsToSet); } else if (object instanceof WebSocketHttpRequestHandler) { WebSocketHttpRequestHandler handler = (WebSocketHttpRequestHandler) object; List<HandshakeInterceptor> handshakeInterceptors = handler .getHandshakeInterceptors(); List<HandshakeInterceptor> interceptorsToSet = new ArrayList<HandshakeInterceptor>( handshakeInterceptors.size() + 1); interceptorsToSet.add(new CsrfTokenHandshakeInterceptor()); interceptorsToSet.addAll(handshakeInterceptors); handler.setHandshakeInterceptors(interceptorsToSet); } else { throw new IllegalStateException( "Bean " + beanName + " is expected to contain mappings to either a SockJsHttpRequestHandler or a WebSocketHttpRequestHandler but got " + object); } } if (inboundRegistry.containsMapping() && !inboundRegistry.isSimpDestPathMatcherConfigured()) { PathMatcher pathMatcher = getDefaultPathMatcher(); inboundRegistry.simpDestPathMatcher(pathMatcher); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.log4j.chainsaw.receivers; import java.awt.BorderLayout; import java.awt.Component; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyDescriptor; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.swing.AbstractCellEditor; import javax.swing.DefaultCellEditor; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.JTextField; import javax.swing.table.AbstractTableModel; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableCellEditor; import javax.swing.table.TableModel; import org.apache.log4j.Level; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.apache.log4j.chainsaw.ChainsawConstants; import org.apache.log4j.chainsaw.Generator; import org.apache.log4j.chainsaw.helper.TableCellEditorFactory; import org.apache.log4j.net.SocketHubReceiver; import org.apache.log4j.plugins.Plugin; /** * A panel that allows the user to edit a particular Plugin, by using introspection * this class discovers the modifiable properties of the Plugin * @author Paul Smith <psmith@apache.org> */ public class PluginPropertyEditorPanel extends JPanel { private final JScrollPane scrollPane = new JScrollPane(); private final JTable propertyTable = new JTable(); private Plugin plugin; private TableModel defaultModel = new DefaultTableModel( new String[] { "Property", "Value" }, 1); private static final Logger logger = LogManager.getLogger(PluginPropertyEditorPanel.class); /** * */ public PluginPropertyEditorPanel() { super(); initComponents(); setupListeners(); } /** * */ private void initComponents() { propertyTable.setRowHeight(ChainsawConstants.DEFAULT_ROW_HEIGHT); setLayout(new BorderLayout()); scrollPane.setViewportView(propertyTable); add(scrollPane, BorderLayout.CENTER); propertyTable.setModel( defaultModel = new DefaultTableModel( new String[] { "Property", "Value" }, 1)); } /** * */ private void setupListeners() { addPropertyChangeListener("plugin", new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { final Plugin p = (Plugin) evt.getNewValue(); if (p != null) { try { PluginPropertyTableModel model = new PluginPropertyTableModel(p); propertyTable.setModel(model); propertyTable.getColumnModel().getColumn(1) .setCellEditor(new PluginTableCellEditor()); propertyTable.setEnabled(true); } catch (Throwable e) { logger.error("Failed to introspect the Plugin", e); } } else { propertyTable.setModel(defaultModel); propertyTable.setEnabled(false); } } }); } public static void main(String[] args) { JFrame frame = new JFrame("Property Editor Test bed"); frame.addWindowListener(new WindowAdapter() { public void windowClosed(WindowEvent e) { System.exit(1); } }); PluginPropertyEditorPanel panel = new PluginPropertyEditorPanel(); frame.getContentPane().add(panel); frame.pack(); frame.setVisible(true); SocketHubReceiver r = new SocketHubReceiver(); panel.setPlugin(r); try { Thread.sleep(3000); panel.setPlugin(new Generator("MyPlugin")); } catch (Exception e) { // TODO: handle exception } } /** * @return Returns the plugin. */ public final Plugin getPlugin() { return plugin; } /** * @param plugin The plugin to set. */ public final void setPlugin(Plugin plugin) { Plugin oldValue = this.plugin; this.plugin = plugin; firePropertyChange("plugin", oldValue, this.plugin); } /** * @author psmith * */ private class PluginTableCellEditor extends AbstractCellEditor implements TableCellEditor { private Map editorMap = new HashMap(); private DefaultCellEditor defaultEditor = new DefaultCellEditor( new JTextField()); private DefaultCellEditor currentEditor = defaultEditor; private PluginTableCellEditor() { editorMap.put(Boolean.class, TableCellEditorFactory.createBooleanTableCellEditor()); editorMap.put(Level.class, TableCellEditorFactory.createLevelTableCellEditor()); //support primitive boolean parameters with the appropriate editor editorMap.put(boolean.class, TableCellEditorFactory.createBooleanTableCellEditor()); } /* (non-Javadoc) * @see javax.swing.table.TableCellEditor#getTableCellEditorComponent(javax.swing.JTable, java.lang.Object, boolean, int, int) */ public Component getTableCellEditorComponent(JTable table, Object value, boolean isSelected, int row, int column) { PluginPropertyTableModel model = (PluginPropertyTableModel) table.getModel(); PropertyDescriptor descriptor = model.getDescriptors()[row]; Class valueClass = descriptor.getPropertyType(); if (editorMap.containsKey(valueClass)) { DefaultCellEditor editor = (DefaultCellEditor) editorMap.get(valueClass); logger.debug("Located CellEditor for " + valueClass); currentEditor = editor; return currentEditor.getTableCellEditorComponent(table, value, isSelected, row, column); } currentEditor = defaultEditor; logger.debug("Cell value class " + valueClass + " not know, using default editor"); return defaultEditor.getTableCellEditorComponent(table, value, isSelected, row, column); } /* (non-Javadoc) * @see javax.swing.CellEditor#getCellEditorValue() */ public Object getCellEditorValue() { return currentEditor.getCellEditorValue(); } } private static class PluginPropertyTableModel extends AbstractTableModel { private final PropertyDescriptor[] descriptors; private final Plugin plugin; private PluginPropertyTableModel(Plugin p) throws IntrospectionException { super(); BeanInfo beanInfo = Introspector.getBeanInfo(p.getClass()); List list = new ArrayList(Arrays.asList( beanInfo.getPropertyDescriptors())); Collections.sort(list, new Comparator() { public int compare(Object o1, Object o2) { PropertyDescriptor d1 = (PropertyDescriptor) o1; PropertyDescriptor d2 = (PropertyDescriptor) o2; return d1.getDisplayName().compareToIgnoreCase( d2.getDisplayName()); } }); this.plugin = p; this.descriptors = (PropertyDescriptor[]) list.toArray( new PropertyDescriptor[0]); } /* (non-Javadoc) * @see javax.swing.table.AbstractTableModel#getValueAt(int, int) */ public Object getValueAt(int row, int col) { PropertyDescriptor d = descriptors[row]; switch (col) { case 1: try { Object object = d.getReadMethod().invoke(plugin, new Object[0]); if (object != null) { return object; } } catch (Exception e) { logger.error( "Error reading value for PropertyDescriptor " + d); } return ""; case 0: return d.getName(); } return null; } /* (non-Javadoc) * @see javax.swing.table.AbstractTableModel#getColumnCount() */ public int getColumnCount() { return 2; } /* (non-Javadoc) * @see javax.swing.table.AbstractTableModel#getRowCount() */ public int getRowCount() { return descriptors.length; } /* (non-Javadoc) * @see javax.swing.table.TableModel#isCellEditable(int, int) */ public boolean isCellEditable(int rowIndex, int columnIndex) { // TODO Determine if the property is one of the ones a User could edit if (columnIndex == 1) { return descriptors[rowIndex].getWriteMethod() != null; } return false; } /* (non-Javadoc) * @see javax.swing.table.TableModel#getColumnName(int) */ public String getColumnName(int column) { return (column == 0) ? "Property" : "Value"; } /* (non-Javadoc) * @see javax.swing.table.TableModel#setValueAt(java.lang.Object, int, int) */ public void setValueAt(Object aValue, int rowIndex, int columnIndex) { if (columnIndex == 1) { //ensure name is set if (descriptors[rowIndex].getName().equalsIgnoreCase("name") && (aValue == null || aValue.toString().trim().equals(""))) { logger.error("Name required"); return; } aValue = translateValueIfNeeded(rowIndex, aValue); logger.debug( "setValueAt, " + rowIndex + ", " + columnIndex + ", value=" + aValue + ", valueClass" + aValue.getClass()); try { descriptors[rowIndex].getWriteMethod().invoke(plugin, new Object[] { aValue }); fireTableCellUpdated(rowIndex, columnIndex); } catch (IllegalArgumentException e) { // ignore } catch (Exception e) { logger.error( "Failed to modify the Plugin because of Exception", e); } } else { super.setValueAt(aValue, rowIndex, columnIndex); } } /** * @param row * @param value * @return */ private Object translateValueIfNeeded(int row, Object value) { if ((descriptors[row].getPropertyType() == int.class) || (descriptors[row].getPropertyType() == Integer.class)) { try { return Integer.valueOf(value.toString()); } catch (Exception e) { logger.error("Failed to convert to Integer type"); } } return value; } /** * @return Returns the descriptors. */ public final PropertyDescriptor[] getDescriptors() { return descriptors; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.messagecounter; import java.text.DateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.List; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.utils.JsonLoader; import static org.apache.activemq.artemis.api.core.JsonUtil.nullSafe; /** * This class stores message count informations for a given queue * * At intervals this class samples the queue for message count data * * Note that the underlying queue *does not* update statistics every time a message * is added since that would reall slow things down, instead we *sample* the queues at * regular intervals - this means we are less intrusive on the queue */ public class MessageCounter { // Constants ----------------------------------------------------- // Attributes ---------------------------------------------------- // destination related information private final String destName; private final String destSubscription; private final boolean destTopic; private final boolean destDurable; private final Queue serverQueue; // counter private long countTotal; private long countTotalLast; private long depthLast; private long timeLastUpdate; private long timeLastAdd; private long timeLastAck; // per hour day counter history private int dayCounterMax; private final List<DayCounter> dayCounters; private long lastMessagesAdded; private long lastMessagesAcked; // Static -------------------------------------------------------- // Constructors -------------------------------------------------- /** * Constructor * * @param name destination name * @param subscription subscription name * @param serverQueue internal queue object * @param topic topic destination flag * @param durable durable subscription flag * @param daycountmax max message history day count */ public MessageCounter(final String name, final String subscription, final Queue serverQueue, final boolean topic, final boolean durable, final int daycountmax) { // store destination related information destName = name; destSubscription = subscription; destTopic = topic; destDurable = durable; this.serverQueue = serverQueue; // initialize counter resetCounter(); // initialize message history dayCounters = new ArrayList<>(); setHistoryLimit(daycountmax); } private final Runnable onTimeExecutor = new Runnable() { @Override public void run() { long latestMessagesAdded = serverQueue.getMessagesAdded(); long latestMessagesAcked = serverQueue.getMessagesAcknowledged(); long newMessagesAdded = latestMessagesAdded - lastMessagesAdded; long newMessagesAcked = latestMessagesAcked - lastMessagesAcked; countTotal += newMessagesAdded; lastMessagesAdded = latestMessagesAdded; lastMessagesAcked = latestMessagesAcked; long timestamp = System.currentTimeMillis(); if (newMessagesAdded > 0) { timeLastAdd = timestamp; } if (newMessagesAcked > 0) { timeLastAck = timestamp; } // update timestamp timeLastUpdate = timestamp; // update message history updateHistory(newMessagesAdded); } }; // Public -------------------------------------------------------- /* * This method is called periodically to update statistics from the queue */ public synchronized void onTimer() { // Actor approach here: Instead of having the Counter locking the queue, we will use the Queue's executor // instead of possibly making a lock on the queue. // This way the scheduled Threads will be free to keep doing their pings in case the server is busy with paging or // any other deliveries serverQueue.getExecutor().execute(onTimeExecutor); } public String getDestinationName() { return destName; } public String getDestinationSubscription() { return destSubscription; } public boolean isDestinationTopic() { return destTopic; } public boolean isDestinationDurable() { return destDurable; } /** * Gets the total message count since startup or * last counter reset */ public long getCount() { return countTotal; } /** * Gets the message count delta since last method call */ public long getCountDelta() { long delta = countTotal - countTotalLast; countTotalLast = countTotal; return delta; } /** * Gets the current message count of pending messages * within the destination waiting for dispatch */ public long getMessageCount() { return serverQueue.getMessageCount(); } /** * Gets the message count delta of pending messages * since last method call. */ public long getMessageCountDelta() { long current = serverQueue.getMessageCount(); int delta = (int) (current - depthLast); depthLast = current; return delta; } public long getLastUpdate() { return timeLastUpdate; } public long getLastAddedMessageTime() { return timeLastAdd; } public long getLastAckedMessageTime() { return timeLastAck; } public void resetCounter() { countTotal = 0; countTotalLast = 0; depthLast = 0; timeLastUpdate = 0; timeLastAdd = 0; timeLastAck = 0; } private void setHistoryLimit(final int daycountmax) { boolean bInitialize = false; // store new maximum day count dayCounterMax = daycountmax; // update day counter array synchronized (dayCounters) { if (dayCounterMax > 0) { // limit day history to specified day count int delta = dayCounters.size() - dayCounterMax; for (int i = 0; i < delta; i++) { // reduce array size to requested size by dropping // oldest day counters dayCounters.remove(0); } // create initial day counter when empty bInitialize = dayCounters.isEmpty(); } else if (dayCounterMax == 0) { // disable history dayCounters.clear(); } else { // unlimited day history // create initial day counter when empty bInitialize = dayCounters.isEmpty(); } // optionally initialize first day counter entry if (bInitialize) { dayCounters.add(new DayCounter(new GregorianCalendar(), true)); } } } public void resetHistory() { int max = dayCounterMax; setHistoryLimit(0); setHistoryLimit(max); } public List<DayCounter> getHistory() { updateHistory(0); return new ArrayList<>(dayCounters); } /** * Get message counter history data as string in format * * "day count\n * Date 1, hour counter 0, hour counter 1, ..., hour counter 23\n * Date 2, hour counter 0, hour counter 1, ..., hour counter 23\n * ..... * ..... * Date n, hour counter 0, hour counter 1, ..., hour counter 23\n" * * @return String message history data string */ public String getHistoryAsString() { StringBuilder ret = new StringBuilder(); // ensure history counters are up to date updateHistory(0); // compile string synchronized (dayCounters) { // first line: history day count ret.append(dayCounters.size() + "\n"); // following lines: day counter data for (DayCounter counter : dayCounters) { ret.append(counter.getDayCounterAsString() + "\n"); } } return ret.toString(); } @Override public String toString() { return "MessageCounter[destName" + destName + ", destSubscription=" + destSubscription + ", destTopic=" + destTopic + ", destDurable=" + destDurable + ", serverQueue =" + serverQueue + "]"; } /** * Returns a JSON String serialization of a {@link MessageCounter} object. * * @return */ public String toJSon() { DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.MEDIUM); String lastAddTimestamp = dateFormat.format(new Date(this.getLastAddedMessageTime())); String lastAckTimestamp = dateFormat.format(new Date(this.getLastAckedMessageTime())); String updateTimestamp = dateFormat.format(new Date(this.getLastUpdate())); return JsonLoader .createObjectBuilder() .add("destinationName", nullSafe(this.getDestinationName())) .add("destinationSubscription", nullSafe(this.getDestinationSubscription())) .add("destinationDurable", this.isDestinationDurable()) .add("count", this.getCount()) .add("countDelta", this.getCountDelta()) .add("messageCount", this.getMessageCount()) .add("messageCountDelta", this.getMessageCountDelta()) .add("lastAddTimestamp", lastAddTimestamp) .add("lastAckTimestamp", lastAckTimestamp) .add("updateTimestamp", updateTimestamp) .build() .toString(); } // Package protected --------------------------------------------- // Protected ----------------------------------------------------- // Private ------------------------------------------------------- /** * Update message counter history * * @param newMessages number of new messages to add to the latest day counter */ private void updateHistory(final long newMessages) { // check history activation if (dayCounters.isEmpty()) { return; } // calculate day difference between current date and date of last day counter entry synchronized (dayCounters) { DayCounter counterLast = dayCounters.get(dayCounters.size() - 1); GregorianCalendar calNow = new GregorianCalendar(); GregorianCalendar calLast = counterLast.getDate(); // clip day time part for day delta calulation calNow.clear(Calendar.AM_PM); calNow.clear(Calendar.HOUR); calNow.clear(Calendar.HOUR_OF_DAY); calNow.clear(Calendar.MINUTE); calNow.clear(Calendar.SECOND); calNow.clear(Calendar.MILLISECOND); calLast.clear(Calendar.AM_PM); calLast.clear(Calendar.HOUR); calLast.clear(Calendar.HOUR_OF_DAY); calLast.clear(Calendar.MINUTE); calLast.clear(Calendar.SECOND); calLast.clear(Calendar.MILLISECOND); long millisPerDay = 86400000; // 24 * 60 * 60 * 1000 long millisDelta = calNow.getTime().getTime() - calLast.getTime().getTime(); int dayDelta = (int) (millisDelta / millisPerDay); if (dayDelta > 0) { // finalize last day counter counterLast.finalizeDayCounter(); // add new intermediate empty day counter entries DayCounter counterNew; for (int i = 1; i < dayDelta; i++) { // increment date calLast.add(Calendar.DAY_OF_YEAR, 1); counterNew = new DayCounter(calLast, false); counterNew.finalizeDayCounter(); dayCounters.add(counterNew); } // add new day counter entry for current day counterNew = new DayCounter(calNow, false); dayCounters.add(counterNew); // ensure history day count limit setHistoryLimit(dayCounterMax); } // update last day counter entry counterLast = dayCounters.get(dayCounters.size() - 1); counterLast.updateDayCounter(newMessages); } } // Inner classes ------------------------------------------------- /** * Internal day counter class for one day hour based counter history */ public static final class DayCounter { static final int HOURS = 24; GregorianCalendar date = null; long[] counters = new long[DayCounter.HOURS]; /** * Constructor * * @param date day counter date * @param isStartDay true first day counter * false follow up day counter */ DayCounter(final GregorianCalendar date, final boolean isStartDay) { // store internal copy of creation date this.date = (GregorianCalendar) date.clone(); // initialize the array with '0'- values to current hour (if it is not the // first monitored day) and the rest with default values ('-1') int hour = date.get(Calendar.HOUR_OF_DAY); for (int i = 0; i < DayCounter.HOURS; i++) { if (i < hour) { if (isStartDay) { counters[i] = -1L; } else { counters[i] = 0L; } } else { counters[i] = -1L; } } // set the array element of the current hour to '0' counters[hour] = 0L; } /** * Gets copy of day counter date * * @return GregorianCalendar day counter date */ public GregorianCalendar getDate() { return (GregorianCalendar) date.clone(); } public long[] getCounters() { return counters; } /** * Update day counter hour array elements * * @param newMessages number of new messages since the counter was last updated. */ void updateDayCounter(final long newMessages) { // get the current hour of the day GregorianCalendar cal = new GregorianCalendar(); int currentIndex = cal.get(Calendar.HOUR_OF_DAY); // check if the last array update is more than 1 hour ago, if so fill all // array elements between the last index and the current index with '0' values boolean bUpdate = false; for (int i = 0; i <= currentIndex; i++) { if (counters[i] > -1) { // found first initialized hour counter // -> set all following uninitialized // counter values to 0 bUpdate = true; } if (bUpdate == true) { if (counters[i] == -1) { counters[i] = 0; } } } // increment current counter with the new messages counters[currentIndex] += newMessages; } /** * Finalize day counter hour array elements */ private void finalizeDayCounter() { // a new day has began, so fill all array elements from index to end with // '0' values boolean bFinalize = false; for (int i = 0; i < DayCounter.HOURS; i++) { if (counters[i] > -1) { // found first initialized hour counter // -> finalize all following uninitialized // counter values bFinalize = true; } if (bFinalize) { if (counters[i] == -1) { counters[i] = 0; } } } } /** * Return day counter data as string with format<br> * "Date, hour counter 0, hour counter 1, ..., hour counter 23". * * @return String day counter data */ private String getDayCounterAsString() { // first element day counter date DateFormat dateFormat = DateFormat.getDateInstance(DateFormat.SHORT); StringBuilder strData = new StringBuilder(dateFormat.format(date.getTime())); // append 24 comma separated hour counter values for (int i = 0; i < DayCounter.HOURS; i++) { strData.append("," + counters[i]); } return strData.toString(); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.storagegateway.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Describes a gateway's network interface. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/storagegateway-2013-06-30/NetworkInterface" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class NetworkInterface implements Serializable, Cloneable, StructuredPojo { /** * <p> * The Internet Protocol version 4 (IPv4) address of the interface. * </p> */ private String ipv4Address; /** * <p> * The Media Access Control (MAC) address of the interface. * </p> * <note> * <p> * This is currently unsupported and will not be returned in output. * </p> * </note> */ private String macAddress; /** * <p> * The Internet Protocol version 6 (IPv6) address of the interface. <i>Currently not supported</i>. * </p> */ private String ipv6Address; /** * <p> * The Internet Protocol version 4 (IPv4) address of the interface. * </p> * * @param ipv4Address * The Internet Protocol version 4 (IPv4) address of the interface. */ public void setIpv4Address(String ipv4Address) { this.ipv4Address = ipv4Address; } /** * <p> * The Internet Protocol version 4 (IPv4) address of the interface. * </p> * * @return The Internet Protocol version 4 (IPv4) address of the interface. */ public String getIpv4Address() { return this.ipv4Address; } /** * <p> * The Internet Protocol version 4 (IPv4) address of the interface. * </p> * * @param ipv4Address * The Internet Protocol version 4 (IPv4) address of the interface. * @return Returns a reference to this object so that method calls can be chained together. */ public NetworkInterface withIpv4Address(String ipv4Address) { setIpv4Address(ipv4Address); return this; } /** * <p> * The Media Access Control (MAC) address of the interface. * </p> * <note> * <p> * This is currently unsupported and will not be returned in output. * </p> * </note> * * @param macAddress * The Media Access Control (MAC) address of the interface.</p> <note> * <p> * This is currently unsupported and will not be returned in output. * </p> */ public void setMacAddress(String macAddress) { this.macAddress = macAddress; } /** * <p> * The Media Access Control (MAC) address of the interface. * </p> * <note> * <p> * This is currently unsupported and will not be returned in output. * </p> * </note> * * @return The Media Access Control (MAC) address of the interface.</p> <note> * <p> * This is currently unsupported and will not be returned in output. * </p> */ public String getMacAddress() { return this.macAddress; } /** * <p> * The Media Access Control (MAC) address of the interface. * </p> * <note> * <p> * This is currently unsupported and will not be returned in output. * </p> * </note> * * @param macAddress * The Media Access Control (MAC) address of the interface.</p> <note> * <p> * This is currently unsupported and will not be returned in output. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public NetworkInterface withMacAddress(String macAddress) { setMacAddress(macAddress); return this; } /** * <p> * The Internet Protocol version 6 (IPv6) address of the interface. <i>Currently not supported</i>. * </p> * * @param ipv6Address * The Internet Protocol version 6 (IPv6) address of the interface. <i>Currently not supported</i>. */ public void setIpv6Address(String ipv6Address) { this.ipv6Address = ipv6Address; } /** * <p> * The Internet Protocol version 6 (IPv6) address of the interface. <i>Currently not supported</i>. * </p> * * @return The Internet Protocol version 6 (IPv6) address of the interface. <i>Currently not supported</i>. */ public String getIpv6Address() { return this.ipv6Address; } /** * <p> * The Internet Protocol version 6 (IPv6) address of the interface. <i>Currently not supported</i>. * </p> * * @param ipv6Address * The Internet Protocol version 6 (IPv6) address of the interface. <i>Currently not supported</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public NetworkInterface withIpv6Address(String ipv6Address) { setIpv6Address(ipv6Address); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getIpv4Address() != null) sb.append("Ipv4Address: ").append(getIpv4Address()).append(","); if (getMacAddress() != null) sb.append("MacAddress: ").append(getMacAddress()).append(","); if (getIpv6Address() != null) sb.append("Ipv6Address: ").append(getIpv6Address()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof NetworkInterface == false) return false; NetworkInterface other = (NetworkInterface) obj; if (other.getIpv4Address() == null ^ this.getIpv4Address() == null) return false; if (other.getIpv4Address() != null && other.getIpv4Address().equals(this.getIpv4Address()) == false) return false; if (other.getMacAddress() == null ^ this.getMacAddress() == null) return false; if (other.getMacAddress() != null && other.getMacAddress().equals(this.getMacAddress()) == false) return false; if (other.getIpv6Address() == null ^ this.getIpv6Address() == null) return false; if (other.getIpv6Address() != null && other.getIpv6Address().equals(this.getIpv6Address()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getIpv4Address() == null) ? 0 : getIpv4Address().hashCode()); hashCode = prime * hashCode + ((getMacAddress() == null) ? 0 : getMacAddress().hashCode()); hashCode = prime * hashCode + ((getIpv6Address() == null) ? 0 : getIpv6Address().hashCode()); return hashCode; } @Override public NetworkInterface clone() { try { return (NetworkInterface) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.storagegateway.model.transform.NetworkInterfaceMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Licensed to Crate.io GmbH ("Crate") under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. Crate licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial agreement. */ package io.crate.geo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.Is.is; import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.List; import java.util.Map; import org.junit.Test; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.LinearRing; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.geom.impl.CoordinateArraySequenceFactory; import org.locationtech.spatial4j.context.jts.JtsSpatialContext; import org.locationtech.spatial4j.io.WKTWriter; import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.jts.JtsGeometry; import org.locationtech.spatial4j.shape.jts.JtsPoint; public class GeoJSONUtilsTest { static { ClassLoader.getSystemClassLoader().setDefaultAssertionStatus(true); } private static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory(CoordinateArraySequenceFactory.instance()); public final static List<Shape> SHAPES = List.<Shape>of( new JtsGeometry(new Polygon(GEOMETRY_FACTORY.createLinearRing(new Coordinate[]{ new Coordinate(0.0, 1.0), new Coordinate(100.0, 0.1), new Coordinate(20.0, 23.567), new Coordinate(0.0, 1.0) }), new LinearRing[0], GEOMETRY_FACTORY), JtsSpatialContext.GEO, true, true), new JtsGeometry(new MultiPolygon( new Polygon[]{ new Polygon(GEOMETRY_FACTORY.createLinearRing(new Coordinate[]{ new Coordinate(0.0, 1.0), new Coordinate(0.1, 1.1), new Coordinate(1.1, 60.0), new Coordinate(0.0, 1.0) }), new LinearRing[0], GEOMETRY_FACTORY), new Polygon(GEOMETRY_FACTORY.createLinearRing(new Coordinate[]{ new Coordinate(2.0, 1.0), new Coordinate(2.1, 1.1), new Coordinate(2.1, 70.0), new Coordinate(2.0, 1.0) }), new LinearRing[0], GEOMETRY_FACTORY) }, GEOMETRY_FACTORY ), JtsSpatialContext.GEO, true, true), new JtsGeometry(GEOMETRY_FACTORY.createMultiPointFromCoords(new Coordinate[]{ new Coordinate(0.0, 0.0), new Coordinate(1.0, 1.0) }), JtsSpatialContext.GEO, true, true), new JtsGeometry(GEOMETRY_FACTORY.createMultiLineString(new LineString[]{ GEOMETRY_FACTORY.createLineString(new Coordinate[]{ new Coordinate(0.0, 1.0), new Coordinate(0.1, 1.1), new Coordinate(1.1, 80.0), new Coordinate(0.0, 1.0) }), GEOMETRY_FACTORY.createLineString(new Coordinate[]{ new Coordinate(2.0, 1.0), new Coordinate(2.1, 1.1), new Coordinate(2.1, 60.0), new Coordinate(2.0, 1.0) }) }), JtsSpatialContext.GEO, true, true) ); @Test public void testShape2Map() throws Exception { for (Shape shape : SHAPES) { Map<String, Object> map = GeoJSONUtils.shape2Map(shape); assertThat(map, hasKey("type")); GeoJSONUtils.validateGeoJson(map); } } @Test public void testPoint2Map() throws Exception { Point point = GEOMETRY_FACTORY.createPoint(new Coordinate(0.0, 0.0)); Shape shape = new JtsPoint(point, JtsSpatialContext.GEO); Map<String, Object> map = GeoJSONUtils.shape2Map(shape); assertThat(map, hasEntry("type", (Object) "Point")); assertThat(map.get("coordinates").getClass().isArray(), is(true)); assertThat(((double[]) map.get("coordinates")).length, is(2)); } @Test public void testMapFromWktRoundTrip() throws Exception { String wkt = "MULTILINESTRING ((10.05 10.28, 20.95 20.89), (20.95 20.89, 31.92 21.45))"; Shape shape = GeoJSONUtils.wkt2Shape(wkt); Map<String, Object> map = GeoJSONUtils.shape2Map(shape); Map<String, Object> wktMap = GeoJSONUtils.wkt2Map(wkt); assertThat(map.get("type"), is(wktMap.get("type"))); assertThat(map.get("coordinates"), is(wktMap.get("coordinates"))); Shape mappedShape = GeoJSONUtils.map2Shape(map); String wktFromMap = new WKTWriter().toString(mappedShape); assertThat(wktFromMap, is(wkt)); } @Test public void testInvalidWKT() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.wkt2Map( "multilinestring (((10.05 10.28 3.4 8.4, 20.95 20.89 4.5 9.5),\n" + " \n" + "( 20.95 20.89 4.5 9.5, 31.92 21.45 3.6 8.6)))"), "Cannot convert WKT \"multilinestring (((10.05 10.28 3.4 8.4, 20.95 20.89 4.5 9.5)"); } @Test public void testMap2Shape() throws Exception { Shape shape = GeoJSONUtils.map2Shape(Map.<String, Object>of( GeoJSONUtils.TYPE_FIELD, GeoJSONUtils.LINE_STRING, GeoJSONUtils.COORDINATES_FIELD, new Double[][]{{0.0, 0.1}, {1.0, 1.1}} )); assertThat(shape, instanceOf(JtsGeometry.class)); assertThat(((JtsGeometry) shape).getGeom(), instanceOf(LineString.class)); } @Test public void testInvalidMap() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.map2Shape(Map.<String, Object>of()), "Cannot convert Map \"{}\" to shape"); } @Test public void testValidateMissingType() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.validateGeoJson(Map.of()), "Invalid GeoJSON: type field missing"); } @Test public void testValidateWrongType() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.validateGeoJson(Map.of(GeoJSONUtils.TYPE_FIELD, "Foo")), "Invalid GeoJSON: invalid type"); } @Test public void testValidateMissingCoordinates() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.validateGeoJson(Map.of(GeoJSONUtils.TYPE_FIELD, GeoJSONUtils.LINE_STRING)), "Invalid GeoJSON: coordinates field missing"); } @Test public void testValidateGeometriesMissing() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.validateGeoJson(Map.of(GeoJSONUtils.TYPE_FIELD, GeoJSONUtils.GEOMETRY_COLLECTION)), "Invalid GeoJSON: coordinates field missing"); } @Test public void testInvalidGeometryCollection() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.validateGeoJson( Map.of( GeoJSONUtils.TYPE_FIELD, GeoJSONUtils.GEOMETRY_COLLECTION, GeoJSONUtils.GEOMETRIES_FIELD, List.<Object>of("ABC") ) ), "Invalid GeoJSON: invalid GeometryCollection"); } @Test public void testValidateInvalidCoordinates() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.validateGeoJson( Map.of( GeoJSONUtils.TYPE_FIELD, GeoJSONUtils.POINT, GeoJSONUtils.COORDINATES_FIELD, "ABC" ) ), "Invalid GeoJSON: invalid GeometryCollection"); } @Test public void testInvalidNestedCoordinates() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.validateGeoJson( Map.of( GeoJSONUtils.TYPE_FIELD, GeoJSONUtils.POINT, GeoJSONUtils.COORDINATES_FIELD, new double[][]{ {0.0, 1.0}, {1.0, 0.0} } ) ), "Invalid GeoJSON: invalid coordinate"); } @Test public void testInvalidDepthNestedCoordinates() throws Exception { assertThrows(IllegalArgumentException.class, () -> GeoJSONUtils.validateGeoJson( Map.of( GeoJSONUtils.TYPE_FIELD, GeoJSONUtils.POLYGON, GeoJSONUtils.COORDINATES_FIELD, new double[][]{ {0.0, 1.0}, {1.0, 0.0} } ) ), "Invalid GeoJSON: invalid coordinate"); } }
package zendesk.belvedere; import android.content.Intent; import android.os.Bundle; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.fragment.app.Fragment; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.List; import zendesk.belvedere.ui.R; /** * APIs for interacting with the ImageStream */ public class ImageStream extends Fragment { private WeakReference<KeyboardHelper> keyboardHelper = new WeakReference<>(null); private List<WeakReference<Listener>> imageStreamListener = new ArrayList<>(); private List<WeakReference<SendListener>> imageStreamSendListener = new ArrayList<>(); private List<WeakReference<ScrollListener>> imageStreamScrollListener = new ArrayList<>(); private ImageStreamUi imageStreamPopup = null; private BelvedereUi.UiConfig uiConfig = null; private boolean wasOpen = false; private PermissionManager permissionManager; private Callback<List<MediaResult>> callback; @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); permissionManager = new PermissionManager(getContext()); } @Override public void onPause() { super.onPause(); if(imageStreamPopup != null) { imageStreamPopup.dismiss(); wasOpen = true; } else { wasOpen = false; } } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { boolean handled = permissionManager.onRequestPermissionsResult(this, requestCode, permissions, grantResults); if(!handled) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); } } @Override public void onActivityResult(int requestCode, final int resultCode, final Intent data) { super.onActivityResult(requestCode, resultCode, data); callback = new Callback<List<MediaResult>>() { @Override public void success(List<MediaResult> result) { List<MediaResult> filteredMediaResult = new ArrayList<>(result.size()); for(MediaResult m : result) { if(m.getSize() <= uiConfig.getMaxFileSize() || uiConfig.getMaxFileSize() == -1L) { filteredMediaResult.add(m); } } if(filteredMediaResult.size() != result.size()) { Toast.makeText(getContext(), R.string.belvedere_image_stream_file_too_large, Toast.LENGTH_SHORT).show(); } notifyImageSelected(filteredMediaResult); } }; Belvedere.from(this.requireContext()).getFilesFromActivityOnResult(requestCode, resultCode, data, callback, false); } void setKeyboardHelper(KeyboardHelper keyboardHelper) { this.keyboardHelper = new WeakReference<>(keyboardHelper); } void setImageStreamUi(ImageStreamUi imageStreamPopup, BelvedereUi.UiConfig uiConfig) { this.imageStreamPopup = imageStreamPopup; if(uiConfig != null) { this.uiConfig = uiConfig; } } void notifyScrollListener(int height, int scrollArea, float scrollPosition) { for(WeakReference<ScrollListener> ref : imageStreamScrollListener) { final ScrollListener scrollListener = ref.get(); if(scrollListener != null) { scrollListener.onScroll(height, scrollArea, scrollPosition); } } } void notifyImageSelected(List<MediaResult> mediaResults) { for(WeakReference<Listener> ref : imageStreamListener) { final Listener listener = ref.get(); if(listener != null) { listener.onMediaSelected(mediaResults); } } } void notifyImageDeselected(List<MediaResult> mediaResults) { for(WeakReference<Listener> ref : imageStreamListener) { final Listener listener = ref.get(); if(listener != null) { listener.onMediaDeselected(mediaResults); } } } void notifyImagesSent(List<MediaResult> mediaResults) { for(WeakReference<SendListener> ref : imageStreamSendListener) { final SendListener listener = ref.get(); if(listener != null) { listener.onSend(mediaResults); } } } void notifyDismissed() { callback = null; // Prevent memory leak of Callback for(WeakReference<Listener> ref : imageStreamListener) { final Listener listener = ref.get(); if(listener != null) { listener.onDismissed(); } } } void notifyVisible() { for(WeakReference<Listener> ref : imageStreamListener) { final Listener listener = ref.get(); if(listener != null) { listener.onVisible(); } } } void handlePermissions(final List<MediaIntent> mediaIntents, final PermissionManager.PermissionCallback permissionCallback) { permissionManager.handlePermissions(this, mediaIntents, permissionCallback); } /** * Gets the currently install {@link KeyboardHelper} */ public KeyboardHelper getKeyboardHelper() { return keyboardHelper.get(); } /** * Add a {@link Listener} for getting notified when the users selects/deselects an attachment * or the ImageStream gets visible or dismissed. */ public void addListener(Listener listener) { imageStreamListener.add(new WeakReference<>(listener)); } /** * Add a {@link ScrollListener} to get informed when the ImageStream gets dragged by the user. */ public void addScrollListener(ScrollListener listener) { imageStreamScrollListener.add(new WeakReference<>(listener)); } /** * Add a {@link ScrollListener} to get informed when the ImageStream gets dragged by the user. */ public void addSendListener(SendListener listener) { imageStreamSendListener.add(new WeakReference<>(listener)); } /** * Hide the ImageStream if visible */ public void dismiss() { if(isAttachmentsPopupVisible()) { imageStreamPopup.dismiss(); } } /** * Check if the ImageStream was visible before the last configuration change. * * @return {@code true} if it was visible, {@code false} if not */ public boolean wasOpen() { return wasOpen; } /** * Check if the ImageStream is currently visible. */ public boolean isAttachmentsPopupVisible() { return imageStreamPopup != null; } /** * Listener for observing the user interaction with the ImageStream */ public interface Listener { /** * The user dismissed the ImageStream. */ void onDismissed(); /** * The ImageStream became visible to the user. */ void onVisible(); /** * The user selected one or multiple attachments. */ void onMediaSelected(List<MediaResult> mediaResults); /** * The user deselected one or multiple attachments. */ void onMediaDeselected(List<MediaResult> mediaResults); } /** * Informs about the scroll position of the ImageStream BottomSheet. */ public interface ScrollListener { /** * Called if the ImageStream gets dragged by the user. * * @param height * @param scrollArea * @param scrollPosition */ void onScroll(int height, int scrollArea, float scrollPosition); } /** * Informs about the selected attachments to be sent. */ public interface SendListener { /** * Called with the selected attachments when the send button is clicked * @params mediaFiles the selected attachments to be sent */ void onSend(List<MediaResult> mediaResults); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.axis2.description; import java.util.HashMap; import javax.xml.namespace.QName; import org.apache.axiom.soap.SOAPBody; import org.apache.axiom.soap.SOAPEnvelope; import org.apache.axiom.util.UIDGenerator; import org.apache.axis2.AxisFault; import org.apache.axis2.Constants; import org.apache.axis2.addressing.AddressingConstants; import org.apache.axis2.addressing.EndpointReference; import org.apache.axis2.client.OperationClient; import org.apache.axis2.client.Options; import org.apache.axis2.client.async.AxisCallback; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.context.MessageContext; import org.apache.axis2.context.OperationContext; import org.apache.axis2.context.ServiceContext; import org.apache.axis2.engine.AxisEngine; import org.apache.axis2.i18n.Messages; import org.apache.axis2.transport.TransportUtils; import org.apache.axis2.transport.http.HTTPConstants; import org.apache.axis2.util.CallbackReceiver; import org.apache.axis2.util.Utils; import org.apache.axis2.wsdl.WSDLConstants; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public class OutInAxisOperation extends TwoChannelAxisOperation { private static final Log log = LogFactory.getLog(OutInAxisOperation.class); public OutInAxisOperation() { super(); //setup a temporary name QName tmpName = new QName(this.getClass().getName() + "_" + UIDGenerator.generateUID()); this.setName(tmpName); setMessageExchangePattern(WSDL2Constants.MEP_URI_OUT_IN); } public OutInAxisOperation(QName name) { super(name); setMessageExchangePattern(WSDL2Constants.MEP_URI_OUT_IN); } public void addMessageContext(MessageContext msgContext, OperationContext opContext) throws AxisFault { HashMap<String, MessageContext> mep = opContext.getMessageContexts(); MessageContext immsgContext = (MessageContext) mep .get(MESSAGE_LABEL_IN_VALUE); MessageContext outmsgContext = (MessageContext) mep .get(MESSAGE_LABEL_OUT_VALUE); if ((immsgContext != null) && (outmsgContext != null)) { throw new AxisFault(Messages.getMessage("mepcompleted")); } if (outmsgContext == null) { mep.put(MESSAGE_LABEL_OUT_VALUE, msgContext); } else { mep.put(MESSAGE_LABEL_IN_VALUE, msgContext); opContext.setComplete(true); opContext.cleanup(); } } /** * Returns a MEP client for an Out-IN operation. This client can be used to * interact with a server which is offering an In-Out operation. To use the * client, you must call addMessageContext() with a message context and then * call execute() to execute the client. * * @param sc The service context for this client to live within. Cannot be * null. * @param options Options to use as defaults for this client. If any options are * set specifically on the client then those override options * here. */ public OperationClient createClient(ServiceContext sc, Options options) { return new OutInAxisOperationClient(this, sc, options); } } /** * MEP client for moi. */ class OutInAxisOperationClient extends OperationClient { private static Log log = LogFactory.getLog(OutInAxisOperationClient.class); OutInAxisOperationClient(OutInAxisOperation axisOp, ServiceContext sc, Options options) { super(axisOp, sc, options); } /** * Adds message context to operation context, so that it will handle the * logic correctly if the OperationContext is null then new one will be * created, and Operation Context will become null when some one calls reset(). * * @param msgContext the MessageContext to add * @throws AxisFault */ public void addMessageContext(MessageContext msgContext) throws AxisFault { msgContext.setServiceContext(sc); if (msgContext.getMessageID() == null) { setMessageID(msgContext); } axisOp.registerOperationContext(msgContext, oc); } /** * Returns the message context for a given message label. * * @param messageLabel : * label of the message and that can be either "Out" or "In" and * nothing else * @return Returns MessageContext. * @throws AxisFault */ public MessageContext getMessageContext(String messageLabel) throws AxisFault { return oc.getMessageContext(messageLabel); } /** * Executes the MEP. What this does depends on the specific MEP client. The * basic idea is to have the MEP client execute and do something with the * messages that have been added to it so far. For example, if its an Out-In * MEP, then if the Out message has been set, then executing the client asks * it to send the message and get the In message, possibly using a different * thread. * * @param block Indicates whether execution should block or return ASAP. What * block means is of course a function of the specific MEP * client. IGNORED BY THIS MEP CLIENT. * @throws AxisFault if something goes wrong during the execution of the MEP. */ public void executeImpl(boolean block) throws AxisFault { if (log.isDebugEnabled()) { log.debug("Entry: OutInAxisOperationClient::execute, " + block); } if (completed) { throw new AxisFault(Messages.getMessage("mepiscomplted")); } ConfigurationContext cc = sc.getConfigurationContext(); // copy interesting info from options to message context. MessageContext mc = oc.getMessageContext(WSDLConstants.MESSAGE_LABEL_OUT_VALUE); if (mc == null) { throw new AxisFault(Messages.getMessage("outmsgctxnull")); } prepareMessageContext(cc, mc); if (options.getTransportIn() == null && mc.getTransportIn() == null) { mc.setTransportIn(ClientUtils.inferInTransport(cc .getAxisConfiguration(), options, mc)); } else if (mc.getTransportIn() == null) { mc.setTransportIn(options.getTransportIn()); } /** * If a module has set the USE_ASYNC_OPERATIONS option then we override the behaviour * for sync calls, and effectively USE_CUSTOM_LISTENER too. However we leave real * async calls alone. */ boolean useAsync = false; if (!mc.getOptions().isUseSeparateListener()) { Boolean useAsyncOption = (Boolean) mc.getProperty(Constants.Configuration.USE_ASYNC_OPERATIONS); if (log.isDebugEnabled()) log.debug("OutInAxisOperationClient: useAsyncOption " + useAsyncOption); if (useAsyncOption != null) { useAsync = useAsyncOption.booleanValue(); } } EndpointReference replyTo = mc.getReplyTo(); if (replyTo != null) { if (replyTo.hasNoneAddress()) { throw new AxisFault( replyTo.getAddress() + "" + " can not be used with OutInAxisOperationClient , user either " + "fireAndForget or sendRobust)"); } else if (replyTo.isWSAddressingAnonymous() && replyTo.getAllReferenceParameters() != null) { mc.setProperty(AddressingConstants.INCLUDE_OPTIONAL_HEADERS, Boolean.TRUE); } String customReplyTo = (String)options.getProperty(Options.CUSTOM_REPLYTO_ADDRESS); if ( ! (Options.CUSTOM_REPLYTO_ADDRESS_TRUE.equals(customReplyTo))) { if (!replyTo.hasAnonymousAddress()){ useAsync = true; } } } if (useAsync || mc.getOptions().isUseSeparateListener()) { sendAsync(useAsync, mc); } else { if (block) { // Send the SOAP Message and receive a response send(mc); completed = true; } else { sc.getConfigurationContext().getThreadPool().execute( new NonBlockingInvocationWorker(mc, axisCallback)); } } } private void sendAsync(boolean useAsync, MessageContext mc) throws AxisFault { if (log.isDebugEnabled()) { log.debug("useAsync=" + useAsync + ", seperateListener=" + mc.getOptions().isUseSeparateListener()); } /** * We are following the async path. If the user hasn't set a callback object then we must * block until the whole MEP is complete, as they have no other way to get their reply message. */ // THREADSAFE issue: Multiple threads could be trying to initialize the callback receiver // so it is synchronized. It is not done within the else clause to avoid the // double-checked lock antipattern. CallbackReceiver callbackReceiver; synchronized (axisOp) { if (axisOp.getMessageReceiver() != null && axisOp.getMessageReceiver() instanceof CallbackReceiver) { callbackReceiver = (CallbackReceiver) axisOp.getMessageReceiver(); } else { if (log.isDebugEnabled()) { log.debug("Creating new callback receiver"); } callbackReceiver = new CallbackReceiver(); axisOp.setMessageReceiver(callbackReceiver); if (log.isDebugEnabled()) log.debug("OutInAxisOperation: callbackReceiver " + callbackReceiver + " : " + axisOp); } } SyncCallBack internalCallback = null; if (axisCallback != null) { callbackReceiver.addCallback(mc.getMessageID(), axisCallback); if (log.isDebugEnabled()) log.debug("OutInAxisOperationClient: Creating axis callback"); } else { if (log.isDebugEnabled()) { log.debug("Creating internal callback"); } internalCallback = new SyncCallBack(); callbackReceiver.addCallback(mc.getMessageID(), internalCallback); if (log.isDebugEnabled()) log.debug("OutInAxisOperationClient: Creating internal callback"); } /** * If USE_CUSTOM_LISTENER is set to 'true' the replyTo value will not be replaced and Axis2 will not * start its internal listner. Some other enntity (e.g. a module) should take care of obtaining the * response message. */ Boolean useCustomListener = (Boolean) options.getProperty(Constants.Configuration.USE_CUSTOM_LISTENER); if (useAsync) { useCustomListener = Boolean.TRUE; } if (useCustomListener == null || !useCustomListener.booleanValue()) { EndpointReference replyTo = mc.getReplyTo(); if (replyTo == null || replyTo.hasAnonymousAddress()){ EndpointReference replyToFromTransport = mc.getConfigurationContext().getListenerManager(). getEPRforService(sc.getAxisService().getName(), axisOp.getName().getLocalPart(), mc .getTransportIn().getName()); if (replyTo == null) { mc.setReplyTo(replyToFromTransport); } else { replyTo.setAddress(replyToFromTransport.getAddress()); } } } //if we don't do this , this guy will wait till it gets HTTP 202 in the HTTP case mc.setProperty(MessageContext.CLIENT_API_NON_BLOCKING, Boolean.TRUE); mc.getConfigurationContext().registerOperationContext(mc.getMessageID(), oc); AxisEngine.send(mc); if (internalCallback != null) { internalCallback.waitForCompletion(options.getTimeOutInMilliSeconds()); // process the result of the invocation if (internalCallback.envelope == null) { if (internalCallback.error == null) { log.error("Callback had neither error nor response"); } if (options.isExceptionToBeThrownOnSOAPFault()) { throw AxisFault.makeFault(internalCallback.error); } } } } /** * When synchronous send() gets back a response MessageContext, this is the workhorse * method which processes it. * * @param responseMessageContext the active response MessageContext * @throws AxisFault if something went wrong */ protected void handleResponse(MessageContext responseMessageContext) throws AxisFault{ // Options object reused above so soapAction needs to be removed so // that soapAction+wsa:Action on response don't conflict responseMessageContext.setSoapAction(null); if (responseMessageContext.getEnvelope() == null) { // If request is REST we assume the responseMessageContext is REST, so // set the variable /* * old code here was using the outbound message context to set the inbound SOAP namespace, * as such and passing it to TransportUtils.createSOAPMessage * * msgctx.getEnvelope().getNamespace().getNamespaceURI() * * However, the SOAP1.2 spec, appendix A indicates that if a SOAP1.2 message is sent to a SOAP1.1 * endpoint, we will get a SOAP1.1 (fault) message response. We need another way to set * the inbound SOAP version. Best way to do this is to trust the content type and let * createSOAPMessage take care of figuring out what the SOAP namespace is. */ SOAPEnvelope resenvelope = TransportUtils.createSOAPMessage(responseMessageContext); if (resenvelope != null) { responseMessageContext.setEnvelope(resenvelope); } else { throw new AxisFault(Messages .getMessage("blockingInvocationExpectsResponse")); } } SOAPEnvelope resenvelope = responseMessageContext.getEnvelope(); if (resenvelope != null) { AxisEngine.receive(responseMessageContext); if (responseMessageContext.getReplyTo() != null) { sc.setTargetEPR(responseMessageContext.getReplyTo()); } // rampart handlers change the envelope and set the decrypted envelope // so need to check the new one else resenvelope.hasFault() become false. resenvelope = responseMessageContext.getEnvelope(); if (resenvelope.hasFault()||responseMessageContext.isProcessingFault()) { if (options.isExceptionToBeThrownOnSOAPFault()) { // does the SOAPFault has a detail element for Excpetion throw Utils.getInboundFaultFromMessageContext(responseMessageContext); } } } } /** * Synchronously send the request and receive a response. This relies on the transport * correctly connecting the response InputStream! * * @param msgContext the request MessageContext to send. * @return Returns MessageContext. * @throws AxisFault Sends the message using a two way transport and waits for a response */ protected MessageContext send(MessageContext msgContext) throws AxisFault { // create the responseMessageContext MessageContext responseMessageContext = msgContext.getConfigurationContext().createMessageContext(); responseMessageContext.setServerSide(false); responseMessageContext.setOperationContext(msgContext.getOperationContext()); responseMessageContext.setOptions(new Options(options)); responseMessageContext.setMessageID(msgContext.getMessageID()); addMessageContext(responseMessageContext); responseMessageContext.setServiceContext(msgContext.getServiceContext()); responseMessageContext.setAxisMessage( axisOp.getMessage(WSDLConstants.MESSAGE_LABEL_IN_VALUE)); //sending the message AxisEngine.send(msgContext); responseMessageContext.setDoingREST(msgContext.isDoingREST()); // Copy RESPONSE properties which the transport set onto the request message context when it processed // the incoming response recieved in reply to an outgoing request. responseMessageContext.setProperty(MessageContext.TRANSPORT_HEADERS, msgContext.getProperty(MessageContext.TRANSPORT_HEADERS)); responseMessageContext.setProperty(HTTPConstants.MC_HTTP_STATUS_CODE, msgContext.getProperty(HTTPConstants.MC_HTTP_STATUS_CODE)); responseMessageContext.setProperty(MessageContext.TRANSPORT_IN, msgContext .getProperty(MessageContext.TRANSPORT_IN)); responseMessageContext.setTransportIn(msgContext.getTransportIn()); responseMessageContext.setTransportOut(msgContext.getTransportOut()); handleResponse(responseMessageContext); return responseMessageContext; } /** * This class is the workhorse for a non-blocking invocation that uses a two * way transport. */ private class NonBlockingInvocationWorker implements Runnable { private MessageContext msgctx; private AxisCallback axisCallback; public NonBlockingInvocationWorker(MessageContext msgctx , AxisCallback axisCallback) { this.msgctx = msgctx; this.axisCallback =axisCallback; } public void run() { try { // send the request and wait for response MessageContext response = send(msgctx); // call the callback if (response != null) { SOAPEnvelope resenvelope = response.getEnvelope(); if (resenvelope.hasFault()) { SOAPBody body = resenvelope.getBody(); // If a fault was found, create an AxisFault with a MessageContext so that // other programming models can deserialize the fault to an alternative form. AxisFault fault = new AxisFault(body.getFault(), response); if (axisCallback != null) { if (options.isExceptionToBeThrownOnSOAPFault()) { axisCallback.onError(fault); } else { axisCallback.onFault(response); } } } else { if (axisCallback != null) { axisCallback.onMessage(response); } } } } catch (Exception e) { if (axisCallback != null) { axisCallback.onError(e); } } finally { if (axisCallback != null) { axisCallback.onComplete(); } } } } /** * This class acts as a callback that allows users to wait on the result. */ private class SyncCallBack implements AxisCallback { boolean complete; boolean receivedFault; public boolean waitForCompletion(long timeout) throws AxisFault { synchronized (this) { try { if (complete) return !receivedFault; wait(timeout); if (!complete) { // We timed out! throw new AxisFault( Messages.getMessage("responseTimeOut")); } } catch (InterruptedException e) { // Something interrupted our wait! error = e; } } if (error != null) throw AxisFault.makeFault(error); return !receivedFault; } /** * This is called when we receive a message. * * @param msgContext the (response) MessageContext */ public void onMessage(MessageContext msgContext) { // Transport input stream gets closed after calling setComplete // method. Have to build the whole envelope including the // attachments at this stage. Data might get lost if the input // stream gets closed before building the whole envelope. // TODO: Shouldn't need to do this - need to hook up stream closure to Axiom completion this.envelope = msgContext.getEnvelope(); this.envelope.buildWithAttachments(); } /** * This gets called when a fault message is received. * * @param msgContext the MessageContext containing the fault. */ public void onFault(MessageContext msgContext) { error = Utils.getInboundFaultFromMessageContext(msgContext); } /** * This is called at the end of the MEP no matter what happens, quite like a * finally block. */ public synchronized void onComplete() { complete = true; notify(); } private SOAPEnvelope envelope; private Exception error; public void onError(Exception e) { if (log.isDebugEnabled()) { log.debug("Entry: OutInAxisOperationClient$SyncCallBack::onError, " + e); } error = e; if (log.isDebugEnabled()) { log.debug("Exit: OutInAxisOperationClient$SyncCallBack::onError"); } } } }
/* * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.css.compiler.ast; import com.google.common.collect.ImmutableList; import com.google.common.css.SourceCode; import com.google.common.css.SourceCodeLocation; import com.google.common.css.compiler.passes.CompactPrinter; import com.google.common.css.compiler.passes.testing.AstPrinter; import junit.framework.TestCase; import java.util.List; /** * Unit tests for the {@link GssParser}. * * @author fbenz@google.com (Florian Benz) */ public class GssParserTest extends TestCase { private CssTree testValid(String gss) throws GssParserException { CssTree tree = parse(gss); assertNotNull(tree); return tree; } private void testTree(String gss, String output) throws GssParserException { CssTree tree = parse(gss); assertNotNull(tree); CssRootNode root = tree.getRoot(); assertNotNull(root); assertEquals(output, AstPrinter.print(tree)); } public void testManySources() throws Exception { CssTree tree = parse(ImmutableList.of( new SourceCode("test1", "a {}"), new SourceCode("test2", "@component c { x {y: z} }"), new SourceCode("test3", "b {}"))); CssRootNode root = tree.getRoot(); assertNotNull(root); assertEquals("[[a]{[]}@component [c]{[x]{[y:[[z]];]}}[b]{[]}]", AstPrinter.print(tree)); } public void testAst1() throws Exception { testTree("a {}", "[[a]{[]}]"); } public void testAst2() throws Exception { testTree("a.b c#d > e.f + g {}", "[[a.b c#d>e.f+g]{[]}]"); } public void testAst3() throws Exception { testTree("a {x: y}", "[[a]{[x:[[y]];]}]"); } public void testAst4() throws Exception { testTree("a {w: x; y: z}", "[[a]{[w:[[x]];y:[[z]];]}]"); } public void testAst5() throws Exception { testTree("a {b: 1em}", "[[a]{[b:[[1em]];]}]"); } public void testAst6() throws Exception { testTree("a {b: 1.5em}", "[[a]{[b:[[1.5em]];]}]"); } public void testAst7() throws Exception { testTree("a {b: 'x'}", "[[a]{[b:[['x']];]}]"); } public void testAst8() throws Exception { testTree("a {b: url(#x)}", "[[a]{[b:[url(#x)];]}]"); } public void testAst9() throws Exception { testTree("a {b: url('#x')}", "[[a]{[b:[url('#x')];]}]"); } public void testAst10() throws Exception { testTree("a {b: x y z}", "[[a]{[b:[[x][y][z]];]}]"); } public void testAst11() throws Exception { testTree("a {b: c,d,e/f g,h i j,k}", "[[a]{[b:[[[c],[d],[[e]/[f]]][[g],[h]][i][[j],[k]]];]}]"); } public void testAst12() throws Exception { testTree("a {b: rgb(0,0,0)}", "[[a]{[b:[rgb(0,0,0)];]}]"); } public void testAst13() throws Exception { testTree("a {b: custom(0,0)}", "[[a]{[b:[custom(0,0)];]}]"); } public void testAst14() throws Exception { testTree("@def a b;", "[@def [a] [b];]"); } public void testAst15() throws Exception { testTree("@component a { x {y: z} }", "[@component [a]{[x]{[y:[[z]];]}}]"); } public void testAst16() throws Exception { testTree("a:foo {\n bla : d ; }", "[[a:foo]{[bla:[[d]];]}]"); } public void testAst17() throws Exception { testTree("foo {f: rgb(o=0);}", "[[foo]{[f:[rgb([[o]=[0]])];]}]"); } public void testAst18() throws Exception { testTree("a:lang(c) { d: e }", "[[a:lang(c)]{[d:[[e]];]}]"); } public void testAst19() throws Exception { testTree("a~b { d: e }", "[[a~b]{[d:[[e]];]}]"); } public void testAst20() throws Exception { testTree("a:b(-2n+3) { d: e }", "[[a:b(-2n+3)]{[d:[[e]];]}]"); } public void testAst21() throws Exception { testTree("a:not(#id) { d: e }", "[[a:not(#id)]{[d:[[e]];]}]"); } public void testAst22() throws Exception { testTree(".a { d:e,f }", "[[.a]{[d:[[[e],[f]]];]}]"); } public void testAst23() throws Exception { testTree(".a { d:e f,g h }", "[[.a]{[d:[[e][[f],[g]][h]];]}]"); } public void testAst24() throws Exception { testTree("a~b/deep/c { d: e }", "[[a~b/deep/c]{[d:[[e]];]}]"); } public void testParsingRules1() throws Exception { testValid("css_rule33 {\n" + "border: black ; /* comment */\n" + "height : 1em\n" + " }" ); } // We don't test for comments between '!' and 'important'. See the comment on // the IMPORTANT_SYM in the grammar for the reason. public void testParsingRules2() throws Exception { testValid("ul.navbar {\n" + " position: absolute;\n" + " top: top;\n" + " left: down;\n" + " width: nice }\n" + "\n" + ".foo {\n" + " position: absolute ! important ;\n" + "}\n" + ".bar {\n" + " position: absolute ! important;\n\n\n" + "}" ); } public void testParsingRules3() throws Exception { testValid("css_rule33 test2 {\n" + "border: black ; /* comment */\n" + "height : 1em\n" + " }" ); } public void testParsingRules4() throws Exception { testValid("p:before {content: counter(par-num, upper-roman) \". \"}"); } public void testParsingSelector1() throws Exception { testValid("a b { x: y}"); } public void testParsingSelector2() throws Exception { testValid("a > b { x: y}"); } public void testParsingSelector3() throws Exception { testValid("a + b { x: y}"); } public void testParsingSelector4() throws Exception { testValid("a + b > c d e.f + g { x: y}"); } public void testParsingSelector5() throws Exception { testValid("a + b > c d e.f#d + g {}"); } public void testParsingSelector6() throws Exception { testValid("a ~ b { x: y}"); } public void testParsingSelector7() throws Exception { testValid("a /deep/ b { x: y}"); } public void testParsingExpr1() throws Exception { testValid("aab {x:s r t}"); } public void testParsingExpr2() throws Exception { testValid("aab {x:s 1em t}"); } public void testParsingExpr3() throws Exception { testValid("aab {x:-1px +1px -1px 1.7px}"); } public void testParsingURL() throws Exception { testValid("a { x: url('http://test.com') }"); } public void testParsingHexcolor() throws Exception { testValid("a { x: #fff }"); } public void testParsingFunction1Arg() throws Exception { testValid("a { x: f(1) }"); } public void testParsingFunctionManyArgs() throws Exception { testValid("a { x: f(1, 2, 3) }"); } public void testParsingFilterFunctions() throws Exception { testValid("a { filter: drop-shadow(1 2 3) custom(1 2 3);" + "filter: drop-shadow(1, 2, 3) custom(1, 2, 3);}"); } public void testParsingWebkitFilterFunctions() throws Exception { testValid("a { filter: -webkit-drop-shadow(1 2) -webkit-custom(1 2);" + "filter: -webkit-drop-shadow(1, 2) -webkit-custom(1, 2);}"); } public void testParsingLocalFunctions() throws Exception { testValid("@font-face { src: local(Gentium), url(Gentium.woff);" + "src: local(Gentium Bold), local(Gentium-Bold), url(GentiumBold.woff);}"); } public void testParsingAt1() throws Exception { testValid("@import url('http://test.com/test.css');"); } public void testParsingAt2() throws Exception { testValid("@import url(http://test.com/test.css);"); } public void testParsingAt3() throws Exception { testValid("@component a extends b {\n" + "@def z 1;\n" + "x {y: z}\n" + "}"); } public void testParsingDef1() throws Exception { testValid("@def RC_TOP_LEFT tl;\n" + "@def RC_TOP_RIGHT tr;\n" + "@def BASE_WARNING_LINK_COLOR #c3d9ff; /* light blue */" ); } public void testParsingDef3() throws Exception { testValid("@def A_B /* @default */ inherit;"); } public void testParsingAttribute1() throws Exception { testValid("a[href=\"http://www.w3.org/\"]{\n" + "bla:d\n" + "}"); } public void testParsingAttribute2() throws Exception { testValid("*[lang|=\"en\"] { color : red }"); } public void testParsingPseudo1() throws Exception { testValid("a:foo {\n bla : d ; }"); } public void testParsingPseudo2() throws Exception { testValid("a:lang(en) {\n bla : d ; }"); } public void testParsingIf1() throws Exception { testValid("@if (RTL_LANG) {\n" + " @def RTL_FLAG 1; \n" + " @def LEFT right;\n" + "} @else {\n" + " @def IMGURL url('images/image.gif');\n" + "}"); } public void testParsingIf2() throws Exception { testValid("@if BROWSER_IE6 {\n" + " @def FUNBOX_MARGIN 0;\n" + "} @elseif BROWSER_IE {\n" + " @def FUNBOX_MARGIN 1 0 -1px 0;\n" + "} @elseif BROWSER_FF3_OR_HIGHER {\n" + " @def FUNBOX_MARGIN -2px 0 0 0;\n" + "} @else {\n" + " @if(A) { @def BB 23; }\n" + " @def FUNBOX_MARGIN -2px 0 -1px 0;\n" + "}"); } public void testParsingIf3() throws Exception { testValid("@if (RTL_LANG) {\n" + " CSS_RULE2.CLASS#id{ d:34em; }\n" + "} @else {\n" + "}"); } public void testParsingParenthesizedTerm() throws Exception { testValid("@if (FOO) { x { y: z } }"); } public void testParsingBooleanTerm1() throws Exception { testValid("@if ( A && (!B || C )) { @def RTL_FLAG 1;}"); } public void testParsingBooleanTerm2() throws Exception { testValid("@if (!A && !B || C || !(F && G ) ) { @def RTL_FLAG 1;}"); } public void testParsingComplexDef1() throws Exception { testValid("@def A a, b, c;"); } public void testParsingComplexDef2() throws Exception { testValid("@def FONT a, b, c 14px/2em #fff;"); } public void testParsingEqualsOperator() throws Exception { testValid(".CSS_ {\n" + " filter: alpha(opacity = 85) ;\n" + "}"); } public void testParsingColonFunctionName() throws Exception { testValid("x {y: a.b:c(d)}"); } public void testParsingColonFunctionName2() throws Exception { testValid(".CSS_ {\n" + "-ms-filter: \"progid:DXImageTr.Microsoft.Alpha(Opacity=80)\" ;\n" + "filter: progid:DXImageTr.Microsoft.AlphaImageLoader" + "(src='images/muc_bubble_left.png', sizingMethod='scale' );\n" + "}"); } public void testParsingEmptyPseudo() throws Exception { testValid("::a, :a[b]::c { x: y}"); } public void testParsingArbitraryDim() throws Exception { testValid("a {x: 2emelet 3x 5t}"); } public void testSelectorWithSpace() throws Exception { testValid("a /* x */ , b {x: y}"); } public void testIeRect() throws Exception { // Non-standard IE workaround. testValid(".a { clip: rect(0 0 0 0);}"); } public void testEllipse() throws Exception { testValid(".a { clip-path: ellipse(150px 300px at 50% 50%);}"); } public void testInset() throws Exception { testValid(".a { clip-path: inset(100px 100px 100px 100px);}"); } public void testCircle() throws Exception { testValid(".a { clip-path: circle(50% at right 5px bottom 10px);}"); } public void testPolygon() throws Exception { testValid(".a { clip-path: polygon(0 0, 0 300px, 300px 600px);}"); } public void testEqualAttribute() throws Exception { testValid("h1[foo=\"bar\"] {x : y}"); } public void testCaretEqualAttribute() throws Exception { testValid("h1[foo^=\"bar\"] {x : y}"); } public void testDollarEqualAttribute() throws Exception { testValid("h1[foo$=\"bar\"] {x : y}"); } public void testAsteriskEqualAttribute() throws Exception { testValid("h1[foo*=\"bar\"] {x : y}"); } public void testPipeEqualAttribute() throws Exception { testValid("h1[foo|=\"bar\"] {x : y}"); } public void testImageSet() throws Exception { testValid("div:before {" + "content: -webkit-image-set(url(a.png) 1x, url(b.png) 2x);" + "content: -moz-image-set(url(a.png) 1x, url(b.png) 2x);" + "content: -o-image-set(url(a.png) 1x, url(b.png) 2x);" + "content: image-set(url(a.png) 1x, url(b.png) 2x);" + "}"); } public void testWebkitGradient() throws Exception { CssTree tree = testValid(".CSS { background: " + "-webkit-gradient(linear, 0 0, 0 100%, from(#fff), to(#ddd)) }"); CssRootNode root = tree.getRoot(); assertNotNull(root); assertEquals("[[.CSS]{[background:[" + "-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd))];]}]", AstPrinter.print(tree)); CssRulesetNode ruleset = (CssRulesetNode) tree.getRoot().getBody().getChildAt(0); CssDeclarationNode decl = (CssDeclarationNode) ruleset.getDeclarations().getChildAt(0); CssFunctionNode function = (CssFunctionNode) decl.getPropertyValue().getChildAt(0); CssFunctionArgumentsNode args = function.getArguments(); assertEquals("The argument list should be flattened, and contain " + "7 arguments + 6 separators (4 commas and 2 meaningful spaces).", 13, args.numChildren()); } public void testGradients() throws Exception { testValid("div {" + "a:radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:radial-gradient(30% 30%, closest-corner, white, black);" + "c:radial-gradient(center, 5em 40px, white, black);" + "d:linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:repeating-linear-gradient(left, red 10%, blue 30%);" + "f:repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } /* http://www.webkit.org/blog/1424/css3-gradients/ */ public void testWebkitGradients() throws Exception { testValid("div {" + "a:-webkit-radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:-webkit-radial-gradient(30% 30%, closest-corner, white, black);" + "c:-webkit-radial-gradient(center, 5em 40px, white, black);" + "d:-webkit-linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:-webkit-repeating-linear-gradient(left, red 10%, blue 30%);" + "f:-webkit-repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } public void testMozillaGradients() throws Exception { testValid("div {" + "a:-moz-radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:-moz-radial-gradient(30% 30%, closest-corner, white, black);" + "c:-moz-radial-gradient(center, 5em 40px, white, black);" + "d:-moz-linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:-moz-repeating-linear-gradient(left, red 10%, blue 30%);" + "f:-moz-repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } public void testOperaGradients() throws Exception { testValid("div {" + "a:-o-radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:-o-radial-gradient(30% 30%, closest-corner, white, black);" + "c:-o-radial-gradient(center, 5em 40px, white, black);" + "d:-o-linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:-o-repeating-linear-gradient(left, red 10%, blue 30%);" + "f:-o-repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } public void testInternetExplorerGradients() throws Exception { testValid("div {" + "a:-ms-radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:-ms-radial-gradient(30% 30%, closest-corner, white, black);" + "c:-ms-radial-gradient(center, 5em 40px, white, black);" + "d:-ms-linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:-ms-repeating-linear-gradient(left, red 10%, blue 30%);" + "f:-ms-repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } public void testKonquererGradients() throws Exception { // Taken from http://twitter.github.com/bootstrap/1.4.0/bootstrap.css testValid("div {" + "background-image: -khtml-gradient(linear, left top, left bottom, " + " from(#333333), to(#222222));" + "}"); } public void testWebkitMinDevicePixelRatio() throws Exception { testValid("@media screen and (-webkit-min-device-pixel-ratio:0) {}"); } public void testMediaQuery() throws Exception { testValid("@media screen and (max-height: 300px) and (min-width: 20px) {}"); } public void testMediaQueryRatioNoSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3/4) {}"); } public void testMediaQueryRatioWithSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3 / 4) {}"); } public void testMediaQueryRatioWithManyLeadingSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3 / 4) {}"); } public void testMediaQueryRatioWithTrailingSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3/ 4) {}"); } public void testMediaQueryRatioWithNoTrailingSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3 /4) {}"); } public void testMozLinearGradient() throws Exception { testValid(".CSS { background-image: " + "-moz-linear-gradient(bottom, #c0c0c0 0%, #dddddd 90%) }"); } public void testParsingWebkitKeyframes1() throws Exception { testValid("@-webkit-keyframes bounce {\n" + " from {\n" + " left: 0px;\n" + " }\n" + " to {\n" + " left: 200px;\n" + " }\n" + "}\n"); } public void testParsingMozKeyframes1() throws Exception { testValid("@-moz-keyframes bounce {\n" + " from {\n" + " left: 0px;\n" + " }\n" + " to {\n" + " left: 200px;\n" + " }\n" + "}\n"); } public void testParsingWebkitKeyframes2() throws Exception { testValid("@-webkit-keyframes pulse {\n" + " 0% {\n" + " background-color: red;\n" + " opacity: 1.0;\n" + " -webkit-transform: scale(1.0) rotate(0deg);\n" + " }\n" + " 33.33% {\n" + " background-color: blue;\n" + " opacity: 0.75;\n" + " -webkit-transform: scale(1.1) rotate(-5deg);\n" + " }\n" + " 66.66% {\n" + " background-color: green;\n" + " opacity: 0.5;\n" + " -webkit-transform: scale(1.1) rotate(5deg);\n" + " }\n" + " 100% {\n" + " background-color: red;\n" + " opacity: 1.0;\n" + " -webkit-transform: scale(1.0) rotate(0deg);\n" + " }\n" + "}"); } public void testParsingWebkitKeyframes3() throws Exception { testValid("@-webkit-keyframes bounce {\n" + " 0%, 51.2% {\n" + " left: 0px;\n" + " background: red;\n" + " }\n" + " 25%, 90.5% {\n" + " left: 200px;\n" + " background: green;\n" + " }\n" + " 25% {\n" + " background: blue;\n" + " }\n" + "}"); } public void testParsingWebkitKeyframes4() throws Exception { testValid("@-webkit-keyframes from {}"); testValid("@-webkit-keyframes to {}"); testValid("from {}"); testValid("to {}"); } public void testEscapingInDoubleQuoteString() throws Exception { testValid("body {content: \"\\0af9bcHH\"}"); testValid("body {content: \"\\0HH\"}"); testValid("body {content: \"\\aHH\"}"); testValid("body {content: \"\\gHH\"}"); testValid("body {content: \"\\\"'HH\"}"); } public void testEscapingInSingleQuoteString() throws Exception { testValid("body {content: '\\0af9bcHH'}"); testValid("body {content: '\\0HH'}"); testValid("body {content: '\\aHH'}"); testValid("body {content: '\\gHH'}"); testValid("body {content: '\"\\'HH'}"); } public void testPseudoFunction() throws Exception { testValid("div :lang(en) { color: #FFF; }"); testValid(":lang(fr) { color: #FFF; }"); } public void testPseudoNth() throws Exception { testValid("div :nth-child(1n+1) { color: #FFF; }"); testValid("div :nth-child(n+1) { color: #FFF; }"); testValid("div :nth-child(+n+2) { color: #FFF; }"); testValid("div :nth-child(n-1) { color: #FFF; }"); testValid("div :nth-child(-n-1) { color: #FFF; }"); testValid("div :nth-child(+2n+3) { color: #FFF; }"); testValid("div :nth-child(-5n+1) { color: #FFF; }"); // just 'n' is not supported by WebKit yet testValid("div :nth-child(n) { color: #FFF; }"); testValid("div :nth-child(-n) { color: #FFF; }"); testValid("div :nth-child(+n) { color: #FFF; }"); testValid("div :nth-child(n-0) { color: #FFF; }"); testValid("div :nth-child(0n+0) { color: #FFF; }"); testValid("div :nth-child(1) { color: #FFF; }"); testValid("div :nth-child(+7) { color: #FFF; }"); testValid("div :nth-child(-9) { color: #FFF; }"); testValid("div :nth-child(odd) { color: #FFF; }"); testValid("div :nth-child(even) { color: #FFF; }"); } public void testPseudoNot() throws Exception { testValid("p :not(.classy) { color: #123; }"); testValid("p :not(div) { color: #123; }"); testValid("p:not(div) { color: #123; }"); testValid("p :not( div ) { color: #123; }"); testValid("p :not(#id) { color: #123; }"); testValid("*:not(:link):not(:visited) {}"); } public void testPseudoElements() throws Exception { testValid("p::first-line { text-transform: uppercase }"); testValid("p::first-letter { color: green; font-size: 200% }"); testValid("div::after { color: #123; }"); testValid("div::before { color: #123; }"); } public void testOldPseudoElements() throws Exception { testValid("p:first-line { text-transform: uppercase }"); testValid("p:first-letter { color: green; font-size: 200% }"); testValid("div:after { color: #123; }"); testValid("div:before { color: #123; }"); } public void testMixinDefinitions() throws Exception { testValid("@defmixin name(PAR1, PAR2) { prop1: PAR1; prop2: PAR2 }"); testValid("@defmixin name( PAR1 , PAR2 )" + "{ prop1: PAR1; prop2: PAR2 }"); testValid("@defmixin name(PAR1, PAR2) { prop1: PAR1; prop2: CONST; }"); } public void testMixins() throws Exception { testValid("div { @mixin name(); }"); testValid("div { @mixin name( ) ; }"); testValid("div { prop1: val; @mixin defname(2px, #fff, 23%); }"); testValid("div { prop1: val; @mixin defname(); p:v;}"); testValid("div { @mixin foo(1px/1em); }"); testValid("div { @mixin foo(1px 1px); }"); } public void testUnquotedUrl() throws Exception { testValid("div { background-image: url(http://google.com/logo.png) }"); } public void testFunctionApplicationUrl() throws Exception { testValid("div { background-image: url(dataUrl('s')) }"); } public void testUrlOfFunctionOfId() throws Exception { // Bare URLs in function arguments are deprecated, but // we have some dependent code to cleanup before removing // the feature. testValid("div { background-image: url(dataUrl(x)); }"); } public void testFn() throws Exception { testValid("div { background-image: url(http://foo) }"); } public void testUrlPrefix() throws Exception { testTree("div { background-image: url-prefix(http://fo); }", "[[div]{[background-image:[url-prefix(http://fo)];]}]"); } public void testUrlPrefix2() throws Exception { testTree("div { background-image: url-prefix(fn(0)); }", "[[div]{[background-image:[url-prefix(fn(0))];]}]"); } public void testEmptyUrl() throws Exception { testValid("div { background-image: url() }"); } public void testUrlWithWhitespace() throws Exception { testTree("div { background-image: url( 'http://google.com/logo.png'); }", "[[div]{[background-image:" + "[url('http://google.com/logo.png')];]}]"); } public void testUnquotedUrlWithWhitespace() throws Exception { testTree("div { background-image: url( http://google.com/logo.png); }", "[[div]{[background-image:" + "[url(http://google.com/logo.png)];]}]"); } public void testCdoCdc() throws Exception { testTree( "<!--\ndiv { color: red; }\n-->", "[[div]{[color:[[red]];]}]"); } public void testIntraPropertyCdoCdc() throws Exception { String css = ".foo{border:1px<!--solid-->blue;}"; try { parse(css); fail("CDO should not be accepted in property values."); } catch (GssParserException e) { assertEquals( "The error should reflect that CDO is not accepted in property " + "values.", css.indexOf("<!--"), e.getGssError().getLocation().getBeginCharacterIndex()); } } public void testMicrosoftListAtRule() throws Exception { // This is syntactically valid according to CSS3, so we should // be able to ignore the proprietary @list rule and not fail // the whole parse. String[] samples = new String[] { "@list l0\n" + "{mso-list-id:792754432;}\n" + "div { border: solid thin black }", "@list l0:level1\n" + "{mso-list-id:792754432;}\n" + "div { border: solid thin black }"}; for (String css : samples) { // no exceptions the first time CssTree t1 = parse(css); String output1 = CompactPrinter.printCompactly(t1); // also no exceptions the second time CssTree t2 = parse(output1); // and the we've reached a fixed point assertEquals( AstPrinter.print(t1), AstPrinter.print(t2)); } } public void testRunawayMicrosoftListAtRule() throws Exception { String[] samples = new String[] { // unterminated block "@list l0 {mso-list-id:792754432;", // unterminated nested paren "@list l0 {mso-list-id:792754432;(}", // improper nesting with parens "@list l0 {mso-list-id:792754432;(})", // unterminated block, unmatched open bracket "@list l0 {mso-list-id:792754432;[", // unterminated block, close bracket without matching open bracket "@list l0 {mso-list-id:792754432;]"}; for (String css : samples) { try { parse(css); fail("The compiler should only accept complete @list rules, not " + css); } catch (GssParserException e) { // expected } } } public void testCustomBorderProperty() throws Exception { testTree( "a { border-height: 1em; }", "[[a]{[border-height:[[1em]];]}]"); testTree( "a { border-left-height: 1em; }", "[[a]{[border-left-height:[[1em]];]}]"); testTree( "a { border-right-height: 1em; }", "[[a]{[border-right-height:[[1em]];]}]"); testTree( "a { border-top-height: 1em; }", "[[a]{[border-top-height:[[1em]];]}]"); testTree( "a { border-bottom-height: 1em; }", "[[a]{[border-bottom-height:[[1em]];]}]"); } public void testForLoop() throws Exception { testTree( "@for $i from 1 to 6 {}", "[@for [$i] [from] [1] [to] [6]{}]"); } public void testForLoopWithStep() throws Exception { testTree( "@for $i from 1 to 6 step 2 {}", "[@for [$i] [from] [1] [to] [6] [step] [2]{}]"); } public void testForLoopWithVariables() throws Exception { testTree( "@for $i from $x to $y step $z {}", "[@for [$i] [from] [$x] [to] [$y] [step] [$z]{}]"); } public void testForLoopWithVariablesInBlock() throws Exception { testTree( "@for $i from 1 to 2 { .foo-$i { padding: $i } }", "[@for [$i] [from] [1] [to] [2]{[.foo-$i]{[padding:[[$i]];]}}]"); } public void testComments() throws GssParserException { testTree("div {}/*comment*/", "[[div]{[]}]"); testTree("div {}/*comment*/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/***comment**/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/***c/o**m//m***e////nt**/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/***c/o**m//m/***e////nt/***/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/****************/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/**/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/**/p {}/**/", "[[div]{[]}[p]{[]}]"); testTree("div {}/**/p {}/**/div {}", "[[div]{[]}[p]{[]}[div]{[]}]"); } public void testUnicodeRange() throws Exception { testValid("@font-face { unicode-range: U+26;}"); testValid("@font-face { unicode-range: U+0015-00FF;}"); testValid("@font-face { unicode-range: U+A015-C0FF;}"); testValid("@font-face { unicode-range: U+26??;}"); } public void testNumericNodeLocation() throws GssParserException { CssTree tree = new GssParser(new SourceCode(null, "div{width:99px;}")).parse(); final CssNumericNode[] resultHolder = new CssNumericNode[1]; tree.getVisitController().startVisit(new DefaultTreeVisitor() { @Override public boolean enterValueNode(CssValueNode value) { if (value instanceof CssNumericNode) { assertNull(resultHolder[0]); resultHolder[0] = (CssNumericNode) value; } return true; } }); assertNotNull(resultHolder[0]); SourceCodeLocation location = resultHolder[0].getSourceCodeLocation(); assertEquals(3, location.getEndCharacterIndex() - location.getBeginCharacterIndex()); } private CssTree parse(List<SourceCode> sources) throws GssParserException { GssParser parser = new GssParser(sources); return parser.parse(); } private CssTree parse(String gss) throws GssParserException { return parse(ImmutableList.of(new SourceCode("test", gss))); } }
/* * Copyright (C) 2011 Patrik Akerfeldt * Copyright (C) 2011 Jake Wharton * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.way.indicator; import static android.graphics.Paint.ANTI_ALIAS_FLAG; import static android.widget.LinearLayout.HORIZONTAL; import static android.widget.LinearLayout.VERTICAL; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.Paint.Style; import android.graphics.drawable.Drawable; import android.os.Parcel; import android.os.Parcelable; import android.support.v4.view.MotionEventCompat; import android.support.v4.view.ViewConfigurationCompat; import android.support.v4.view.ViewPager; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import android.view.ViewConfiguration; import com.way.weather.R; /** * Draws circles (one for each view). The current view position is filled and * others are only stroked. */ public class CirclePageIndicator extends View implements PageIndicator { private static final int INVALID_POINTER = -1; private float mRadius; private final Paint mPaintPageFill = new Paint(ANTI_ALIAS_FLAG); private final Paint mPaintStroke = new Paint(ANTI_ALIAS_FLAG); private final Paint mPaintFill = new Paint(ANTI_ALIAS_FLAG); private ViewPager mViewPager; private ViewPager.OnPageChangeListener mListener; private int mCurrentPage; private int mSnapPage; private float mPageOffset; private int mScrollState; private int mOrientation; private boolean mCentered; private boolean mSnap; private int mTouchSlop; private float mLastMotionX = -1; private int mActivePointerId = INVALID_POINTER; private boolean mIsDragging; public CirclePageIndicator(Context context) { this(context, null); } public CirclePageIndicator(Context context, AttributeSet attrs) { this(context, attrs, R.attr.vpiCirclePageIndicatorStyle); } public CirclePageIndicator(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); if (isInEditMode()) return; // Load defaults from resources final Resources res = getResources(); final int defaultPageColor = res .getColor(R.color.default_circle_indicator_page_color); final int defaultFillColor = res .getColor(R.color.default_circle_indicator_fill_color); final int defaultOrientation = res .getInteger(R.integer.default_circle_indicator_orientation); final int defaultStrokeColor = res .getColor(R.color.default_circle_indicator_stroke_color); final float defaultStrokeWidth = res .getDimension(R.dimen.default_circle_indicator_stroke_width); final float defaultRadius = res .getDimension(R.dimen.default_circle_indicator_radius); final boolean defaultCentered = res .getBoolean(R.bool.default_circle_indicator_centered); final boolean defaultSnap = res .getBoolean(R.bool.default_circle_indicator_snap); // Retrieve styles attributes TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CirclePageIndicator, defStyle, 0); mCentered = a.getBoolean(R.styleable.CirclePageIndicator_centered, defaultCentered); mOrientation = a.getInt( R.styleable.CirclePageIndicator_android_orientation, defaultOrientation); mPaintPageFill.setStyle(Style.FILL); mPaintPageFill.setColor(a.getColor( R.styleable.CirclePageIndicator_pageColor, defaultPageColor)); mPaintStroke.setStyle(Style.STROKE); mPaintStroke.setColor(a .getColor(R.styleable.CirclePageIndicator_strokeColor, defaultStrokeColor)); mPaintStroke.setStrokeWidth(a .getDimension(R.styleable.CirclePageIndicator_strokeWidth, defaultStrokeWidth)); mPaintFill.setStyle(Style.FILL); mPaintFill.setColor(a.getColor( R.styleable.CirclePageIndicator_fillColor, defaultFillColor)); mRadius = a.getDimension(R.styleable.CirclePageIndicator_radius, defaultRadius); mSnap = a.getBoolean(R.styleable.CirclePageIndicator_snap, defaultSnap); Drawable background = a .getDrawable(R.styleable.CirclePageIndicator_android_background); if (background != null) { setBackgroundDrawable(background); } a.recycle(); final ViewConfiguration configuration = ViewConfiguration.get(context); mTouchSlop = ViewConfigurationCompat .getScaledPagingTouchSlop(configuration); } public void setCentered(boolean centered) { mCentered = centered; invalidate(); } public boolean isCentered() { return mCentered; } public void setPageColor(int pageColor) { mPaintPageFill.setColor(pageColor); invalidate(); } public int getPageColor() { return mPaintPageFill.getColor(); } public void setFillColor(int fillColor) { mPaintFill.setColor(fillColor); invalidate(); } public int getFillColor() { return mPaintFill.getColor(); } public void setOrientation(int orientation) { switch (orientation) { case HORIZONTAL: case VERTICAL: mOrientation = orientation; requestLayout(); break; default: throw new IllegalArgumentException( "Orientation must be either HORIZONTAL or VERTICAL."); } } public int getOrientation() { return mOrientation; } public void setStrokeColor(int strokeColor) { mPaintStroke.setColor(strokeColor); invalidate(); } public int getStrokeColor() { return mPaintStroke.getColor(); } public void setStrokeWidth(float strokeWidth) { mPaintStroke.setStrokeWidth(strokeWidth); invalidate(); } public float getStrokeWidth() { return mPaintStroke.getStrokeWidth(); } public void setRadius(float radius) { mRadius = radius; invalidate(); } public float getRadius() { return mRadius; } public void setSnap(boolean snap) { mSnap = snap; invalidate(); } public boolean isSnap() { return mSnap; } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (mViewPager == null) { return; } final int count = mViewPager.getAdapter().getCount(); if (count == 0) { return; } if (mCurrentPage >= count) { setCurrentItem(count - 1); return; } int longSize; int longPaddingBefore; int longPaddingAfter; int shortPaddingBefore; if (mOrientation == HORIZONTAL) { longSize = getWidth(); longPaddingBefore = getPaddingLeft(); longPaddingAfter = getPaddingRight(); shortPaddingBefore = getPaddingTop(); } else { longSize = getHeight(); longPaddingBefore = getPaddingTop(); longPaddingAfter = getPaddingBottom(); shortPaddingBefore = getPaddingLeft(); } final float threeRadius = mRadius * 3; final float shortOffset = shortPaddingBefore + mRadius; float longOffset = longPaddingBefore + mRadius; if (mCentered) { longOffset += ((longSize - longPaddingBefore - longPaddingAfter) / 2.0f) - ((count * threeRadius) / 2.0f); } float dX; float dY; float pageFillRadius = mRadius; if (mPaintStroke.getStrokeWidth() > 0) { pageFillRadius -= mPaintStroke.getStrokeWidth() / 2.0f; } // Draw stroked circles for (int iLoop = 0; iLoop < count; iLoop++) { float drawLong = longOffset + (iLoop * threeRadius); if (mOrientation == HORIZONTAL) { dX = drawLong; dY = shortOffset; } else { dX = shortOffset; dY = drawLong; } // Only paint fill if not completely transparent if (mPaintPageFill.getAlpha() > 0) { canvas.drawCircle(dX, dY, pageFillRadius, mPaintPageFill); } // Only paint stroke if a stroke width was non-zero if (pageFillRadius != mRadius) { canvas.drawCircle(dX, dY, mRadius, mPaintStroke); } } // Draw the filled circle according to the current scroll float cx = (mSnap ? mSnapPage : mCurrentPage) * threeRadius; if (!mSnap) { cx += mPageOffset * threeRadius; } if (mOrientation == HORIZONTAL) { dX = longOffset + cx; dY = shortOffset; } else { dX = shortOffset; dY = longOffset + cx; } canvas.drawCircle(dX, dY, mRadius, mPaintFill); } public boolean onTouchEvent(android.view.MotionEvent ev) { if (super.onTouchEvent(ev)) { return true; } if ((mViewPager == null) || (mViewPager.getAdapter().getCount() == 0)) { return false; } final int action = ev.getAction() & MotionEventCompat.ACTION_MASK; switch (action) { case MotionEvent.ACTION_DOWN: mActivePointerId = MotionEventCompat.getPointerId(ev, 0); mLastMotionX = ev.getX(); break; case MotionEvent.ACTION_MOVE: { final int activePointerIndex = MotionEventCompat.findPointerIndex( ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, activePointerIndex); final float deltaX = x - mLastMotionX; if (!mIsDragging) { if (Math.abs(deltaX) > mTouchSlop) { mIsDragging = true; } } if (mIsDragging) { mLastMotionX = x; if (mViewPager.isFakeDragging() || mViewPager.beginFakeDrag()) { mViewPager.fakeDragBy(deltaX); } } break; } case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: if (!mIsDragging) { final int count = mViewPager.getAdapter().getCount(); final int width = getWidth(); final float halfWidth = width / 2f; final float sixthWidth = width / 6f; if ((mCurrentPage > 0) && (ev.getX() < halfWidth - sixthWidth)) { if (action != MotionEvent.ACTION_CANCEL) { mViewPager.setCurrentItem(mCurrentPage - 1); } return true; } else if ((mCurrentPage < count - 1) && (ev.getX() > halfWidth + sixthWidth)) { if (action != MotionEvent.ACTION_CANCEL) { mViewPager.setCurrentItem(mCurrentPage + 1); } return true; } } mIsDragging = false; mActivePointerId = INVALID_POINTER; if (mViewPager.isFakeDragging()) mViewPager.endFakeDrag(); break; case MotionEventCompat.ACTION_POINTER_DOWN: { final int index = MotionEventCompat.getActionIndex(ev); mLastMotionX = MotionEventCompat.getX(ev, index); mActivePointerId = MotionEventCompat.getPointerId(ev, index); break; } case MotionEventCompat.ACTION_POINTER_UP: final int pointerIndex = MotionEventCompat.getActionIndex(ev); final int pointerId = MotionEventCompat.getPointerId(ev, pointerIndex); if (pointerId == mActivePointerId) { final int newPointerIndex = pointerIndex == 0 ? 1 : 0; mActivePointerId = MotionEventCompat.getPointerId(ev, newPointerIndex); } mLastMotionX = MotionEventCompat.getX(ev, MotionEventCompat.findPointerIndex(ev, mActivePointerId)); break; } return true; } @Override public void setViewPager(ViewPager view) { if (mViewPager == view) { return; } if (mViewPager != null) { mViewPager.setOnPageChangeListener(null); } if (view.getAdapter() == null) { throw new IllegalStateException( "ViewPager does not have adapter instance."); } mViewPager = view; mViewPager.setOnPageChangeListener(this); invalidate(); } @Override public void setViewPager(ViewPager view, int initialPosition) { setViewPager(view); setCurrentItem(initialPosition); } @Override public void setCurrentItem(int item) { if (mViewPager == null) { throw new IllegalStateException("ViewPager has not been bound."); } mViewPager.setCurrentItem(item); mCurrentPage = item; invalidate(); } @Override public void notifyDataSetChanged() { invalidate(); } @Override public void onPageScrollStateChanged(int state) { mScrollState = state; if (mListener != null) { mListener.onPageScrollStateChanged(state); } } @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { mCurrentPage = position; mPageOffset = positionOffset; invalidate(); if (mListener != null) { mListener.onPageScrolled(position, positionOffset, positionOffsetPixels); } } @Override public void onPageSelected(int position) { if (mSnap || mScrollState == ViewPager.SCROLL_STATE_IDLE) { mCurrentPage = position; mSnapPage = position; invalidate(); } if (mListener != null) { mListener.onPageSelected(position); } } @Override public void setOnPageChangeListener(ViewPager.OnPageChangeListener listener) { mListener = listener; } /* * (non-Javadoc) * * @see android.view.View#onMeasure(int, int) */ @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (mOrientation == HORIZONTAL) { setMeasuredDimension(measureLong(widthMeasureSpec), measureShort(heightMeasureSpec)); } else { setMeasuredDimension(measureShort(widthMeasureSpec), measureLong(heightMeasureSpec)); } } /** * Determines the width of this view * * @param measureSpec * A measureSpec packed into an int * @return The width of the view, honoring constraints from measureSpec */ private int measureLong(int measureSpec) { int result; int specMode = MeasureSpec.getMode(measureSpec); int specSize = MeasureSpec.getSize(measureSpec); if ((specMode == MeasureSpec.EXACTLY) || (mViewPager == null)) { // We were told how big to be result = specSize; } else { // Calculate the width according the views count final int count = mViewPager.getAdapter().getCount(); result = (int) (getPaddingLeft() + getPaddingRight() + (count * 2 * mRadius) + (count - 1) * mRadius + 1); // Respect AT_MOST value if that was what is called for by // measureSpec if (specMode == MeasureSpec.AT_MOST) { result = Math.min(result, specSize); } } return result; } /** * Determines the height of this view * * @param measureSpec * A measureSpec packed into an int * @return The height of the view, honoring constraints from measureSpec */ private int measureShort(int measureSpec) { int result; int specMode = MeasureSpec.getMode(measureSpec); int specSize = MeasureSpec.getSize(measureSpec); if (specMode == MeasureSpec.EXACTLY) { // We were told how big to be result = specSize; } else { // Measure the height result = (int) (2 * mRadius + getPaddingTop() + getPaddingBottom() + 1); // Respect AT_MOST value if that was what is called for by // measureSpec if (specMode == MeasureSpec.AT_MOST) { result = Math.min(result, specSize); } } return result; } @Override public void onRestoreInstanceState(Parcelable state) { SavedState savedState = (SavedState) state; super.onRestoreInstanceState(savedState.getSuperState()); mCurrentPage = savedState.currentPage; mSnapPage = savedState.currentPage; requestLayout(); } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState savedState = new SavedState(superState); savedState.currentPage = mCurrentPage; return savedState; } static class SavedState extends BaseSavedState { int currentPage; public SavedState(Parcelable superState) { super(superState); } private SavedState(Parcel in) { super(in); currentPage = in.readInt(); } @Override public void writeToParcel(Parcel dest, int flags) { super.writeToParcel(dest, flags); dest.writeInt(currentPage); } public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { @Override public SavedState createFromParcel(Parcel in) { return new SavedState(in); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }; } }
package ij.plugin; import java.awt.*; import java.awt.event.*; import java.util.Vector; import ij.*; import ij.process.*; import ij.gui.*; import ij.util.Tools; import ij.plugin.frame.Recorder; import ij.measure.Calibration; /** This plugin implements the Image/Duplicate command. <pre> // test script img1 = IJ.getImage(); img2 = new Duplicator().run(img1); //img2 = new Duplicator().run(img1,1,10); img2.show(); </pre> */ public class Duplicator implements PlugIn, TextListener { private static boolean duplicateStack; private boolean duplicateSubstack; private int first, last; private Checkbox checkbox; private TextField rangeField; private TextField[] rangeFields; private int firstC, lastC, firstZ, lastZ, firstT, lastT; private boolean isCommand; public void run(String arg) { isCommand = true; ImagePlus imp = IJ.getImage(); int stackSize = imp.getStackSize(); String title = imp.getTitle(); String newTitle = WindowManager.getUniqueName(title); if (!IJ.altKeyDown()||stackSize>1) { if (imp.isHyperStack() || imp.isComposite()) { duplicateHyperstack(imp, newTitle); return; } else newTitle = showDialog(imp, "Duplicate...", "Title: ", newTitle); } if (newTitle==null) return; ImagePlus imp2; Roi roi = imp.getRoi(); if (duplicateSubstack && (first>1||last<stackSize)) imp2 = run(imp, first, last); else if (duplicateStack || imp.getStackSize()==1) imp2 = run(imp); else imp2 = duplicateImage(imp); Calibration cal = imp2.getCalibration(); if (roi!=null && (cal.xOrigin!=0.0||cal.yOrigin!=0.0)) { cal.xOrigin -= roi.getBounds().x; cal.yOrigin -= roi.getBounds().y; } imp2.setTitle(newTitle); if (roi!=null && roi.isArea() && roi.getType()!=Roi.RECTANGLE) { Roi roi2 = (Roi)cropRoi(imp, roi).clone(); roi2.setLocation(0, 0); imp2.setRoi(roi2); } imp2.show(); if (stackSize>1 && imp2.getStackSize()==stackSize) imp2.setSlice(imp.getCurrentSlice()); } /** Returns a copy of the image, stack or hyperstack contained in the specified ImagePlus. */ public ImagePlus run(ImagePlus imp) { if (Recorder.record&&isCommand) Recorder.recordCall("imp = new Duplicator().run(imp);"); if (imp.getStackSize()==1) return duplicateImage(imp); Rectangle rect = null; Roi roi = imp.getRoi(); Roi roi2 = cropRoi(imp, roi); if (roi2!=null && roi2.isArea()) rect = roi2.getBounds(); ImageStack stack = imp.getStack(); ImageStack stack2 = null; int n = stack.getSize(); for (int i=1; i<=n; i++) { if (stack.isVirtual()) IJ.showStatus("Duplicating: "+i+"/"+n); ImageProcessor ip2 = stack.getProcessor(i); ip2.setRoi(rect); ip2 = ip2.crop(); if (stack2==null) stack2 = new ImageStack(ip2.getWidth(), ip2.getHeight(), imp.getProcessor().getColorModel()); stack2.addSlice(stack.getSliceLabel(i), ip2); } ImagePlus imp2 = imp.createImagePlus(); imp2.setStack("DUP_"+imp.getTitle(), stack2); String info = (String)imp.getProperty("Info"); if (info!=null) imp2.setProperty("Info", info); int[] dim = imp.getDimensions(); imp2.setDimensions(dim[2], dim[3], dim[4]); if (imp.isComposite()) { imp2 = new CompositeImage(imp2, 0); ((CompositeImage)imp2).copyLuts(imp); } if (imp.isHyperStack()) imp2.setOpenAsHyperStack(true); Overlay overlay = imp.getOverlay(); if (overlay!=null && !imp.getHideOverlay()) imp2.setOverlay(overlay.crop(rect)); return imp2; } ImagePlus duplicateImage(ImagePlus imp) { ImageProcessor ip = imp.getProcessor(); ImageProcessor ip2 = ip.crop(); ImagePlus imp2 = imp.createImagePlus(); imp2.setProcessor("DUP_"+imp.getTitle(), ip2); String info = (String)imp.getProperty("Info"); if (info!=null) imp2.setProperty("Info", info); if (imp.getStackSize()>1) { ImageStack stack = imp.getStack(); String label = stack.getSliceLabel(imp.getCurrentSlice()); if (label!=null && label.indexOf('\n')>0) imp2.setProperty("Info", label); if (imp.isComposite()) { LUT lut = ((CompositeImage)imp).getChannelLut(); imp2.getProcessor().setColorModel(lut); } } Overlay overlay = imp.getOverlay(); if (overlay!=null && !imp.getHideOverlay()) { Overlay overlay2 = overlay.crop(ip.getRoi()); if (imp.getStackSize()>1) overlay2.crop(imp.getCurrentSlice(), imp.getCurrentSlice()); imp2.setOverlay(overlay2); } return imp2; } /** Returns a new stack containing a subrange of the specified stack. */ public ImagePlus run(ImagePlus imp, int firstSlice, int lastSlice) { Rectangle rect = null; Roi roi = imp.getRoi(); if (roi!=null && roi.isArea()) rect = roi.getBounds(); ImageStack stack = imp.getStack(); ImageStack stack2 = null; for (int i=firstSlice; i<=lastSlice; i++) { if (stack.isVirtual()) IJ.showStatus("Duplicating: "+i+"/"+lastSlice); ImageProcessor ip2 = stack.getProcessor(i); ip2.setRoi(rect); ip2 = ip2.crop(); if (stack2==null) stack2 = new ImageStack(ip2.getWidth(), ip2.getHeight(), imp.getProcessor().getColorModel()); stack2.addSlice(stack.getSliceLabel(i), ip2); } ImagePlus imp2 = imp.createImagePlus(); imp2.setStack("DUP_"+imp.getTitle(), stack2); String info = (String)imp.getProperty("Info"); if (info!=null) imp2.setProperty("Info", info); int size = stack2.getSize(); boolean tseries = imp.getNFrames()==imp.getStackSize(); if (tseries) imp2.setDimensions(1, 1, size); else imp2.setDimensions(1, size, 1); Overlay overlay = imp.getOverlay(); if (overlay!=null && !imp.getHideOverlay()) { Overlay overlay2 = overlay.crop(rect); overlay2.crop(firstSlice, lastSlice); imp2.setOverlay(overlay2); } if (Recorder.record&&isCommand) Recorder.recordCall("imp = new Duplicator().run(imp, "+firstSlice+", "+lastSlice+");"); return imp2; } /** Returns a new hyperstack containing a possibly reduced version of the input image. */ public ImagePlus run(ImagePlus imp, int firstC, int lastC, int firstZ, int lastZ, int firstT, int lastT) { Rectangle rect = null; Roi roi = imp.getRoi(); Roi roi2 = cropRoi(imp, roi); if (roi2!=null && roi2.isArea()) rect = roi2.getBounds(); ImageStack stack = imp.getStack(); ImageStack stack2 = null; for (int t=firstT; t<=lastT; t++) { for (int z=firstZ; z<=lastZ; z++) { for (int c=firstC; c<=lastC; c++) { int n1 = imp.getStackIndex(c, z, t); ImageProcessor ip = stack.getProcessor(n1); String label = stack.getSliceLabel(n1); ip.setRoi(rect); ip = ip.crop(); if (stack2==null) stack2 = new ImageStack(ip.getWidth(), ip.getHeight(), null); stack2.addSlice(label, ip); } } } ImagePlus imp2 = imp.createImagePlus(); imp2.setStack("DUP_"+imp.getTitle(), stack2); imp2.setDimensions(lastC-firstC+1, lastZ-firstZ+1, lastT-firstT+1); if (imp.isComposite()) { int mode = ((CompositeImage)imp).getMode(); if (lastC>firstC) { imp2 = new CompositeImage(imp2, mode); int i2 = 1; for (int i=firstC; i<=lastC; i++) { LUT lut = ((CompositeImage)imp).getChannelLut(i); ((CompositeImage)imp2).setChannelLut(lut, i2++); } } else if (firstC==lastC) { LUT lut = ((CompositeImage)imp).getChannelLut(firstC); imp2.getProcessor().setColorModel(lut); imp2.setDisplayRange(lut.min, lut.max); } } imp2.setOpenAsHyperStack(true); Calibration cal = imp2.getCalibration(); if (roi!=null && (cal.xOrigin!=0.0||cal.yOrigin!=0.0)) { cal.xOrigin -= roi.getBounds().x; cal.yOrigin -= roi.getBounds().y; } Overlay overlay = imp.getOverlay(); if (overlay!=null && !imp.getHideOverlay()) { Overlay overlay2 = overlay.crop(roi2!=null?roi2.getBounds():null); overlay2.crop(firstC, lastC, firstZ, lastZ, firstT, lastT); imp2.setOverlay(overlay2); } if (Recorder.record&&isCommand) Recorder.recordCall("imp = new Duplicator().run(imp, "+firstC+", "+lastC+", "+firstZ+", "+lastZ+", "+firstT+", "+lastT+");"); return imp2; } String showDialog(ImagePlus imp, String title, String prompt, String defaultString) { int stackSize = imp.getStackSize(); duplicateSubstack = stackSize>1 && (stackSize==imp.getNSlices()||stackSize==imp.getNFrames()); GenericDialog gd = new GenericDialog(title); gd.addStringField(prompt, defaultString, duplicateSubstack?15:20); if (stackSize>1) { String msg = duplicateSubstack?"Duplicate stack":"Duplicate entire stack"; gd.addCheckbox(msg, duplicateStack||imp.isComposite()); if (duplicateSubstack) { gd.setInsets(2, 30, 3); gd.addStringField("Range:", "1-"+stackSize); Vector v = gd.getStringFields(); rangeField = (TextField)v.elementAt(1); rangeField.addTextListener(this); checkbox = (Checkbox)(gd.getCheckboxes().elementAt(0)); } } else duplicateStack = false; gd.showDialog(); if (gd.wasCanceled()) return null; title = gd.getNextString(); if (stackSize>1) { duplicateStack = gd.getNextBoolean(); if (duplicateStack && duplicateSubstack) { String[] range = Tools.split(gd.getNextString(), " -"); double d1 = gd.parseDouble(range[0]); double d2 = range.length==2?gd.parseDouble(range[1]):Double.NaN; first = Double.isNaN(d1)?1:(int)d1; last = Double.isNaN(d2)?stackSize:(int)d2; if (first<1) first = 1; if (last>stackSize) last = stackSize; if (first>last) {first=1; last=stackSize;} } else { first = 1; last = stackSize; } } return title; } void duplicateHyperstack(ImagePlus imp, String newTitle) { newTitle = showHSDialog(imp, newTitle); if (newTitle==null) return; ImagePlus imp2 = null; Roi roi = imp.getRoi(); if (!duplicateStack) { int nChannels = imp.getNChannels(); boolean singleComposite = imp.isComposite() && nChannels==imp.getStackSize(); if (!singleComposite && nChannels>1 && imp.isComposite() && ((CompositeImage)imp).getMode()==IJ.COMPOSITE) { firstC = 1; lastC = nChannels; } else firstC = lastC = imp.getChannel(); firstZ = lastZ = imp.getSlice(); firstT = lastT = imp.getFrame(); } imp2 = run(imp, firstC, lastC, firstZ, lastZ, firstT, lastT); if (imp2==null) return; imp2.setTitle(newTitle); if (imp2.getWidth()==0 || imp2.getHeight()==0) { IJ.error("Duplicator", "Selection is outside the image"); return; } if (roi!=null && roi.isArea() && roi.getType()!=Roi.RECTANGLE) { Roi roi2 = (Roi)cropRoi(imp, roi).clone(); roi2.setLocation(0, 0); imp2.setRoi(roi2); } imp2.show(); imp2.setPosition(imp.getC(), imp.getZ(), imp.getT()); if (IJ.isMacro()&&imp2.getWindow()!=null) IJ.wait(50); } String showHSDialog(ImagePlus imp, String newTitle) { int nChannels = imp.getNChannels(); int nSlices = imp.getNSlices(); int nFrames = imp.getNFrames(); boolean composite = imp.isComposite() && nChannels==imp.getStackSize(); GenericDialog gd = new GenericDialog("Duplicate"); gd.addStringField("Title:", newTitle, 15); gd.setInsets(12, 20, 8); gd.addCheckbox("Duplicate hyperstack", duplicateStack||composite); int nRangeFields = 0; if (nChannels>1) { gd.setInsets(2, 30, 3); gd.addStringField("Channels (c):", "1-"+nChannels); nRangeFields++; } if (nSlices>1) { gd.setInsets(2, 30, 3); gd.addStringField("Slices (z):", "1-"+nSlices); nRangeFields++; } if (nFrames>1) { gd.setInsets(2, 30, 3); gd.addStringField("Frames (t):", "1-"+nFrames); nRangeFields++; } Vector v = gd.getStringFields(); rangeFields = new TextField[3]; for (int i=0; i<nRangeFields; i++) { rangeFields[i] = (TextField)v.elementAt(i+1); rangeFields[i].addTextListener(this); } checkbox = (Checkbox)(gd.getCheckboxes().elementAt(0)); gd.showDialog(); if (gd.wasCanceled()) return null; newTitle = gd.getNextString(); duplicateStack = gd.getNextBoolean(); if (nChannels>1) { String[] range = Tools.split(gd.getNextString(), " -"); double c1 = gd.parseDouble(range[0]); double c2 = range.length==2?gd.parseDouble(range[1]):Double.NaN; firstC = Double.isNaN(c1)?1:(int)c1; lastC = Double.isNaN(c2)?firstC:(int)c2; if (firstC<1) firstC = 1; if (lastC>nChannels) lastC = nChannels; if (firstC>lastC) {firstC=1; lastC=nChannels;} } else firstC = lastC = 1; if (nSlices>1) { String[] range = Tools.split(gd.getNextString(), " -"); double z1 = gd.parseDouble(range[0]); double z2 = range.length==2?gd.parseDouble(range[1]):Double.NaN; firstZ = Double.isNaN(z1)?1:(int)z1; lastZ = Double.isNaN(z2)?firstZ:(int)z2; if (firstZ<1) firstZ = 1; if (lastZ>nSlices) lastZ = nSlices; if (firstZ>lastZ) {firstZ=1; lastZ=nSlices;} } else firstZ = lastZ = 1; if (nFrames>1) { String[] range = Tools.split(gd.getNextString(), " -"); double t1 = gd.parseDouble(range[0]); double t2 = range.length==2?gd.parseDouble(range[1]):Double.NaN; firstT= Double.isNaN(t1)?1:(int)t1; lastT = Double.isNaN(t2)?firstT:(int)t2; if (firstT<1) firstT = 1; if (lastT>nFrames) lastT = nFrames; if (firstT>lastT) {firstT=1; lastT=nFrames;} } else firstT = lastT = 1; return newTitle; } /* * Returns the part of 'roi' overlaping 'imp' * Author Marcel Boeglin 2013.12.15 */ Roi cropRoi(ImagePlus imp, Roi roi) { if (roi==null) return null; if (imp==null) return roi; Rectangle b = roi.getBounds(); int w = imp.getWidth(); int h = imp.getHeight(); if (b.x<0 || b.y<0 || b.x+b.width>w || b.y+b.height>h) { ShapeRoi shape1 = new ShapeRoi(roi); ShapeRoi shape2 = new ShapeRoi(new Roi(0, 0, w, h)); roi = shape2.and(shape1); } if (roi.getBounds().width==0 || roi.getBounds().height==0) throw new IllegalArgumentException("Selection is outside the image"); return roi; } public static Overlay cropOverlay(Overlay overlay, Rectangle bounds) { return overlay.crop(bounds); } public void textValueChanged(TextEvent e) { checkbox.setState(true); } }
/* * Copyright 2022 Apollo Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.ctrip.framework.apollo.portal.util; import com.ctrip.framework.apollo.common.dto.ClusterDTO; import com.ctrip.framework.apollo.common.entity.App; import com.ctrip.framework.apollo.common.entity.AppNamespace; import com.ctrip.framework.apollo.common.exception.BadRequestException; import com.ctrip.framework.apollo.core.enums.ConfigFileFormat; import com.ctrip.framework.apollo.core.utils.StringUtils; import com.ctrip.framework.apollo.portal.controller.ConfigsImportController; import com.ctrip.framework.apollo.portal.environment.Env; import com.google.common.base.Splitter; import java.io.File; import java.util.List; import org.springframework.web.multipart.MultipartFile; /** * First version: move from {@link ConfigsImportController#importConfigFile(java.lang.String, java.lang.String, java.lang.String, java.lang.String, org.springframework.web.multipart.MultipartFile)} * @author wxq */ public class ConfigFileUtils { public static final String APP_METADATA_FILENAME = "app.metadata"; public static final String CLUSTER_METADATA_FILE_SUFFIX = ".cluster.metadata"; public static final String APP_NAMESPACE_METADATA_FILE_SUFFIX = ".appnamespace.metadata"; public static void check(MultipartFile file) { checkEmpty(file); final String originalFilename = file.getOriginalFilename(); checkFormat(originalFilename); } /** * @throws BadRequestException if file is empty */ static void checkEmpty(MultipartFile file) { if (file.isEmpty()) { throw new BadRequestException("The file is empty. " + file.getOriginalFilename()); } } /** * @throws BadRequestException if file's format is invalid */ static void checkFormat(final String originalFilename) { final List<String> fileNameSplit = Splitter.on(".").splitToList(originalFilename); if (fileNameSplit.size() <= 1) { throw new BadRequestException("The file format is invalid."); } for (String s : fileNameSplit) { if (StringUtils.isEmpty(s)) { throw new BadRequestException("The file format is invalid."); } } } static String[] getThreePart(final String originalFilename) { return originalFilename.split("[+]"); } /** * @throws BadRequestException if file's name cannot divide to 3 parts by "+" symbol */ static void checkThreePart(final String originalFilename) { String[] parts = getThreePart(originalFilename); if (3 != parts.length) { throw new BadRequestException("file name [" + originalFilename + "] not valid"); } } /** * <pre> * "application+default+application.properties" -> "properties" * "application+default+application.yml" -> "yml" * </pre> * @throws BadRequestException if file's format is invalid */ public static String getFormat(final String originalFilename) { final List<String> fileNameSplit = Splitter.on(".").splitToList(originalFilename); if (fileNameSplit.size() <= 1) { throw new BadRequestException("The file format is invalid."); } return fileNameSplit.get(fileNameSplit.size() - 1); } /** * <pre> * "123+default+application.properties" -> "123" * "abc+default+application.yml" -> "abc" * "666+default+application.json" -> "666" * </pre> * @throws BadRequestException if file's name is invalid */ public static String getAppId(final String originalFilename) { checkThreePart(originalFilename); return getThreePart(originalFilename)[0]; } public static String getClusterName(final String originalFilename) { checkThreePart(originalFilename); return getThreePart(originalFilename)[1]; } /** * <pre> * "application+default+application.properties" -> "application" * "application+default+application.yml" -> "application.yml" * "application+default+application.json" -> "application.json" * "application+default+application.333.yml" -> "application.333.yml" * </pre> * @throws BadRequestException if file's name is invalid */ public static String getNamespace(final String originalFilename) { checkThreePart(originalFilename); final String[] threeParts = getThreePart(originalFilename); final String suffix = threeParts[2]; if (!suffix.contains(".")) { throw new BadRequestException(originalFilename + " namespace and format is invalid!"); } final int lastDotIndex = suffix.lastIndexOf("."); final String namespace = suffix.substring(0, lastDotIndex); // format after last character '.' final String format = suffix.substring(lastDotIndex + 1); if (!ConfigFileFormat.isValidFormat(format)) { throw new BadRequestException(originalFilename + " format is invalid!"); } ConfigFileFormat configFileFormat = ConfigFileFormat.fromString(format); if (configFileFormat.equals(ConfigFileFormat.Properties)) { return namespace; } else { // compatibility of other format return namespace + "." + format; } } /** * <pre> * appId cluster namespace return * 666 default application 666+default+application.properties * 123 none action.yml 123+none+action.yml * </pre> */ public static String toFilename( final String appId, final String clusterName, final String namespace, final ConfigFileFormat configFileFormat ) { final String suffix; if (ConfigFileFormat.Properties.equals(configFileFormat)) { suffix = "." + ConfigFileFormat.Properties.getValue(); } else { suffix = ""; } return appId + "+" + clusterName + "+" + namespace + suffix; } /** * file path = ownerName/appId/env/configFilename * @return file path in compressed file */ public static String genNamespacePath( final String ownerName, final String appId, final Env env, final String configFilename ) { return String.join(File.separator, ownerName, appId, env.getName(), configFilename); } /** * path = ownerName/appId/app.metadata */ public static String genAppInfoPath(App app) { return String.join(File.separator, app.getOwnerName(), app.getAppId(), APP_METADATA_FILENAME); } /** * path = {appNamespace}.appnamespace.metadata */ public static String genAppNamespaceInfoPath(AppNamespace appNamespace) { return String.join(File.separator, appNamespace.getAppId() + "+" + appNamespace.getName() + APP_NAMESPACE_METADATA_FILE_SUFFIX); } /** * path = ownerName/appId/env/${clusterName}.metadata */ public static String genClusterInfoPath(App app, Env env, ClusterDTO cluster) { return String.join(File.separator, app.getOwnerName(), app.getAppId(), env.getName(), cluster.getName() + CLUSTER_METADATA_FILE_SUFFIX); } }
/* * Copyright 2015-2016 USEF Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package energy.usef.brp.workflow.settlement.send; import static energy.usef.brp.service.business.BrpDefaultSettlementMessageContent.*; import static energy.usef.core.data.xml.bean.message.MessagePrecedence.TRANSACTIONAL; import java.math.BigDecimal; import java.math.BigInteger; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import javax.ejb.Asynchronous; import javax.ejb.Lock; import javax.ejb.LockType; import javax.ejb.Singleton; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.enterprise.event.TransactionPhase; import javax.inject.Inject; import org.joda.time.LocalDate; import org.joda.time.Months; import org.joda.time.Period; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import energy.usef.brp.config.ConfigBrp; import energy.usef.brp.config.ConfigBrpParam; import energy.usef.core.config.Config; import energy.usef.core.config.ConfigParam; import energy.usef.core.constant.USEFConstants; import energy.usef.core.data.xml.bean.message.FlexOrderSettlement; import energy.usef.core.data.xml.bean.message.MessageMetadata; import energy.usef.core.data.xml.bean.message.PTUSettlement; import energy.usef.core.data.xml.bean.message.SettlementMessage; import energy.usef.core.data.xml.bean.message.USEFRole; import energy.usef.core.model.AgrConnectionGroup; import energy.usef.core.model.DocumentStatus; import energy.usef.core.model.DocumentType; import energy.usef.core.service.business.CorePlanboardBusinessService; import energy.usef.core.service.business.SequenceGeneratorService; import energy.usef.core.service.helper.JMSHelperService; import energy.usef.core.service.helper.MessageMetadataBuilder; import energy.usef.core.util.XMLUtil; import energy.usef.core.workflow.settlement.CoreSettlementBusinessService; import energy.usef.core.workflow.transformer.SettlementTransformer; /** * This coordinator class is in charge of the workflow sending Settlement messages to aggregators. */ @Singleton @TransactionAttribute(TransactionAttributeType.REQUIRED) public class BrpSendSettlementMessagesCoordinator { private static final Logger LOGGER = LoggerFactory.getLogger(BrpSendSettlementMessagesCoordinator.class); @Inject private Config config; @Inject private ConfigBrp configBrp; @Inject private JMSHelperService jmsHelperService; @Inject private CorePlanboardBusinessService corePlanboardBusinessService; @Inject private CoreSettlementBusinessService coreSettlementBusinessService; @Inject private SequenceGeneratorService sequenceGeneratorService; @Inject private Event<SendSettlementMessageEvent> sendSettlementMessageEventManager; @Asynchronous @Lock(LockType.WRITE) public void isReadyToSendSettlementMessage( @Observes(during = TransactionPhase.AFTER_COMPLETION) CheckInitiateSettlementDoneEvent event) { LOGGER.debug(USEFConstants.LOG_COORDINATOR_START_HANDLING_EVENT, event); LocalDate period = new LocalDate(event.getYear(), event.getMonth(), 1); boolean isNotProcessed = corePlanboardBusinessService .findPlanboardMessages(DocumentType.FLEX_ORDER_SETTLEMENT, period, period.plusMonths(1).minusDays(1), null) .size() == 0; if (isNotProcessed && coreSettlementBusinessService.isEachFlexOrderReadyForSettlement(event.getYear(), event.getMonth())) { sendSettlementMessageEventManager.fire(new SendSettlementMessageEvent(event.getYear(), event.getMonth())); } LOGGER.debug(USEFConstants.LOG_COORDINATOR_FINISHED_HANDLING_EVENT, event); } /** * This method starts the workflow when triggered by an event. * * @param event {@link SendSettlementMessageEvent} event which starts the workflow. */ public void invokeWorkflow(@Observes SendSettlementMessageEvent event) { LOGGER.debug(USEFConstants.LOG_COORDINATOR_START_HANDLING_EVENT, event); LocalDate dateFrom = new LocalDate(event.getYear(), event.getMonth(), 1); LocalDate dateUntil = dateFrom.plus(Months.ONE).minusDays(1); LOGGER.debug("SendSettlementMessageEvent for {} until {}.", dateFrom, dateUntil); // Fetch all aggregators having active connections in the period defined by [dateFrom, dateUntil] . List<String> aggregators = corePlanboardBusinessService .findConnectionGroupWithConnectionsWithOverlappingValidity(dateFrom, dateUntil) .values().stream().flatMap(map -> map.keySet().stream()) .map(connectionGroup -> ((AgrConnectionGroup) connectionGroup).getAggregatorDomain()) .distinct() .collect(Collectors.toList()); // Fetch all FlexOrderSettlement for the period Map<String, List<energy.usef.core.model.FlexOrderSettlement>> flexOrderSettlementPerAggregator = coreSettlementBusinessService .findFlexOrderSettlementsForPeriod(dateFrom, dateUntil, Optional.empty(), Optional.empty()).stream() .collect(Collectors.groupingBy(flexOrderSettlement -> flexOrderSettlement.getFlexOrder().getParticipantDomain())); if (aggregators.isEmpty()) { LOGGER.error("SendSettlementMessageEvent triggered while there are no aggregators eligible for settlement."); return; } for (String aggregator : aggregators) { SettlementMessage settlementMessage = buildSettlementMessage(flexOrderSettlementPerAggregator.get(aggregator), dateFrom); populateSettlementMessageData(settlementMessage, aggregator, dateFrom, dateUntil); storeSettlementMessage(aggregator, flexOrderSettlementPerAggregator.get(aggregator)); jmsHelperService.sendMessageToOutQueue(XMLUtil.messageObjectToXml(settlementMessage)); } LOGGER.debug(USEFConstants.LOG_COORDINATOR_FINISHED_HANDLING_EVENT, event); } private SettlementMessage buildSettlementMessage(List<energy.usef.core.model.FlexOrderSettlement> flexOrderSettlements, LocalDate dateFrom) { if (flexOrderSettlements == null || flexOrderSettlements.isEmpty()) { return buildDefaultSettlementMessage(dateFrom); } SettlementMessage settlementMessage = new SettlementMessage(); for (energy.usef.core.model.FlexOrderSettlement flexOrderSettlement : flexOrderSettlements) { settlementMessage.getFlexOrderSettlement().add(SettlementTransformer.transformToXml(flexOrderSettlement)); } return settlementMessage; } private void populateSettlementMessageData(SettlementMessage settlementMessage, String aggregatorDomain, LocalDate dateFrom, LocalDate dateUntil) { MessageMetadata messageMetadata = new MessageMetadataBuilder().conversationID().messageID().timeStamp() .senderDomain(config.getProperty(ConfigParam.HOST_DOMAIN)).senderRole(USEFRole.BRP) .recipientDomain(aggregatorDomain).recipientRole(USEFRole.AGR) .precedence(TRANSACTIONAL).build(); settlementMessage.setMessageMetadata(messageMetadata); settlementMessage.setCurrency(config.getProperty(ConfigParam.CURRENCY)); settlementMessage.setPeriodStart(dateFrom); settlementMessage.setPeriodEnd(dateUntil); settlementMessage.setPTUDuration(Period.minutes(config.getIntegerProperty(ConfigParam.PTU_DURATION))); settlementMessage.setTimeZone(config.getProperty(ConfigParam.TIME_ZONE)); settlementMessage.setReference(config.getProperty(ConfigParam.HOST_DOMAIN) + sequenceGeneratorService.next()); } private void storeSettlementMessage(String participantDomain, List<energy.usef.core.model.FlexOrderSettlement> flexOrderSettlements) { corePlanboardBusinessService.storeFlexOrderSettlementsPlanboardMessage(flexOrderSettlements, configBrp.getIntegerProperty(ConfigBrpParam.BRP_SETTLEMENT_RESPONSE_WAITING_DURATION), DocumentStatus.SENT, participantDomain, null); } private SettlementMessage buildDefaultSettlementMessage(LocalDate dateFrom) { SettlementMessage settlementMessage = new SettlementMessage(); FlexOrderSettlement defaultFlexOrderSettlement = new FlexOrderSettlement(); defaultFlexOrderSettlement.setPeriod(dateFrom); defaultFlexOrderSettlement.setOrderReference(ORDER_SETTLEMENT_ORDER_REFERENCE.getValue()); settlementMessage.getFlexOrderSettlement().add(defaultFlexOrderSettlement); PTUSettlement defaultPtuSettlement = new PTUSettlement(); defaultPtuSettlement.setActualPower(new BigInteger(PTU_SETTLEMENT_ACTUAL_POWER.getValue())); defaultPtuSettlement.setDeliveredFlexPower(new BigInteger(PTU_SETTLEMENT_DELIVERED_FLEX_POWER.getValue())); defaultPtuSettlement.setNetSettlement(new BigDecimal(PTU_SETTLEMENT_NET_SETTLEMENT.getValue())); defaultPtuSettlement.setOrderedFlexPower(new BigInteger(PTU_SETTLEMENT_ORDERED_FLEX_POWER.getValue())); defaultPtuSettlement.setPrognosisPower(new BigInteger(PTU_SETTLEMENT_PROGNOSIS_POWER.getValue())); defaultPtuSettlement.setPrice(new BigDecimal(PTU_SETTLEMENT_PRICE.getValue())); defaultPtuSettlement.setStart(new BigInteger(PTU_SETTLEMENT_START.getValue())); defaultFlexOrderSettlement.getPTUSettlement().add(defaultPtuSettlement); return settlementMessage; } }
package ca.uhn.fhir.jpa.dao; /* * #%L * HAPI FHIR JPA Server * %% * Copyright (C) 2014 - 2022 Smile CDR, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneSearchBuilder; import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneIndexExtractor; import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneClauseBuilder; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.search.ExtendedLuceneIndexData; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.hibernate.search.mapper.orm.Search; import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.TransactionTemplate; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class FulltextSearchSvcImpl implements IFulltextSearchSvc { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FulltextSearchSvcImpl.class); @Autowired protected IForcedIdDao myForcedIdDao; @PersistenceContext(type = PersistenceContextType.TRANSACTION) private EntityManager myEntityManager; @Autowired private PlatformTransactionManager myTxManager; @Autowired private FhirContext myFhirContext; @Autowired private ISearchParamRegistry mySearchParamRegistry; @Autowired private DaoConfig myDaoConfig; private ExtendedLuceneSearchBuilder myAdvancedIndexQueryBuilder = new ExtendedLuceneSearchBuilder(); private Boolean ourDisabled; /** * Constructor */ public FulltextSearchSvcImpl() { super(); } public ExtendedLuceneIndexData extractLuceneIndexData(IBaseResource theResource, ResourceIndexedSearchParams theNewParams) { String resourceType = myFhirContext.getResourceType(theResource); Map<String, RuntimeSearchParam> activeSearchParams = mySearchParamRegistry.getActiveSearchParams(resourceType); ExtendedLuceneIndexExtractor extractor = new ExtendedLuceneIndexExtractor(myFhirContext, activeSearchParams); return extractor.extract(theNewParams); } @Override public boolean supportsSomeOf(SearchParameterMap myParams) { // keep this in sync with the guts of doSearch boolean requiresHibernateSearchAccess = myParams.containsKey(Constants.PARAM_CONTENT) || myParams.containsKey(Constants.PARAM_TEXT) || myParams.isLastN(); requiresHibernateSearchAccess |= myDaoConfig.isAdvancedLuceneIndexing() && myAdvancedIndexQueryBuilder.isSupportsSomeOf(myParams); return requiresHibernateSearchAccess; } private List<ResourcePersistentId> doSearch(String theResourceType, SearchParameterMap theParams, ResourcePersistentId theReferencingPid) { // keep this in sync with supportsSomeOf(); SearchSession session = Search.session(myEntityManager); List<Long> longPids = session.search(ResourceTable.class) // Selects are replacements for projection and convert more cleanly than the old implementation. .select( f -> f.field("myId", Long.class) ) .where( f -> f.bool(b -> { ExtendedLuceneClauseBuilder builder = new ExtendedLuceneClauseBuilder(myFhirContext, b, f); /* * Handle _content parameter (resource body content) * * Posterity: * We do not want the HAPI-FHIR dao's to process the * _content parameter, so we remove it from the map here */ List<List<IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT); builder.addStringTextSearch(Constants.PARAM_CONTENT, contentAndTerms); /* * Handle _text parameter (resource narrative content) * * Positerity: * We do not want the HAPI-FHIR dao's to process the * _text parameter, so we remove it from the map here */ List<List<IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT); builder.addStringTextSearch(Constants.PARAM_TEXT, textAndTerms); if (theReferencingPid != null) { b.must(f.match().field("myResourceLinksField").matching(theReferencingPid.toString())); } if (isNotBlank(theResourceType)) { b.must(f.match().field("myResourceType").matching(theResourceType)); } /* * Handle other supported parameters */ if (myDaoConfig.isAdvancedLuceneIndexing()) { myAdvancedIndexQueryBuilder.addAndConsumeAdvancedQueryClauses(builder, theResourceType, theParams, mySearchParamRegistry); } //DROP EARLY HERE IF BOOL IS EMPTY? }) ).fetchAllHits(); return convertLongsToResourcePersistentIds(longPids); } private List<ResourcePersistentId> convertLongsToResourcePersistentIds(List<Long> theLongPids) { return theLongPids.stream() .map(ResourcePersistentId::new) .collect(Collectors.toList()); } @Override public List<ResourcePersistentId> everything(String theResourceName, SearchParameterMap theParams, RequestDetails theRequest) { ResourcePersistentId pid = null; if (theParams.get(IAnyResource.SP_RES_ID) != null) { String idParamValue; IQueryParameterType idParam = theParams.get(IAnyResource.SP_RES_ID).get(0).get(0); if (idParam instanceof TokenParam) { TokenParam idParm = (TokenParam) idParam; idParamValue = idParm.getValue(); } else { StringParam idParm = (StringParam) idParam; idParamValue = idParm.getValue(); } // pid = myIdHelperService.translateForcedIdToPid_(theResourceName, idParamValue, theRequest); } ResourcePersistentId referencingPid = pid; List<ResourcePersistentId> retVal = doSearch(null, theParams, referencingPid); if (referencingPid != null) { retVal.add(referencingPid); } return retVal; } @Override public boolean isDisabled() { Boolean retVal = ourDisabled; if (retVal == null) { retVal = new TransactionTemplate(myTxManager).execute(t -> { try { SearchSession searchSession = Search.session(myEntityManager); searchSession.search(ResourceTable.class); return Boolean.FALSE; } catch (Exception e) { ourLog.trace("FullText test failed", e); ourLog.debug("Hibernate Search (Lucene) appears to be disabled on this server, fulltext will be disabled"); return Boolean.TRUE; } }); ourDisabled = retVal; } assert retVal != null; return retVal; } @Transactional() @Override public List<ResourcePersistentId> search(String theResourceName, SearchParameterMap theParams) { return doSearch(theResourceName, theParams, null); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.security.authorize; import java.io.IOException; import java.net.InetAddress; import java.util.IdentityHashMap; import java.util.Map; import java.util.Set; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.KerberosInfo; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.MachineList; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An authorization manager which handles service-level authorization * for incoming service requests. */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class ServiceAuthorizationManager { static final String BLOCKED = ".blocked"; static final String HOSTS = ".hosts"; private static final String HADOOP_POLICY_FILE = "hadoop-policy.xml"; // For each class, first ACL in the array specifies the allowed entries // and second ACL specifies blocked entries. private volatile Map<Class<?>, AccessControlList[]> protocolToAcls = new IdentityHashMap<Class<?>, AccessControlList[]>(); // For each class, first MachineList in the array specifies the allowed entries // and second MachineList specifies blocked entries. private volatile Map<Class<?>, MachineList[]> protocolToMachineLists = new IdentityHashMap<Class<?>, MachineList[]>(); /** * Configuration key for controlling service-level authorization for Hadoop. * * @deprecated Use * {@link CommonConfigurationKeys#HADOOP_SECURITY_AUTHORIZATION} * instead. */ @Deprecated public static final String SERVICE_AUTHORIZATION_CONFIG = "hadoop.security.authorization"; public static final Logger AUDITLOG = LoggerFactory.getLogger( "SecurityLogger." + ServiceAuthorizationManager.class.getName()); private static final String AUTHZ_SUCCESSFUL_FOR = "Authorization successful for "; private static final String AUTHZ_FAILED_FOR = "Authorization failed for "; /** * Authorize the user to access the protocol being used. * * @param user user accessing the service * @param protocol service being accessed * @param conf configuration to use * @param addr InetAddress of the client * @throws AuthorizationException on authorization failure */ public void authorize(UserGroupInformation user, Class<?> protocol, Configuration conf, InetAddress addr ) throws AuthorizationException { AccessControlList[] acls = protocolToAcls.get(protocol); MachineList[] hosts = protocolToMachineLists.get(protocol); if (acls == null || hosts == null) { throw new AuthorizationException("Protocol " + protocol + " is not known."); } String clientPrincipal = null; if (UserGroupInformation.isSecurityEnabled()) { // get client principal key to verify (if available) KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf); if (krbInfo != null) { String clientKey = krbInfo.clientPrincipal(); if (clientKey != null && !clientKey.isEmpty()) { try { clientPrincipal = SecurityUtil.getServerPrincipal( conf.get(clientKey), addr); } catch (IOException e) { throw (AuthorizationException) new AuthorizationException( "Can't figure out Kerberos principal name for connection from " + addr + " for user=" + user + " protocol=" + protocol) .initCause(e); } } } } if((clientPrincipal != null && !clientPrincipal.equals(user.getUserName())) || acls.length != 2 || !acls[0].isUserAllowed(user) || acls[1].isUserAllowed(user)) { String cause = clientPrincipal != null ? ": this service is only accessible by " + clientPrincipal : ": denied by configured ACL"; AUDITLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol=" + protocol + cause); throw new AuthorizationException("User " + user + " is not authorized for protocol " + protocol + cause); } if (addr != null) { String hostAddress = addr.getHostAddress(); if (hosts.length != 2 || !hosts[0].includes(hostAddress) || hosts[1].includes(hostAddress)) { AUDITLOG.warn(AUTHZ_FAILED_FOR + " for protocol=" + protocol + " from host = " + hostAddress); throw new AuthorizationException("Host " + hostAddress + " is not authorized for protocol " + protocol) ; } } AUDITLOG.info(AUTHZ_SUCCESSFUL_FOR + user + " for protocol="+protocol); } public void refresh(Configuration conf, PolicyProvider provider) { // Get the system property 'hadoop.policy.file' String policyFile = System.getProperty("hadoop.policy.file", HADOOP_POLICY_FILE); // Make a copy of the original config, and load the policy file Configuration policyConf = new Configuration(conf); policyConf.addResource(policyFile); refreshWithLoadedConfiguration(policyConf, provider); } @Private public void refreshWithLoadedConfiguration(Configuration conf, PolicyProvider provider) { final Map<Class<?>, AccessControlList[]> newAcls = new IdentityHashMap<Class<?>, AccessControlList[]>(); final Map<Class<?>, MachineList[]> newMachineLists = new IdentityHashMap<Class<?>, MachineList[]>(); String defaultAcl = conf.get( CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_ACL, AccessControlList.WILDCARD_ACL_VALUE); String defaultBlockedAcl = conf.get( CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_BLOCKED_ACL, ""); String defaultServiceHostsKey = getHostKey( CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_ACL); String defaultMachineList = conf.get(defaultServiceHostsKey, MachineList.WILDCARD_VALUE); String defaultBlockedMachineList= conf.get( defaultServiceHostsKey+ BLOCKED, ""); // Parse the config file Service[] services = provider.getServices(); if (services != null) { for (Service service : services) { AccessControlList acl = new AccessControlList( conf.get(service.getServiceKey(), defaultAcl) ); AccessControlList blockedAcl = new AccessControlList( conf.get(service.getServiceKey() + BLOCKED, defaultBlockedAcl)); newAcls.put(service.getProtocol(), new AccessControlList[] {acl, blockedAcl}); String serviceHostsKey = getHostKey(service.getServiceKey()); MachineList machineList = new MachineList (conf.get(serviceHostsKey, defaultMachineList)); MachineList blockedMachineList = new MachineList( conf.get(serviceHostsKey + BLOCKED, defaultBlockedMachineList)); newMachineLists.put(service.getProtocol(), new MachineList[] {machineList, blockedMachineList}); } } // Flip to the newly parsed permissions protocolToAcls = newAcls; protocolToMachineLists = newMachineLists; } private String getHostKey(String serviceKey) { int endIndex = serviceKey.lastIndexOf("."); if (endIndex != -1) { return serviceKey.substring(0, endIndex)+ HOSTS; } return serviceKey; } @VisibleForTesting public Set<Class<?>> getProtocolsWithAcls() { return protocolToAcls.keySet(); } @VisibleForTesting public AccessControlList getProtocolsAcls(Class<?> className) { return protocolToAcls.get(className)[0]; } @VisibleForTesting public AccessControlList getProtocolsBlockedAcls(Class<?> className) { return protocolToAcls.get(className)[1]; } @VisibleForTesting public Set<Class<?>> getProtocolsWithMachineLists() { return protocolToMachineLists.keySet(); } @VisibleForTesting public MachineList getProtocolsMachineList(Class<?> className) { return protocolToMachineLists.get(className)[0]; } @VisibleForTesting public MachineList getProtocolsBlockedMachineList(Class<?> className) { return protocolToMachineLists.get(className)[1]; } }
package org.hisp.dhis.analytics.data; /* * Copyright (c) 2004-2017, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hisp.dhis.analytics.AggregationType; import org.hisp.dhis.analytics.DataQueryGroups; import org.hisp.dhis.analytics.DataQueryParams; import org.hisp.dhis.analytics.DataType; import org.hisp.dhis.analytics.OutputFormat; import org.hisp.dhis.analytics.Partitions; import org.hisp.dhis.analytics.QueryPlanner; import org.hisp.dhis.analytics.QueryPlannerParams; import org.hisp.dhis.analytics.partition.PartitionManager; import org.hisp.dhis.analytics.table.PartitionUtils; import org.hisp.dhis.common.BaseDimensionalObject; import org.hisp.dhis.common.DimensionType; import org.hisp.dhis.common.DimensionalItemObject; import org.hisp.dhis.common.DimensionalObject; import org.hisp.dhis.common.IllegalQueryException; import org.hisp.dhis.common.ListMap; import org.hisp.dhis.common.MaintenanceModeException; import org.hisp.dhis.commons.collection.PaginatedList; import org.hisp.dhis.commons.filter.FilterUtils; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementGroup; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodType; import org.hisp.dhis.program.ProgramDataElementDimensionItem; import org.hisp.dhis.setting.SettingKey; import org.hisp.dhis.setting.SystemSettingManager; import org.hisp.dhis.system.filter.AggregatableDataElementFilter; import org.hisp.dhis.system.util.MathUtils; import org.hisp.dhis.util.ObjectUtils; import org.springframework.beans.factory.annotation.Autowired; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.function.Function; import static org.hisp.dhis.analytics.AggregationType.SUM; import static org.hisp.dhis.analytics.DataQueryParams.LEVEL_PREFIX; import static org.hisp.dhis.analytics.DataQueryParams.COMPLETENESS_DIMENSION_TYPES; import static org.hisp.dhis.common.DimensionalObject.*; import static org.hisp.dhis.common.IdentifiableObjectUtils.getUids; import static org.hisp.dhis.common.DimensionalObjectUtils.asTypedList; /** * @author Lars Helge Overland */ public class DefaultQueryPlanner implements QueryPlanner { private static final Log log = LogFactory.getLog( DefaultQueryPlanner.class ); @Autowired private PartitionManager partitionManager; @Autowired private SystemSettingManager systemSettingManager; // ------------------------------------------------------------------------- // DefaultQueryPlanner implementation // ------------------------------------------------------------------------- @Override public void validate( DataQueryParams params ) throws IllegalQueryException { String violation = null; if ( params == null ) { throw new IllegalQueryException( "Params cannot be null" ); } final List<DimensionalItemObject> dataElements = Lists.newArrayList( params.getDataElements() ); params.getProgramDataElements().stream().forEach( pde -> dataElements.add( ((ProgramDataElementDimensionItem) pde).getDataElement() ) ); final List<DataElement> nonAggDataElements = FilterUtils.inverseFilter( asTypedList( dataElements ), AggregatableDataElementFilter.INSTANCE ); if ( params.getDimensions().isEmpty() ) { violation = "At least one dimension must be specified"; } if ( !params.getDimensionsAsFilters().isEmpty() ) { violation = "Dimensions cannot be specified as dimension and filter simultaneously: " + params.getDimensionsAsFilters(); } if ( !params.hasPeriods() && !params.isSkipPartitioning() && !params.hasStartEndDate() ) { violation = "At least one period must be specified as dimension or filter"; } if ( params.hasPeriods() && params.hasStartEndDate() ) { violation = "Periods and start and end dates cannot be specified simultaneously"; } if ( !params.getFilterIndicators().isEmpty() && params.getFilterOptions( DATA_X_DIM_ID ).size() > 1 ) { violation = "Only a single indicator can be specified as filter"; } if ( !params.getFilterReportingRates().isEmpty() && params.getFilterOptions( DATA_X_DIM_ID ).size() > 1 ) { violation = "Only a single reporting rate can be specified as filter"; } if ( params.getFilters().contains( new BaseDimensionalObject( CATEGORYOPTIONCOMBO_DIM_ID ) ) ) { violation = "Category option combos cannot be specified as filter"; } if ( !params.getDuplicateDimensions().isEmpty() ) { violation = "Dimensions cannot be specified more than once: " + params.getDuplicateDimensions(); } if ( !params.getAllReportingRates().isEmpty() && !params.containsOnlyDimensionsAndFilters( COMPLETENESS_DIMENSION_TYPES ) ) { violation = "Reporting rates can only be specified together with dimensions of type: " + COMPLETENESS_DIMENSION_TYPES; } if ( params.hasDimensionOrFilter( CATEGORYOPTIONCOMBO_DIM_ID ) && params.getAllDataElements().isEmpty() ) { violation = "Assigned categories cannot be specified when data elements are not specified"; } if ( params.hasDimensionOrFilter( CATEGORYOPTIONCOMBO_DIM_ID ) && ( params.getAllDataElements().size() != params.getAllDataDimensionItems().size() ) ) { violation = "Assigned categories can only be specified together with data elements, not indicators or reporting rates"; } if ( !nonAggDataElements.isEmpty() ) { violation = "Data elements must be of a value and aggregation type that allow aggregation: " + getUids( nonAggDataElements ); } if ( params.isOutputFormat( OutputFormat.DATA_VALUE_SET ) ) { if ( !params.hasDimension( DATA_X_DIM_ID ) ) { violation = "A data dimension 'dx' must be specified when output format is DATA_VALUE_SET"; } if ( !params.hasDimension( PERIOD_DIM_ID ) ) { violation = "A period dimension 'pe' must be specified when output format is DATA_VALUE_SET"; } if ( !params.hasDimension( ORGUNIT_DIM_ID ) ) { violation = "An organisation unit dimension 'ou' must be specified when output format is DATA_VALUE_SET"; } } if ( violation != null ) { log.warn( String.format( "Analytics validation failed: %s", violation ) ); throw new IllegalQueryException( violation ); } } @Override public void validateTableLayout( DataQueryParams params, List<String> columns, List<String> rows ) { String violation = null; if ( columns != null ) { for ( String column : columns ) { if ( !params.hasDimension( column ) ) { violation = "Column must be present as dimension in query: " + column; } } } if ( rows != null ) { for ( String row : rows ) { if ( !params.hasDimension( row ) ) { violation = "Row must be present as dimension in query: " + row; } } } if ( violation != null ) { log.warn( String.format( "Validation failed: %s", violation ) ); throw new IllegalQueryException( violation ); } } @Override public void validateMaintenanceMode() throws MaintenanceModeException { boolean maintenance = (Boolean) systemSettingManager.getSystemSetting( SettingKey.ANALYTICS_MAINTENANCE_MODE ); if ( maintenance ) { throw new MaintenanceModeException( "Analytics engine is in maintenance mode, try again later" ); } } @Override public DataQueryGroups planQuery( DataQueryParams params, QueryPlannerParams plannerParams ) { validate( params ); // --------------------------------------------------------------------- // Group queries which can be executed together // --------------------------------------------------------------------- final List<DataQueryParams> queries = new ArrayList<>( groupByPartition( params, plannerParams ) ); List<Function<DataQueryParams, List<DataQueryParams>>> groupers = new ImmutableList.Builder<Function<DataQueryParams, List<DataQueryParams>>>() .add( q -> groupByOrgUnitLevel( q ) ) .add( q -> groupByPeriodType( q ) ) .add( q -> groupByDataType( q ) ) .add( q -> groupByAggregationType( q ) ) .add( q -> groupByDaysInPeriod( q ) ) .add( q -> groupByDataPeriodType( q ) ) .addAll( plannerParams.getQueryGroupers() ) .build(); for ( Function<DataQueryParams, List<DataQueryParams>> grouper : groupers ) { List<DataQueryParams> currentQueries = Lists.newArrayList( queries ); queries.clear(); for ( DataQueryParams query : currentQueries ) { queries.addAll( grouper.apply( query ) ); } } // --------------------------------------------------------------------- // Split queries until optimal number // --------------------------------------------------------------------- DataQueryGroups queryGroups = DataQueryGroups.newBuilder().withQueries( queries ).build(); if ( queryGroups.isOptimal( plannerParams.getOptimalQueries() ) ) { return queryGroups; } List<String> splitDimensions = Lists.newArrayList( DATA_X_DIM_ID, ORGUNIT_DIM_ID ); for ( String dim : splitDimensions ) { queryGroups = splitByDimension( queryGroups, dim, plannerParams.getOptimalQueries() ); if ( queryGroups.isOptimal( plannerParams.getOptimalQueries() ) ) { break; } } return queryGroups; } // ------------------------------------------------------------------------- // Supportive methods // ------------------------------------------------------------------------- /** * Splits the given list of queries in sub queries on the given dimension. */ private DataQueryGroups splitByDimension( DataQueryGroups queryGroups, String dimension, int optimalQueries ) { int optimalForSubQuery = MathUtils.divideToFloor( optimalQueries, queryGroups.getLargestGroupSize() ); List<DataQueryParams> subQueries = new ArrayList<>(); for ( DataQueryParams query : queryGroups.getAllQueries() ) { DimensionalObject dim = query.getDimension( dimension ); List<DimensionalItemObject> values = null; if ( dim == null || (values = dim.getItems()) == null || values.isEmpty() ) { subQueries.add( DataQueryParams.newBuilder( query ).build() ); continue; } List<List<DimensionalItemObject>> valuePages = new PaginatedList<>( values ).setNumberOfPages( optimalForSubQuery ).getPages(); for ( List<DimensionalItemObject> valuePage : valuePages ) { DataQueryParams subQuery = DataQueryParams.newBuilder( query ) .withDimensionOptions( dim.getDimension(), valuePage ).build(); subQueries.add( subQuery ); } } if ( subQueries.size() > queryGroups.getAllQueries().size() ) { log.debug( String.format( "Split on dimension %s: %d", dimension, (subQueries.size() / queryGroups.getAllQueries().size()) ) ); } return DataQueryGroups.newBuilder().withQueries( subQueries ).build(); } // ------------------------------------------------------------------------- // Supportive - group by methods // ------------------------------------------------------------------------- @Override public List<DataQueryParams> groupByPartition( DataQueryParams params, QueryPlannerParams plannerParams ) { Set<String> validPartitions = partitionManager.getDataValueAnalyticsPartitions(); String tableName = plannerParams.getTableName(); String tableSuffix = plannerParams.getTableSuffix(); List<DataQueryParams> queries = new ArrayList<>(); if ( params.isSkipPartitioning() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .withPartitions( new Partitions().add( tableName ) ).build(); queries.add( query ); } else if ( !params.getPeriods().isEmpty() ) { ListMap<Partitions, DimensionalItemObject> partitionPeriodMap = PartitionUtils.getPartitionPeriodMap( params.getPeriods(), tableName, tableSuffix, validPartitions ); for ( Partitions partitions : partitionPeriodMap.keySet() ) { if ( partitions.hasAny() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .withPeriods( partitionPeriodMap.get( partitions ) ) .withPartitions( partitions ).build(); queries.add( query ); } } } else if ( !params.getFilterPeriods().isEmpty() ) { Partitions partitions = PartitionUtils.getPartitions( params.getFilterPeriods(), tableName, tableSuffix, validPartitions ); if ( partitions.hasAny() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .withPartitions( partitions ).build(); queries.add( query ); } } else if ( params.hasStartEndDate() ) { Partitions partitions = PartitionUtils.getPartitions( params.getStartDate(), params.getEndDate(), tableName, tableSuffix, validPartitions ); if ( partitions.hasAny() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .withPartitions( partitions ).build(); queries.add( query ); } } else { throw new IllegalQueryException( "Query does not contain any period dimension items" ); } if ( queries.size() > 1 ) { log.debug( String.format( "Split on partition: %d", queries.size() ) ); } return queries; } /** * If periods appear as dimensions in the given query; groups the query into * sub queries based on the period type of the periods. Sets the period type * name on each query. If periods appear as filters; replaces the period filter * with one filter for each period type. Sets the dimension names and filter * names respectively. */ @Override public List<DataQueryParams> groupByPeriodType( DataQueryParams params ) { List<DataQueryParams> queries = new ArrayList<>(); if ( params.isSkipPartitioning() ) { queries.add( params ); } else if ( !params.getPeriods().isEmpty() ) { ListMap<String, DimensionalItemObject> periodTypePeriodMap = PartitionUtils.getPeriodTypePeriodMap( params.getPeriods() ); for ( String periodType : periodTypePeriodMap.keySet() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .addOrSetDimensionOptions( PERIOD_DIM_ID, DimensionType.PERIOD, periodType.toLowerCase(), periodTypePeriodMap.get( periodType ) ) .withPeriodType( periodType ).build(); queries.add( query ); } } else if ( !params.getFilterPeriods().isEmpty() ) { DimensionalObject filter = params.getFilter( PERIOD_DIM_ID ); ListMap<String, DimensionalItemObject> periodTypePeriodMap = PartitionUtils.getPeriodTypePeriodMap( filter.getItems() ); DataQueryParams.Builder query = DataQueryParams.newBuilder( params ) .removeFilter( PERIOD_DIM_ID ) .withPeriodType( periodTypePeriodMap.keySet().iterator().next() ); // Using first period type for ( String periodType : periodTypePeriodMap.keySet() ) { query.addFilter( new BaseDimensionalObject( filter.getDimension(), filter.getDimensionType(), periodType.toLowerCase(), filter.getDisplayName(), periodTypePeriodMap.get( periodType ) ) ); } queries.add( query.build() ); } else { queries.add( DataQueryParams.newBuilder( params ).build() ); return queries; } if ( queries.size() > 1 ) { log.debug( String.format( "Split on period type: %d", queries.size() ) ); } return queries; } @Override public List<DataQueryParams> groupByOrgUnitLevel( DataQueryParams params ) { List<DataQueryParams> queries = new ArrayList<>(); if ( !params.getOrganisationUnits().isEmpty() ) { ListMap<Integer, DimensionalItemObject> levelOrgUnitMap = QueryPlannerUtils.getLevelOrgUnitMap( params.getOrganisationUnits() ); for ( Integer level : levelOrgUnitMap.keySet() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .addOrSetDimensionOptions( ORGUNIT_DIM_ID, DimensionType.ORGANISATION_UNIT, LEVEL_PREFIX + level, levelOrgUnitMap.get( level ) ).build(); queries.add( query ); } } else if ( !params.getFilterOrganisationUnits().isEmpty() ) { ListMap<Integer, DimensionalItemObject> levelOrgUnitMap = QueryPlannerUtils.getLevelOrgUnitMap( params.getFilterOrganisationUnits() ); DimensionalObject filter = params.getFilter( ORGUNIT_DIM_ID ); DataQueryParams.Builder query = DataQueryParams.newBuilder( params ) .removeFilter( ORGUNIT_DIM_ID ); for ( Integer level : levelOrgUnitMap.keySet() ) { query.addFilter( new BaseDimensionalObject( filter.getDimension(), filter.getDimensionType(), LEVEL_PREFIX + level, filter.getDisplayName(), levelOrgUnitMap.get( level ) ) ); } queries.add( query.build() ); } else { queries.add( DataQueryParams.newBuilder( params ).build() ); return queries; } if ( queries.size() > 1 ) { log.debug( String.format( "Split on org unit level: %d", queries.size() ) ); } return queries; } @Override public List<DataQueryParams> groupByStartEndDate( DataQueryParams params ) { List<DataQueryParams> queries = new ArrayList<>(); if ( !params.getPeriods().isEmpty() ) { for ( DimensionalItemObject item : params.getPeriods() ) { Period period = (Period) item; DataQueryParams query = DataQueryParams.newBuilder( params ) .withStartDate( period.getStartDate() ) .withEndDate( period.getEndDate() ).build(); BaseDimensionalObject staticPeriod = (BaseDimensionalObject) query.getDimension( PERIOD_DIM_ID ); staticPeriod.setDimensionName( period.getIsoDate() ); staticPeriod.setFixed( true ); queries.add( query ); } } else if ( !params.getFilterPeriods().isEmpty() ) { Period period = (Period) params.getFilterPeriods().get( 0 ); DataQueryParams query = DataQueryParams.newBuilder( params ) .withStartDate( period.getStartDate() ) .withEndDate( period.getEndDate() ) .removeFilter( PERIOD_DIM_ID ).build(); queries.add( query ); } else { throw new IllegalQueryException( "Query does not contain any period dimension items" ); } if ( queries.size() > 1 ) { log.debug( String.format( "Split on period: %d", queries.size() ) ); } return queries; } /** * Groups queries by their data type. * * @param params the data query parameters. * @return a list of {@link DataQueryParams}. */ private List<DataQueryParams> groupByDataType( DataQueryParams params ) { List<DataQueryParams> queries = new ArrayList<>(); if ( !params.getDataElements().isEmpty() ) { ListMap<DataType, DimensionalItemObject> dataTypeDataElementMap = QueryPlannerUtils.getDataTypeDataElementMap( params.getDataElements() ); for ( DataType dataType : dataTypeDataElementMap.keySet() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .withDataElements( dataTypeDataElementMap.get( dataType ) ) .withDataType( dataType ).build(); queries.add( query ); } } else { DataQueryParams query = DataQueryParams.newBuilder( params ) .withDataType( DataType.NUMERIC ).build(); queries.add( query ); } if ( queries.size() > 1 ) { log.debug( String.format( "Split on data type: %d", queries.size() ) ); } return queries; } /** * Groups the given query in sub queries based on the aggregation type of its * data elements. The aggregation type can be sum, average aggregation or * average disaggregation. Sum means that the data elements have sum aggregation * operator. Average aggregation means that the data elements have the average * aggregation operator and that the period type of the data elements have * higher or equal frequency than the aggregation period type. Average disaggregation * means that the data elements have the average aggregation operator and * that the period type of the data elements have lower frequency than the * aggregation period type. Average bool means that the data elements have the * average aggregation operator and the bool value type. * <p> * If no data elements are present, the aggregation type will be determined * based on the first data element in the first data element group in the * first data element group set in the query. * <p> * If the aggregation type is already set/overridden in the request, the * query will be returned unchanged. If there are no data elements or data * element group sets specified the aggregation type will fall back to sum. * * @param params the data query parameters. * @return a list of {@link DataQueryParams}. */ private List<DataQueryParams> groupByAggregationType( DataQueryParams params ) { List<DataQueryParams> queries = new ArrayList<>(); if ( !params.getDataElements().isEmpty() ) { ListMap<AggregationType, DimensionalItemObject> aggregationTypeDataElementMap = QueryPlannerUtils.getAggregationTypeDataElementMap( params ); for ( AggregationType aggregationType : aggregationTypeDataElementMap.keySet() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .withDataElements( aggregationTypeDataElementMap.get( aggregationType ) ) .withAggregationType( aggregationType ).build(); queries.add( query ); } } else if ( !params.getDataElementGroupSets().isEmpty() ) { DimensionalObject degs = params.getDataElementGroupSets().get( 0 ); DataElementGroup deg = (DataElementGroup) (degs.hasItems() ? degs.getItems().get( 0 ) : null); AggregationType aggregationType = ObjectUtils.firstNonNull( params.getAggregationType(), SUM ); if ( deg != null && !deg.getMembers().isEmpty() ) { PeriodType periodType = PeriodType.getPeriodTypeByName( params.getPeriodType() ); aggregationType = ObjectUtils.firstNonNull( params.getAggregationType(), deg.getAggregationType() ); aggregationType = QueryPlannerUtils.getAggregationType( deg.getValueType(), aggregationType, periodType, deg.getPeriodType() ); } DataQueryParams query = DataQueryParams.newBuilder( params ) .withAggregationType( aggregationType ).build(); queries.add( query ); } else { DataQueryParams query = DataQueryParams.newBuilder( params ) .withAggregationType( ObjectUtils.firstNonNull( params.getAggregationType(), SUM ) ).build(); queries.add( query ); } if ( queries.size() > 1 ) { log.debug( String.format( "Split on aggregation type: %d", queries.size() ) ); } return queries; } /** * Groups the given query into sub queries based on the number of days in the * aggregation period. This only applies if the aggregation type is * {@link AggregationType#AVERAGE_SUM_INT} and the query has at least one period as * dimension option. This is necessary since the number of days in the aggregation * period is part of the expression for aggregating the value. * * @param params the data query parameters. * @return a list of {@link DataQueryParams}. */ private List<DataQueryParams> groupByDaysInPeriod( DataQueryParams params ) { List<DataQueryParams> queries = new ArrayList<>(); if ( params.getPeriods().isEmpty() || !params.isAggregationType( AggregationType.AVERAGE_SUM_INT ) ) { queries.add( DataQueryParams.newBuilder( params ).build() ); return queries; } ListMap<Integer, DimensionalItemObject> daysPeriodMap = QueryPlannerUtils.getDaysPeriodMap( params.getPeriods() ); DimensionalObject periodDim = params.getDimension( PERIOD_DIM_ID ); for ( Integer days : daysPeriodMap.keySet() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .addOrSetDimensionOptions( periodDim.getDimension(), periodDim.getDimensionType(), periodDim.getDimensionName(), daysPeriodMap.get( days ) ).build(); queries.add( query ); } if ( queries.size() > 1 ) { log.debug( String.format( "Split on days in period: %d", queries.size() ) ); } return queries; } /** * Groups the given query in sub queries based on the period type of its * data elements. Sets the data period type on each query. This only applies * if the aggregation type of the query involves disaggregation. * * @param params the data query parameters. * @return a list of {@link DataQueryParams}. */ private List<DataQueryParams> groupByDataPeriodType( DataQueryParams params ) { List<DataQueryParams> queries = new ArrayList<>(); if ( params.getDataElements().isEmpty() || !params.isDisaggregation() ) { queries.add( DataQueryParams.newBuilder( params ).build() ); return queries; } ListMap<PeriodType, DimensionalItemObject> periodTypeDataElementMap = QueryPlannerUtils.getPeriodTypeDataElementMap( params.getDataElements() ); for ( PeriodType periodType : periodTypeDataElementMap.keySet() ) { DataQueryParams query = DataQueryParams.newBuilder( params ) .withDataElements( periodTypeDataElementMap.get( periodType ) ) .withDataPeriodType( periodType ).build(); queries.add( query ); } if ( queries.size() > 1 ) { log.debug( String.format( "Split on data period type: %d", queries.size() ) ); } return queries; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * This is not the original file distributed by the Apache Software Foundation * It has been modified by the Hipparchus project */ package org.hipparchus.optim.nonlinear.vector.leastsquares; import static org.hamcrest.CoreMatchers.is; import java.util.ArrayList; import java.util.List; import org.hamcrest.MatcherAssert; import org.hipparchus.analysis.MultivariateMatrixFunction; import org.hipparchus.analysis.MultivariateVectorFunction; import org.hipparchus.exception.LocalizedCoreFormats; import org.hipparchus.exception.MathIllegalArgumentException; import org.hipparchus.exception.MathIllegalStateException; import org.hipparchus.geometry.euclidean.twod.Vector2D; import org.hipparchus.linear.DiagonalMatrix; import org.hipparchus.linear.RealMatrix; import org.hipparchus.linear.RealVector; import org.hipparchus.optim.ConvergenceChecker; import org.hipparchus.optim.nonlinear.vector.leastsquares.LeastSquaresOptimizer.Optimum; import org.hipparchus.optim.nonlinear.vector.leastsquares.LeastSquaresProblem.Evaluation; import org.hipparchus.util.FastMath; import org.hipparchus.util.Incrementor; import org.hipparchus.util.Precision; import org.junit.Assert; import org.junit.Test; /** * <p>Some of the unit tests are re-implementations of the MINPACK <a * href="http://www.netlib.org/minpack/ex/file17">file17</a> and <a * href="http://www.netlib.org/minpack/ex/file22">file22</a> test files. * The redistribution policy for MINPACK is available <a * href="http://www.netlib.org/minpack/disclaimer">here</a>. * */ public class LevenbergMarquardtOptimizerTest extends AbstractLeastSquaresOptimizerAbstractTest{ public LeastSquaresBuilder builder(BevingtonProblem problem){ return base() .model(problem.getModelFunction(), problem.getModelFunctionJacobian()); } public LeastSquaresBuilder builder(CircleProblem problem){ return base() .model(problem.getModelFunction(), problem.getModelFunctionJacobian()) .target(problem.target()) .weight(new DiagonalMatrix(problem.weight())); } @Override public int getMaxIterations() { return 25; } @Override public LeastSquaresOptimizer getOptimizer() { return new LevenbergMarquardtOptimizer(); } @Override @Test public void testNonInvertible() { /* * Overrides the method from parent class, since the default singularity * threshold (1e-14) does not trigger the expected exception. */ LinearProblem problem = new LinearProblem(new double[][] { { 1, 2, -3 }, { 2, 1, 3 }, { -3, 0, -9 } }, new double[] { 1, 1, 1 }); final Optimum optimum = optimizer.optimize( problem.getBuilder().maxIterations(20).build()); //TODO check that it is a bad fit? Why the extra conditions? Assert.assertTrue(FastMath.sqrt(problem.getTarget().length) * optimum.getRMS() > 0.6); try { optimum.getCovariances(1.5e-14); fail(optimizer); } catch (MathIllegalArgumentException e) { Assert.assertEquals(LocalizedCoreFormats.SINGULAR_MATRIX, e.getSpecifier()); } } @Test public void testControlParameters() { CircleVectorial circle = new CircleVectorial(); circle.addPoint( 30.0, 68.0); circle.addPoint( 50.0, -6.0); circle.addPoint(110.0, -20.0); circle.addPoint( 35.0, 15.0); circle.addPoint( 45.0, 97.0); checkEstimate( circle, 0.1, 10, 1.0e-14, 1.0e-16, 1.0e-10, false); checkEstimate( circle, 0.1, 10, 1.0e-15, 1.0e-17, 1.0e-10, true); checkEstimate( circle, 0.1, 5, 1.0e-15, 1.0e-16, 1.0e-10, true); circle.addPoint(300, -300); //wardev I changed true => false //TODO why should this fail? It uses 15 evaluations. checkEstimate( circle, 0.1, 20, 1.0e-18, 1.0e-16, 1.0e-10, false); } private void checkEstimate(CircleVectorial circle, double initialStepBoundFactor, int maxCostEval, double costRelativeTolerance, double parRelativeTolerance, double orthoTolerance, boolean shouldFail) { try { final LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer() .withInitialStepBoundFactor(initialStepBoundFactor) .withCostRelativeTolerance(costRelativeTolerance) .withParameterRelativeTolerance(parRelativeTolerance) .withOrthoTolerance(orthoTolerance) .withRankingThreshold(Precision.SAFE_MIN); final LeastSquaresProblem problem = builder(circle) .maxEvaluations(maxCostEval) .maxIterations(100) .start(new double[] { 98.680, 47.345 }) .build(); optimizer.optimize(problem); Assert.assertTrue(!shouldFail); //TODO check it got the right answer } catch (MathIllegalArgumentException ee) { Assert.assertTrue(shouldFail); } catch (MathIllegalStateException ee) { Assert.assertTrue(shouldFail); } } /** * Non-linear test case: fitting of decay curve (from Chapter 8 of * Bevington's textbook, "Data reduction and analysis for the physical sciences"). * XXX The expected ("reference") values may not be accurate and the tolerance too * relaxed for this test to be currently really useful (the issue is under * investigation). */ @Test public void testBevington() { final double[][] dataPoints = { // column 1 = times { 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255, 270, 285, 300, 315, 330, 345, 360, 375, 390, 405, 420, 435, 450, 465, 480, 495, 510, 525, 540, 555, 570, 585, 600, 615, 630, 645, 660, 675, 690, 705, 720, 735, 750, 765, 780, 795, 810, 825, 840, 855, 870, 885, }, // column 2 = measured counts { 775, 479, 380, 302, 185, 157, 137, 119, 110, 89, 74, 61, 66, 68, 48, 54, 51, 46, 55, 29, 28, 37, 49, 26, 35, 29, 31, 24, 25, 35, 24, 30, 26, 28, 21, 18, 20, 27, 17, 17, 14, 17, 24, 11, 22, 17, 12, 10, 13, 16, 9, 9, 14, 21, 17, 13, 12, 18, 10, }, }; final double[] start = {10, 900, 80, 27, 225}; final BevingtonProblem problem = new BevingtonProblem(); final int len = dataPoints[0].length; final double[] weights = new double[len]; for (int i = 0; i < len; i++) { problem.addPoint(dataPoints[0][i], dataPoints[1][i]); weights[i] = 1 / dataPoints[1][i]; } final Optimum optimum = optimizer.optimize( builder(problem) .target(dataPoints[1]) .weight(new DiagonalMatrix(weights)) .start(start) .maxIterations(20) .build() ); final RealVector solution = optimum.getPoint(); final double[] expectedSolution = { 10.4, 958.3, 131.4, 33.9, 205.0 }; final RealMatrix covarMatrix = optimum.getCovariances(1e-14); final double[][] expectedCovarMatrix = { { 3.38, -3.69, 27.98, -2.34, -49.24 }, { -3.69, 2492.26, 81.89, -69.21, -8.9 }, { 27.98, 81.89, 468.99, -44.22, -615.44 }, { -2.34, -69.21, -44.22, 6.39, 53.80 }, { -49.24, -8.9, -615.44, 53.8, 929.45 } }; final int numParams = expectedSolution.length; // Check that the computed solution is within the reference error range. for (int i = 0; i < numParams; i++) { final double error = FastMath.sqrt(expectedCovarMatrix[i][i]); Assert.assertEquals("Parameter " + i, expectedSolution[i], solution.getEntry(i), error); } // Check that each entry of the computed covariance matrix is within 10% // of the reference matrix entry. for (int i = 0; i < numParams; i++) { for (int j = 0; j < numParams; j++) { Assert.assertEquals("Covariance matrix [" + i + "][" + j + "]", expectedCovarMatrix[i][j], covarMatrix.getEntry(i, j), FastMath.abs(0.1 * expectedCovarMatrix[i][j])); } } // Check various measures of goodness-of-fit. final double chi2 = optimum.getChiSquare(); final double cost = optimum.getCost(); final double rms = optimum.getRMS(); final double reducedChi2 = optimum.getReducedChiSquare(start.length); // XXX Values computed by the CM code: It would be better to compare // with the results from another library. final double expectedChi2 = 66.07852350839286; final double expectedReducedChi2 = 1.2014277001525975; final double expectedCost = 8.128869755900439; final double expectedRms = 1.0582887010256337; final double tol = 1e14; Assert.assertEquals(expectedChi2, chi2, tol); Assert.assertEquals(expectedReducedChi2, reducedChi2, tol); Assert.assertEquals(expectedCost, cost, tol); Assert.assertEquals(expectedRms, rms, tol); } @Test public void testCircleFitting2() { final double xCenter = 123.456; final double yCenter = 654.321; final double xSigma = 10; final double ySigma = 15; final double radius = 111.111; // The test is extremely sensitive to the seed. final long seed = 59421061L; final RandomCirclePointGenerator factory = new RandomCirclePointGenerator(xCenter, yCenter, radius, xSigma, ySigma, seed); final CircleProblem circle = new CircleProblem(xSigma, ySigma); final int numPoints = 10; for (Vector2D p : factory.generate(numPoints)) { circle.addPoint(p.getX(), p.getY()); } // First guess for the center's coordinates and radius. final double[] init = { 90, 659, 115 }; Incrementor incrementor = new Incrementor(); final Optimum optimum = optimizer.optimize( LeastSquaresFactory.countEvaluations(builder(circle).maxIterations(50).start(init).build(), incrementor)); final double[] paramFound = optimum.getPoint().toArray(); // Retrieve errors estimation. final double[] asymptoticStandardErrorFound = optimum.getSigma(1e-14).toArray(); // Check that the parameters are found within the assumed error bars. Assert.assertEquals(xCenter, paramFound[0], 3 * asymptoticStandardErrorFound[0]); Assert.assertEquals(yCenter, paramFound[1], 3 * asymptoticStandardErrorFound[1]); Assert.assertEquals(radius, paramFound[2], 3 * asymptoticStandardErrorFound[2]); Assert.assertTrue(incrementor.getCount() < 40); } @Test public void testParameterValidator() { // Setup. final double xCenter = 123.456; final double yCenter = 654.321; final double xSigma = 10; final double ySigma = 15; final double radius = 111.111; final long seed = 3456789L; final RandomCirclePointGenerator factory = new RandomCirclePointGenerator(xCenter, yCenter, radius, xSigma, ySigma, seed); final CircleProblem circle = new CircleProblem(xSigma, ySigma); final int numPoints = 10; for (Vector2D p : factory.generate(numPoints)) { circle.addPoint(p.getX(), p.getY()); } // First guess for the center's coordinates and radius. final double[] init = { 90, 659, 115 }; final Optimum optimum = optimizer.optimize(builder(circle).maxIterations(50).start(init).build()); final int numEval = optimum.getEvaluations(); Assert.assertTrue(numEval > 1); // Build a new problem with a validator that amounts to cheating. final ParameterValidator cheatValidator = new ParameterValidator() { public RealVector validate(RealVector params) { // Cheat: return the optimum found previously. return optimum.getPoint(); } }; final Optimum cheatOptimum = optimizer.optimize(builder(circle).maxIterations(50).start(init).parameterValidator(cheatValidator).build()); final int cheatNumEval = cheatOptimum.getEvaluations(); Assert.assertTrue(cheatNumEval < numEval); // System.out.println("n=" + numEval + " nc=" + cheatNumEval); } @Test public void testEvaluationCount() { //setup LeastSquaresProblem lsp = new LinearProblem(new double[][] {{1}}, new double[] {1}) .getBuilder() .checker(new ConvergenceChecker<Evaluation>() { public boolean converged(int iteration, Evaluation previous, Evaluation current) { return true; } }) .build(); //action Optimum optimum = optimizer.optimize(lsp); //verify //check iterations and evaluations are not switched. MatcherAssert.assertThat(optimum.getIterations(), is(1)); MatcherAssert.assertThat(optimum.getEvaluations(), is(2)); } private static class BevingtonProblem { private List<Double> time; private List<Double> count; public BevingtonProblem() { time = new ArrayList<Double>(); count = new ArrayList<Double>(); } public void addPoint(double t, double c) { time.add(t); count.add(c); } public MultivariateVectorFunction getModelFunction() { return new MultivariateVectorFunction() { public double[] value(double[] params) { double[] values = new double[time.size()]; for (int i = 0; i < values.length; ++i) { final double t = time.get(i); values[i] = params[0] + params[1] * FastMath.exp(-t / params[3]) + params[2] * FastMath.exp(-t / params[4]); } return values; } }; } public MultivariateMatrixFunction getModelFunctionJacobian() { return new MultivariateMatrixFunction() { public double[][] value(double[] params) { double[][] jacobian = new double[time.size()][5]; for (int i = 0; i < jacobian.length; ++i) { final double t = time.get(i); jacobian[i][0] = 1; final double p3 = params[3]; final double p4 = params[4]; final double tOp3 = t / p3; final double tOp4 = t / p4; jacobian[i][1] = FastMath.exp(-tOp3); jacobian[i][2] = FastMath.exp(-tOp4); jacobian[i][3] = params[1] * FastMath.exp(-tOp3) * tOp3 / p3; jacobian[i][4] = params[2] * FastMath.exp(-tOp4) * tOp4 / p4; } return jacobian; } }; } } }
/** * Copyright 2017 Hortonworks. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package com.hortonworks.streamline.streams.metrics.storm.ambari; import com.google.common.collect.Lists; import com.hortonworks.streamline.common.JsonClientUtil; import com.hortonworks.streamline.streams.exception.ConfigException; import com.hortonworks.streamline.streams.metrics.AbstractTimeSeriesQuerier; import org.glassfish.jersey.client.ClientConfig; import org.glassfish.jersey.uri.internal.JerseyUriBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.core.MediaType; import java.net.URI; import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Implementation of TimeSeriesQuerier for Ambari Metric Service (AMS) with Storm. * <p/> * This class assumes that metrics for Storm is pushed to AMS via Ambari Storm Metrics Sink. * appId is user specific (default is 'nimbus'), and metric name is composed to 'topology.[topology name].[component name].[task id].[metric name](.[key of the value map])'. * <p/> * Please note that this class requires Ambari 2.4 or above. */ public class AmbariMetricsServiceWithStormQuerier extends AbstractTimeSeriesQuerier { private static final Logger log = LoggerFactory.getLogger(AmbariMetricsServiceWithStormQuerier.class); public static final String METRIC_NAME_PREFIX_KAFKA_OFFSET = "kafkaOffset."; // the configuration keys static final String COLLECTOR_API_URL = "collectorApiUrl"; static final String APP_ID = "appId"; // these metrics need '.%' as postfix to aggregate values for each stream private static final List<String> METRICS_NEED_AGGREGATION_ON_STREAMS = Lists.newArrayList( "__complete-latency", "__emit-count", "__ack-count", "__fail-count", "__process-latency", "__execute-count", "__execute-latency" ); public static final String DEFAULT_APP_ID = "nimbus"; public static final String WILDCARD_ALL_COMPONENTS = "%"; private Client client; private URI collectorApiUri; private String appId; public AmbariMetricsServiceWithStormQuerier() { } /** * {@inheritDoc} */ @Override public void init(Map<String, String> conf) throws ConfigException { if (conf != null) { try { collectorApiUri = new URI(conf.get(COLLECTOR_API_URL)); appId = conf.get(APP_ID); if (appId == null) { appId = DEFAULT_APP_ID; } } catch (URISyntaxException e) { throw new ConfigException(e); } } client = ClientBuilder.newClient(new ClientConfig()); } @Override public Map<Long, Double> getTopologyLevelMetrics(String topologyName, String metricName, AggregateFunction aggrFunction, long from, long to) { return getMetrics(topologyName, WILDCARD_ALL_COMPONENTS, metricName, aggrFunction, from, to); } /** * {@inheritDoc} */ @Override public Map<Long, Double> getMetrics(String topologyName, String componentId, String metricName, AggregateFunction aggrFunction, long from, long to) { URI targetUri = composeQueryParameters(topologyName, componentId, metricName, aggrFunction, from, to); log.debug("Calling {} for querying metric", targetUri.toString()); Map<String, ?> responseMap = JsonClientUtil.getEntity(client.target(targetUri), Map.class); List<Map<String, ?>> metrics = (List<Map<String, ?>>) responseMap.get("metrics"); if (metrics.size() > 0) { Map<String, Number> points = (Map<String, Number>) metrics.get(0).get("metrics"); Map<Long, Double> ret = new HashMap<>(points.size()); for (Map.Entry<String, Number> timestampToValue : points.entrySet()) { ret.put(Long.valueOf(timestampToValue.getKey()), timestampToValue.getValue().doubleValue()); } return ret; } else { return Collections.emptyMap(); } } /** * {@inheritDoc} */ @Override public Map<String, Map<Long, Double>> getRawMetrics(String metricName, String parameters, long from, long to) { Map<String, String> queryParams = parseParameters(parameters); URI targetUri = composeRawQueryParameters(metricName, queryParams, from, to); log.debug("Calling {} for querying metric", targetUri.toString()); Map<String, ?> responseMap = JsonClientUtil.getEntity(client.target(targetUri), Map.class); List<Map<String, ?>> metrics = (List<Map<String, ?>>) responseMap.get("metrics"); if (metrics.size() > 0) { Map<String, Map<Long, Double>> ret = new HashMap<>(metrics.size()); for (Map<String, ?> metric : metrics) { String retrievedMetricName = (String) metric.get("metricname"); Map<String, Number> retrievedPoints = (Map<String, Number>) metric.get("metrics"); Map<Long, Double> pointsForOutput; if (retrievedPoints == null || retrievedPoints.isEmpty()) { pointsForOutput = Collections.emptyMap(); } else { pointsForOutput = new HashMap<>(retrievedPoints.size()); for (Map.Entry<String, Number> timestampToValue : retrievedPoints.entrySet()) { pointsForOutput.put(Long.valueOf(timestampToValue.getKey()), timestampToValue.getValue().doubleValue()); } } ret.put(retrievedMetricName, pointsForOutput); } return ret; } else { return Collections.emptyMap(); } } private URI composeRawQueryParameters(String metricName, Map<String, String> queryParams, long from, long to) { JerseyUriBuilder uriBuilder = new JerseyUriBuilder().uri(collectorApiUri); for (Map.Entry<String, String> pair : queryParams.entrySet()) { uriBuilder = uriBuilder.queryParam(pair.getKey(), pair.getValue()); } // force replacing values for metricNames, startTime, endTime with parameters return uriBuilder.replaceQueryParam("metricNames", metricName) .replaceQueryParam("startTime", String.valueOf(from)) .replaceQueryParam("endTime", String.valueOf(to)) .build(); } private URI composeQueryParameters(String topologyName, String componentId, String metricName, AggregateFunction aggrFunction, long from, long to) { String actualMetricName = buildMetricName(topologyName, componentId, metricName); JerseyUriBuilder uriBuilder = new JerseyUriBuilder(); return uriBuilder.uri(collectorApiUri) .queryParam("appId", DEFAULT_APP_ID) .queryParam("hostname", "") .queryParam("metricNames", actualMetricName) .queryParam("startTime", String.valueOf(from)) .queryParam("endTime", String.valueOf(to)) .queryParam("seriesAggregateFunction", aggrFunction.name()) .build(); } private String buildMetricName(String topologyName, String componentId, String metricName) { String actualMetricName; if (metricName.startsWith(METRIC_NAME_PREFIX_KAFKA_OFFSET)) { actualMetricName = createKafkaOffsetMetricName(topologyName, metricName); } else { actualMetricName = "topology." + topologyName + "." + componentId + ".%." + metricName; } if (METRICS_NEED_AGGREGATION_ON_STREAMS.contains(metricName)) { actualMetricName = actualMetricName + ".%"; } // since '._' is treat as special character (separator) so it should be replaced return actualMetricName.replace('_', '-'); } private String createKafkaOffsetMetricName(String topologyName, String kafkaOffsetMetricName) { // get rid of "kafkaOffset." // <topic>/<metric name (starts with total)> or <topic>/partition_<partition_num>/<metricName> String tempMetricName = kafkaOffsetMetricName.substring(METRIC_NAME_PREFIX_KAFKA_OFFSET.length()); String[] slashSplittedNames = tempMetricName.split("/"); if (slashSplittedNames.length == 1) { // unknown metrics throw new IllegalArgumentException("Unknown metrics for kafka offset metric: " + kafkaOffsetMetricName); } String topic = slashSplittedNames[0]; String metricName = "topology." + topologyName + ".kafka-topic." + topic; if (slashSplittedNames.length > 2) { // partition level metricName = metricName + "." + slashSplittedNames[1] + "." + slashSplittedNames[2]; } else { // topic level metricName = metricName + "." + slashSplittedNames[1]; } return metricName; } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.data.holder; import static java.util.Objects.requireNonNull; import org.checkerframework.checker.nullness.qual.Nullable; import org.spongepowered.api.data.DataHolder; import org.spongepowered.api.data.DataProvider; import org.spongepowered.api.data.DataTransactionResult; import org.spongepowered.api.data.Key; import org.spongepowered.api.data.value.CollectionValue; import org.spongepowered.api.data.value.MapValue; import org.spongepowered.api.data.value.MergeFunction; import org.spongepowered.api.data.value.Value; import org.spongepowered.api.data.value.ValueContainer; import org.spongepowered.common.data.key.SpongeKey; import org.spongepowered.common.util.DataUtil; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.BiFunction; import java.util.function.Supplier; import java.util.stream.Collectors; @SuppressWarnings({"unchecked", "rawtypes"}) public interface SpongeMutableDataHolder extends SpongeDataHolder, DataHolder.Mutable { // Implementation Utility default List<DataHolder.Mutable> impl$mutableDelegateDataHolder() { return this.impl$delegateDataHolder().stream() .filter(dh -> dh instanceof DataHolder.Mutable) .map(DataHolder.Mutable.class::cast) .collect(Collectors.toList()); } default <E, V extends Value<E>> DataTransactionResult impl$applyTransaction(Key<V> key, BiFunction<DataProvider<V, E>, Mutable, DataTransactionResult> function, Supplier<DataTransactionResult> defaultResult) { for (Mutable dataHolder : this.impl$mutableDelegateDataHolder()) { // Offer to the first available mutable data holder final DataProvider<V, E> dataProvider = this.impl$getProviderFor(key, dataHolder); if (!dataProvider.isSupported(dataHolder)) { return defaultResult.get(); } return function.apply(dataProvider, dataHolder); } return defaultResult.get(); } // Mutable Implementation @Override default <E> DataTransactionResult offer(Key<? extends Value<E>> key, E value) { return this.impl$applyTransaction(key, (p, m) -> p.offer(m, value), () -> DataTransactionResult.failResult(Value.immutableOf(key, value))); } @Override default DataTransactionResult offer(Value<?> value) { return this.impl$applyTransaction(value.key(), (p, m) -> ((DataProvider<Value<?>, ?>) p).offerValue(m, value), () -> DataTransactionResult.failResult(value.asImmutable())); } @Override default <E> DataTransactionResult offerSingle(Key<? extends CollectionValue<E, ?>> key, E element) { final SpongeKey<? extends CollectionValue<E, Collection<E>>, Collection<E>> key0 = (SpongeKey<? extends CollectionValue<E, Collection<E>>, Collection<E>>) key; return this.impl$applyTransaction(key0, (p, m) -> { final Collection<E> collection = p.get(m) .map(DataUtil::ensureMutable) .orElseGet(key0.getDefaultValueSupplier()); if (!collection.add(element)) { return DataTransactionResult.failNoData(); } return p.offer(m, collection); }, DataTransactionResult::failNoData); } @Override default <E> DataTransactionResult offerAll(Key<? extends CollectionValue<E, ?>> key, Collection<? extends E> elements) { final SpongeKey<? extends CollectionValue<E, Collection<E>>, Collection<E>> key0 = (SpongeKey<? extends CollectionValue<E, Collection<E>>, Collection<E>>) key; return this.impl$applyTransaction(key0, (p, m) -> { final Collection<E> collection = p.get(m) .map(DataUtil::ensureMutable) .orElseGet(key0.getDefaultValueSupplier()); if (!collection.addAll(elements)) { return DataTransactionResult.failNoData(); } return p.offer(m, collection); }, DataTransactionResult::failNoData); } @Override default <K, V> DataTransactionResult offerSingle(Key<? extends MapValue<K, V>> key, K valueKey, V value) { return this.impl$applyTransaction(key, (p, m) -> { final Map<K, V> kvMap = p.get(m).map(DataUtil::ensureMutable).orElseGet(((SpongeKey) key).getDefaultValueSupplier()); kvMap.put(valueKey, value); return p.offer(m, kvMap); }, DataTransactionResult::failNoData); } @Override default <K, V> DataTransactionResult offerAll(Key<? extends MapValue<K, V>> key, Map<? extends K, ? extends V> values) { if (values.isEmpty()) { return DataTransactionResult.failNoData(); } return this.impl$applyTransaction(key, (p, m) -> { final Map<K, V> kvMap = p.get(m).map(DataUtil::ensureMutable).orElseGet(((SpongeKey) key).getDefaultValueSupplier()); kvMap.putAll(values); return p.offer(m, kvMap); }, DataTransactionResult::failNoData); } @Override default <E> DataTransactionResult removeSingle(Key<? extends CollectionValue<E, ?>> key, E element) { final SpongeKey<? extends CollectionValue<E, Collection<E>>, Collection<E>> key0 = (SpongeKey<? extends CollectionValue<E, Collection<E>>, Collection<E>>) key; return this.impl$applyTransaction(key0, (p, m) -> { final Optional<Collection<E>> optCollection = p.get(m).map(DataUtil::ensureMutable); if (!optCollection.isPresent()) { return DataTransactionResult.failNoData(); } final Collection<E> collection = optCollection.get(); if (!collection.remove(element)) { return DataTransactionResult.failNoData(); } return p.offer(m, collection); }, DataTransactionResult::failNoData); } @Override default <E> DataTransactionResult removeAll(Key<? extends CollectionValue<E, ?>> key, Collection<? extends E> elements) { if (elements.isEmpty()) { return DataTransactionResult.failNoData(); } final SpongeKey<? extends CollectionValue<E, Collection<E>>, Collection<E>> key0 = (SpongeKey<? extends CollectionValue<E, Collection<E>>, Collection<E>>) key; return this.impl$applyTransaction(key0, (p, m) -> { final Optional<Collection<E>> optCollection = p.get(m).map(DataUtil::ensureMutable); if (!optCollection.isPresent()) { return DataTransactionResult.failNoData(); } final Collection<E> collection = optCollection.get(); if (!collection.removeAll(elements)) { return DataTransactionResult.failNoData(); } return p.offer(m, collection); }, DataTransactionResult::failNoData); } @Override default <K> DataTransactionResult removeKey(Key<? extends MapValue<K, ?>> key, K mapKey) { final SpongeKey<? extends MapValue<K, Object>, Map<K, Object>> key0 = (SpongeKey<? extends MapValue<K, Object>, Map<K, Object>>) key; return this.impl$applyTransaction(key0, (p, m) -> { final Optional<? extends Map<K, ?>> optMap = p.get(m).map(DataUtil::ensureMutable); if (!optMap.isPresent() || !optMap.get().containsKey(mapKey)) { return DataTransactionResult.failNoData(); } final Map<K, ?> map = optMap.get(); map.remove(mapKey); return ((DataProvider) p).offer(m, map); }, DataTransactionResult::failNoData); } @Override default <K, V> DataTransactionResult removeAll(Key<? extends MapValue<K, V>> key, Map<? extends K, ? extends V> values) { if (values.isEmpty()) { return DataTransactionResult.failNoData(); } return this.impl$applyTransaction(key, (p, m) -> { final Optional<? extends Map<K, ?>> optMap = p.get(m).map(DataUtil::ensureMutable); if (!optMap.isPresent()) { return DataTransactionResult.failNoData(); } final Map<K, ?> map = optMap.get(); for (final Map.Entry<? extends K, ? extends V> entry : values.entrySet()) { map.remove(entry.getKey(), entry.getValue()); } return ((DataProvider) p).offer(m, map); }, DataTransactionResult::failNoData); } @Override default DataTransactionResult remove(Key<?> key) { return this.impl$applyTransaction((Key) key, DataProvider::remove, DataTransactionResult::failNoData); } @Override default DataTransactionResult remove(Value<?> value) { return this.impl$applyTransaction(value.key(), (p, m) -> { final Optional<?> opt = p.get(m); if (opt.isPresent() && opt.get().equals(value.get())) { return p.remove(m); } return DataTransactionResult.failNoData(); }, DataTransactionResult::failNoData); } @Override default DataTransactionResult copyFrom(ValueContainer that, MergeFunction function) { requireNonNull(that, "that"); requireNonNull(function, "function"); final DataTransactionResult.Builder builder = DataTransactionResult.builder(); boolean success = false; if (function == MergeFunction.REPLACEMENT_PREFERRED) { // Produce less garbage if we know we don't have to merge any values for (final Value<?> replacement : that.getValues()) { final DataTransactionResult result = this.offer(replacement); builder.absorbResult(result); if (result.isSuccessful()) { success = true; } } } else if (function == MergeFunction.ORIGINAL_PREFERRED) { // Produce less garbage if we know we don't have to merge any values for (final Value replacement : that.getValues()) { final Key<Value<Object>> key = replacement.key(); if (this.get(key).isPresent()) { continue; } final Value merged = function.merge(null, replacement); final DataTransactionResult result = this.offer(merged); builder.absorbResult(result); if (result.isSuccessful()) { success = true; } } } else { for (final Value replacement : that.getValues()) { final Key<Value<Object>> key = replacement.key(); final @Nullable Value original = this.getValue(key).map(Value::asImmutable).orElse(null); final Value merged = function.merge(original, replacement); final DataTransactionResult result = this.offer(merged); builder.absorbResult(result); if (result.isSuccessful()) { success = true; } } } if (success) { builder.result(DataTransactionResult.Type.SUCCESS); } else { builder.result(DataTransactionResult.Type.FAILURE); } return builder.build(); } @Override default DataTransactionResult undo(DataTransactionResult result) { if (result.replacedData().isEmpty() && result.successfulData().isEmpty()) { return DataTransactionResult.successNoData(); } final DataTransactionResult.Builder builder = DataTransactionResult.builder(); for (final Value<?> value : result.replacedData()) { builder.absorbResult(this.offer(value)); } for (final Value<?> value : result.successfulData()) { builder.absorbResult(this.remove(value)); } return DataTransactionResult.failNoData(); } // Delegated @Override default <E> DataTransactionResult tryOffer(Key<? extends Value<E>> key, E value) { final DataTransactionResult result = this.offer(key, value); if (!result.isSuccessful()) { throw new IllegalArgumentException("Failed offer transaction!"); } return result; } @Override default DataTransactionResult offerAll(CollectionValue<?, ?> value) { return this.offerAll((Key<? extends CollectionValue<Object, ?>>) value.key(), value.get()); } @Override default DataTransactionResult offerAll(MapValue<?, ?> value) { return this.offerAll((Key<? extends MapValue<Object, Object>>) value.key(), value.get()); } @Override default DataTransactionResult removeAll(CollectionValue<?, ?> value) { return this.removeAll((Key<? extends CollectionValue<Object, ?>>) value.key(), value.get()); } @Override default DataTransactionResult removeAll(MapValue<?, ?> value) { return this.removeAll((Key<? extends MapValue<Object, Object>>) value.key(), value.get()); } }
/* * $Id$ * * Copyright 2003,2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.opensymphony.xwork2.config.impl; import com.opensymphony.xwork2.util.PatternMatcher; import org.apache.commons.lang3.math.NumberUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.Serializable; import java.util.*; /** * <p> Matches patterns against pre-compiled wildcard expressions pulled from * target objects. It uses the wildcard matcher from the Apache Cocoon * project. Patterns will be matched in the order they were added. The first * match wins, so more specific patterns should be defined before less specific * patterns. * * @since 2.1 */ public abstract class AbstractMatcher<E> implements Serializable { /** * <p> The logging instance </p> */ private static final Logger log = LogManager.getLogger(AbstractMatcher.class); /** * <p> Handles all wildcard pattern matching. </p> */ PatternMatcher<Object> wildcard; /** * <p> The compiled patterns and their associated target objects </p> */ List<Mapping<E>> compiledPatterns = new ArrayList<>(); ; public AbstractMatcher(PatternMatcher<?> helper) { this.wildcard = (PatternMatcher<Object>) helper; } /** * <p> * Finds and precompiles the wildcard patterns. Patterns will be evaluated * in the order they were added. Only patterns that actually contain a * wildcard will be compiled. * </p> * * <p> * Patterns can optionally be matched "loosely". When the end of the pattern * matches \*[^*]\*$ (wildcard, no wildcard, wildcard), if the pattern * fails, it is also matched as if the last two characters didn't exist. The * goal is to support the legacy "*!*" syntax, where the "!*" is optional. * </p> * * @param name The pattern * @param target The object to associate with the pattern * @param looseMatch * To loosely match wildcards or not */ public void addPattern(String name, E target, boolean looseMatch) { Object pattern; if (!wildcard.isLiteral(name)) { if (looseMatch && (name.length() > 0) && (name.charAt(0) == '/')) { name = name.substring(1); } log.debug("Compiling pattern '{}'", name); pattern = wildcard.compilePattern(name); compiledPatterns.add(new Mapping<E>(name, pattern, target)); if (looseMatch) { int lastStar = name.lastIndexOf('*'); if (lastStar > 1 && lastStar == name.length() - 1) { if (name.charAt(lastStar - 1) != '*') { pattern = wildcard.compilePattern(name.substring(0, lastStar - 1)); compiledPatterns.add(new Mapping<E>(name, pattern, target)); } } } } } public void freeze() { compiledPatterns = Collections.unmodifiableList(new ArrayList<Mapping<E>>()); } /** * <p> Matches the path against the compiled wildcard patterns. </p> * * @param potentialMatch The portion of the request URI for selecting a config. * @return The action config if matched, else null */ public E match(String potentialMatch) { E config = null; if (compiledPatterns.size() > 0) { log.debug("Attempting to match '{}' to a wildcard pattern, {} available", potentialMatch, compiledPatterns.size()); Map<String,String> vars = new LinkedHashMap<String,String>(); for (Mapping<E> m : compiledPatterns) { if (wildcard.match(vars, potentialMatch, m.getPattern())) { log.debug("Value matches pattern '{}'", m.getOriginalPattern()); config = convert(potentialMatch, m.getTarget(), vars); break; } } } return config; } /** * <p> Clones the target object and its children, replacing various * properties with the values of the wildcard-matched strings. </p> * * @param path The requested path * @param orig The original object * @param vars A Map of wildcard-matched strings * @return A cloned object with appropriate properties replaced with * wildcard-matched values */ protected abstract E convert(String path, E orig, Map<String, String> vars); /** * <p>Replaces parameter values</p> * * @param orig The original parameters with placeholder values * @param vars A Map of wildcard-matched strings * * @return map with replaced parameters */ protected Map<String,String> replaceParameters(Map<String, String> orig, Map<String,String> vars) { Map<String, String> map = new LinkedHashMap<>(); //this will set the group index references, like {1} for (String key : orig.keySet()) { map.put(key, convertParam(orig.get(key), vars)); } //the values map will contain entries like name->"Lex Luthor" and 1->"Lex Luthor" //now add the non-numeric values for (String key: vars.keySet()) { if (!NumberUtils.isNumber(key)) { map.put(key, vars.get(key)); } } return map; } /** * <p> Inserts into a value wildcard-matched strings where specified * with the {x} syntax. If a wildcard-matched value isn't found, the * replacement token is turned into an empty string. * </p> * * @param val The value to convert * @param vars A Map of wildcard-matched strings * @return The new value */ protected String convertParam(String val, Map<String, String> vars) { if (val == null) { return null; } int len = val.length(); StringBuilder ret = new StringBuilder(); char c; String varVal; for (int x=0; x<len; x++) { c = val.charAt(x); if (x < len - 2 && c == '{' && '}' == val.charAt(x+2)) { varVal = (String)vars.get(String.valueOf(val.charAt(x + 1))); if (varVal != null) { ret.append(varVal); } x += 2; } else { ret.append(c); } } return ret.toString(); } /** * <p> Stores a compiled wildcard pattern and the object it came * from. </p> */ private static class Mapping<E> implements Serializable { /** * <p> The original pattern. </p> */ private String original; /** * <p> The compiled pattern. </p> */ private Object pattern; /** * <p> The original object. </p> */ private E config; /** * <p> Contructs a read-only Mapping instance. </p> * * @param original The original pattern * @param pattern The compiled pattern * @param config The original object */ public Mapping(String original, Object pattern, E config) { this.original = original; this.pattern = pattern; this.config = config; } /** * <p> Gets the compiled wildcard pattern. </p> * * @return The compiled pattern */ public Object getPattern() { return this.pattern; } /** * <p> Gets the object that contains the pattern. </p> * * @return The associated object */ public E getTarget() { return this.config; } /** * <p> Gets the original wildcard pattern. </p> * * @return The original pattern */ public String getOriginalPattern() { return this.original; } } }
/* Copyright 2000-2006 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.dom.svg; import org.apache.batik.anim.values.AnimatableValue; import org.apache.batik.dom.AbstractDocument; import org.apache.batik.dom.util.XLinkSupport; import org.apache.batik.dom.util.XMLSupport; import org.apache.batik.util.SVGTypes; import org.w3c.dom.Node; import org.w3c.dom.svg.SVGAnimatedLength; import org.w3c.dom.svg.SVGElementInstance; import org.w3c.dom.svg.SVGUseElement; /** * This class implements {@link org.w3c.dom.svg.SVGUseElement}. * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id$ */ public class SVGOMUseElement extends SVGURIReferenceGraphicsElement implements SVGUseElement { /** * The attribute initializer. */ protected final static AttributeInitializer attributeInitializer; static { attributeInitializer = new AttributeInitializer(4); attributeInitializer.addAttribute(XMLSupport.XMLNS_NAMESPACE_URI, null, "xmlns:xlink", XLinkSupport.XLINK_NAMESPACE_URI); attributeInitializer.addAttribute(XLinkSupport.XLINK_NAMESPACE_URI, "xlink", "type", "simple"); attributeInitializer.addAttribute(XLinkSupport.XLINK_NAMESPACE_URI, "xlink", "show", "embed"); attributeInitializer.addAttribute(XLinkSupport.XLINK_NAMESPACE_URI, "xlink", "actuate", "onLoad"); } /** * Store the shadow tree of the use element. */ protected SVGOMUseShadowRoot shadowTree; /** * Creates a new SVGOMUseElement object. */ protected SVGOMUseElement() { } /** * Creates a new SVGOMUseElement object. * @param prefix The namespace prefix. * @param owner The owner document. */ public SVGOMUseElement(String prefix, AbstractDocument owner) { super(prefix, owner); } /** * <b>DOM</b>: Implements {@link Node#getLocalName()}. */ public String getLocalName() { return SVG_USE_TAG; } /** * <b>DOM</b>: Implements {@link SVGUseElement#getX()}. */ public SVGAnimatedLength getX() { return getAnimatedLengthAttribute (null, SVG_X_ATTRIBUTE, SVG_USE_X_DEFAULT_VALUE, SVGOMAnimatedLength.HORIZONTAL_LENGTH, false); } /** * <b>DOM</b>: Implements {@link SVGUseElement#getY()}. */ public SVGAnimatedLength getY() { return getAnimatedLengthAttribute (null, SVG_Y_ATTRIBUTE, SVG_USE_Y_DEFAULT_VALUE, SVGOMAnimatedLength.VERTICAL_LENGTH, false); } /** * <b>DOM</b>: Implements {@link SVGUseElement#getWidth()}. */ public SVGAnimatedLength getWidth() { return getAnimatedLengthAttribute (null, SVG_WIDTH_ATTRIBUTE, SVG_USE_WIDTH_DEFAULT_VALUE, SVGOMAnimatedLength.HORIZONTAL_LENGTH, true); } /** * <b>DOM</b>: Implements {@link SVGUseElement#getHeight()}. */ public SVGAnimatedLength getHeight() { return getAnimatedLengthAttribute (null, SVG_HEIGHT_ATTRIBUTE, SVG_USE_HEIGHT_DEFAULT_VALUE, SVGOMAnimatedLength.VERTICAL_LENGTH, true); } /** * <b>DOM</b>: Implements {@link SVGUseElement#getInstanceRoot()}. */ public SVGElementInstance getInstanceRoot() { throw new RuntimeException(" !!! TODO: getInstanceRoot()"); } /** * <b>DOM</b>: Implements {@link SVGUseElement#getAnimatedInstanceRoot()}. */ public SVGElementInstance getAnimatedInstanceRoot() { throw new RuntimeException(" !!! TODO: getAnimatedInstanceRoot()"); } // CSSNavigableNode /////////////////////////////////////////////// /** * Returns the CSS first child node of this node. */ public Node getCSSFirstChild() { if (shadowTree != null) { return shadowTree.getFirstChild(); } return null; } /** * Returns the CSS last child of this stylable element. */ public Node getCSSLastChild() { // use element shadow trees only ever have a single element return getCSSFirstChild(); } /** * Returns whether this node is the root of a (conceptual) hidden tree * that selectors will not work across. Returns true here, since CSS * selectors cannot work in the conceptual cloned sub-tree of the * content referenced by the 'use' element. */ public boolean isHiddenFromSelectors() { return true; } /** * Sets the shadow tree for this 'use' element. */ public void setUseShadowTree(SVGOMUseShadowRoot r) { shadowTree = r; } /** * Returns the AttributeInitializer for this element type. * @return null if this element has no attribute with a default value. */ protected AttributeInitializer getAttributeInitializer() { return attributeInitializer; } /** * Returns a new uninitialized instance of this object's class. */ protected Node newNode() { return new SVGOMUseElement(); } // ExtendedTraitAccess /////////////////////////////////////////////////// /** * Returns whether the given XML attribute is animatable. */ public boolean isAttributeAnimatable(String ns, String ln) { if (ns == null) { if (ln.equals(SVG_X_ATTRIBUTE) || ln.equals(SVG_Y_ATTRIBUTE) || ln.equals(SVG_WIDTH_ATTRIBUTE) || ln.equals(SVG_HEIGHT_ATTRIBUTE)) { return true; } } return super.isAttributeAnimatable(ns, ln); } /** * Returns the type of the given attribute. */ public int getAttributeType(String ns, String ln) { if (ns == null) { if (ln.equals(SVG_X_ATTRIBUTE) || ln.equals(SVG_Y_ATTRIBUTE) || ln.equals(SVG_WIDTH_ATTRIBUTE) || ln.equals(SVG_HEIGHT_ATTRIBUTE)) { return SVGTypes.TYPE_LENGTH; } } return super.getAttributeType(ns, ln); } // AnimationTarget /////////////////////////////////////////////////////// /** * Gets how percentage values are interpreted by the given attribute. */ protected short getAttributePercentageInterpretation(String ns, String ln) { if (ns == null) { if (ln.equals(SVG_X_ATTRIBUTE)) { return PERCENTAGE_VIEWPORT_WIDTH; } if (ln.equals(SVG_Y_ATTRIBUTE)) { return PERCENTAGE_VIEWPORT_HEIGHT; } } return super.getAttributePercentageInterpretation(ns, ln); } /** * Updates an attribute value in this target. */ public void updateAttributeValue(String ns, String ln, AnimatableValue val) { if (ns == null) { if (ln.equals(SVG_X_ATTRIBUTE)) { updateLengthAttributeValue(getX(), val); return; } else if (ln.equals(SVG_Y_ATTRIBUTE)) { updateLengthAttributeValue(getY(), val); return; } else if (ln.equals(SVG_WIDTH_ATTRIBUTE)) { updateLengthAttributeValue(getWidth(), val); return; } else if (ln.equals(SVG_HEIGHT_ATTRIBUTE)) { updateLengthAttributeValue(getHeight(), val); return; } } super.updateAttributeValue(ns, ln, val); } /** * Returns the underlying value of an animatable XML attribute. */ public AnimatableValue getUnderlyingValue(String ns, String ln) { if (ns == null) { if (ln.equals(SVG_X_ATTRIBUTE)) { return getBaseValue (getX(), PERCENTAGE_VIEWPORT_WIDTH); } else if (ln.equals(SVG_Y_ATTRIBUTE)) { return getBaseValue (getY(), PERCENTAGE_VIEWPORT_HEIGHT); } else if (ln.equals(SVG_WIDTH_ATTRIBUTE)) { return getBaseValue (getWidth(), PERCENTAGE_VIEWPORT_WIDTH); } else if (ln.equals(SVG_HEIGHT_ATTRIBUTE)) { return getBaseValue (getHeight(), PERCENTAGE_VIEWPORT_HEIGHT); } } return super.getUnderlyingValue(ns, ln); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2019_06_01.implementation; import com.microsoft.azure.SubResource; import com.microsoft.azure.management.network.v2019_06_01.AddressSpace; import com.microsoft.azure.management.network.v2019_06_01.VirtualNetworkPeeringState; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; /** * Peerings in a virtual network resource. */ @JsonFlatten public class VirtualNetworkPeeringInner extends SubResource { /** * Whether the VMs in the local virtual network space would be able to * access the VMs in remote virtual network space. */ @JsonProperty(value = "properties.allowVirtualNetworkAccess") private Boolean allowVirtualNetworkAccess; /** * Whether the forwarded traffic from the VMs in the local virtual network * will be allowed/disallowed in remote virtual network. */ @JsonProperty(value = "properties.allowForwardedTraffic") private Boolean allowForwardedTraffic; /** * If gateway links can be used in remote virtual networking to link to * this virtual network. */ @JsonProperty(value = "properties.allowGatewayTransit") private Boolean allowGatewayTransit; /** * If remote gateways can be used on this virtual network. If the flag is * set to true, and allowGatewayTransit on remote peering is also true, * virtual network will use gateways of remote virtual network for transit. * Only one peering can have this flag set to true. This flag cannot be set * if virtual network already has a gateway. */ @JsonProperty(value = "properties.useRemoteGateways") private Boolean useRemoteGateways; /** * The reference of the remote virtual network. The remote virtual network * can be in the same or different region (preview). See here to register * for the preview and learn more * (https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering). */ @JsonProperty(value = "properties.remoteVirtualNetwork") private SubResource remoteVirtualNetwork; /** * The reference of the remote virtual network address space. */ @JsonProperty(value = "properties.remoteAddressSpace") private AddressSpace remoteAddressSpace; /** * The status of the virtual network peering. Possible values include: * 'Initiated', 'Connected', 'Disconnected'. */ @JsonProperty(value = "properties.peeringState") private VirtualNetworkPeeringState peeringState; /** * The provisioning state of the resource. */ @JsonProperty(value = "properties.provisioningState") private String provisioningState; /** * The name of the resource that is unique within a resource group. This * name can be used to access the resource. */ @JsonProperty(value = "name") private String name; /** * A unique read-only string that changes whenever the resource is updated. */ @JsonProperty(value = "etag") private String etag; /** * Get whether the VMs in the local virtual network space would be able to access the VMs in remote virtual network space. * * @return the allowVirtualNetworkAccess value */ public Boolean allowVirtualNetworkAccess() { return this.allowVirtualNetworkAccess; } /** * Set whether the VMs in the local virtual network space would be able to access the VMs in remote virtual network space. * * @param allowVirtualNetworkAccess the allowVirtualNetworkAccess value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withAllowVirtualNetworkAccess(Boolean allowVirtualNetworkAccess) { this.allowVirtualNetworkAccess = allowVirtualNetworkAccess; return this; } /** * Get whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network. * * @return the allowForwardedTraffic value */ public Boolean allowForwardedTraffic() { return this.allowForwardedTraffic; } /** * Set whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network. * * @param allowForwardedTraffic the allowForwardedTraffic value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withAllowForwardedTraffic(Boolean allowForwardedTraffic) { this.allowForwardedTraffic = allowForwardedTraffic; return this; } /** * Get if gateway links can be used in remote virtual networking to link to this virtual network. * * @return the allowGatewayTransit value */ public Boolean allowGatewayTransit() { return this.allowGatewayTransit; } /** * Set if gateway links can be used in remote virtual networking to link to this virtual network. * * @param allowGatewayTransit the allowGatewayTransit value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withAllowGatewayTransit(Boolean allowGatewayTransit) { this.allowGatewayTransit = allowGatewayTransit; return this; } /** * Get if remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway. * * @return the useRemoteGateways value */ public Boolean useRemoteGateways() { return this.useRemoteGateways; } /** * Set if remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway. * * @param useRemoteGateways the useRemoteGateways value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withUseRemoteGateways(Boolean useRemoteGateways) { this.useRemoteGateways = useRemoteGateways; return this; } /** * Get the reference of the remote virtual network. The remote virtual network can be in the same or different region (preview). See here to register for the preview and learn more (https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering). * * @return the remoteVirtualNetwork value */ public SubResource remoteVirtualNetwork() { return this.remoteVirtualNetwork; } /** * Set the reference of the remote virtual network. The remote virtual network can be in the same or different region (preview). See here to register for the preview and learn more (https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering). * * @param remoteVirtualNetwork the remoteVirtualNetwork value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withRemoteVirtualNetwork(SubResource remoteVirtualNetwork) { this.remoteVirtualNetwork = remoteVirtualNetwork; return this; } /** * Get the reference of the remote virtual network address space. * * @return the remoteAddressSpace value */ public AddressSpace remoteAddressSpace() { return this.remoteAddressSpace; } /** * Set the reference of the remote virtual network address space. * * @param remoteAddressSpace the remoteAddressSpace value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withRemoteAddressSpace(AddressSpace remoteAddressSpace) { this.remoteAddressSpace = remoteAddressSpace; return this; } /** * Get the status of the virtual network peering. Possible values include: 'Initiated', 'Connected', 'Disconnected'. * * @return the peeringState value */ public VirtualNetworkPeeringState peeringState() { return this.peeringState; } /** * Set the status of the virtual network peering. Possible values include: 'Initiated', 'Connected', 'Disconnected'. * * @param peeringState the peeringState value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withPeeringState(VirtualNetworkPeeringState peeringState) { this.peeringState = peeringState; return this; } /** * Get the provisioning state of the resource. * * @return the provisioningState value */ public String provisioningState() { return this.provisioningState; } /** * Set the provisioning state of the resource. * * @param provisioningState the provisioningState value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withProvisioningState(String provisioningState) { this.provisioningState = provisioningState; return this; } /** * Get the name of the resource that is unique within a resource group. This name can be used to access the resource. * * @return the name value */ public String name() { return this.name; } /** * Set the name of the resource that is unique within a resource group. This name can be used to access the resource. * * @param name the name value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withName(String name) { this.name = name; return this; } /** * Get a unique read-only string that changes whenever the resource is updated. * * @return the etag value */ public String etag() { return this.etag; } /** * Set a unique read-only string that changes whenever the resource is updated. * * @param etag the etag value to set * @return the VirtualNetworkPeeringInner object itself. */ public VirtualNetworkPeeringInner withEtag(String etag) { this.etag = etag; return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.configuration; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.stream.Collectors.joining; import static org.apache.geode.distributed.ConfigurationProperties.ENABLE_CLUSTER_CONFIGURATION; import static org.apache.geode.distributed.ConfigurationProperties.LOAD_CLUSTER_CONFIGURATION_FROM_DIR; import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS; import static org.apache.geode.distributed.ConfigurationProperties.LOG_FILE; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.apache.geode.distributed.ConfigurationProperties.NAME; import static org.apache.geode.distributed.ConfigurationProperties.USE_CLUSTER_CONFIGURATION; import static org.apache.geode.internal.AvailablePortHelper.getRandomAvailableTCPPorts; import static org.apache.geode.test.dunit.Host.getHost; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.Arrays; import java.util.Properties; import org.awaitility.Awaitility; import org.awaitility.core.ConditionFactory; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.Region; import org.apache.geode.distributed.Locator; import org.apache.geode.distributed.internal.ClusterConfigurationService; import org.apache.geode.distributed.internal.InternalLocator; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase; import org.apache.geode.test.junit.categories.DistributedTest; import org.apache.geode.test.junit.categories.FlakyTest; import org.apache.geode.util.test.TestUtil; @Category({DistributedTest.class, FlakyTest.class}) // GEODE-1165 public class ClusterConfigurationServiceUsingDirDUnitTest extends JUnit4CacheTestCase { @Override public final void preTearDownCacheTestCase() throws Exception { for (int i = 0; i < 2; i++) { VM vm = getHost(0).getVM(i); vm.invoke("Removing shared configuration", () -> { InternalLocator locator = InternalLocator.getLocator(); if (locator == null) { return; } ClusterConfigurationService sharedConfig = locator.getSharedConfiguration(); if (sharedConfig != null) { sharedConfig.destroySharedConfiguration(); } }); } } @Test public void basicClusterConfigDirWithOneLocator() throws Exception { final int[] ports = getRandomAvailableTCPPorts(1); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } @Test public void basicClusterConfigDirWithTwoLocators() throws Exception { final int[] ports = getRandomAvailableTCPPorts(2); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } @Test public void updateClusterConfigDirWithTwoLocatorsNoRollingServerRestart() throws Exception { final int[] ports = getRandomAvailableTCPPorts(2); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-empty.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region absence", () -> { Region r = getRootRegion("newReplicatedRegion"); assertNull("Region does exist", r); }); } // Shut down the locators in reverse order to how we will start them up in the next step. // Unless we start them asynchronously, the older one will want to wait for a new diskstore // to become available and will time out. for (int i = locatorCount; i > 0; i--) { VM vm = getHost(0).getVM(i - 1); stopLocator(vm); } for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); vm.invoke(() -> disconnectFromDS()); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } @Test public void updateClusterConfigDirWithTwoLocatorsAndRollingServerRestart() throws Exception { final int[] ports = getRandomAvailableTCPPorts(2); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-empty.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region absence", () -> { Region r = getRootRegion("newReplicatedRegion"); assertNull("Region does exist", r); }); } // Shut down the locators in reverse order to how we will start them up in the next step. // Unless we start them asynchronously, the older one will want to wait for a new diskstore // to become available and will time out. for (int i = locatorCount; i > 0; i--) { VM vm = getHost(0).getVM(i - 1); stopLocator(vm); } for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } @Test public void updateClusterConfigDirWithTwoLocatorsRollingRestartAndRollingServerRestart() throws Exception { final int[] ports = getRandomAvailableTCPPorts(2); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-empty.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region absence", () -> { Region r = getRootRegion("newReplicatedRegion"); assertNull("Region does exist", r); }); } // Roll the locators for (int i = locatorCount - 1; i >= 0; i--) { VM vm = getHost(0).getVM(i); stopLocator(vm); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } // Roll the servers for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } private void copyClusterXml(final VM vm, final String clusterXml) { vm.invoke("Copying new cluster.xml from " + clusterXml, () -> { String clusterXmlPath = TestUtil.getResourcePath(ClusterConfigurationServiceUsingDirDUnitTest.class, clusterXml); InputStream cacheXml = new FileInputStream(clusterXmlPath); assertNotNull("Could not create InputStream from " + clusterXmlPath, cacheXml); Files.createDirectories(Paths.get("cluster_config", "cluster")); Files.copy(cacheXml, Paths.get("cluster_config", "cluster", "cluster.xml"), StandardCopyOption.REPLACE_EXISTING); }); } private void startLocator(final VM vm, final int i, final int[] locatorPorts) { vm.invoke("Creating locator on " + vm, () -> { final String locatorName = "locator" + i; final File logFile = new File("locator-" + i + ".log"); final Properties props = new Properties(); props.setProperty(NAME, locatorName); props.setProperty(MCAST_PORT, "0"); props.setProperty(ENABLE_CLUSTER_CONFIGURATION, "true"); props.setProperty(LOAD_CLUSTER_CONFIGURATION_FROM_DIR, "true"); if (locatorPorts.length > 1) { int otherLocatorPort = locatorPorts[(i + 1) % locatorPorts.length]; props.setProperty(LOCATORS, "localhost[" + otherLocatorPort + "]"); } Locator.startLocatorAndDS(locatorPorts[i], logFile, props); }); } private void waitForSharedConfiguration(final VM vm) { vm.invoke("Waiting for shared configuration", () -> { final InternalLocator locator = InternalLocator.getLocator(); await().until(() -> { return locator.isSharedConfigurationRunning(); }); }); } private void stopLocator(final VM vm) { vm.invoke("Stopping locator on " + vm, () -> { InternalLocator locator = InternalLocator.getLocator(); assertNotNull("No locator found", locator); locator.stop(); disconnectAllFromDS(); }); } private void restartCache(final VM vm, final int i, final int[] locatorPorts) { vm.invoke("Creating cache on VM " + i, () -> { disconnectFromDS(); final Properties props = new Properties(); props.setProperty(NAME, "member" + i); props.setProperty(MCAST_PORT, "0"); props.setProperty(LOCATORS, getLocatorStr(locatorPorts)); props.setProperty(LOG_FILE, "server-" + i + ".log"); props.setProperty(USE_CLUSTER_CONFIGURATION, "true"); props.setProperty(ENABLE_CLUSTER_CONFIGURATION, "true"); getSystem(props); getCache(); }); } private String getLocatorStr(final int[] locatorPorts) { return Arrays.stream(locatorPorts).mapToObj(p -> "localhost[" + p + "]").collect(joining(",")); } private ConditionFactory await() { return Awaitility.await().atMost(2, MINUTES); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.metrics2.sink; import java.io.IOException; import java.net.URI; import java.util.Calendar; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.sink.RollingFileSystemSinkTestBase.MyMetrics1; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Test the {@link RollingFileSystemSink} class in the context of HDFS. */ public class TestRollingFileSystemSinkWithHdfs extends RollingFileSystemSinkTestBase { private static final int NUM_DATANODES = 4; private MiniDFSCluster cluster; /** * Create a {@link MiniDFSCluster} instance with four nodes. The * node count is required to allow append to function. Also clear the * sink's test flags. * * @throws IOException thrown if cluster creation fails */ @Before public void setupHdfs() throws IOException { Configuration conf = new Configuration(); // It appears that since HDFS-265, append is always enabled. cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATANODES).build(); // Also clear sink flags RollingFileSystemSink.hasFlushed = false; } /** * Stop the {@link MiniDFSCluster}. */ @After public void shutdownHdfs() { if (cluster != null) { cluster.shutdown(); } } /** * Test writing logs to HDFS. * * @throws Exception thrown when things break */ @Test public void testWrite() throws Exception { String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; MetricsSystem ms = initMetricsSystem(path, false, true); assertMetricsContents(doWriteTest(ms, path, 1)); } /** * Test writing logs to HDFS if append is enabled and the log file already * exists. * * @throws Exception thrown when things break */ @Test public void testAppend() throws Exception { String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; assertExtraContents(doAppendTest(path, false, true, 1)); } /** * Test writing logs to HDFS if append is enabled, the log file already * exists, and the sink is set to ignore errors. * * @throws Exception thrown when things break */ @Test public void testSilentAppend() throws Exception { String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; assertExtraContents(doAppendTest(path, true, true, 1)); } /** * Test writing logs to HDFS without append enabled, when the log file already * exists. * * @throws Exception thrown when things break */ @Test public void testNoAppend() throws Exception { String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; assertMetricsContents(doAppendTest(path, false, false, 2)); } /** * Test writing logs to HDFS without append enabled, with ignore errors * enabled, and when the log file already exists. * * @throws Exception thrown when things break */ @Test public void testSilentOverwrite() throws Exception { String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; assertMetricsContents(doAppendTest(path, true, false, 2)); } /** * Test that writing to HDFS fails when HDFS is unavailable. * * @throws IOException thrown when reading or writing log files */ @Test public void testFailedWrite() throws IOException { final String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; MetricsSystem ms = initMetricsSystem(path, false, false); new MyMetrics1().registerWith(ms); shutdownHdfs(); MockSink.errored = false; ms.publishMetricsNow(); // publish the metrics assertTrue("No exception was generated while writing metrics " + "even though HDFS was unavailable", MockSink.errored); try { ms.stop(); } finally { ms.shutdown(); } } /** * Test that closing a file in HDFS fails when HDFS is unavailable. * * @throws IOException thrown when reading or writing log files */ @Test public void testFailedClose() throws IOException { final String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; MetricsSystem ms = initMetricsSystem(path, false, false); new MyMetrics1().registerWith(ms); ms.publishMetricsNow(); // publish the metrics shutdownHdfs(); MockSink.errored = false; try { ms.stop(); assertTrue("No exception was generated while stopping sink " + "even though HDFS was unavailable", MockSink.errored); } catch (MetricsException ex) { // Expected } finally { ms.shutdown(); } } /** * Test that writing to HDFS fails silently when HDFS is unavailable. * * @throws IOException thrown when reading or writing log files * @throws java.lang.InterruptedException thrown if interrupted */ @Test public void testSilentFailedWrite() throws IOException, InterruptedException { final String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; MetricsSystem ms = initMetricsSystem(path, true, false); new MyMetrics1().registerWith(ms); shutdownHdfs(); MockSink.errored = false; ms.publishMetricsNow(); // publish the metrics assertFalse("An exception was generated writing metrics " + "while HDFS was unavailable, even though the sink is set to " + "ignore errors", MockSink.errored); try { ms.stop(); } finally { ms.shutdown(); } } /** * Test that closing a file in HDFS silently fails when HDFS is unavailable. * * @throws IOException thrown when reading or writing log files */ @Test public void testSilentFailedClose() throws IOException { final String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; MetricsSystem ms = initMetricsSystem(path, true, false); new MyMetrics1().registerWith(ms); ms.publishMetricsNow(); // publish the metrics shutdownHdfs(); MockSink.errored = false; try { ms.stop(); assertFalse("An exception was generated stopping sink " + "while HDFS was unavailable, even though the sink is set to " + "ignore errors", MockSink.errored); } finally { ms.shutdown(); } } /** * This test specifically checks whether the flusher thread is automatically * flushing the files. It unfortunately can only test with the alternative * flushing schedule (because of test timing), but it's better than nothing. * * @throws Exception thrown if something breaks */ @Test public void testFlushThread() throws Exception { // Cause the sink's flush thread to be run immediately after the second // metrics log is written RollingFileSystemSink.forceFlush = true; String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; MetricsSystem ms = initMetricsSystem(path, true, false, false); new MyMetrics1().registerWith(ms); // Publish the metrics ms.publishMetricsNow(); // Pubish again because the first write seems to get properly flushed // regardless. ms.publishMetricsNow(); int count = 0; try { // Sleep until the flusher has run. This should never actually need to // sleep, but the sleep is here to make sure this test isn't flakey. while (!RollingFileSystemSink.hasFlushed) { Thread.sleep(10L); if (++count > 1000) { fail("Flush thread did not run within 10 seconds"); } } Calendar now = Calendar.getInstance(); Path currentDir = new Path(path, DATE_FORMAT.format(now.getTime()) + "00"); FileSystem fs = FileSystem.newInstance(new URI(path), new Configuration()); Path currentFile = findMostRecentLogFile(fs, new Path(currentDir, getLogFilename())); FileStatus status = fs.getFileStatus(currentFile); // Each metrics record is 118+ bytes, depending on hostname assertTrue("The flusher thread didn't flush the log contents. Expected " + "at least 236 bytes in the log file, but got " + status.getLen(), status.getLen() >= 236); } finally { RollingFileSystemSink.forceFlush = false; try { ms.stop(); } finally { ms.shutdown(); } } } /** * Test that a failure to connect to HDFS does not cause the init() method * to fail. */ @Test public void testInitWithNoHDFS() { String path = "hdfs://" + cluster.getNameNode().getHostAndPort() + "/tmp"; shutdownHdfs(); MockSink.errored = false; initMetricsSystem(path, true, false); assertTrue("The sink was not initialized as expected", MockSink.initialized); assertFalse("The sink threw an unexpected error on initialization", MockSink.errored); } }
/** * Copyright 2013 OpenSocial Foundation * Copyright 2013 International Business Machines Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Utility library for working with Activity Streams Actions * Requires underscorejs. * * @author James M Snell (jasnell@us.ibm.com) */ package com.ibm.common.activitystreams.internal; import static com.google.gson.internal.bind.TypeAdapters.NUMBER; import static com.ibm.common.activitystreams.internal.Adapters.DATE; import static com.ibm.common.activitystreams.internal.Adapters.DATETIME; import static com.ibm.common.activitystreams.internal.Adapters.NLV; import static com.ibm.common.activitystreams.internal.Adapters.TABLE; import static com.ibm.common.activitystreams.internal.Adapters.OPTIONAL; import static com.ibm.common.activitystreams.internal.Adapters.ACTIONS; import static com.ibm.common.activitystreams.internal.Adapters.DURATION; import static com.ibm.common.activitystreams.internal.Adapters.INTERVAL; import static com.ibm.common.activitystreams.internal.Adapters.ITERABLE; import static com.ibm.common.activitystreams.internal.Adapters.MIMETYPE; import static com.ibm.common.activitystreams.internal.Adapters.MULTIMAP; import static com.ibm.common.activitystreams.internal.Adapters.RANGE; import static com.ibm.common.activitystreams.internal.Adapters.PERIOD; import static com.ibm.common.activitystreams.internal.Adapters.forEnum; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.util.Date; import org.joda.time.DateTime; import org.joda.time.ReadableDuration; import org.joda.time.ReadableInterval; import org.joda.time.ReadablePeriod; import com.google.common.base.Optional; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Multimap; import com.google.common.collect.Range; import com.google.common.collect.Table; import com.google.common.net.MediaType; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.internal.LazilyParsedNumber; import com.ibm.common.activitystreams.ASObject; import com.ibm.common.activitystreams.ActionsValue; import com.ibm.common.activitystreams.Activity; import com.ibm.common.activitystreams.Collection; import com.ibm.common.activitystreams.LinkValue; import com.ibm.common.activitystreams.NLV; import com.ibm.common.activitystreams.TypeValue; import com.ibm.common.activitystreams.Writable; /** * @author james * @version $Revision: 1.0 $ */ public final class GsonWrapper { /** * Method make. * @return Builder */ public static final Builder make() { return new Builder(); } /** * @author james * @version $Revision: 1.0 $ */ public static final class Builder implements Supplier<GsonWrapper> { private String charset = "UTF-8"; private boolean pretty; private Schema schema = null; // default private ImmutableList.Builder<AdapterEntry<?>> adapters = ImmutableList.builder(); /** * Method charset. * @param charset String * @return Builder */ public Builder charset(String charset) { this.charset = charset; return this; } /** * Method schema. * @param schema Schema * @return Builder */ public Builder schema(Schema schema) { this.schema = schema; return this; } /** * Method adapter. * @param type Class<? extends T> * @param adapter Adapter<T> * @return Builder */ public <T>Builder adapter( Class<? extends T> type, Adapter<T> adapter) { return adapter(type,adapter,false); } /** * Method adapter. * @param type Class<? extends T> * @param adapter Adapter<T> * @param hier boolean * @return Builder */ public <T>Builder adapter( Class<? extends T> type, Adapter<T> adapter, boolean hier) { adapters.add(new AdapterEntry<T>(type,adapter,hier)); return this; } /** * Method prettyPrint. * @param on boolean * @return Builder */ public Builder prettyPrint(boolean on) { this.pretty = on; return this; } /** * Method prettyPrint. * @return Builder */ public Builder prettyPrint() { return prettyPrint(true); } /** * Method get. * @return GsonWrapper * @see com.google.common.base.Supplier#get() */ public GsonWrapper get() { return new GsonWrapper(this); } } /** * @author james * @version $Revision: 1.0 $ */ private final static class AdapterEntry<T> { private final Class<? extends T> type; private final Adapter<T> adapter; private final boolean hier; /** * Constructor for AdapterEntry. * @param type Class<? extends T> * @param adapter Adapter<T> * @param hier boolean */ AdapterEntry( Class<? extends T> type, Adapter<T> adapter, boolean hier) { this.type = type; this.adapter = adapter; this.hier = hier; } } private final Gson gson; private final String charset; /** * Constructor for GsonWrapper. * @param builder Builder */ protected GsonWrapper(Builder builder) { Schema schema = builder.schema != null ? builder.schema : Schema.make().get(); ASObjectAdapter base = new ASObjectAdapter(schema); GsonBuilder b = initGsonBuilder( builder, schema, base, builder.adapters.build()); if (builder.pretty) b.setPrettyPrinting(); b.disableHtmlEscaping(); this.gson = b.create(); this.charset = builder.charset; } /** * Method initGsonBuilder. * @param builder Builder * @return GsonBuilder */ private static GsonBuilder initGsonBuilder( Builder builder, Schema schema, ASObjectAdapter base, Iterable<AdapterEntry<?>> adapters) { GsonBuilder gson = new GsonBuilder() .registerTypeHierarchyAdapter(TypeValue.class, new TypeValueAdapter(schema)) .registerTypeHierarchyAdapter(LinkValue.class, new LinkValueAdapter(schema)) .registerTypeHierarchyAdapter(Iterable.class, ITERABLE) .registerTypeHierarchyAdapter(ASObject.class, base) .registerTypeHierarchyAdapter(Collection.class, base) .registerTypeHierarchyAdapter(Activity.class, base) .registerTypeHierarchyAdapter(NLV.class, NLV) .registerTypeHierarchyAdapter(ActionsValue.class, ACTIONS) .registerTypeHierarchyAdapter(Optional.class, OPTIONAL) .registerTypeHierarchyAdapter(Range.class, RANGE) .registerTypeHierarchyAdapter(Table.class, TABLE) .registerTypeHierarchyAdapter(LazilyParsedNumber.class, NUMBER) .registerTypeHierarchyAdapter(LazilyParsedNumberComparable.class, NUMBER) .registerTypeHierarchyAdapter(ReadableDuration.class, DURATION) .registerTypeHierarchyAdapter(ReadablePeriod.class, PERIOD) .registerTypeHierarchyAdapter(ReadableInterval.class, INTERVAL) .registerTypeAdapter( Activity.Status.class, forEnum( Activity.Status.class, Activity.Status.OTHER)) .registerTypeAdapter(Date.class, DATE) .registerTypeAdapter(DateTime.class, DATETIME) .registerTypeAdapter(MediaType.class, MIMETYPE) .registerTypeHierarchyAdapter(Multimap.class, MULTIMAP); for (AdapterEntry<?> entry : adapters) { if (entry.hier) gson.registerTypeHierarchyAdapter( entry.type, entry.adapter!=null ? entry.adapter : base); else gson.registerTypeAdapter( entry.type, entry.adapter!=null ? entry.adapter:base); } return gson; } /** * Method write. * @param w Writable * @param out OutputStream */ public void write(Writable w, OutputStream out) { try { OutputStreamWriter wout = new OutputStreamWriter(out, charset); gson.toJson(w,wout); wout.flush(); } catch (Throwable t) { throw Throwables.propagate(t); } } /** * Method write. * @param w Writable * @param out Writer */ public void write(Writable w, Writer out) { gson.toJson(w,out); } /** * Method write. * @param w Writable * @return String */ public String write(Writable w) { StringWriter sw = new StringWriter(); write(w,sw); return sw.toString(); } /** * Method readAs. * @param in InputStream * @param type Class<? extends A> * @return A */ public <A extends ASObject>A readAs(InputStream in, Class<? extends A> type) { try { return readAs(new InputStreamReader(in, charset), type); } catch (Throwable t) { throw Throwables.propagate(t); } } /** * Method readAs. * @param in Reader * @param type Class<? extends A> * @return A */ public <A extends ASObject>A readAs(Reader in, Class<? extends A> type) { return (A)gson.fromJson(in, type); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.client.thin; import java.io.IOException; import java.lang.reflect.Array; import java.util.AbstractMap.SimpleEntry; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import javax.cache.expiry.ExpiryPolicy; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.binary.BinaryRawWriter; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheKeyConfiguration; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.CacheRebalanceMode; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.cache.PartitionLossPolicy; import org.apache.ignite.cache.QueryEntity; import org.apache.ignite.cache.QueryIndex; import org.apache.ignite.cache.QueryIndexType; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.client.ClientCacheConfiguration; import org.apache.ignite.internal.binary.BinaryContext; import org.apache.ignite.internal.binary.BinaryFieldMetadata; import org.apache.ignite.internal.binary.BinaryMetadata; import org.apache.ignite.internal.binary.BinaryObjectImpl; import org.apache.ignite.internal.binary.BinaryRawWriterEx; import org.apache.ignite.internal.binary.BinaryReaderExImpl; import org.apache.ignite.internal.binary.BinaryReaderHandles; import org.apache.ignite.internal.binary.BinarySchema; import org.apache.ignite.internal.binary.BinaryThreadLocalContext; import org.apache.ignite.internal.binary.BinaryUtils; import org.apache.ignite.internal.binary.BinaryWriterExImpl; import org.apache.ignite.internal.binary.streams.BinaryHeapInputStream; import org.apache.ignite.internal.binary.streams.BinaryInputStream; import org.apache.ignite.internal.binary.streams.BinaryOutputStream; import org.apache.ignite.internal.processors.platform.cache.expiry.PlatformExpiryPolicy; import org.apache.ignite.internal.util.MutableSingletonList; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.client.thin.ProtocolVersionFeature.EXPIRY_POLICY; import static org.apache.ignite.internal.client.thin.ProtocolVersionFeature.QUERY_ENTITY_PRECISION_AND_SCALE; import static org.apache.ignite.internal.processors.platform.cache.expiry.PlatformExpiryPolicy.convertDuration; /** * Shared serialization/deserialization utils. */ final class ClientUtils { /** Marshaller. */ private final ClientBinaryMarshaller marsh; /** * Constructor. */ ClientUtils(ClientBinaryMarshaller marsh) { this.marsh = marsh; } /** * Get cache ID by cache name. */ static int cacheId(String name) { Objects.requireNonNull(name, "name"); return name.hashCode(); } /** * @param col Collection to serialize. * @param out Output stream. * @param elemWriter Collection element serializer */ static <E> void collection( Collection<E> col, BinaryOutputStream out, BiConsumer<BinaryOutputStream, E> elemWriter ) { if (col == null || col.isEmpty()) out.writeInt(0); else { out.writeInt(col.size()); for (E e : col) elemWriter.accept(out, e); } } /** * @param col Collection to serialize. * @param out Output stream. * @param elemWriter Collection element serializer */ static <E> void collection(E[] col, BinaryOutputStream out, BiConsumer<BinaryOutputStream, E> elemWriter) { if (col == null || col.length == 0) out.writeInt(0); else { out.writeInt(col.length); for (E e : col) elemWriter.accept(out, e); } } /** * @param in Input stream. * @param elemReader Collection element deserializer. * @return Deserialized collection. */ static <E> Collection<E> collection(BinaryInputStream in, Function<BinaryInputStream, E> elemReader) { Collection<E> col = new LinkedList<>(); // needs to be ordered for some use cases int cnt = in.readInt(); for (int i = 0; i < cnt; i++) col.add(elemReader.apply(in)); return col; } /** * @return Deserialized map */ private static <K, V> Map<K, V> map( BinaryInputStream in, Function<BinaryInputStream, K> keyReader, Function<BinaryInputStream, V> valReader ) { int cnt = in.readInt(); Map<K, V> map = new HashMap<>(cnt); for (int i = 0; i < cnt; i++) map.put(keyReader.apply(in), valReader.apply(in)); return map; } /** Deserialize binary type metadata from stream. */ BinaryMetadata binaryMetadata(BinaryInputStream in) throws IOException { try (BinaryReaderExImpl reader = createBinaryReader(in)) { int typeId = reader.readInt(); String typeName = reader.readString(); String affKeyFieldName = reader.readString(); Map<String, BinaryFieldMetadata> fields = ClientUtils.map( in, unused -> reader.readString(), unused2 -> new BinaryFieldMetadata(reader.readInt(), reader.readInt()) ); boolean isEnum = reader.readBoolean(); Map<String, Integer> enumValues = isEnum ? ClientUtils.map(in, unsed -> reader.readString(), unsed2 -> reader.readInt()) : null; Collection<BinarySchema> schemas = ClientUtils.collection( in, unused -> new BinarySchema( reader.readInt(), new ArrayList<>(ClientUtils.collection(in, unused2 -> reader.readInt())) ) ); return new BinaryMetadata( typeId, typeName, fields, affKeyFieldName, schemas, isEnum, enumValues ); } } /** Serialize binary type metadata to stream. */ void binaryMetadata(BinaryMetadata meta, BinaryOutputStream out) { try (BinaryRawWriterEx w = new BinaryWriterExImpl(marsh.context(), out, null, null)) { w.writeInt(meta.typeId()); w.writeString(meta.typeName()); w.writeString(meta.affinityKeyFieldName()); collection( meta.fieldsMap().entrySet(), out, (unused, e) -> { w.writeString(e.getKey()); w.writeInt(e.getValue().typeId()); w.writeInt(e.getValue().fieldId()); } ); w.writeBoolean(meta.isEnum()); if (meta.isEnum()) collection( meta.enumMap().entrySet(), out, (unused, e) -> { w.writeString(e.getKey()); w.writeInt(e.getValue()); } ); collection( meta.schemas(), out, (unused, s) -> { w.writeInt(s.schemaId()); collection( Arrays.stream(s.fieldIds()).boxed().collect(Collectors.toList()), out, (unused2, i) -> w.writeInt(i) ); } ); } } /** Serialize configuration to stream. */ void cacheConfiguration(ClientCacheConfiguration cfg, BinaryOutputStream out, ProtocolContext protocolCtx) { try (BinaryRawWriterEx writer = new BinaryWriterExImpl(marsh.context(), out, null, null)) { int origPos = out.position(); writer.writeInt(0); // configuration length is to be assigned in the end writer.writeShort((short)0); // properties count is to be assigned in the end AtomicInteger propCnt = new AtomicInteger(0); BiConsumer<CfgItem, Consumer<BinaryRawWriter>> itemWriter = (cfgItem, cfgWriter) -> { writer.writeShort(cfgItem.code()); cfgWriter.accept(writer); propCnt.incrementAndGet(); }; itemWriter.accept(CfgItem.NAME, w -> w.writeString(cfg.getName())); itemWriter.accept(CfgItem.CACHE_MODE, w -> w.writeInt(cfg.getCacheMode().ordinal())); itemWriter.accept(CfgItem.ATOMICITY_MODE, w -> w.writeInt(cfg.getAtomicityMode().ordinal())); itemWriter.accept(CfgItem.BACKUPS, w -> w.writeInt(cfg.getBackups())); itemWriter.accept(CfgItem.WRITE_SYNC_MODE, w -> w.writeInt(cfg.getWriteSynchronizationMode().ordinal())); itemWriter.accept(CfgItem.READ_FROM_BACKUP, w -> w.writeBoolean(cfg.isReadFromBackup())); itemWriter.accept(CfgItem.EAGER_TTL, w -> w.writeBoolean(cfg.isEagerTtl())); itemWriter.accept(CfgItem.GROUP_NAME, w -> w.writeString(cfg.getGroupName())); itemWriter.accept(CfgItem.DEFAULT_LOCK_TIMEOUT, w -> w.writeLong(cfg.getDefaultLockTimeout())); itemWriter.accept(CfgItem.PART_LOSS_POLICY, w -> w.writeInt(cfg.getPartitionLossPolicy().ordinal())); itemWriter.accept(CfgItem.REBALANCE_BATCH_SIZE, w -> w.writeInt(cfg.getRebalanceBatchSize())); itemWriter.accept(CfgItem.REBALANCE_BATCHES_PREFETCH_COUNT, w -> w.writeLong(cfg.getRebalanceBatchesPrefetchCount())); itemWriter.accept(CfgItem.REBALANCE_DELAY, w -> w.writeLong(cfg.getRebalanceDelay())); itemWriter.accept(CfgItem.REBALANCE_MODE, w -> w.writeInt(cfg.getRebalanceMode().ordinal())); itemWriter.accept(CfgItem.REBALANCE_ORDER, w -> w.writeInt(cfg.getRebalanceOrder())); itemWriter.accept(CfgItem.REBALANCE_THROTTLE, w -> w.writeLong(cfg.getRebalanceThrottle())); itemWriter.accept(CfgItem.REBALANCE_TIMEOUT, w -> w.writeLong(cfg.getRebalanceTimeout())); itemWriter.accept(CfgItem.COPY_ON_READ, w -> w.writeBoolean(cfg.isCopyOnRead())); itemWriter.accept(CfgItem.DATA_REGION_NAME, w -> w.writeString(cfg.getDataRegionName())); itemWriter.accept(CfgItem.STATS_ENABLED, w -> w.writeBoolean(cfg.isStatisticsEnabled())); itemWriter.accept(CfgItem.MAX_ASYNC_OPS, w -> w.writeInt(cfg.getMaxConcurrentAsyncOperations())); itemWriter.accept(CfgItem.MAX_QUERY_ITERATORS, w -> w.writeInt(cfg.getMaxQueryIteratorsCount())); itemWriter.accept(CfgItem.ONHEAP_CACHE_ENABLED, w -> w.writeBoolean(cfg.isOnheapCacheEnabled())); itemWriter.accept(CfgItem.QUERY_METRIC_SIZE, w -> w.writeInt(cfg.getQueryDetailMetricsSize())); itemWriter.accept(CfgItem.QUERY_PARALLELISM, w -> w.writeInt(cfg.getQueryParallelism())); itemWriter.accept(CfgItem.SQL_ESCAPE_ALL, w -> w.writeBoolean(cfg.isSqlEscapeAll())); itemWriter.accept(CfgItem.SQL_IDX_MAX_INLINE_SIZE, w -> w.writeInt(cfg.getSqlIndexMaxInlineSize())); itemWriter.accept(CfgItem.SQL_SCHEMA, w -> w.writeString(cfg.getSqlSchema())); itemWriter.accept( CfgItem.KEY_CONFIGS, w -> ClientUtils.collection( cfg.getKeyConfiguration(), out, (unused, i) -> { w.writeString(i.getTypeName()); w.writeString(i.getAffinityKeyFieldName()); } ) ); itemWriter.accept( CfgItem.QUERY_ENTITIES, w -> ClientUtils.collection( cfg.getQueryEntities(), out, (unused, e) -> { w.writeString(e.getKeyType()); w.writeString(e.getValueType()); w.writeString(e.getTableName()); w.writeString(e.getKeyFieldName()); w.writeString(e.getValueFieldName()); ClientUtils.collection( e.getFields().entrySet(), out, (unused2, f) -> { QueryField qf = new QueryField(e, f); w.writeString(qf.getName()); w.writeString(qf.getTypeName()); w.writeBoolean(qf.isKey()); w.writeBoolean(qf.isNotNull()); w.writeObject(qf.getDefaultValue()); if (protocolCtx.isFeatureSupported(QUERY_ENTITY_PRECISION_AND_SCALE)) { w.writeInt(qf.getPrecision()); w.writeInt(qf.getScale()); } } ); ClientUtils.collection( e.getAliases().entrySet(), out, (unused3, a) -> { w.writeString(a.getKey()); w.writeString(a.getValue()); } ); ClientUtils.collection( e.getIndexes(), out, (unused4, i) -> { w.writeString(i.getName()); w.writeByte((byte)i.getIndexType().ordinal()); w.writeInt(i.getInlineSize()); ClientUtils.collection(i.getFields().entrySet(), out, (unused5, f) -> { w.writeString(f.getKey()); w.writeBoolean(f.getValue()); } ); }); } ) ); if (protocolCtx.isFeatureSupported(EXPIRY_POLICY)) { itemWriter.accept(CfgItem.EXPIRE_POLICY, w -> { ExpiryPolicy expiryPlc = cfg.getExpiryPolicy(); if (expiryPlc == null) w.writeBoolean(false); else { w.writeBoolean(true); w.writeLong(convertDuration(expiryPlc.getExpiryForCreation())); w.writeLong(convertDuration(expiryPlc.getExpiryForUpdate())); w.writeLong(convertDuration(expiryPlc.getExpiryForAccess())); } }); } else if (cfg.getExpiryPolicy() != null) { throw new ClientProtocolError(String.format("Expire policies are not supported by the server " + "version %s, required version %s", protocolCtx.version(), EXPIRY_POLICY.verIntroduced())); } writer.writeInt(origPos, out.position() - origPos - 4); // configuration length writer.writeInt(origPos + 4, propCnt.get()); // properties count } } /** Deserialize configuration from stream. */ ClientCacheConfiguration cacheConfiguration(BinaryInputStream in, ProtocolContext protocolCtx) throws IOException { try (BinaryReaderExImpl reader = createBinaryReader(in)) { reader.readInt(); // Do not need length to read data. The protocol defines fixed configuration layout. return new ClientCacheConfiguration().setName("TBD") // cache name is to be assigned later .setAtomicityMode(CacheAtomicityMode.fromOrdinal(reader.readInt())) .setBackups(reader.readInt()) .setCacheMode(CacheMode.fromOrdinal(reader.readInt())) .setCopyOnRead(reader.readBoolean()) .setDataRegionName(reader.readString()) .setEagerTtl(reader.readBoolean()) .setStatisticsEnabled(reader.readBoolean()) .setGroupName(reader.readString()) .setDefaultLockTimeout(reader.readLong()) .setMaxConcurrentAsyncOperations(reader.readInt()) .setMaxQueryIteratorsCount(reader.readInt()) .setName(reader.readString()) .setOnheapCacheEnabled(reader.readBoolean()) .setPartitionLossPolicy(PartitionLossPolicy.fromOrdinal((byte)reader.readInt())) .setQueryDetailMetricsSize(reader.readInt()) .setQueryParallelism(reader.readInt()) .setReadFromBackup(reader.readBoolean()) .setRebalanceBatchSize(reader.readInt()) .setRebalanceBatchesPrefetchCount(reader.readLong()) .setRebalanceDelay(reader.readLong()) .setRebalanceMode(CacheRebalanceMode.fromOrdinal(reader.readInt())) .setRebalanceOrder(reader.readInt()) .setRebalanceThrottle(reader.readLong()) .setRebalanceTimeout(reader.readLong()) .setSqlEscapeAll(reader.readBoolean()) .setSqlIndexMaxInlineSize(reader.readInt()) .setSqlSchema(reader.readString()) .setWriteSynchronizationMode(CacheWriteSynchronizationMode.fromOrdinal(reader.readInt())) .setKeyConfiguration( ClientUtils.collection(in, unused -> new CacheKeyConfiguration(reader.readString(), reader.readString())) .toArray(new CacheKeyConfiguration[0]) ).setQueryEntities(ClientUtils.collection( in, unused -> { QueryEntity qryEntity = new QueryEntity(reader.readString(), reader.readString()) .setTableName(reader.readString()) .setKeyFieldName(reader.readString()) .setValueFieldName(reader.readString()); boolean isPrecisionAndScaleSupported = protocolCtx.isFeatureSupported(QUERY_ENTITY_PRECISION_AND_SCALE); Collection<QueryField> qryFields = ClientUtils.collection( in, unused2 -> { String name = reader.readString(); String typeName = reader.readString(); boolean isKey = reader.readBoolean(); boolean isNotNull = reader.readBoolean(); Object dfltVal = reader.readObject(); int precision = isPrecisionAndScaleSupported ? reader.readInt() : -1; int scale = isPrecisionAndScaleSupported ? reader.readInt() : -1; return new QueryField(name, typeName, isKey, isNotNull, dfltVal, precision, scale); } ); return qryEntity .setFields(qryFields.stream().collect(Collectors.toMap( QueryField::getName, QueryField::getTypeName, (a, b) -> a, LinkedHashMap::new ))) .setKeyFields(qryFields.stream() .filter(QueryField::isKey) .map(QueryField::getName) .collect(Collectors.toCollection(LinkedHashSet::new)) ) .setNotNullFields(qryFields.stream() .filter(QueryField::isNotNull) .map(QueryField::getName) .collect(Collectors.toSet()) ) .setDefaultFieldValues(qryFields.stream() .filter(f -> f.getDefaultValue() != null) .collect(Collectors.toMap(QueryField::getName, QueryField::getDefaultValue)) ) .setFieldsPrecision(qryFields.stream() .filter(f -> f.getPrecision() != -1) .collect(Collectors.toMap(QueryField::getName, QueryField::getPrecision)) ) .setFieldsScale(qryFields.stream() .filter(f -> f.getScale() != -1) .collect(Collectors.toMap(QueryField::getName, QueryField::getScale)) ) .setAliases(ClientUtils.collection( in, unused3 -> new SimpleEntry<>(reader.readString(), reader.readString()) ).stream().collect(Collectors.toMap(SimpleEntry::getKey, SimpleEntry::getValue))) .setIndexes(ClientUtils.collection( in, unused4 -> { String name = reader.readString(); QueryIndexType type = QueryIndexType.fromOrdinal(reader.readByte()); int inlineSize = reader.readInt(); LinkedHashMap<String, Boolean> fields = ClientUtils.collection( in, unused5 -> new SimpleEntry<>(reader.readString(), reader.readBoolean()) ).stream().collect(Collectors.toMap( SimpleEntry::getKey, SimpleEntry::getValue, (a, b) -> a, LinkedHashMap::new )); return new QueryIndex(fields, type).setName(name).setInlineSize(inlineSize); } )); } ).toArray(new QueryEntity[0])) .setExpiryPolicy(!protocolCtx.isFeatureSupported(EXPIRY_POLICY) ? null : reader.readBoolean() ? new PlatformExpiryPolicy(reader.readLong(), reader.readLong(), reader.readLong()) : null ); } } /** Serialize SQL field query to stream. */ void write(SqlFieldsQuery qry, BinaryOutputStream out) { writeObject(out, qry.getSchema()); out.writeInt(qry.getPageSize()); out.writeInt(-1); // do not limit writeObject(out, qry.getSql()); ClientUtils.collection(qry.getArgs() == null ? null : Arrays.asList(qry.getArgs()), out, this::writeObject); out.writeByte((byte)0); // statement type ANY out.writeBoolean(qry.isDistributedJoins()); out.writeBoolean(qry.isLocal()); out.writeBoolean(qry.isReplicatedOnly()); out.writeBoolean(qry.isEnforceJoinOrder()); out.writeBoolean(qry.isCollocated()); out.writeBoolean(qry.isLazy()); out.writeLong(qry.getTimeout()); out.writeBoolean(true); // include column names } /** Write Ignite binary object to output stream. */ void writeObject(BinaryOutputStream out, Object obj) { out.writeByteArray(marsh.marshal(obj)); } /** * @param out Output stream. */ BinaryRawWriterEx createBinaryWriter(BinaryOutputStream out) { return new BinaryWriterExImpl(marsh.context(), out, BinaryThreadLocalContext.get().schemaHolder(), null); } /** * @param in Input stream. */ BinaryReaderExImpl createBinaryReader(BinaryInputStream in) { return createBinaryReader(marsh.context(), in); } /** * @param binaryCtx Binary context. * @param in Input stream. */ static BinaryReaderExImpl createBinaryReader(@Nullable BinaryContext binaryCtx, BinaryInputStream in) { return new BinaryReaderExImpl(binaryCtx, in, null, null, true, true); } /** Read Ignite binary object from input stream. */ <T> T readObject(BinaryInputStream in, boolean keepBinary) { return readObject(in, keepBinary, null); } /** Read Ignite binary object from input stream. */ <T> T readObject(BinaryInputStream in, boolean keepBinary, Class<T> clazz) { if (keepBinary) return (T)marsh.unmarshal(in); else { BinaryReaderHandles hnds = new BinaryReaderHandles(); return (T)unwrapBinary(marsh.deserialize(in, hnds), hnds, clazz); } } /** * Unwrap binary object. */ private Object unwrapBinary(Object obj, BinaryReaderHandles hnds, Class<?> clazz) { if (obj instanceof BinaryObjectImpl) { BinaryObjectImpl obj0 = (BinaryObjectImpl)obj; return marsh.deserialize(BinaryHeapInputStream.create(obj0.array(), obj0.start()), hnds); } else if (obj instanceof BinaryObject) return ((BinaryObject)obj).deserialize(); else if (BinaryUtils.knownCollection(obj)) return unwrapCollection((Collection<Object>)obj, hnds); else if (BinaryUtils.knownMap(obj)) return unwrapMap((Map<Object, Object>)obj, hnds); else if (obj instanceof Object[]) return unwrapArray((Object[])obj, hnds, clazz); else return obj; } /** * Unwrap collection with binary objects. */ private Collection<Object> unwrapCollection(Collection<Object> col, BinaryReaderHandles hnds) { Collection<Object> col0 = BinaryUtils.newKnownCollection(col); for (Object obj0 : col) col0.add(unwrapBinary(obj0, hnds, null)); return (col0 instanceof MutableSingletonList) ? U.convertToSingletonList(col0) : col0; } /** * Unwrap map with binary objects. */ private Map<Object, Object> unwrapMap(Map<Object, Object> map, BinaryReaderHandles hnds) { Map<Object, Object> map0 = BinaryUtils.newMap(map); for (Map.Entry<Object, Object> e : map.entrySet()) map0.put(unwrapBinary(e.getKey(), hnds, null), unwrapBinary(e.getValue(), hnds, null)); return map0; } /** * Unwrap array with binary objects. */ private Object[] unwrapArray(Object[] arr, BinaryReaderHandles hnds, Class<?> arrayClass) { if (BinaryUtils.knownArray(arr)) return arr; Class<?> componentType = arrayClass != null && arrayClass.isArray() ? arrayClass.getComponentType() : arr.getClass().getComponentType(); Object[] res = (Object[])Array.newInstance(componentType, arr.length); for (int i = 0; i < arr.length; i++) res[i] = unwrapBinary(arr[i], hnds, null); return res; } /** A helper class to translate query fields. */ private static final class QueryField { /** Name. */ private final String name; /** Type name. */ private final String typeName; /** Is key. */ private final boolean isKey; /** Is not null. */ private final boolean isNotNull; /** Default value. */ private final Object dfltVal; /** Precision. */ private final int precision; /** Scale. */ private final int scale; /** Serialization constructor. */ QueryField(QueryEntity e, Map.Entry<String, String> nameAndTypeName) { name = nameAndTypeName.getKey(); typeName = nameAndTypeName.getValue(); Set<String> keys = e.getKeyFields(); Set<String> notNulls = e.getNotNullFields(); Map<String, Object> dflts = e.getDefaultFieldValues(); Map<String, Integer> fldsPrecision = e.getFieldsPrecision(); Map<String, Integer> fldsScale = e.getFieldsScale(); isKey = keys != null && keys.contains(name); isNotNull = notNulls != null && notNulls.contains(name); dfltVal = dflts == null ? null : dflts.get(name); precision = fldsPrecision == null ? -1 : fldsPrecision.getOrDefault(name, -1); scale = fldsScale == null ? -1 : fldsScale.getOrDefault(name, -1); } /** Deserialization constructor. */ public QueryField(String name, String typeName, boolean isKey, boolean isNotNull, Object dfltVal, int precision, int scale) { this.name = name; this.typeName = typeName; this.isKey = isKey; this.isNotNull = isNotNull; this.dfltVal = dfltVal; this.precision = precision; this.scale = scale; } /** * @return Name. */ String getName() { return name; } /** * @return Type name. */ String getTypeName() { return typeName; } /** * @return Is Key. */ boolean isKey() { return isKey; } /** * @return Is Not Null. */ boolean isNotNull() { return isNotNull; } /** * @return Default value. */ Object getDefaultValue() { return dfltVal; } /** * @return Precision. */ public int getPrecision() { return precision; } /** * @return Scale. */ public int getScale() { return scale; } } /** Thin client protocol cache configuration item codes. */ private enum CfgItem { /** Name. */NAME(0), /** Cache mode. */CACHE_MODE(1), /** Atomicity mode. */ATOMICITY_MODE(2), /** Backups. */BACKUPS(3), /** Write synchronization mode. */WRITE_SYNC_MODE(4), /** Read from backup. */READ_FROM_BACKUP(6), /** Eager ttl. */EAGER_TTL(405), /** Group name. */GROUP_NAME(400), /** Default lock timeout. */DEFAULT_LOCK_TIMEOUT(402), /** Partition loss policy. */PART_LOSS_POLICY(404), /** Rebalance batch size. */REBALANCE_BATCH_SIZE(303), /** Rebalance batches prefetch count. */REBALANCE_BATCHES_PREFETCH_COUNT(304), /** Rebalance delay. */REBALANCE_DELAY(301), /** Rebalance mode. */REBALANCE_MODE(300), /** Rebalance order. */REBALANCE_ORDER(305), /** Rebalance throttle. */REBALANCE_THROTTLE(306), /** Rebalance timeout. */REBALANCE_TIMEOUT(302), /** Copy on read. */COPY_ON_READ(5), /** Data region name. */DATA_REGION_NAME(100), /** Stats enabled. */STATS_ENABLED(406), /** Max async ops. */MAX_ASYNC_OPS(403), /** Max query iterators. */MAX_QUERY_ITERATORS(206), /** Onheap cache enabled. */ONHEAP_CACHE_ENABLED(101), /** Query metric size. */QUERY_METRIC_SIZE(202), /** Query parallelism. */QUERY_PARALLELISM(201), /** Sql escape all. */SQL_ESCAPE_ALL(205), /** Sql index max inline size. */SQL_IDX_MAX_INLINE_SIZE(204), /** Sql schema. */SQL_SCHEMA(203), /** Key configs. */KEY_CONFIGS(401), /** Key entities. */QUERY_ENTITIES(200), /** Expire policy. */EXPIRE_POLICY(407); /** Code. */ private final short code; /** */ CfgItem(int code) { this.code = (short)code; } /** @return Code. */ short code() { return code; } } }
package com.marshalchen.common.commonUtils.urlUtils; import android.os.Build; import android.util.Base64; import javax.crypto.Cipher; import javax.crypto.SecretKey; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; import java.security.MessageDigest; /** * * TripleDES Method */ public class TripleDES { private final static String token = ""; public static byte[] copyOf(byte[] original, int newLength) { if (newLength < 0) { throw new NegativeArraySizeException(Integer.toString(newLength)); } return copyOfRange(original, 0, newLength); } public static int judgeVerionOfSdk() { return Build.VERSION.SDK_INT; } public static String Base64encoding(byte[] context,int type) { String result=""; if (judgeVerionOfSdk() > 7) { result= Base64.encodeToString(context, type); } else { result= com.marshalchen.common.commonUtils.urlUtils.Base64.encodeBytes(context); } return result; } public static byte[] Base64encodingByte(byte[] context,int type) { byte[] result; if (judgeVerionOfSdk() > 17) { result= Base64.encode(context, type); } else { result= com.marshalchen.common.commonUtils.urlUtils.Base64.encodeBytesToBytes(context); } return result; } public static byte[] Base64decoding(String context,int type) throws Exception{ byte[] result; if (judgeVerionOfSdk() > 17) { result= Base64.decode(context, type); } else { result= com.marshalchen.common.commonUtils.urlUtils.Base64.decode(context); } return result; } public static byte[] Base64decodingByte(byte[] context,int type) throws Exception{ byte[] result; if (judgeVerionOfSdk() > 17) { result= Base64.decode(context, type); } else { result= com.marshalchen.common.commonUtils.urlUtils.Base64.decode(context); } return result; } public static byte[] copyOfRange(byte[] original, int start, int end) { if (start > end) { throw new IllegalArgumentException(); } int originalLength = original.length; if (start < 0 || start > originalLength) { throw new ArrayIndexOutOfBoundsException(); } int resultLength = end - start; int copyLength = Math.min(resultLength, originalLength - start); byte[] result = new byte[resultLength]; System.arraycopy(original, start, result, 0, copyLength); return result; } public static byte[] encrypt(String message) throws Exception { final MessageDigest md = MessageDigest.getInstance("SHA-1"); final byte[] digestOfPassword = md.digest(token .getBytes()); byte[] keyBytes = copyOf(digestOfPassword, 24); for (int j = 0, k = 16; j < 8; ) { keyBytes[k++] = keyBytes[j++]; } final SecretKey key = new SecretKeySpec(keyBytes, "DESede"); String s1 = "12345678"; byte[] bytes = s1.getBytes(); final IvParameterSpec iv = new IvParameterSpec(bytes); final Cipher cipher = Cipher.getInstance("DESede/CBC/PKCS5Padding"); cipher.init(Cipher.ENCRYPT_MODE, key, iv); final byte[] plainTextBytes = message.getBytes("utf-8"); final byte[] cipherText = cipher.doFinal(plainTextBytes); String ss = Base64encoding(cipherText, 0); return Base64encodingByte(cipherText, 0); } public static String encrypts(String message) throws Exception { final MessageDigest md = MessageDigest.getInstance("SHA-1"); final byte[] digestOfPassword = md.digest(token .getBytes()); byte[] keyBytes = copyOf(digestOfPassword, 24); for (int j = 0, k = 16; j < 8; ) { keyBytes[k++] = keyBytes[j++]; } final SecretKey key = new SecretKeySpec(keyBytes, "DESede"); String s1 = "12345678"; byte[] bytes = s1.getBytes(); final IvParameterSpec iv = new IvParameterSpec(bytes); final Cipher cipher = Cipher.getInstance("DESede/CBC/PKCS5Padding"); cipher.init(Cipher.ENCRYPT_MODE, key, iv); final byte[] plainTextBytes = message.getBytes("utf-8"); final byte[] cipherText = cipher.doFinal(plainTextBytes); String ss = Base64encoding(cipherText, 0); byte[] result = Base64encodingByte(cipherText, 0); return new String(result, "UTF-8"); } public static String decrypt(byte[] message) throws Exception { byte[] values =Base64decodingByte(message, 0); final MessageDigest md = MessageDigest.getInstance("SHA-1"); final byte[] digestOfPassword = md.digest(token .getBytes("utf-8")); final byte[] keyBytes = copyOf(digestOfPassword, 24); for (int j = 0, k = 16; j < 8; ) { keyBytes[k++] = keyBytes[j++]; } final SecretKey key = new SecretKeySpec(keyBytes, "DESede"); String s1 = "12345678"; byte[] bytes = s1.getBytes(); final IvParameterSpec iv = new IvParameterSpec(bytes); final Cipher decipher = Cipher.getInstance("DESede/CBC/PKCS5Padding"); decipher.init(Cipher.DECRYPT_MODE, key, iv); final byte[] plainText = decipher.doFinal(values); return new String(plainText, "UTF-8"); } public static String decrypts(String message) throws Exception { if (message == null || message == "") return ""; byte[] values = Base64decoding(message, 0); final MessageDigest md = MessageDigest.getInstance("SHA-1"); final byte[] digestOfPassword = md.digest(token .getBytes("utf-8")); final byte[] keyBytes = copyOf(digestOfPassword, 24); for (int j = 0, k = 16; j < 8; ) { keyBytes[k++] = keyBytes[j++]; } final SecretKey key = new SecretKeySpec(keyBytes, "DESede"); String s1 = "12345678"; byte[] bytes = s1.getBytes(); final IvParameterSpec iv = new IvParameterSpec(bytes); final Cipher decipher = Cipher.getInstance("DESede/CBC/PKCS5Padding"); decipher.init(Cipher.DECRYPT_MODE, key, iv); final byte[] plainText = decipher.doFinal(values); return new String(plainText, "UTF-8"); } //delete all the wrap of String public static String replaceNewLine(String strText) { String strResult = ""; int intStart = 0; int intLoc = strText.indexOf("\n", intStart); while (intLoc != -1) { strResult += strText.substring(intStart, intLoc - 1); intStart = intLoc + 1; intLoc = strText.indexOf("\n", intStart); } strResult += strText.substring(intStart, strText.length()); return strResult; } public static String byte2hex(byte[] b) { String hs = ""; String stmp = ""; for (int n = 0; n < b.length; n++) { stmp = Integer.toHexString(b[n] & 0xFF); if (stmp.length() == 1) hs += ("0" + stmp); else hs += stmp; } return hs.toUpperCase(); } public static byte[] hex2byte(byte[] b) { if ((b.length % 2) != 0) throw new IllegalArgumentException("wrong index"); byte[] b2 = new byte[b.length / 2]; for (int n = 0; n < b.length; n += 2) { String item = new String(b, n, 2); b2[n / 2] = (byte) Integer.parseInt(item, 16); } return b2; } }
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.cas.support.spnego.web.flow.client; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.ToStringBuilder; import org.jasig.cas.support.spnego.util.ReverseDNSRunnable; import org.jasig.cas.web.support.WebUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import org.springframework.webflow.action.AbstractAction; import org.springframework.webflow.execution.Event; import org.springframework.webflow.execution.RequestContext; import javax.servlet.http.HttpServletRequest; import javax.validation.constraints.NotNull; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Abstract class for defining a simple binary filter to determine whether a * given client system should be prompted for SPNEGO / KRB / NTLM credentials. * * Envisioned implementations would include LDAP and DNS based determinations, * but of course others may have value as well for local architectures. * * @author Sean Baker sean.baker@usuhs.edu * @author Misagh Moayyed * @since 4.1 */ @Component("baseSpnegoClientAction") public class BaseSpnegoKnownClientSystemsFilterAction extends AbstractAction { private static final int DEFAULT_TIMEOUT = 2000; /** Logger instance. **/ protected final Logger logger = LoggerFactory.getLogger(this.getClass()); /** Pattern of ip addresses to check. **/ @Value("${cas.spnego.ip.pattern:127.+}") private Pattern ipsToCheckPattern; /** Alternative remote host attribute. **/ @Value("${cas.spnego.alt.remote.host.attribute:alternateRemoteHeader}") private String alternativeRemoteHostAttribute; /** Timeout for DNS Requests. **/ private long timeout = DEFAULT_TIMEOUT; /** * Instantiates a new Base. */ public BaseSpnegoKnownClientSystemsFilterAction() {} /** * Instantiates a new Base. * * @param ipsToCheckPattern the ips to check pattern */ public BaseSpnegoKnownClientSystemsFilterAction(final String ipsToCheckPattern) { setIpsToCheckPattern(ipsToCheckPattern); } /** * Instantiates a new Base. * * @param ipsToCheckPattern the ips to check pattern * @param alternativeRemoteHostAttribute the alternative remote host attribute */ public BaseSpnegoKnownClientSystemsFilterAction(final String ipsToCheckPattern, final String alternativeRemoteHostAttribute) { setIpsToCheckPattern(ipsToCheckPattern); this.alternativeRemoteHostAttribute = alternativeRemoteHostAttribute; } /** * Instantiates a new Base. * * @param ipsToCheckPattern the ips to check pattern * @param alternativeRemoteHostAttribute the alternative remote host attribute */ public BaseSpnegoKnownClientSystemsFilterAction(final Pattern ipsToCheckPattern, final String alternativeRemoteHostAttribute) { this.ipsToCheckPattern = ipsToCheckPattern; this.alternativeRemoteHostAttribute = alternativeRemoteHostAttribute; } /** * {@inheritDoc} * Gets the remote ip from the request, and invokes spnego if it isn't filtered. * * @param context the request context * @return {@link #yes()} if spnego should be invoked and ip isn't filtered, * {@link #no()} otherwise. */ @Override protected final Event doExecute(final RequestContext context) { final String remoteIp = getRemoteIp(context); logger.debug("Current user IP {}", remoteIp); return shouldDoSpnego(remoteIp) ? yes() : no(); } /** * Default implementation -- simply check the IP filter. * @param remoteIp the remote ip * @return true boolean */ protected boolean shouldDoSpnego(final String remoteIp) { return ipPatternCanBeChecked(remoteIp) && ipPatternMatches(remoteIp); } /** * Base class definition for whether the IP should be checked or not; overridable. * @param remoteIp the remote ip * @return whether or not the IP can / should be matched against the pattern */ protected boolean ipPatternCanBeChecked(final String remoteIp) { return (this.ipsToCheckPattern != null && StringUtils.isNotBlank(remoteIp)); } /** * Simple pattern match to determine whether an IP should be checked. * Could stand to be extended to support "real" IP addresses and patterns, but * for the local / first implementation regex made more sense. * @param remoteIp the remote ip * @return whether the remote ip received should be queried */ protected boolean ipPatternMatches(final String remoteIp) { final Matcher matcher = this.ipsToCheckPattern.matcher(remoteIp); if (matcher.find()) { logger.debug("Remote IP address {} should be checked based on the defined pattern {}", remoteIp, this.ipsToCheckPattern.pattern()); return true; } logger.debug("No pattern or remote IP defined, or pattern does not match remote IP [{}]", remoteIp); return false; } /** * Pulls the remote IP from the current HttpServletRequest, or grabs the value * for the specified alternative attribute (say, for proxied requests). Falls * back to providing the "normal" remote address if no value can be retrieved * from the specified alternative header value. * @param context the context * @return the remote ip */ private String getRemoteIp(@NotNull final RequestContext context) { final HttpServletRequest request = WebUtils.getHttpServletRequest(context); String userAddress = request.getRemoteAddr(); logger.debug("Remote Address = {}", userAddress); if (StringUtils.isNotBlank(this.alternativeRemoteHostAttribute)) { userAddress = request.getHeader(this.alternativeRemoteHostAttribute); logger.debug("Header Attribute [{}] = [{}]", this.alternativeRemoteHostAttribute, userAddress); if (StringUtils.isBlank(userAddress)) { userAddress = request.getRemoteAddr(); logger.warn("No value could be retrieved from the header [{}]. Falling back to [{}].", this.alternativeRemoteHostAttribute, userAddress); } } return userAddress; } /** * Alternative header to be used for retrieving the remote system IP address. * @param alternativeRemoteHostAttribute the alternative remote host attribute */ public final void setAlternativeRemoteHostAttribute(@NotNull final String alternativeRemoteHostAttribute) { this.alternativeRemoteHostAttribute = alternativeRemoteHostAttribute; } /** * Regular expression string to define IPs which should be considered. * @param ipsToCheckPattern the ips to check as a regex pattern */ public final void setIpsToCheckPattern(@NotNull final String ipsToCheckPattern) { this.ipsToCheckPattern = Pattern.compile(ipsToCheckPattern); } @Override public final String toString() { return new ToStringBuilder(this) .append("ipsToCheckPattern", this.ipsToCheckPattern) .append("alternativeRemoteHostAttribute", this.alternativeRemoteHostAttribute) .append("timeout", this.timeout) .toString(); } /** * Set timeout (ms) for DNS requests; valuable for heterogeneous environments employing * fall-through authentication mechanisms. * @param timeout # of milliseconds to wait for a DNS request to return */ public final void setTimeout(final long timeout) { this.timeout = timeout; } /** * Convenience method to perform a reverse DNS lookup. Threads the request * through a custom Runnable class in order to prevent inordinately long * user waits while performing reverse lookup. * @param remoteIp the remote ip * @return the remote host name */ protected String getRemoteHostName(final String remoteIp) { final ReverseDNSRunnable revDNS = new ReverseDNSRunnable(remoteIp); final Thread t = new Thread(revDNS); t.start(); try { t.join(this.timeout); } catch (final InterruptedException e) { logger.debug("Threaded lookup failed. Defaulting to IP {}.", remoteIp, e); } final String remoteHostName = revDNS.get(); logger.debug("Found remote host name {}.", remoteHostName); return StringUtils.isNotEmpty(remoteHostName) ? remoteHostName : remoteIp; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.jfoenix.controls; import com.jfoenix.transitions.JFXAnimationTimer; import com.jfoenix.transitions.JFXKeyFrame; import com.jfoenix.transitions.JFXKeyValue; import com.sun.javafx.event.EventHandlerManager; import com.sun.javafx.scene.control.skin.TooltipSkin; import javafx.animation.KeyFrame; import javafx.animation.Timeline; import javafx.event.EventDispatchChain; import javafx.event.EventHandler; import javafx.event.WeakEventHandler; import javafx.geometry.Bounds; import javafx.geometry.NodeOrientation; import javafx.geometry.Pos; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.control.Skin; import javafx.scene.control.Tooltip; import javafx.scene.input.MouseEvent; import javafx.stage.Window; import javafx.stage.WindowEvent; import javafx.util.Duration; /** * JFXTooltip is the material design implementation of the tooltip. * * @author Shadi Shaheen * @version 1.0 * @since 3/29/2019 */ public class JFXTooltip extends Tooltip { private static TooltipBehavior BEHAVIOR = new TooltipBehavior( Duration.millis(650), Duration.millis(1500), Duration.millis(200)); /** * updates the hover duration for {@link JFXTooltip} behavior * * @param duration */ public static void setHoverDelay(Duration duration) { BEHAVIOR.setHoverDelay(duration == null ? Duration.millis(650) : duration); } /** * updates the visible duration for {@link JFXTooltip} behavior * * @param duration */ public static void setVisibleDuration(Duration duration) { BEHAVIOR.setVisibleDuration(duration == null ? Duration.millis(1500) : duration); } /** * updates the left duration for {@link JFXTooltip} behavior * * @param duration */ public static void setLeftDelay(Duration duration) { BEHAVIOR.setLeftDelay(duration == null ? Duration.millis(200) : duration); } /** * Associates the given {@link JFXTooltip} tooltip to the given node. * * @param node * @param tooltip */ public static void install(Node node, JFXTooltip tooltip) { BEHAVIOR.install(node, tooltip); } /** * Associates the given {@link JFXTooltip} tooltip to the given node. * The tooltip will be shown according to the given {@link Pos} pos * * @param node * @param tooltip */ public static void install(Node node, JFXTooltip tooltip, Pos pos) { tooltip.setPos(pos); BEHAVIOR.install(node, tooltip); } /** * Removes {@link JFXTooltip} tooltip from the given node * * @param node */ public static void uninstall(Node node) { BEHAVIOR.uninstall(node); } private Node root = null; private boolean hiding = false; private JFXAnimationTimer animation = new JFXAnimationTimer( JFXKeyFrame.builder().setDuration(Duration.millis(150)) .setAnimateCondition(() -> !hiding) .setKeyValues(JFXKeyValue.builder() .setTargetSupplier(() -> root.opacityProperty()) .setEndValue(1).build(), JFXKeyValue.builder() .setTargetSupplier(() -> root.scaleXProperty()) .setEndValue(1).build(), JFXKeyValue.builder() .setTargetSupplier(() -> root.scaleYProperty()) .setEndValue(1).build()) .build(), JFXKeyFrame.builder().setDuration(Duration.millis(75)) .setAnimateCondition(() -> hiding) .setKeyValues(JFXKeyValue.builder() .setTargetSupplier(() -> root.opacityProperty()) .setEndValue(0).build()).build() ); /** * {@inheritDoc} */ public JFXTooltip() { init(); } /** * {@inheritDoc} */ public JFXTooltip(String text) { super(text); init(); } private void init() { getStyleClass().add(DEFAULT_STYLE_CLASS); eventHandlerManager.addEventHandler(WindowEvent.WINDOW_SHOWING, event -> { root = getScene().getRoot(); root.setOpacity(0); root.setScaleY(0.8); root.setScaleX(0.8); animation.setOnFinished(null); }); eventHandlerManager.addEventHandler(WindowEvent.WINDOW_SHOWN, event -> { setAnchorX(getUpdatedAnchorX()); setAnchorY(getUpdatedAnchorY()); animation.reverseAndContinue(); }); } /*************************************************************************** * * * Properties * * * **************************************************************************/ private Pos pos = Pos.BOTTOM_CENTER; private double margin = 8; /** * @return the tooltip position */ public Pos getPos() { return pos; } /** * sets the tooltip position with respect to its node * * @param pos */ public void setPos(Pos pos) { this.pos = pos == null ? Pos.BOTTOM_CENTER : pos; } /** * @return the gap between tooltip and the associated node */ public double getMargin() { return margin; } /** * sets the gap between tooltip and the associated node. * the default value is 8 * * @param margin */ public void setMargin(double margin) { this.margin = margin; } private double getUpdatedAnchorY() { switch (pos.getVpos()) { case CENTER: return getAnchorY() - getHeight() / 2; case TOP: case BASELINE: return getAnchorY() - getHeight(); default: return getAnchorY(); } } private double getUpdatedAnchorX() { switch (pos.getHpos()) { case CENTER: return getAnchorX() - getWidth() / 2; case LEFT: return getAnchorX() - getWidth(); default: return getAnchorX(); } } /*************************************************************************** * * * Stylesheet Handling * * * **************************************************************************/ /** * Initialize the style class to 'jfx-tooltip'. * <p> * This is the selector class from which CSS can be used to style * this control. */ private static final String DEFAULT_STYLE_CLASS = "jfx-tooltip"; /** * {@inheritDoc} */ @Override public void hide() { hiding = true; animation.setOnFinished(super::hide); animation.reverseAndContinue(); } /** * {@inheritDoc} */ @Override public void show(Node ownerNode, double anchorX, double anchorY) { // if tooltip hide animation still running, then hide method is not called yet // thus only reverse the animation to show the tooltip again hiding = false; if (isShowing()) { animation.setOnFinished(null); animation.reverseAndContinue(); } else { // tooltip was not showing compute its anchors and show it Window parent = ownerNode.getScene().getWindow(); final Bounds origin = ownerNode.localToScene(ownerNode.getBoundsInLocal()); anchorX = parent.getX() + origin.getMinX() + getHPosForNode(ownerNode); anchorY = parent.getY() + origin.getMinY() + getVPosForNode(ownerNode); super.show(ownerNode, anchorX, anchorY); } } private double getHPosForNode(Node node) { double hx = -margin; switch (pos.getHpos()) { case CENTER: hx = (node.getBoundsInParent().getWidth() / 2); break; case RIGHT: hx = node.getBoundsInParent().getWidth() + margin; break; } return hx; } private double getVPosForNode(Node node) { double vy = -margin; switch (pos.getVpos()) { case CENTER: vy = (node.getBoundsInParent().getHeight() / 2); break; case BOTTOM: vy = node.getBoundsInParent().getHeight() + margin; break; } return vy; } /** * {@inheritDoc} */ @Override protected Skin<?> createDefaultSkin() { return new TooltipSkin(this) { { Node node = getNode(); node.setEffect(null); } }; } private final EventHandlerManager eventHandlerManager = new EventHandlerManager(this); /** * {@inheritDoc} */ @Override public EventDispatchChain buildEventDispatchChain(EventDispatchChain tail) { return super.buildEventDispatchChain(tail).prepend(eventHandlerManager); } private static class TooltipBehavior { private static String TOOLTIP_PROP = "jfoenix-tooltip"; private Timeline hoverTimer = new Timeline(); private Timeline visibleTimer = new Timeline(); private Timeline leftTimer = new Timeline(); /** * the currently hovered node */ private Node hoveredNode; /** * the next tooltip to be shown */ private JFXTooltip nextTooltip; /** * the current showing tooltip */ private JFXTooltip currentTooltip; private TooltipBehavior(Duration hoverDelay, Duration visibleDuration, Duration leftDelay) { setHoverDelay(hoverDelay); hoverTimer.setOnFinished(event -> { ensureHoveredNodeIsVisible(() -> { // set tooltip orientation NodeOrientation nodeOrientation = hoveredNode.getEffectiveNodeOrientation(); nextTooltip.getScene().setNodeOrientation(nodeOrientation); //show tooltip showTooltip(nextTooltip); currentTooltip = nextTooltip; hoveredNode = null; // start visible timer visibleTimer.playFromStart(); }); // clear next tooltip nextTooltip = null; }); setVisibleDuration(visibleDuration); visibleTimer.setOnFinished(event -> hideCurrentTooltip()); setLeftDelay(leftDelay); leftTimer.setOnFinished(event -> hideCurrentTooltip()); } private void setHoverDelay(Duration duration) { hoverTimer.getKeyFrames().setAll(new KeyFrame(duration)); } private void setVisibleDuration(Duration duration) { visibleTimer.getKeyFrames().setAll(new KeyFrame(duration)); } private void setLeftDelay(Duration duration) { leftTimer.getKeyFrames().setAll(new KeyFrame(duration)); } private void hideCurrentTooltip() { currentTooltip.hide(); currentTooltip = null; hoveredNode = null; } private void showTooltip(JFXTooltip tooltip) { // anchors are computed differently for each tooltip tooltip.show(hoveredNode, -1, -1); } private EventHandler<MouseEvent> moveHandler = (MouseEvent event) -> { // if tool tip is already showing, do nothing if (visibleTimer.getStatus() == Timeline.Status.RUNNING) { return; } hoveredNode = (Node) event.getSource(); Object property = hoveredNode.getProperties().get(TOOLTIP_PROP); if (property instanceof JFXTooltip) { JFXTooltip tooltip = (JFXTooltip) property; ensureHoveredNodeIsVisible(() -> { // if a tooltip is already showing then show this tooltip immediately if (leftTimer.getStatus() == Timeline.Status.RUNNING) { if (currentTooltip != null) { currentTooltip.hide(); } currentTooltip = tooltip; // show the tooltip showTooltip(tooltip); // stop left timer and start the visible timer to hide the tooltip // once finished leftTimer.stop(); visibleTimer.playFromStart(); } else { // else mark the tooltip as the next tooltip to be shown once the hover // timer is finished (restart the timer) // t.setActivated(true); nextTooltip = tooltip; hoverTimer.stop(); hoverTimer.playFromStart(); } }); } else { uninstall(hoveredNode); } }; private WeakEventHandler<MouseEvent> weakMoveHandler = new WeakEventHandler<>(moveHandler); private EventHandler<MouseEvent> exitHandler = (MouseEvent event) -> { // stop running hover timer as the mouse exited the node if (hoverTimer.getStatus() == Timeline.Status.RUNNING) { hoverTimer.stop(); } else if (visibleTimer.getStatus() == Timeline.Status.RUNNING) { // if tool tip was already showing, stop the visible timer // and start the left timer to hide the current tooltip visibleTimer.stop(); leftTimer.playFromStart(); } hoveredNode = null; nextTooltip = null; }; private WeakEventHandler<MouseEvent> weakExitHandler = new WeakEventHandler<>(exitHandler); // if mouse is pressed then stop all timers / clear all fields private EventHandler<MouseEvent> pressedHandler = (MouseEvent event) -> { // stop timers hoverTimer.stop(); visibleTimer.stop(); leftTimer.stop(); // hide current tooltip if (currentTooltip != null) { currentTooltip.hide(); } // clear fields hoveredNode = null; currentTooltip = null; nextTooltip = null; }; private WeakEventHandler<MouseEvent> weakPressedHandler = new WeakEventHandler<>(pressedHandler); private void install(Node node, JFXTooltip tooltip) { if (node == null) { return; } if (tooltip == null) { uninstall(node); return; } node.removeEventHandler(MouseEvent.MOUSE_MOVED, weakMoveHandler); node.removeEventHandler(MouseEvent.MOUSE_EXITED, weakExitHandler); node.removeEventHandler(MouseEvent.MOUSE_PRESSED, weakPressedHandler); node.addEventHandler(MouseEvent.MOUSE_MOVED, weakMoveHandler); node.addEventHandler(MouseEvent.MOUSE_EXITED, weakExitHandler); node.addEventHandler(MouseEvent.MOUSE_PRESSED, weakPressedHandler); node.getProperties().put(TOOLTIP_PROP, tooltip); } private void uninstall(Node node) { if (node == null) { return; } node.removeEventHandler(MouseEvent.MOUSE_MOVED, weakMoveHandler); node.removeEventHandler(MouseEvent.MOUSE_EXITED, weakExitHandler); node.removeEventHandler(MouseEvent.MOUSE_PRESSED, weakPressedHandler); Object tooltip = node.getProperties().get(TOOLTIP_PROP); if (tooltip != null) { node.getProperties().remove(TOOLTIP_PROP); if (tooltip.equals(currentTooltip) || tooltip.equals(nextTooltip)) { weakPressedHandler.handle(null); } } } private void ensureHoveredNodeIsVisible(Runnable visibleRunnable) { final Window owner = getWindow(hoveredNode); if (owner != null && owner.isShowing()) { final boolean treeVisible = hoveredNode.impl_isTreeVisible(); if (treeVisible) { visibleRunnable.run(); } } } private Window getWindow(final Node node) { final Scene scene = node == null ? null : node.getScene(); return scene == null ? null : scene.getWindow(); } } }
/* * @(#)AbstractPolicy.java * * Copyright 2003-2005 Sun Microsystems, Inc. All Rights Reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistribution of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistribution in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Sun Microsystems, Inc. or the names of contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * This software is provided "AS IS," without a warranty of any kind. ALL * EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING * ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") * AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE * AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST * REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, * INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY * OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE, * EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. * * You acknowledge that this software is not designed or intended for use in * the design, construction, operation or maintenance of any nuclear facility. */ package org.wso2.balana; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.balana.combine.*; import org.wso2.balana.ctx.AbstractResult; import org.wso2.balana.ctx.EvaluationCtx; import org.wso2.balana.ctx.xacml2.Result; import org.wso2.balana.xacml2.Obligation; import org.wso2.balana.xacml3.Advice; import org.wso2.balana.xacml3.AdviceExpression; import java.io.OutputStream; import java.io.PrintStream; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.wso2.balana.xacml3.ObligationExpression; /** * Represents an instance of an XACML policy. * * @since 1.0 * @author Seth Proctor * @author Marco Barreno */ public abstract class AbstractPolicy implements PolicyTreeElement{ // attributes associated with this policy private URI idAttr; private String version; private CombiningAlgorithm combiningAlg; // the elements in the policy private String description; private AbstractTarget target; // the value in defaults, or null if there was no default value private String defaultVersion; // the meta-data associated with this policy protected PolicyMetaData metaData; // the child elements under this policy represented simply as the // PolicyTreeElements... private List<PolicyTreeElement> children; // ...or the CombinerElements that are passed to combining algorithms private List<CombinerElement> childElements; // any obligations held by this policy private Set<AbstractObligation> obligationExpressions; // any advice expressions held by this policy private Set<AdviceExpression> adviceExpressions; // the list of combiner parameters private List<CombinerParameter> parameters; private String subjectPolicyValue; private String resourcePolicyValue; private String actionPolicyValue; private String envPolicyValue; // the logger we'll use for all messages private static final Log logger = LogFactory.getLog(AbstractPolicy.class); /** * Constructor used by <code>PolicyReference</code>, which supplies its own values for the * methods in this class. */ protected AbstractPolicy() { } /** * Constructor used to create a policy from concrete components. * * @param id the policy id * @param version the policy version or null for the default (this is always null for pre-2.0 * policies) * @param combiningAlg the combining algorithm to use * @param description describes the policy or null if there is none * @param target the policy's target */ protected AbstractPolicy(URI id, String version, CombiningAlgorithm combiningAlg, String description, AbstractTarget target) { this(id, version, combiningAlg, description, target, null); } /** * Constructor used to create a policy from concrete components. * * @param id the policy id * @param version the policy version or null for the default (this is always null for pre-2.0 * policies) * @param combiningAlg the combining algorithm to use * @param description describes the policy or null if there is none * @param target the policy's target * @param defaultVersion the XPath version to use for selectors */ protected AbstractPolicy(URI id, String version, CombiningAlgorithm combiningAlg, String description, AbstractTarget target, String defaultVersion) { this(id, version, combiningAlg, description, target, defaultVersion, null, null, null); } /** * Constructor used to create a policy from concrete components. * * @param id the policy id * @param version the policy version or null for the default (this is always null for pre-2.0 * policies) * @param combiningAlg the combining algorithm to use * @param description describes the policy or null if there is none * @param target the policy's target * @param defaultVersion the XPath version to use for selectors * @param obligationExpressions the policy's ObligationExpressions * @param adviceExpressions the policy's advice expressions * @param parameters the policy's parameters */ protected AbstractPolicy(URI id, String version, CombiningAlgorithm combiningAlg, String description, AbstractTarget target, String defaultVersion, Set<AbstractObligation> obligationExpressions, Set<AdviceExpression> adviceExpressions, List<CombinerParameter> parameters) { idAttr = id; this.combiningAlg = combiningAlg; this.description = description; this.target = target; this.defaultVersion = defaultVersion; if (version == null) this.version = "1.0"; else this.version = version; String namespaceUri = XACMLConstants.XACML_3_0_IDENTIFIER; if (target != null) { if (target instanceof org.wso2.balana.xacml2.Target) { namespaceUri = XACMLConstants.XACML_2_0_IDENTIFIER; } } metaData = new PolicyMetaData(namespaceUri, defaultVersion); if (obligationExpressions == null) this.obligationExpressions = new HashSet<AbstractObligation>(); else this.obligationExpressions = Collections. unmodifiableSet(new HashSet<AbstractObligation>(obligationExpressions)); if(adviceExpressions == null){ this.adviceExpressions = new HashSet<AdviceExpression>(); } else { this.adviceExpressions = Collections. unmodifiableSet(new HashSet<AdviceExpression>(adviceExpressions)); } if (parameters == null) this.parameters = new ArrayList<CombinerParameter>(); else this.parameters = Collections.unmodifiableList(new ArrayList<CombinerParameter>(parameters)); } /** * Constructor used by child classes to initialize the shared data from a DOM root node. * * @param root the DOM root of the policy * @param policyPrefix either "Policy" or "PolicySet" * @param combiningName name of the field naming the combining alg * the XACML policy, if null use default factories * @throws ParsingException if the policy is invalid */ protected AbstractPolicy(Node root, String policyPrefix, String combiningName) throws ParsingException { // get the attributes, all of which are common to Policies NamedNodeMap attrs = root.getAttributes(); try { // get the attribute Id idAttr = new URI(attrs.getNamedItem(policyPrefix + "Id").getNodeValue()); } catch (Exception e) { throw new ParsingException("Error parsing required attribute " + policyPrefix + "Id", e); } // see if there's a version Node versionNode = attrs.getNamedItem("Version"); if (versionNode != null) { version = versionNode.getNodeValue(); } else { // assign the default version version = "1.0"; } // now get the combining algorithm... try { URI algId = new URI(attrs.getNamedItem(combiningName).getNodeValue()); CombiningAlgFactory factory = Balana.getInstance().getCombiningAlgFactory(); combiningAlg = factory.createAlgorithm(algId); } catch (Exception e) { throw new ParsingException("Error parsing combining algorithm" + " in " + policyPrefix, e); } // ...and make sure it's the right kind if (policyPrefix.equals("Policy")) { if (!(combiningAlg instanceof RuleCombiningAlgorithm)) throw new ParsingException("Policy must use a Rule " + "Combining Algorithm"); } else { if (!(combiningAlg instanceof PolicyCombiningAlgorithm)) throw new ParsingException("PolicySet must use a Policy " + "Combining Algorithm"); } // do an initial pass through the elements to pull out the // defaults, if any, so we can setup the meta-data NodeList children = root.getChildNodes(); String xpathVersion = null; for (int i = 0; i < children.getLength(); i++) { Node child = children.item(i); if (DOMHelper.getLocalName(child).equals(policyPrefix + "Defaults")) handleDefaults(child); } // with the defaults read, create the meta-data metaData = new PolicyMetaData(root.getNamespaceURI(), defaultVersion); // now read the remaining policy elements obligationExpressions = new HashSet<AbstractObligation>(); adviceExpressions = new HashSet<AdviceExpression>(); parameters = new ArrayList<CombinerParameter>(); children = root.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node child = children.item(i); String cname = DOMHelper.getLocalName(child); if (cname.equals("Description")) { if (child.hasChildNodes()){ description = child.getFirstChild().getNodeValue(); } } else if (cname.equals("Target")) { target = TargetFactory.getFactory().getTarget(child, metaData); } else if (cname.equals("ObligationExpressions") || cname.equals("Obligations")) { parseObligationExpressions(child); } else if (cname.equals("AdviceExpressions")) { parseAdviceExpressions(child); } else if (cname.equals("CombinerParameters")) { handleParameters(child); } } // finally, make sure the obligations and parameters are immutable obligationExpressions = Collections.unmodifiableSet(obligationExpressions); adviceExpressions = Collections.unmodifiableSet(adviceExpressions); parameters = Collections.unmodifiableList(parameters); } public String getSubjectPolicyValue() { return subjectPolicyValue; } public void setSubjectPolicyValue(String subjectPolicyValue) { this.subjectPolicyValue = subjectPolicyValue; } public String getResourcePolicyValue() { return resourcePolicyValue; } public void setResourcePolicyValue(String resourcePolicyValue) { this.resourcePolicyValue = resourcePolicyValue; } public String getActionPolicyValue() { return actionPolicyValue; } public void setActionPolicyValue(String actionPolicyValue) { this.actionPolicyValue = actionPolicyValue; } public String getEnvPolicyValue() { return envPolicyValue; } public void setEnvPolicyValue(String envPolicyValue) { this.envPolicyValue = envPolicyValue; } /** * Helper routine to parse the obligation data * * @param root root node of ObligationExpression * @throws ParsingException if error while parsing node */ private void parseObligationExpressions(Node root) throws ParsingException { NodeList nodes = root.getChildNodes(); for (int i = 0; i < nodes.getLength(); i++) { Node node = nodes.item(i); if (DOMHelper.getLocalName(node).equals("ObligationExpression") || DOMHelper.getLocalName(node).equals("Obligation")){ AbstractObligation obligation = ObligationFactory.getFactory(). getObligation(node, metaData); obligationExpressions.add(obligation); } } } /** * Helper routine to parse the Advice Expression data * * @param root root node of AdviceExpressions * @throws ParsingException if error while parsing node */ private void parseAdviceExpressions(Node root) throws ParsingException { NodeList nodes = root.getChildNodes(); for (int i = 0; i < nodes.getLength(); i++) { Node node = nodes.item(i); if (DOMHelper.getLocalName(node).equals("AdviceExpression")) adviceExpressions.add(AdviceExpression.getInstance(node, metaData)); } } /** * There used to be multiple things in the defaults type, but now there's just the one string * that must be a certain value, so it doesn't seem all that useful to have a class for * this...we could always bring it back, however, if it started to do more * @param root * @throws ParsingException */ private void handleDefaults(Node root) throws ParsingException { defaultVersion = null; NodeList nodes = root.getChildNodes(); for (int i = 0; i < nodes.getLength(); i++) { Node node = nodes.item(i); if (DOMHelper.getLocalName(node).equals("XPathVersion")){ defaultVersion = node.getFirstChild().getNodeValue(); } } } /** * Handles all the CombinerParameters in the policy or policy set * @param root * @throws ParsingException */ private void handleParameters(Node root) throws ParsingException { NodeList nodes = root.getChildNodes(); for (int i = 0; i < nodes.getLength(); i++) { Node node = nodes.item(i); if (DOMHelper.getLocalName(node).equals("CombinerParameter")){ parameters.add(CombinerParameter.getInstance(node)); } } } /** * Returns the id of this policy * * @return the policy id */ public URI getId() { return idAttr; } /** * Returns the version of this policy. If this is an XACML 1.x policy then this will always * return <code>"1.0"</code>. * * @return the policy version */ public String getVersion() { return version; } /** * Returns the combining algorithm used by this policy * * @return the combining algorithm */ public CombiningAlgorithm getCombiningAlg() { return combiningAlg; } /** * Returns the list of input parameters for the combining algorithm. If this is an XACML 1.x * policy then the list will always be empty. * * @return a <code>List</code> of <code>CombinerParameter</code>s */ public List getCombiningParameters() { return parameters; } /** * Returns the given description of this policy or null if there is no description * * @return the description or null */ public String getDescription() { return description; } /** * Returns the target for this policy * * @return the policy's target */ public AbstractTarget getTarget() { return target; } /** * Returns the XPath version to use or null if none was specified * * @return XPath version or null */ public String getDefaultVersion() { return defaultVersion; } /** * Returns the <code>List</code> of children under this node in the policy tree. Depending on * what kind of policy this node represents the children will either be * <code>AbstractPolicy</code> objects or <code>Rule</code>s. * * @return a <code>List</code> of child nodes */ public List<PolicyTreeElement> getChildren() { return children; } /** * Returns the <code>List</code> of <code>CombinerElement</code>s that is provided to the * combining algorithm. This returns the same set of children that <code>getChildren</code> * provides along with any associated combiner parameters. * * @return a <code>List</code> of <code>CombinerElement</code>s */ public List<CombinerElement> getChildElements() { return childElements; } /** * Returns the Set of obligations for this policy, which may be empty * * @return the policy's obligations */ public Set getObligationExpressions() { return obligationExpressions; } /** * Returns the Set of advice expressions for this policy, which may be empty * * @return the policy's advice expressions */ public Set getAdviceExpressions() { return adviceExpressions; } /** * Returns the meta-data associated with this policy */ public PolicyMetaData getMetaData() { return metaData; } /** * Given the input context sees whether or not the request matches this policy. This must be * called by combining algorithms before they evaluate a policy. This is also used in the * initial policy finding operation to determine which top-level policies might apply to the * request. * * @param context the representation of the request * * @return the result of trying to match the policy and the request */ public MatchResult match(EvaluationCtx context) { return target.match(context); } /** * Sets the child policy tree elements for this node, which are passed to the combining * algorithm on evaluation. The <code>List</code> must contain <code>CombinerElement</code>s, * which in turn will contain <code>Rule</code>s or <code>AbstractPolicy</code>s, but may not * contain both types of elements. * * @param children a <code>List</code> of <code>CombinerElement</code>s representing the child * elements used by the combining algorithm */ protected void setChildren(List<CombinerElement> children) { // we always want a concrete list, since we're going to pass it to // a combiner that expects a non-null input if (children == null) { this.children = new ArrayList<PolicyTreeElement>(); } else { // NOTE: since this is only getting called by known child // classes we don't check that the types are all the same List<PolicyTreeElement> list = new ArrayList<PolicyTreeElement>(); for (CombinerElement aChildren : children) { list.add(aChildren.getElement()); } this.children = Collections.unmodifiableList(list); childElements = Collections.unmodifiableList(children); } } /** * Tries to evaluate the policy by calling the combining algorithm on the given policies or * rules. The <code>match</code> method must always be called first, and must always return * MATCH, before this method is called. * * @param context the representation of the request * * @return the result of evaluation */ public AbstractResult evaluate(EvaluationCtx context) { // evaluate AbstractResult result = combiningAlg.combine(context, parameters, childElements); // if we have no obligation expressions or advice expressions, we're done if (obligationExpressions.size() < 1 && adviceExpressions.size() < 1){ return result; } // if we have obligations, // now, see if we should add any obligations to the set int effect = result.getDecision(); if ((effect == Result.DECISION_INDETERMINATE) || (effect == Result.DECISION_NOT_APPLICABLE)) { // we didn't permit/deny, so we never return obligations return result; } // if any obligations or advices are defined, evaluates them and return processObligationAndAdvices(context, effect, result); return result; } /** * helper method to evaluate the obligations and advice expressions * * @param evaluationCtx context of a single policy evaluation * @param effect policy effect * @param result result of combining algorithm */ private void processObligationAndAdvices(EvaluationCtx evaluationCtx, int effect, AbstractResult result){ if(obligationExpressions != null && obligationExpressions.size() > 0){ Set<ObligationResult> results = new HashSet<ObligationResult>(); for(AbstractObligation obligationExpression : obligationExpressions){ if(obligationExpression.getFulfillOn() == effect) { results.add(obligationExpression.evaluate(evaluationCtx)); } } result.getObligations().addAll(results); } if(adviceExpressions != null && adviceExpressions.size() > 0){ Set<Advice> advices = new HashSet<Advice>(); for(AdviceExpression adviceExpression : adviceExpressions){ if(adviceExpression.getAppliesTo() == effect) { advices.add(adviceExpression.evaluate(evaluationCtx)); } } result.getAdvices().addAll(advices); } } /** * Encodes this <code>Obligation</code> into its XML form and writes this out to the provided * <code>StringBuilder<code> * * @param builder string stream into which the XML-encoded data is written */ protected void encodeCommonElements(StringBuilder builder) { for (CombinerElement childElement : childElements) { childElement.encode(builder); } if (obligationExpressions != null && obligationExpressions.size() != 0) { if(metaData.getXACMLVersion() == XACMLConstants.XACML_VERSION_3_0){ builder.append("<Obligations>\n"); } else { builder.append("<ObligationExpressions>\n"); } for (AbstractObligation obligationExpression : obligationExpressions) { obligationExpression.encode(builder); } if(metaData.getXACMLVersion() == XACMLConstants.XACML_VERSION_3_0){ builder.append("</Obligations>\n"); } else { builder.append("</ObligationExpressions>\n"); } } if (adviceExpressions != null && adviceExpressions.size() != 0) { builder.append("<AdviceExpressions>\n"); for (AdviceExpression adviceExpression : adviceExpressions) { adviceExpression.encode(builder); } builder.append("</AdviceExpressions>\n"); } } }
/* * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package javax.imageio.spi; import java.security.PrivilegedAction; import java.security.AccessController; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.Vector; import com.sun.imageio.spi.FileImageInputStreamSpi; import com.sun.imageio.spi.FileImageOutputStreamSpi; import com.sun.imageio.spi.InputStreamImageInputStreamSpi; import com.sun.imageio.spi.OutputStreamImageOutputStreamSpi; import com.sun.imageio.spi.RAFImageInputStreamSpi; import com.sun.imageio.spi.RAFImageOutputStreamSpi; import com.sun.imageio.plugins.gif.GIFImageReaderSpi; import com.sun.imageio.plugins.gif.GIFImageWriterSpi; import com.sun.imageio.plugins.jpeg.JPEGImageReaderSpi; import com.sun.imageio.plugins.jpeg.JPEGImageWriterSpi; import com.sun.imageio.plugins.png.PNGImageReaderSpi; import com.sun.imageio.plugins.png.PNGImageWriterSpi; import com.sun.imageio.plugins.bmp.BMPImageReaderSpi; import com.sun.imageio.plugins.bmp.BMPImageWriterSpi; import com.sun.imageio.plugins.wbmp.WBMPImageReaderSpi; import com.sun.imageio.plugins.wbmp.WBMPImageWriterSpi; import sun.awt.AppContext; import java.util.ServiceLoader; import java.util.ServiceConfigurationError; /** * A registry for service provider instances. Service provider * classes may be detected at run time by means of meta-information in * the JAR files containing them. The intent is that it be relatively * inexpensive to load and inspect all available service provider * classes. These classes may them be used to locate and instantiate * more heavyweight classes that will perform actual work, in this * case instances of <code>ImageReader</code>, * <code>ImageWriter</code>, <code>ImageTranscoder</code>, * <code>ImageInputStream</code>, and <code>ImageOutputStream</code>. * * <p> Service providers found on the system classpath (typically * the <code>lib/ext</code> directory in the Java * installation directory) are automatically loaded as soon as this class is * instantiated. * * <p> When the <code>registerApplicationClasspathSpis</code> method * is called, service provider instances declared in the * meta-information section of JAR files on the application class path * are loaded. To declare a service provider, a <code>services</code> * subdirectory is placed within the <code>META-INF</code> directory * that is present in every JAR file. This directory contains a file * for each service provider interface that has one or more * implementation classes present in the JAR file. For example, if * the JAR file contained a class named * <code>com.mycompany.imageio.MyFormatReaderSpi</code> which * implements the <code>ImageReaderSpi</code> interface, the JAR file * would contain a file named: * * <pre> * META-INF/services/javax.imageio.spi.ImageReaderSpi * </pre> * * containing the line: * * <pre> * com.mycompany.imageio.MyFormatReaderSpi * </pre> * * <p> The service provider classes are intended to be lightweight * and quick to load. Implementations of these interfaces * should avoid complex dependencies on other classes and on * native code. * * <p> It is also possible to manually add service providers not found * automatically, as well as to remove those that are using the * interfaces of the <code>ServiceRegistry</code> class. Thus * the application may customize the contents of the registry as it * sees fit. * * <p> For more details on declaring service providers, and the JAR * format in general, see the <a * href="{@docRoot}/../technotes/guides/jar/jar.html"> * JAR File Specification</a>. * */ public final class IIORegistry extends ServiceRegistry { /** * A <code>Vector</code> containing the valid IIO registry * categories (superinterfaces) to be used in the constructor. */ private static final Vector initialCategories = new Vector(5); static { initialCategories.add(ImageReaderSpi.class); initialCategories.add(ImageWriterSpi.class); initialCategories.add(ImageTranscoderSpi.class); initialCategories.add(ImageInputStreamSpi.class); initialCategories.add(ImageOutputStreamSpi.class); } /** * Set up the valid service provider categories and automatically * register all available service providers. * * <p> The constructor is private in order to prevent creation of * additional instances. */ private IIORegistry() { super(initialCategories.iterator()); registerStandardSpis(); registerApplicationClasspathSpis(); } /** * Returns the default <code>IIORegistry</code> instance used by * the Image I/O API. This instance should be used for all * registry functions. * * <p> Each <code>ThreadGroup</code> will receive its own * instance; this allows different <code>Applet</code>s in the * same browser (for example) to each have their own registry. * * @return the default registry for the current * <code>ThreadGroup</code>. */ public static IIORegistry getDefaultInstance() { AppContext context = AppContext.getAppContext(); IIORegistry registry = (IIORegistry)context.get(IIORegistry.class); if (registry == null) { // Create an instance for this AppContext registry = new IIORegistry(); context.put(IIORegistry.class, registry); } return registry; } private void registerStandardSpis() { // Hardwire standard SPIs registerServiceProvider(new GIFImageReaderSpi()); registerServiceProvider(new GIFImageWriterSpi()); registerServiceProvider(new BMPImageReaderSpi()); registerServiceProvider(new BMPImageWriterSpi()); registerServiceProvider(new WBMPImageReaderSpi()); registerServiceProvider(new WBMPImageWriterSpi()); registerServiceProvider(new PNGImageReaderSpi()); registerServiceProvider(new PNGImageWriterSpi()); registerServiceProvider(new JPEGImageReaderSpi()); registerServiceProvider(new JPEGImageWriterSpi()); registerServiceProvider(new FileImageInputStreamSpi()); registerServiceProvider(new FileImageOutputStreamSpi()); registerServiceProvider(new InputStreamImageInputStreamSpi()); registerServiceProvider(new OutputStreamImageOutputStreamSpi()); registerServiceProvider(new RAFImageInputStreamSpi()); registerServiceProvider(new RAFImageOutputStreamSpi()); registerInstalledProviders(); } /** * Registers all available service providers found on the * application class path, using the default * <code>ClassLoader</code>. This method is typically invoked by * the <code>ImageIO.scanForPlugins</code> method. * * @see javax.imageio.ImageIO#scanForPlugins * @see ClassLoader#getResources */ public void registerApplicationClasspathSpis() { // FIX: load only from application classpath ClassLoader loader = Thread.currentThread().getContextClassLoader(); Iterator categories = getCategories(); while (categories.hasNext()) { Class<IIOServiceProvider> c = (Class)categories.next(); Iterator<IIOServiceProvider> riter = ServiceLoader.load(c, loader).iterator(); while (riter.hasNext()) { try { // Note that the next() call is required to be inside // the try/catch block; see 6342404. IIOServiceProvider r = riter.next(); registerServiceProvider(r); } catch (ServiceConfigurationError err) { if (System.getSecurityManager() != null) { // In the applet case, we will catch the error so // registration of other plugins can proceed err.printStackTrace(); } else { // In the application case, we will throw the // error to indicate app/system misconfiguration throw err; } } } } } private void registerInstalledProviders() { /* We need to load installed providers from the system classpath (typically the <code>lib/ext</code> directory in in the Java installation directory) in the privileged mode in order to be able read corresponding jar files even if file read capability is restricted (like the applet context case). */ PrivilegedAction doRegistration = new PrivilegedAction() { public Object run() { Iterator categories = getCategories(); while (categories.hasNext()) { Class<IIOServiceProvider> c = (Class)categories.next(); for (IIOServiceProvider p : ServiceLoader.loadInstalled(c)) { registerServiceProvider(p); } } return this; } }; AccessController.doPrivileged(doRegistration); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.rest.handler.util; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.runtime.rest.messages.ErrorResponseBody; import org.apache.flink.runtime.rest.messages.ResponseBody; import org.apache.flink.runtime.rest.util.RestConstants; import org.apache.flink.runtime.rest.util.RestMapperUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; import org.apache.flink.shaded.netty4.io.netty.buffer.ByteBuf; import org.apache.flink.shaded.netty4.io.netty.buffer.Unpooled; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelFuture; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelFutureListener; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelHandlerContext; import org.apache.flink.shaded.netty4.io.netty.channel.DefaultFileRegion; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.DefaultHttpResponse; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpChunkedInput; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpRequest; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponse; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.LastHttpContent; import org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslHandler; import org.apache.flink.shaded.netty4.io.netty.handler.stream.ChunkedFile; import org.apache.flink.shaded.netty4.io.netty.util.concurrent.Future; import org.apache.flink.shaded.netty4.io.netty.util.concurrent.GenericFutureListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.io.StringWriter; import java.nio.channels.FileChannel; import java.util.Map; import java.util.concurrent.CompletableFuture; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus.OK; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpVersion.HTTP_1_1; /** * Utilities for the REST handlers. */ public class HandlerUtils { private static final Logger LOG = LoggerFactory.getLogger(HandlerUtils.class); private static final ObjectMapper mapper = RestMapperUtils.getStrictObjectMapper(); /** * Sends the given response and status code to the given channel. * * @param channelHandlerContext identifying the open channel * @param httpRequest originating http request * @param response which should be sent * @param statusCode of the message to send * @param headers additional header values * @param <P> type of the response */ public static <P extends ResponseBody> CompletableFuture<Void> sendResponse( ChannelHandlerContext channelHandlerContext, HttpRequest httpRequest, P response, HttpResponseStatus statusCode, Map<String, String> headers) { StringWriter sw = new StringWriter(); try { mapper.writeValue(sw, response); } catch (IOException ioe) { LOG.error("Internal server error. Could not map response to JSON.", ioe); return sendErrorResponse( channelHandlerContext, httpRequest, new ErrorResponseBody("Internal server error. Could not map response to JSON."), HttpResponseStatus.INTERNAL_SERVER_ERROR, headers); } return sendResponse( channelHandlerContext, httpRequest, sw.toString(), statusCode, headers); } /** * Sends the given error response and status code to the given channel. * * @param channelHandlerContext identifying the open channel * @param httpRequest originating http request * @param errorMessage which should be sent * @param statusCode of the message to send * @param headers additional header values */ public static CompletableFuture<Void> sendErrorResponse( ChannelHandlerContext channelHandlerContext, HttpRequest httpRequest, ErrorResponseBody errorMessage, HttpResponseStatus statusCode, Map<String, String> headers) { return sendErrorResponse( channelHandlerContext, HttpHeaders.isKeepAlive(httpRequest), errorMessage, statusCode, headers); } /** * Sends the given error response and status code to the given channel. * * @param channelHandlerContext identifying the open channel * @param keepAlive If the connection should be kept alive. * @param errorMessage which should be sent * @param statusCode of the message to send * @param headers additional header values */ public static CompletableFuture<Void> sendErrorResponse( ChannelHandlerContext channelHandlerContext, boolean keepAlive, ErrorResponseBody errorMessage, HttpResponseStatus statusCode, Map<String, String> headers) { StringWriter sw = new StringWriter(); try { mapper.writeValue(sw, errorMessage); } catch (IOException e) { // this should never happen LOG.error("Internal server error. Could not map error response to JSON.", e); return sendResponse( channelHandlerContext, keepAlive, "Internal server error. Could not map error response to JSON.", HttpResponseStatus.INTERNAL_SERVER_ERROR, headers); } return sendResponse( channelHandlerContext, keepAlive, sw.toString(), statusCode, headers); } /** * Sends the given response and status code to the given channel. * * @param channelHandlerContext identifying the open channel * @param httpRequest originating http request * @param message which should be sent * @param statusCode of the message to send * @param headers additional header values */ public static CompletableFuture<Void> sendResponse( @Nonnull ChannelHandlerContext channelHandlerContext, @Nonnull HttpRequest httpRequest, @Nonnull String message, @Nonnull HttpResponseStatus statusCode, @Nonnull Map<String, String> headers) { return sendResponse( channelHandlerContext, HttpHeaders.isKeepAlive(httpRequest), message, statusCode, headers); } /** * Sends the given response and status code to the given channel. * * @param channelHandlerContext identifying the open channel * @param keepAlive If the connection should be kept alive. * @param message which should be sent * @param statusCode of the message to send * @param headers additional header values */ public static CompletableFuture<Void> sendResponse( @Nonnull ChannelHandlerContext channelHandlerContext, boolean keepAlive, @Nonnull String message, @Nonnull HttpResponseStatus statusCode, @Nonnull Map<String, String> headers) { HttpResponse response = new DefaultHttpResponse(HTTP_1_1, statusCode); response.headers().set(CONTENT_TYPE, RestConstants.REST_CONTENT_TYPE); for (Map.Entry<String, String> headerEntry : headers.entrySet()) { response.headers().set(headerEntry.getKey(), headerEntry.getValue()); } if (keepAlive) { response.headers().set(CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } byte[] buf = message.getBytes(ConfigConstants.DEFAULT_CHARSET); ByteBuf b = Unpooled.copiedBuffer(buf); HttpHeaders.setContentLength(response, buf.length); // write the initial line and the header. channelHandlerContext.write(response); channelHandlerContext.write(b); ChannelFuture lastContentFuture = channelHandlerContext.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); // close the connection, if no keep-alive is needed if (!keepAlive) { lastContentFuture.addListener(ChannelFutureListener.CLOSE); } return toCompletableFuture(lastContentFuture); } public static void transferFile(ChannelHandlerContext ctx, File file, HttpRequest httpRequest) throws FlinkException { final RandomAccessFile randomAccessFile; try { randomAccessFile = new RandomAccessFile(file, "r"); } catch (FileNotFoundException e) { throw new FlinkException("Can not find file " + file + ".", e); } try { final long fileLength = randomAccessFile.length(); final FileChannel fileChannel = randomAccessFile.getChannel(); try { HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); response.headers().set(CONTENT_TYPE, "text/plain"); if (HttpHeaders.isKeepAlive(httpRequest)) { response.headers().set(CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } HttpHeaders.setContentLength(response, fileLength); // write the initial line and the header. ctx.write(response); // write the content. final ChannelFuture lastContentFuture; final GenericFutureListener<Future<? super Void>> completionListener = future -> { fileChannel.close(); randomAccessFile.close(); }; if (ctx.pipeline().get(SslHandler.class) == null) { ctx.write( new DefaultFileRegion(fileChannel, 0, fileLength), ctx.newProgressivePromise()) .addListener(completionListener); lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); } else { lastContentFuture = ctx .writeAndFlush( new HttpChunkedInput(new ChunkedFile(randomAccessFile, 0, fileLength, 8192)), ctx.newProgressivePromise()) .addListener(completionListener); // HttpChunkedInput will write the end marker (LastHttpContent) for us. } // close the connection, if no keep-alive is needed if (!HttpHeaders.isKeepAlive(httpRequest)) { lastContentFuture.addListener(ChannelFutureListener.CLOSE); } } catch (IOException ex) { fileChannel.close(); throw ex; } } catch (IOException ioe) { try { randomAccessFile.close(); } catch (IOException e) { throw new FlinkException("Close file or channel error.", e); } throw new FlinkException("Could not transfer file " + file + " to the client.", ioe); } } private static CompletableFuture<Void> toCompletableFuture(final ChannelFuture channelFuture) { final CompletableFuture<Void> completableFuture = new CompletableFuture<>(); channelFuture.addListener(future -> { if (future.isSuccess()) { completableFuture.complete(null); } else { completableFuture.completeExceptionally(future.cause()); } }); return completableFuture; } }
/* * Copyright (c) 2021, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.common.gateway.dto; import org.apache.commons.lang3.SerializationUtils; import java.util.HashMap; import java.util.Map; /** * Holds information related to jwt token. * */ public class JWTInfoDto { private String applicationTier; private String keyType; private String version; private String applicationName; private String endUser; private int endUserTenantId; private String applicationUUId; private String subscriber; private String subscriptionTier; private String applicationId; private String apiContext; private String apiName; private JWTValidationInfo jwtValidationInfo; private Map<String, String> appAttributes = new HashMap<>(); private String sub; private String[] organizations; public JWTInfoDto() { } public JWTInfoDto(JWTInfoDto jwtInfoDto) { this.applicationId = jwtInfoDto.getApplicationId(); this.keyType = jwtInfoDto.getKeyType(); this.version = jwtInfoDto.getVersion(); this.applicationName = jwtInfoDto.getApplicationName(); this.endUser = jwtInfoDto.getEndUser(); this.endUserTenantId = jwtInfoDto.getEndUserTenantId(); this.applicationUUId = jwtInfoDto.getApplicationUUId(); this.subscriber = jwtInfoDto.getSubscriber(); this.subscriptionTier = jwtInfoDto.getSubscriptionTier(); this.applicationTier = jwtInfoDto.getApplicationTier(); this.apiContext = jwtInfoDto.getApiContext(); this.apiName = jwtInfoDto.getApiName(); this.jwtValidationInfo = new JWTValidationInfo(jwtInfoDto.getJwtValidationInfo()); this.appAttributes = jwtInfoDto.getAppAttributes(); this.sub = jwtInfoDto.getSub(); this.organizations = SerializationUtils.clone(jwtInfoDto.getOrganizations()); } public String getApplicationTier() { return applicationTier; } public void setApplicationTier(String applicationTier) { this.applicationTier = applicationTier; } public String getKeyType() { return keyType; } public void setKeyType(String keyType) { this.keyType = keyType; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getEndUser() { return endUser; } public void setEndUser(String endUser) { this.endUser = endUser; } public int getEndUserTenantId() { return endUserTenantId; } public void setEndUserTenantId(int endUserTenantId) { this.endUserTenantId = endUserTenantId; } public String getApplicationUUId() { return applicationUUId; } public void setApplicationUUId(String applicationUUId) { this.applicationUUId = applicationUUId; } public String getSubscriber() { return subscriber; } public void setSubscriber(String subscriber) { this.subscriber = subscriber; } public String getSubscriptionTier() { return subscriptionTier; } public void setSubscriptionTier(String subscriptionTier) { this.subscriptionTier = subscriptionTier; } public String getApplicationId() { return applicationId; } public void setApplicationId(String applicationId) { this.applicationId = applicationId; } public String getApiContext() { return apiContext; } public void setApiContext(String apiContext) { this.apiContext = apiContext; } public JWTValidationInfo getJwtValidationInfo() { return jwtValidationInfo; } public void setJwtValidationInfo(JWTValidationInfo jwtValidationInfo) { this.jwtValidationInfo = jwtValidationInfo; } /*public MessageContext getMessageContext() { return messageContext; } public void setMessageContext(MessageContext messageContext) { this.messageContext = messageContext; }*/ public String getApiName() { return apiName; } public void setApiName(String apiName) { this.apiName = apiName; } public Map<String, String> getAppAttributes() { return appAttributes; } public void setAppAttributes(Map<String, String> appAttributes) { this.appAttributes = appAttributes; } public String getSub() { return sub; } public void setSub(String sub) { this.sub = sub; } public String[] getOrganizations() { return SerializationUtils.clone(organizations); } public void setOrganizations(String[] organizations) { this.organizations = SerializationUtils.clone(organizations); } }
// Copyright 2018 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.enterprise.sessionmanager; import com.google.common.annotations.VisibleForTesting; import java.util.Formatter; import java.util.List; import java.util.Random; import java.util.logging.Logger; /** * This class provides an implementation of the SessionManagerInterface. * * This class is actually a reasonably thin layer between the * SessionManagerInterface and a BackendInterface implementation. This layer * contains settings to determine what type of Backend to instantiate, * session name creation, and basic garbage collection logic. */ public class SessionManagerBase implements SessionManagerInterfaceBase { // ------------------------------------------------- // INTERNAL STATE (set up at constuction) /* these are protected to facilitate testing - test classes may wish to * extend and override these for better local operation */ protected Settings settings; // package wide settings private static final Logger logger = Logger.getLogger(SessionManagerBase.class.getName()); // for secure session name creation private static final Random random = new Random(); protected BackendInterfaceBase backend; // Temp var that will be filled with random bytes. // Declared static just to avoid GC overhead. private static final byte[] randomBytes = new byte[16]; // Another tmp var that declare here so as to only // create once. private static final StringBuilder sidBuilder = new StringBuilder(randomBytes.length * 2); //------------------------------------------------- // INTERNAL HELPER ROUTINES /** * Generate a 128-bit (16 byte) cryptographically secure random session name. * * @return random printable string small enough for a cookie but large * enough to be unguessable and secure enough to be unpredictable */ protected String genSessionId() { // We generate some random bytes and then convert to ASCII. // // We synchronize on randomBytes so as to singlethread access // to 'randomBytes', and 'sidBuilder'. synchronized (randomBytes) { random.nextBytes(randomBytes); sidBuilder.setLength(0); Formatter f = new Formatter(sidBuilder); for (byte b : randomBytes) { f.format("%02x", b); } return f.toString(); } } /** * Run session garbage stochastically (i.e. ~1% chance upon call to this func). */ protected void garbageCollectIfNeeded() { if (random.nextDouble() < 0.01) { garbageCollectNow(); } } /** * Allows for dependency injection for the backend. If the parameter is * null, it selects a default one based on command-line parameters. * * @param backend */ protected void setupBackendBase(BackendInterfaceBase backend) { if (backend != null) { logger.fine("Using <" + backend.getClass() + "> as Session Manager backend."); this.backend = backend; } else { setupDefaultBackend(); } } protected void setupDefaultBackend() { this.backend = new BackendFilesBase(settings); } @VisibleForTesting public BackendInterfaceBase getBackend() { return backend; } //------------------------------------------------- // CONSTRUCTOR public SessionManagerBase() { // Will use the default backend. this(null); } /** * Allows for dependency injection for the backend (used mainly for tests). * * @param backend Desired backend - overrides the default. */ public SessionManagerBase(BackendInterfaceBase backend) { settings = new Settings(); setupBackendBase(backend); } // ------------------------------------------------- // INTERFACE SPECIFIED METHODS /** @see com.google.enterprise.sessionmanager.SessionManagerInterface#sessionExists */ @Override public boolean sessionExists(String sessionId) { if (sessionId == null || sessionId.equals("")) { return false; } return backend.sessionExists(sessionId); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#keyExists */ public boolean keyExists(String sessionId, String key) { return backend.keyExists(sessionId, key); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#sessionAge */ public long sessionAge(String sessionId) { return backend.sessionAge(sessionId); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#createSession */ public String createSession() { garbageCollectIfNeeded(); String sessionId = genSessionId(); backend.createSession(sessionId); return sessionId; } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#setValue */ public void setValue(String sessionId, String key, String newValue) throws IndexOutOfBoundsException { garbageCollectIfNeeded(); if (newValue == null) { newValue = new String(""); } logger.finer("session:" + sessionId + " key:" + key + " newValue:" + newValue); backend.writeData(sessionId, key, newValue); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#getValue */ public String getValue(String sessionId, String key) throws IndexOutOfBoundsException { garbageCollectIfNeeded(); if (key == null) return null; byte[] data = backend.readKey(sessionId, key); if (data == null) { logger.finer("session:" + sessionId + " key:" + key + ". Not Present."); return null; } String value = Utils.toStringUtf8(data); logger.finer("session:" + sessionId + " key:" + key + " value:" + value); return value; } public void setValueBin(String sessionId, String key, byte[] newValue) throws IndexOutOfBoundsException { garbageCollectIfNeeded(); if (newValue == null) { newValue = new byte[0]; } logger.finer("session:" + sessionId + " key:" + key + " newValue:" + Utils.toStringUtf8(newValue)); backend.writeData(sessionId, key, newValue); } public void setValueCompressed(String sessionId, String key, byte[] value) throws IndexOutOfBoundsException { garbageCollectIfNeeded(); if (value == null) { value = new byte[0]; } logger.finer("session:" + sessionId + " key:" + key + " newValue:" + Utils.toStringUtf8(value) + " size:" + value.length); backend.writeCompressedData(sessionId, key, value); } public byte[] getValueBin(String sessionId, String key) throws IndexOutOfBoundsException { garbageCollectIfNeeded(); if (key == null) return null; byte[] result = backend.readKey(sessionId, key); logger.finer("session:" + sessionId + " key:" + key + " result: " + Utils.toStringUtf8(result)); return result; } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#storeKrb5Identity */ public KerberosId storeKrb5Identity(String sessionId, String spnegoBlob) throws IndexOutOfBoundsException { garbageCollectIfNeeded(); logger.finer("session:" + sessionId + " blob length:" + spnegoBlob.length()); return backend.storeKrb5Identity(sessionId, spnegoBlob); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#getKrb5TokenForServer */ public KeyMaterial getKrb5TokenForServer(String sessionId, String server) throws IndexOutOfBoundsException { garbageCollectIfNeeded(); logger.finer("session:" + sessionId + " server" + server); return backend.getKrb5TokenForServer(sessionId, server); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#getKrb5Identity */ public String getKrb5Identity(String sessionId) throws IndexOutOfBoundsException { garbageCollectIfNeeded(); logger.finer("session:" + sessionId + " user credentials are initialized?"); return backend.getKrb5Identity(sessionId); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#deleteSession */ public void deleteSession(String sessionId) throws IndexOutOfBoundsException { backend.deleteSession(sessionId); garbageCollectIfNeeded(); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#getKrb5CcacheFilename */ public String getKrb5CcacheFilename(String sessionId) throws IndexOutOfBoundsException { garbageCollectIfNeeded(); return backend.getKrb5CcacheFilename(sessionId); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#parseKrb5Keytab */ public String parseKrb5Keytab(String filepath) { garbageCollectIfNeeded(); return backend.parseKrb5Keytab(filepath); } /** * @see com.google.enterprise.sessionmanager.SessionManagerInterface#getKrb5ServerNameIfEnabled */ public String getKrb5ServerNameIfEnabled() { garbageCollectIfNeeded(); return backend.getKrb5ServerNameIfEnabled(); } //------------------------------------------------- // ADDED PUBLIC METHODS SPECIFIC TO THIS IMPLEMENTATION // (these generally support testing) /** * wipe all data from the session back-end * (generally used for post-testing cleanup) */ public void wipeAllSessionData() throws IndexOutOfBoundsException { List<String> sessions = backend.listSessions(); for (String session : sessions) { backend.deleteSession(session); } } protected void garbageCollectNow() { List<String> sessions = null; try { sessions = backend.listSessions(); } catch (Exception e){ // RPC server thows exception for this method, we don't need to do // garbage collection as the server code itself does it. return; } if (sessions == null) return; for (String session : sessions) { // I've received reports that sometimes the call below to sessionAge // throws the session not found exception. I'm not sure how listSessions // would return a sessionId that doesn't actually exist, perhaps some // funky race condition. regardless, it's obviously best to wrap the // call to sessionAge in a try..catch ... - ken2 try { if (backend.sessionAge(session) > settings.getSessionTimeout()) { backend.deleteSession(session); } } catch (IndexOutOfBoundsException e) { // no need to take any action - if the session has ceased to exist, // we don't need to garbage collect it. } } } }
package com.mindoo.domino.jna; import java.util.Calendar; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; import java.util.TreeMap; import java.util.Vector; import com.mindoo.domino.jna.internal.NotesConstants; import lotus.domino.Base; import lotus.domino.Document; import lotus.domino.NotesException; import lotus.domino.View; import lotus.domino.ViewEntry; /** * Bridge between a {@link ViewEntry} and data we retrieve from C API calls * * @author Karsten Lehmann */ public class ViewEntryImpl implements ViewEntry { private View m_parent; private NotesViewEntryData m_data; private Vector<Object> m_colValues; public ViewEntryImpl(View parent, NotesViewEntryData data) { m_data = data; } @Override public void recycle() throws NotesException { //noop, because we don't have a handle to release } @Override public void recycle(Vector paramVector) throws NotesException { if (paramVector!=null) { for (int i=0; i<paramVector.size(); i++) { Object obj = paramVector.get(i); if (obj instanceof Base) { try { ((Base)obj).recycle(); } catch (NotesException e) { // } } } } } @Override public boolean isDocument() throws NotesException { return !isCategory() && !isTotal(); } @Override public boolean isCategory() throws NotesException { return (m_data.getNoteId() & NotesConstants.NOTEID_CATEGORY) == NotesConstants.NOTEID_CATEGORY; } @Override public boolean isTotal() throws NotesException { return (m_data.getNoteId() & NotesConstants.NOTEID_CATEGORY_TOTAL) == NotesConstants.NOTEID_CATEGORY_TOTAL; } @Override public Object getParent() throws NotesException { return m_parent; } @Override public Document getDocument() throws NotesException { try { return m_parent.getParent().getDocumentByID(Integer.toString(m_data.getNoteId(), 16)); } catch (NotesException e) { if (e.id == 4091) return null; else throw e; } } @Override public int getSiblingCount() throws NotesException { return m_data.getSiblingCount(); } @Override public int getChildCount() throws NotesException { return m_data.getChildCount(); } @Override public int getDescendantCount() throws NotesException { return m_data.getDescendantCount(); } @Override public int getIndentLevel() throws NotesException { int[] pos = m_data.getPosition(); if (pos==null) return 0; return pos.length - 1; } @Override public int getColumnIndentLevel() throws NotesException { return m_data.getIndentLevels(); } @Override public String getPosition(char paramChar) throws NotesException { int[] pos = m_data.getPosition(); if (pos==null) return ""; StringBuilder sb = new StringBuilder(); for (int i=0; i<pos.length; i++) { if (sb.length()>0) sb.append(paramChar); sb.append(Integer.toString(pos[i])); } return sb.toString(); } @Override public String getNoteID() throws NotesException { return Integer.toString(m_data.getNoteId(), 16); } @Override public int getNoteIDAsInt() throws NotesException { return m_data.getNoteId(); } @Override public String getUniversalID() throws NotesException { return m_data.getUNID(); } @Override public boolean isConflict() throws NotesException { return m_data.isConflict(); } public boolean isResponse() { return m_data.isResponse(); } @Override public Vector getColumnValues() throws NotesException { if (m_colValues==null) { m_colValues = new Vector<Object>(); TreeMap<Integer,Object> columnValuesByColValuesIndex = new TreeMap<Integer,Object>(); NotesCollection parentCol = m_data.getParent(); Iterator<String> colNames = parentCol.getColumnNames(); while (colNames.hasNext()) { String currColName = colNames.next(); int colValuesIndex = m_data.getParent().getColumnValuesIndex(currColName); if (colValuesIndex<65535) { Object currColValue = m_data.get(currColName); if (currColValue instanceof Calendar) { currColValue = ((Calendar)currColValue).getTime(); } else if (currColValue instanceof List) { List<?> currColValueAsList = (List<?>) currColValue; //convert column value to a Vector Vector<Object> currColValueAsVector = new Vector<Object>(currColValueAsList.size()); for (int i=0; i<currColValueAsList.size(); i++) { Object currListVal = currColValueAsList.get(i); if (currListVal instanceof Calendar) { currListVal = ((Calendar)currListVal).getTime(); } else if (currListVal instanceof NotesTimeDate) { currListVal = ((NotesTimeDate)currListVal).toDate(); } else if (currListVal instanceof NotesDateRange) { NotesDateRange range = (NotesDateRange) currListVal; NotesTimeDate startDateTime = range.getStartDateTime(); NotesTimeDate endDateTime = range.getEndDateTime(); //replace NotesDateRange with a Vector of Date Vector<Object> currDateRangeValuesAsVector = new Vector<Object>(2); currDateRangeValuesAsVector.add(startDateTime.toDate()); currDateRangeValuesAsVector.add(endDateTime.toDate()); currListVal = currDateRangeValuesAsVector; } else if (currListVal instanceof Calendar[]) { //replace Calendar[] with date range info with a Vector of Date Calendar[] calArr = (Calendar[]) currListVal; Vector<Object> currDateRangeValuesAsVector = new Vector<Object>(calArr.length); for (int j=0; j<calArr.length; j++) { if (calArr[j]!=null) { currDateRangeValuesAsVector.add(calArr[j].getTime()); } } currListVal = currDateRangeValuesAsVector; } currColValueAsVector.add(currListVal); } currColValue = currColValueAsVector; } columnValuesByColValuesIndex.put(colValuesIndex, currColValue); } } Object[] colValuesArr = new Object[columnValuesByColValuesIndex.isEmpty() ? 0 : (columnValuesByColValuesIndex.lastKey()+1)]; for (Entry<Integer,Object> currEntry : columnValuesByColValuesIndex.entrySet()) { colValuesArr[currEntry.getKey()] = currEntry.getValue(); } m_colValues = new Vector<Object>(colValuesArr.length); for (int i=0; i<colValuesArr.length; i++) { if (colValuesArr[i]==null) m_colValues.add(""); else m_colValues.add(colValuesArr[i]); } } return m_colValues; } public Iterator<String> getColumnNamesWithValues() { return m_data.getColumnNames(); } public Object getColumnValue(String columnName) { return m_data.get(columnName); } public int getNumberOfColumnsWithValues() { return m_data.getNumberOfColumnsWithValues(); } @Override public int getFTSearchScore() throws NotesException { return m_data.getFTScore(); } @Override public boolean isValid() throws NotesException { return m_data.getNoteId()!=0; } @Override public boolean getRead() throws NotesException { return !m_data.isUnread(); } @Override public boolean getRead(String paramString) throws NotesException { return false; } @Override public boolean isPreferJavaDates() throws NotesException { return true; } @Override public void setPreferJavaDates(boolean paramBoolean) throws NotesException { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3.validation.operations; import org.junit.Assert; import org.junit.Test; import org.apache.cassandra.schema.SchemaConstants; import org.apache.cassandra.cql3.CQLTester; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.Keyspace; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.exceptions.SyntaxException; import org.apache.cassandra.schema.SchemaKeyspace; import static java.lang.String.format; import static org.junit.Assert.assertEquals; public class AlterTest extends CQLTester { @Test public void testAddList() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY, content text);"); execute("ALTER TABLE %s ADD myCollection list<text>;"); execute("INSERT INTO %s (id, content , myCollection) VALUES ('test', 'first test', ['first element']);"); assertRows(execute("SELECT * FROM %s;"), row("test", "first test", list("first element"))); } @Test public void testDropList() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY, content text, myCollection list<text>);"); execute("INSERT INTO %s (id, content , myCollection) VALUES ('test', 'first test', ['first element']);"); execute("ALTER TABLE %s DROP myCollection;"); assertRows(execute("SELECT * FROM %s;"), row("test", "first test")); } @Test public void testAddMap() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY, content text);"); execute("ALTER TABLE %s ADD myCollection map<text, text>;"); execute("INSERT INTO %s (id, content , myCollection) VALUES ('test', 'first test', { '1' : 'first element'});"); assertRows(execute("SELECT * FROM %s;"), row("test", "first test", map("1", "first element"))); } @Test public void testDropMap() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY, content text, myCollection map<text, text>);"); execute("INSERT INTO %s (id, content , myCollection) VALUES ('test', 'first test', { '1' : 'first element'});"); execute("ALTER TABLE %s DROP myCollection;"); assertRows(execute("SELECT * FROM %s;"), row("test", "first test")); } @Test public void testDropListAndAddListWithSameName() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY, content text, myCollection list<text>);"); execute("INSERT INTO %s (id, content , myCollection) VALUES ('test', 'first test', ['first element']);"); execute("ALTER TABLE %s DROP myCollection;"); execute("ALTER TABLE %s ADD myCollection list<text>;"); assertRows(execute("SELECT * FROM %s;"), row("test", "first test", null)); execute("UPDATE %s set myCollection = ['second element'] WHERE id = 'test';"); assertRows(execute("SELECT * FROM %s;"), row("test", "first test", list("second element"))); } @Test public void testDropListAndAddMapWithSameName() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY, content text, myCollection list<text>);"); execute("INSERT INTO %s (id, content , myCollection) VALUES ('test', 'first test', ['first element']);"); execute("ALTER TABLE %s DROP myCollection;"); assertInvalid("ALTER TABLE %s ADD myCollection map<int, int>;"); } @Test public void testDropWithTimestamp() throws Throwable { createTable("CREATE TABLE %s (id int, c1 int, v1 int, todrop int, PRIMARY KEY (id, c1));"); for (int i = 0; i < 5; i++) execute("INSERT INTO %s (id, c1, v1, todrop) VALUES (?, ?, ?, ?) USING TIMESTAMP ?", 1, i, i, i, 10000L * i); // flush is necessary since otherwise the values of `todrop` will get discarded during // alter statement flush(true); execute("ALTER TABLE %s DROP todrop USING TIMESTAMP 20000;"); execute("ALTER TABLE %s ADD todrop int;"); execute("INSERT INTO %s (id, c1, v1, todrop) VALUES (?, ?, ?, ?) USING TIMESTAMP ?", 1, 100, 100, 100, 30000L); assertRows(execute("SELECT id, c1, v1, todrop FROM %s"), row(1, 0, 0, null), row(1, 1, 1, null), row(1, 2, 2, null), row(1, 3, 3, 3), row(1, 4, 4, 4), row(1, 100, 100, 100)); } @Test public void testDropStaticWithTimestamp() throws Throwable { createTable("CREATE TABLE %s (id int, c1 int, v1 int, todrop int static, PRIMARY KEY (id, c1));"); for (int i = 0; i < 5; i++) execute("INSERT INTO %s (id, c1, v1, todrop) VALUES (?, ?, ?, ?) USING TIMESTAMP ?", 1, i, i, i, 10000L * i); // flush is necessary since otherwise the values of `todrop` will get discarded during // alter statement flush(true); execute("ALTER TABLE %s DROP todrop USING TIMESTAMP 20000;"); execute("ALTER TABLE %s ADD todrop int static;"); execute("INSERT INTO %s (id, c1, v1, todrop) VALUES (?, ?, ?, ?) USING TIMESTAMP ?", 1, 100, 100, 100, 30000L); // static column value with largest timestmap will be available again assertRows(execute("SELECT id, c1, v1, todrop FROM %s"), row(1, 0, 0, 4), row(1, 1, 1, 4), row(1, 2, 2, 4), row(1, 3, 3, 4), row(1, 4, 4, 4), row(1, 100, 100, 4)); } @Test public void testDropMultipleWithTimestamp() throws Throwable { createTable("CREATE TABLE %s (id int, c1 int, v1 int, todrop1 int, todrop2 int, PRIMARY KEY (id, c1));"); for (int i = 0; i < 5; i++) execute("INSERT INTO %s (id, c1, v1, todrop1, todrop2) VALUES (?, ?, ?, ?, ?) USING TIMESTAMP ?", 1, i, i, i, i, 10000L * i); // flush is necessary since otherwise the values of `todrop1` and `todrop2` will get discarded during // alter statement flush(true); execute("ALTER TABLE %s DROP (todrop1, todrop2) USING TIMESTAMP 20000;"); execute("ALTER TABLE %s ADD todrop1 int;"); execute("ALTER TABLE %s ADD todrop2 int;"); execute("INSERT INTO %s (id, c1, v1, todrop1, todrop2) VALUES (?, ?, ?, ?, ?) USING TIMESTAMP ?", 1, 100, 100, 100, 100, 40000L); assertRows(execute("SELECT id, c1, v1, todrop1, todrop2 FROM %s"), row(1, 0, 0, null, null), row(1, 1, 1, null, null), row(1, 2, 2, null, null), row(1, 3, 3, 3, 3), row(1, 4, 4, 4, 4), row(1, 100, 100, 100, 100)); } @Test public void testChangeStrategyWithUnquotedAgrument() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY);"); assertInvalidSyntaxMessage("no viable alternative at input '}'", "ALTER TABLE %s WITH caching = {'keys' : 'all', 'rows_per_partition' : ALL};"); } @Test // tests CASSANDRA-7976 public void testAlterIndexInterval() throws Throwable { String tableName = createTable("CREATE TABLE IF NOT EXISTS %s (id uuid, album text, artist text, data blob, PRIMARY KEY (id))"); ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(tableName); alterTable("ALTER TABLE %s WITH min_index_interval=256 AND max_index_interval=512"); assertEquals(256, cfs.metadata().params.minIndexInterval); assertEquals(512, cfs.metadata().params.maxIndexInterval); alterTable("ALTER TABLE %s WITH caching = {}"); assertEquals(256, cfs.metadata().params.minIndexInterval); assertEquals(512, cfs.metadata().params.maxIndexInterval); } /** * Migrated from cql_tests.py:TestCQL.create_alter_options_test() */ @Test public void testCreateAlterKeyspaces() throws Throwable { assertInvalidThrow(SyntaxException.class, "CREATE KEYSPACE ks1"); assertInvalidThrow(ConfigurationException.class, "CREATE KEYSPACE ks1 WITH replication= { 'replication_factor' : 1 }"); String ks1 = createKeyspace("CREATE KEYSPACE %s WITH replication={ 'class' : 'SimpleStrategy', 'replication_factor' : 1 }"); String ks2 = createKeyspace("CREATE KEYSPACE %s WITH replication={ 'class' : 'SimpleStrategy', 'replication_factor' : 1 } AND durable_writes=false"); assertRowsIgnoringOrderAndExtra(execute("SELECT keyspace_name, durable_writes FROM system_schema.keyspaces"), row(KEYSPACE, true), row(KEYSPACE_PER_TEST, true), row(ks1, true), row(ks2, false)); schemaChange("ALTER KEYSPACE " + ks1 + " WITH replication = { 'class' : 'NetworkTopologyStrategy', '" + DATA_CENTER + "' : 1 } AND durable_writes=False"); schemaChange("ALTER KEYSPACE " + ks2 + " WITH durable_writes=true"); assertRowsIgnoringOrderAndExtra(execute("SELECT keyspace_name, durable_writes, replication FROM system_schema.keyspaces"), row(KEYSPACE, true, map("class", "org.apache.cassandra.locator.SimpleStrategy", "replication_factor", "1")), row(KEYSPACE_PER_TEST, true, map("class", "org.apache.cassandra.locator.SimpleStrategy", "replication_factor", "1")), row(ks1, false, map("class", "org.apache.cassandra.locator.NetworkTopologyStrategy", DATA_CENTER, "1")), row(ks2, true, map("class", "org.apache.cassandra.locator.SimpleStrategy", "replication_factor", "1"))); execute("USE " + ks1); assertInvalidThrow(ConfigurationException.class, "CREATE TABLE cf1 (a int PRIMARY KEY, b int) WITH compaction = { 'min_threshold' : 4 }"); execute("CREATE TABLE cf1 (a int PRIMARY KEY, b int) WITH compaction = { 'class' : 'SizeTieredCompactionStrategy', 'min_threshold' : 7 }"); assertRows(execute("SELECT table_name, compaction FROM system_schema.tables WHERE keyspace_name='" + ks1 + "'"), row("cf1", map("class", "org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy", "min_threshold", "7", "max_threshold", "32"))); } /** * Test {@link ConfigurationException} thrown on alter keyspace to no DC option in replication configuration. */ @Test public void testAlterKeyspaceWithNoOptionThrowsConfigurationException() throws Throwable { // Create keyspaces execute("CREATE KEYSPACE testABC WITH replication={ 'class' : 'NetworkTopologyStrategy', '" + DATA_CENTER + "' : 3 }"); execute("CREATE KEYSPACE testXYZ WITH replication={ 'class' : 'SimpleStrategy', 'replication_factor' : 3 }"); // Try to alter the created keyspace without any option assertInvalidThrow(ConfigurationException.class, "ALTER KEYSPACE testABC WITH replication={ 'class' : 'NetworkTopologyStrategy' }"); assertInvalidThrow(ConfigurationException.class, "ALTER KEYSPACE testXYZ WITH replication={ 'class' : 'SimpleStrategy' }"); // Make sure that the alter works as expected execute("ALTER KEYSPACE testABC WITH replication={ 'class' : 'NetworkTopologyStrategy', '" + DATA_CENTER + "' : 2 }"); execute("ALTER KEYSPACE testXYZ WITH replication={ 'class' : 'SimpleStrategy', 'replication_factor' : 2 }"); // clean up execute("DROP KEYSPACE IF EXISTS testABC"); execute("DROP KEYSPACE IF EXISTS testXYZ"); } /** * Test {@link ConfigurationException} thrown when altering a keyspace to invalid DC option in replication configuration. */ @Test public void testAlterKeyspaceWithNTSOnlyAcceptsConfiguredDataCenterNames() throws Throwable { // Create a keyspace with expected DC name. execute("CREATE KEYSPACE testABC WITH replication = {'class' : 'NetworkTopologyStrategy', '" + DATA_CENTER + "' : 2 }"); // try modifying the keyspace assertInvalidThrow(ConfigurationException.class, "ALTER KEYSPACE testABC WITH replication = { 'class' : 'NetworkTopologyStrategy', 'INVALID_DC' : 2 }"); execute("ALTER KEYSPACE testABC WITH replication = {'class' : 'NetworkTopologyStrategy', '" + DATA_CENTER + "' : 3 }"); // Mix valid and invalid, should throw an exception assertInvalidThrow(ConfigurationException.class, "ALTER KEYSPACE testABC WITH replication={ 'class' : 'NetworkTopologyStrategy', '" + DATA_CENTER + "' : 2 , 'INVALID_DC': 1}"); // clean-up execute("DROP KEYSPACE IF EXISTS testABC"); } /** * Test for bug of 5232, * migrated from cql_tests.py:TestCQL.alter_bug_test() */ @Test public void testAlterStatementWithAdd() throws Throwable { createTable("CREATE TABLE %s (id int PRIMARY KEY, t text)"); execute("UPDATE %s SET t = '111' WHERE id = 1"); execute("ALTER TABLE %s ADD l list<text>"); assertRows(execute("SELECT * FROM %s"), row(1, null, "111")); execute("ALTER TABLE %s ADD m map<int, text>"); assertRows(execute("SELECT * FROM %s"), row(1, null, null, "111")); } /** * Test for 7744, * migrated from cql_tests.py:TestCQL.downgrade_to_compact_bug_test() */ @Test public void testDowngradeToCompact() throws Throwable { createTable("create table %s (k int primary key, v set<text>)"); execute("insert into %s (k, v) VALUES (0, {'f'})"); flush(); execute("alter table %s drop v"); execute("alter table %s add v int"); } @Test // tests CASSANDRA-9565 public void testDoubleWith() throws Throwable { String[] stmts = { "ALTER KEYSPACE WITH WITH DURABLE_WRITES = true", "ALTER KEYSPACE ks WITH WITH DURABLE_WRITES = true" }; for (String stmt : stmts) { assertInvalidSyntaxMessage("no viable alternative at input 'WITH'", stmt); } } @Test public void testAlterTableWithCompression() throws Throwable { createTable("CREATE TABLE %s (a text, b int, c int, primary key (a, b))"); assertRows(execute(format("SELECT compression FROM %s.%s WHERE keyspace_name = ? and table_name = ?;", SchemaConstants.SCHEMA_KEYSPACE_NAME, SchemaKeyspace.TABLES), KEYSPACE, currentTable()), row(map("chunk_length_in_kb", "64", "class", "org.apache.cassandra.io.compress.LZ4Compressor", "min_compress_ratio", "1.1"))); execute("ALTER TABLE %s WITH compression = { 'class' : 'SnappyCompressor', 'chunk_length_in_kb' : 32 };"); assertRows(execute(format("SELECT compression FROM %s.%s WHERE keyspace_name = ? and table_name = ?;", SchemaConstants.SCHEMA_KEYSPACE_NAME, SchemaKeyspace.TABLES), KEYSPACE, currentTable()), row(map("chunk_length_in_kb", "32", "class", "org.apache.cassandra.io.compress.SnappyCompressor", "min_compress_ratio", "1.1"))); execute("ALTER TABLE %s WITH compression = { 'sstable_compression' : 'LZ4Compressor', 'chunk_length_kb' : 64 };"); assertRows(execute(format("SELECT compression FROM %s.%s WHERE keyspace_name = ? and table_name = ?;", SchemaConstants.SCHEMA_KEYSPACE_NAME, SchemaKeyspace.TABLES), KEYSPACE, currentTable()), row(map("chunk_length_in_kb", "64", "class", "org.apache.cassandra.io.compress.LZ4Compressor", "min_compress_ratio", "1.1"))); execute("ALTER TABLE %s WITH compression = { 'sstable_compression' : 'LZ4Compressor', 'min_compress_ratio' : 2 };"); assertRows(execute(format("SELECT compression FROM %s.%s WHERE keyspace_name = ? and table_name = ?;", SchemaConstants.SCHEMA_KEYSPACE_NAME, SchemaKeyspace.TABLES), KEYSPACE, currentTable()), row(map("chunk_length_in_kb", "64", "class", "org.apache.cassandra.io.compress.LZ4Compressor", "min_compress_ratio", "2.0"))); execute("ALTER TABLE %s WITH compression = { 'sstable_compression' : 'LZ4Compressor', 'min_compress_ratio' : 0 };"); assertRows(execute(format("SELECT compression FROM %s.%s WHERE keyspace_name = ? and table_name = ?;", SchemaConstants.SCHEMA_KEYSPACE_NAME, SchemaKeyspace.TABLES), KEYSPACE, currentTable()), row(map("chunk_length_in_kb", "64", "class", "org.apache.cassandra.io.compress.LZ4Compressor", "min_compress_ratio", "0.0"))); execute("ALTER TABLE %s WITH compression = { 'sstable_compression' : '', 'chunk_length_kb' : 32 };"); assertRows(execute(format("SELECT compression FROM %s.%s WHERE keyspace_name = ? and table_name = ?;", SchemaConstants.SCHEMA_KEYSPACE_NAME, SchemaKeyspace.TABLES), KEYSPACE, currentTable()), row(map("enabled", "false"))); execute("ALTER TABLE %s WITH compression = { 'class' : 'SnappyCompressor', 'chunk_length_in_kb' : 32 };"); execute("ALTER TABLE %s WITH compression = { 'enabled' : 'false'};"); assertRows(execute(format("SELECT compression FROM %s.%s WHERE keyspace_name = ? and table_name = ?;", SchemaConstants.SCHEMA_KEYSPACE_NAME, SchemaKeyspace.TABLES), KEYSPACE, currentTable()), row(map("enabled", "false"))); assertThrowsConfigurationException("Missing sub-option 'class' for the 'compression' option.", "ALTER TABLE %s WITH compression = {'chunk_length_in_kb' : 32};"); assertThrowsConfigurationException("The 'class' option must not be empty. To disable compression use 'enabled' : false", "ALTER TABLE %s WITH compression = { 'class' : ''};"); assertThrowsConfigurationException("If the 'enabled' option is set to false no other options must be specified", "ALTER TABLE %s WITH compression = { 'enabled' : 'false', 'class' : 'SnappyCompressor'};"); assertThrowsConfigurationException("The 'sstable_compression' option must not be used if the compression algorithm is already specified by the 'class' option", "ALTER TABLE %s WITH compression = { 'sstable_compression' : 'SnappyCompressor', 'class' : 'SnappyCompressor'};"); assertThrowsConfigurationException("The 'chunk_length_kb' option must not be used if the chunk length is already specified by the 'chunk_length_in_kb' option", "ALTER TABLE %s WITH compression = { 'class' : 'SnappyCompressor', 'chunk_length_kb' : 32 , 'chunk_length_in_kb' : 32 };"); assertThrowsConfigurationException("Invalid negative min_compress_ratio", "ALTER TABLE %s WITH compression = { 'class' : 'SnappyCompressor', 'min_compress_ratio' : -1 };"); } private void assertThrowsConfigurationException(String errorMsg, String alterStmt) throws Throwable { try { execute(alterStmt); Assert.fail("Query should be invalid but no error was thrown. Query is: " + alterStmt); } catch (ConfigurationException e) { assertEquals(errorMsg, e.getMessage()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.algebricks.rewriter.rules; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.mutable.Mutable; import org.apache.commons.lang3.mutable.MutableObject; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression; import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator; import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext; import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag; import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag; import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable; import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression; import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression; import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression; import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression; import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions; import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier; import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities; import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform; import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule; /** * Factors out common sub-expressions by assigning them to a variables, and replacing the common sub-expressions with references to those variables. * Preconditions/Assumptions: * Assumes no projects are in the plan. This rule ignores variable reference expressions and constants (other rules deal with those separately). * Postconditions/Examples: * Plan with extracted sub-expressions. Generates one assign operator per extracted expression. * Example 1 - Simple Arithmetic Example (simplified) * Before plan: * assign [$$1] <- [5 + 6 - 10] * assign [$$0] <- [5 + 6 + 30] * After plan: * assign [$$1] <- [$$5 - 10] * assign [$$0] <- [$$5 + 30] * assign [$$5] <- [5 + 6] * Example 2 - Cleaning up 'Distinct By' (simplified) * Before plan: (notice how $$0 is not live after the distinct) * assign [$$3] <- [field-access($$0, 1)] * distinct ([%0->$$5]) * assign [$$5] <- [field-access($$0, 1)] * unnest $$0 <- [scan-dataset] * After plan: (notice how the issue of $$0 is fixed) * assign [$$3] <- [$$5] * distinct ([$$5]) * assign [$$5] <- [field-access($$0, 1)] * unnest $$0 <- [scan-dataset] * Example 3 - Pulling Common Expressions Above Joins (simplified) * Before plan: * assign [$$9] <- funcZ(funcY($$8)) * join (funcX(funcY($$8))) * After plan: * assign [$$9] <- funcZ($$10)) * select (funcX($$10)) * assign [$$10] <- [funcY($$8)] * join (TRUE) */ public class ExtractCommonExpressionsRule implements IAlgebraicRewriteRule { private final List<ILogicalExpression> originalAssignExprs = new ArrayList<ILogicalExpression>(); private final CommonExpressionSubstitutionVisitor substVisitor = new CommonExpressionSubstitutionVisitor(); private final Map<ILogicalExpression, ExprEquivalenceClass> exprEqClassMap = new HashMap<ILogicalExpression, ExprEquivalenceClass>(); // Set of operators for which common subexpression elimination should not be performed. private static final Set<LogicalOperatorTag> ignoreOps = new HashSet<LogicalOperatorTag>(6); static { ignoreOps.add(LogicalOperatorTag.UNNEST); ignoreOps.add(LogicalOperatorTag.UNNEST_MAP); ignoreOps.add(LogicalOperatorTag.ORDER); ignoreOps.add(LogicalOperatorTag.PROJECT); ignoreOps.add(LogicalOperatorTag.AGGREGATE); ignoreOps.add(LogicalOperatorTag.RUNNINGAGGREGATE); } @Override public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { return false; } @Override public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { exprEqClassMap.clear(); substVisitor.setContext(context); boolean modified = removeCommonExpressions(opRef, context); if (modified) { context.computeAndSetTypeEnvironmentForOperator(opRef.getValue()); } return modified; } private void updateEquivalenceClassMap(LogicalVariable lhs, Mutable<ILogicalExpression> rhsExprRef, ILogicalExpression rhsExpr, ILogicalOperator op) { ExprEquivalenceClass exprEqClass = exprEqClassMap.get(rhsExpr); if (exprEqClass == null) { exprEqClass = new ExprEquivalenceClass(op, rhsExprRef); exprEqClassMap.put(rhsExpr, exprEqClass); } exprEqClass.setVariable(lhs); } private boolean removeCommonExpressions(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue(); if (context.checkIfInDontApplySet(this, opRef.getValue())) { return false; } boolean modified = false; // Recurse into children. for (Mutable<ILogicalOperator> inputOpRef : op.getInputs()) { if (removeCommonExpressions(inputOpRef, context)) { modified = true; } } // TODO: Deal with replicate properly. Currently, we just clear the expr equivalence map, // since we want to avoid incorrect expression replacement // (the resulting new variables should be assigned live below a replicate/split). if (op.getOperatorTag() == LogicalOperatorTag.REPLICATE || op.getOperatorTag() == LogicalOperatorTag.SPLIT) { exprEqClassMap.clear(); return modified; } // Exclude these operators. if (ignoreOps.contains(op.getOperatorTag())) { return modified; } // Remember a copy of the original assign expressions, so we can add them to the equivalence class map // after replacing expressions within the assign operator itself. if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) { AssignOperator assignOp = (AssignOperator) op; originalAssignExprs.clear(); int numVars = assignOp.getVariables().size(); for (int i = 0; i < numVars; i++) { Mutable<ILogicalExpression> exprRef = assignOp.getExpressions().get(i); ILogicalExpression expr = exprRef.getValue(); originalAssignExprs.add(expr.cloneExpression()); } } // Perform common subexpression elimination. substVisitor.setOperator(op); if (op.acceptExpressionTransform(substVisitor)) { modified = true; } // Update equivalence class map. if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) { AssignOperator assignOp = (AssignOperator) op; int numVars = assignOp.getVariables().size(); for (int i = 0; i < numVars; i++) { Mutable<ILogicalExpression> exprRef = assignOp.getExpressions().get(i); ILogicalExpression expr = exprRef.getValue(); if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE || expr.getExpressionTag() == LogicalExpressionTag.CONSTANT) { continue; } // Update equivalence class map. LogicalVariable lhs = assignOp.getVariables().get(i); updateEquivalenceClassMap(lhs, exprRef, exprRef.getValue(), op); // Update equivalence class map with original assign expression. updateEquivalenceClassMap(lhs, exprRef, originalAssignExprs.get(i), op); } } // TODO: For now do not perform replacement in nested plans // due to the complication of figuring out whether the firstOp in an equivalence class is within a subplan, // and the resulting variable will not be visible to the outside. // Since subplans should be eliminated in most cases, this behavior is acceptable for now. /* if (op.hasNestedPlans()) { AbstractOperatorWithNestedPlans opWithNestedPlan = (AbstractOperatorWithNestedPlans) op; for (ILogicalPlan nestedPlan : opWithNestedPlan.getNestedPlans()) { for (Mutable<ILogicalOperator> rootRef : nestedPlan.getRoots()) { if (removeCommonExpressions(rootRef, context)) { modified = true; } } } } */ if (modified) { context.computeAndSetTypeEnvironmentForOperator(op); context.addToDontApplySet(this, op); } return modified; } private class CommonExpressionSubstitutionVisitor implements ILogicalExpressionReferenceTransform { private IOptimizationContext context; private ILogicalOperator op; public void setContext(IOptimizationContext context) { this.context = context; } public void setOperator(ILogicalOperator op) throws AlgebricksException { this.op = op; } @Override public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException { AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue(); boolean modified = false; ExprEquivalenceClass exprEqClass = exprEqClassMap.get(expr); if (exprEqClass != null) { // Replace common subexpression with existing variable. if (exprEqClass.variableIsSet()) { if (expr.isFunctional()) { Set<LogicalVariable> liveVars = new HashSet<>(); List<LogicalVariable> usedVars = new ArrayList<>(); VariableUtilities.getLiveVariables(op, liveVars); VariableUtilities.getUsedVariables(op, usedVars); // Check if the replacing variable is live at this op. // However, if the op is already using variables that are not live, // then a replacement may enable fixing the plan. // This behavior is necessary to, e.g., properly deal with distinct by. // Also just replace the expr if we are replacing common exprs from within the same operator. if (liveVars.contains(exprEqClass.getVariable()) || !liveVars.containsAll(usedVars) || op == exprEqClass.getFirstOperator()) { exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable())); // Do not descend into children since this expr has been completely replaced. return true; } } } else { if (expr.isFunctional() && assignCommonExpression(exprEqClass, expr)) { //re-obtain the live vars after rewriting in the method called in the if condition Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>(); VariableUtilities.getLiveVariables(op, liveVars); //rewrite only when the variable is live if (liveVars.contains(exprEqClass.getVariable())) { exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable())); // Do not descend into children since this expr has been completely replaced. return true; } } } } else { if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE && expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) { exprEqClass = new ExprEquivalenceClass(op, exprRef); exprEqClassMap.put(expr, exprEqClass); } } // Descend into function arguments. if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) { AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr; for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) { if (transform(arg)) { modified = true; } } } return modified; } private boolean assignCommonExpression(ExprEquivalenceClass exprEqClass, ILogicalExpression expr) throws AlgebricksException { AbstractLogicalOperator firstOp = (AbstractLogicalOperator) exprEqClass.getFirstOperator(); Mutable<ILogicalExpression> firstExprRef = exprEqClass.getFirstExpression(); if (firstOp.getOperatorTag() == LogicalOperatorTag.INNERJOIN || firstOp.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) { // Do not extract common expressions from within the same join operator. if (firstOp == op) { return false; } AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) firstOp; Mutable<ILogicalExpression> joinCond = joinOp.getCondition(); ILogicalExpression enclosingExpr = getEnclosingExpression(joinCond, firstExprRef.getValue()); if (enclosingExpr == null) { // No viable enclosing expression that we can pull out from the join. return false; } // Place a Select operator beneath op that contains the enclosing expression. SelectOperator selectOp = new SelectOperator(new MutableObject<ILogicalExpression>(enclosingExpr), false, null); selectOp.getInputs().add(new MutableObject<ILogicalOperator>(op.getInputs().get(0).getValue())); op.getInputs().get(0).setValue(selectOp); // Set firstOp to be the select below op, since we want to assign the common subexpr there. firstOp = selectOp; } else if (firstOp.getInputs().size() > 1) { // Bail for any non-join operator with multiple inputs. return false; } LogicalVariable newVar = context.newVar(); AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(firstExprRef.getValue().cloneExpression())); // Place assign below firstOp. newAssign.getInputs().add(new MutableObject<ILogicalOperator>(firstOp.getInputs().get(0).getValue())); newAssign.setExecutionMode(firstOp.getExecutionMode()); firstOp.getInputs().get(0).setValue(newAssign); // Replace original expr with variable reference, and set var in expression equivalence class. firstExprRef.setValue(new VariableReferenceExpression(newVar)); exprEqClass.setVariable(newVar); context.computeAndSetTypeEnvironmentForOperator(newAssign); context.computeAndSetTypeEnvironmentForOperator(firstOp); return true; } private ILogicalExpression getEnclosingExpression(Mutable<ILogicalExpression> conditionExprRef, ILogicalExpression commonSubExpr) { ILogicalExpression conditionExpr = conditionExprRef.getValue(); if (conditionExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) { return null; } if (isEqJoinCondition(commonSubExpr)) { // Do not eliminate the common expression if we could use it for an equi-join. return null; } AbstractFunctionCallExpression conditionFuncExpr = (AbstractFunctionCallExpression) conditionExpr; // Boolean expression that encloses the common subexpression. ILogicalExpression enclosingBoolExpr = null; // We are not dealing with arbitrarily nested and/or expressions here. FunctionIdentifier funcIdent = conditionFuncExpr.getFunctionIdentifier(); if (funcIdent.equals(AlgebricksBuiltinFunctions.AND) || funcIdent.equals(AlgebricksBuiltinFunctions.OR)) { Iterator<Mutable<ILogicalExpression>> argIter = conditionFuncExpr.getArguments().iterator(); while (argIter.hasNext()) { Mutable<ILogicalExpression> argRef = argIter.next(); if (containsExpr(argRef.getValue(), commonSubExpr)) { enclosingBoolExpr = argRef.getValue(); // Remove the enclosing expression from the argument list. // We are going to pull it out into a new select operator. argIter.remove(); break; } } // If and/or only has a single argument left, pull it out and remove the and/or function. if (conditionFuncExpr.getArguments().size() == 1) { conditionExprRef.setValue(conditionFuncExpr.getArguments().get(0).getValue()); } } else { if (!containsExpr(conditionExprRef.getValue(), commonSubExpr)) { return null; } enclosingBoolExpr = conditionFuncExpr; // Replace the enclosing expression with TRUE. conditionExprRef.setValue(ConstantExpression.TRUE); } return enclosingBoolExpr; } } private boolean containsExpr(ILogicalExpression expr, ILogicalExpression searchExpr) { if (expr == searchExpr) { return true; } if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) { return false; } AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr; for (Mutable<ILogicalExpression> argRef : funcExpr.getArguments()) { if (containsExpr(argRef.getValue(), searchExpr)) { return true; } } return false; } private boolean isEqJoinCondition(ILogicalExpression expr) { AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr; if (funcExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.EQ)) { ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue(); ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue(); if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE && arg2.getExpressionTag() == LogicalExpressionTag.VARIABLE) { return true; } } return false; } private final class ExprEquivalenceClass { // First operator in which expression is used. private final ILogicalOperator firstOp; // Reference to expression in first op. private final Mutable<ILogicalExpression> firstExprRef; // Variable that this expression has been assigned to. private LogicalVariable var; public ExprEquivalenceClass(ILogicalOperator firstOp, Mutable<ILogicalExpression> firstExprRef) { this.firstOp = firstOp; this.firstExprRef = firstExprRef; } public ILogicalOperator getFirstOperator() { return firstOp; } public Mutable<ILogicalExpression> getFirstExpression() { return firstExprRef; } public void setVariable(LogicalVariable var) { this.var = var; } public LogicalVariable getVariable() { return var; } public boolean variableIsSet() { return var != null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.segment.file; import static org.apache.jackrabbit.oak.segment.SegmentCache.newSegmentCache; import static org.apache.jackrabbit.oak.segment.data.SegmentData.newSegmentData; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.util.HashSet; import java.util.Set; import java.util.UUID; import java.util.function.Consumer; import org.apache.jackrabbit.oak.api.jmx.CacheStatsMBean; import org.apache.jackrabbit.oak.segment.CachingSegmentReader; import org.apache.jackrabbit.oak.segment.RecordType; import org.apache.jackrabbit.oak.segment.Revisions; import org.apache.jackrabbit.oak.segment.Segment; import org.apache.jackrabbit.oak.segment.Segment.RecordConsumer; import org.apache.jackrabbit.oak.segment.SegmentBlob; import org.apache.jackrabbit.oak.segment.SegmentBufferMonitor; import org.apache.jackrabbit.oak.segment.SegmentCache; import org.apache.jackrabbit.oak.segment.SegmentId; import org.apache.jackrabbit.oak.segment.SegmentIdFactory; import org.apache.jackrabbit.oak.segment.SegmentIdProvider; import org.apache.jackrabbit.oak.segment.SegmentNodeState; import org.apache.jackrabbit.oak.segment.SegmentNotFoundException; import org.apache.jackrabbit.oak.segment.SegmentReader; import org.apache.jackrabbit.oak.segment.SegmentStore; import org.apache.jackrabbit.oak.segment.SegmentTracker; import org.apache.jackrabbit.oak.segment.SegmentWriter; import org.apache.jackrabbit.oak.segment.file.tar.EntryRecovery; import org.apache.jackrabbit.oak.segment.file.tar.GCGeneration; import org.apache.jackrabbit.oak.segment.file.tar.TarFiles; import org.apache.jackrabbit.oak.segment.file.tar.TarRecovery; import org.apache.jackrabbit.oak.segment.spi.monitor.IOMonitor; import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentNodeStorePersistence; import org.apache.jackrabbit.oak.segment.spi.persistence.Buffer; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.stats.StatsOptions; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The storage implementation for tar files. */ public abstract class AbstractFileStore implements SegmentStore, Closeable { private static final Logger log = LoggerFactory.getLogger(AbstractFileStore.class); /** * The minimum supported store version. It is possible for an implementation * to support in a transparent and backwards-compatible way older versions * of a repository. In this case, the minimum supported store version * identifies the store format that can still be processed by the * implementation. The minimum store version has to be greater than zero and * less than or equal to the maximum store version. */ private static final int MIN_STORE_VERSION = 1; /** * The maximum supported store version. It is possible for an implementation * to support in a transparent and forwards-compatible way newer version of * a repository. In this case, the maximum supported store version * identifies the store format that can still be processed by the * implementation. The maximum supported store version has to be greater * than zero and greater than or equal to the minimum store version. */ private static final int MAX_STORE_VERSION = 2; static ManifestChecker newManifestChecker(SegmentNodeStorePersistence persistence, boolean strictVersionCheck) throws IOException { return ManifestChecker.newManifestChecker( persistence.getManifestFile(), persistence.segmentFilesExist(), strictVersionCheck ? MAX_STORE_VERSION : MIN_STORE_VERSION, MAX_STORE_VERSION ); } @NotNull final SegmentTracker tracker; @NotNull final CachingSegmentReader segmentReader; final File directory; private final BlobStore blobStore; final boolean memoryMapping; final boolean offHeapAccess; @NotNull final SegmentCache segmentCache; final TarRecovery recovery = new TarRecovery() { @Override public void recoverEntry(UUID uuid, byte[] data, EntryRecovery entryRecovery) throws IOException { writeSegment(uuid, data, entryRecovery); } }; @NotNull private final SegmentBufferMonitor segmentBufferMonitor; protected final IOMonitor ioMonitor; AbstractFileStore(final FileStoreBuilder builder) { this.directory = builder.getDirectory(); this.tracker = new SegmentTracker(new SegmentIdFactory() { @Override @NotNull public SegmentId newSegmentId(long msb, long lsb) { return new SegmentId(AbstractFileStore.this, msb, lsb, segmentCache::recordHit); } }); this.blobStore = builder.getBlobStore(); this.segmentCache = newSegmentCache(builder.getSegmentCacheSize()); this.segmentReader = new CachingSegmentReader( this::getWriter, blobStore, builder.getStringCacheSize(), builder.getTemplateCacheSize(), builder.getStatsProvider().getMeter("oak.segment.reads", StatsOptions.DEFAULT) ); this.memoryMapping = builder.getMemoryMapping(); this.offHeapAccess = builder.getOffHeapAccess(); this.ioMonitor = builder.getIOMonitor(); this.segmentBufferMonitor = new SegmentBufferMonitor(builder.getStatsProvider()); } static SegmentNotFoundException asSegmentNotFoundException(Exception e, SegmentId id) { if (e.getCause() instanceof SegmentNotFoundException) { return (SegmentNotFoundException) e.getCause(); } return new SegmentNotFoundException(id, e); } @NotNull public CacheStatsMBean getSegmentCacheStats() { return segmentCache.getCacheStats(); } @NotNull public CacheStatsMBean getStringCacheStats() { return segmentReader.getStringCacheStats(); } @NotNull public CacheStatsMBean getTemplateCacheStats() { return segmentReader.getTemplateCacheStats(); } @NotNull public abstract SegmentWriter getWriter(); @NotNull public SegmentReader getReader() { return segmentReader; } @NotNull public SegmentIdProvider getSegmentIdProvider() { return tracker; } /** * @return the {@link Revisions} object bound to the current store. */ public abstract Revisions getRevisions(); /** * Convenience method for accessing the root node for the current head. * This is equivalent to * <pre> * fileStore.getReader().readHeadState(fileStore.getRevisions()) * </pre> * @return the current head node state */ @NotNull public SegmentNodeState getHead() { return segmentReader.readHeadState(getRevisions()); } /** * @return the external BlobStore (if configured) with this store, {@code null} otherwise. */ @Nullable public BlobStore getBlobStore() { return blobStore; } private void writeSegment(UUID id, byte[] data, EntryRecovery w) throws IOException { long msb = id.getMostSignificantBits(); long lsb = id.getLeastSignificantBits(); Buffer buffer = Buffer.wrap(data); GCGeneration generation = SegmentId.isDataSegmentId(lsb) ? Segment.getGcGeneration(newSegmentData(buffer), id) : GCGeneration.NULL; w.recoverEntry(msb, lsb, data, 0, data.length, generation); if (SegmentId.isDataSegmentId(lsb)) { Segment segment = new Segment(tracker, segmentReader, tracker.newSegmentId(msb, lsb), buffer); populateTarGraph(segment, w); populateTarBinaryReferences(segment, w); } } private static void populateTarGraph(Segment segment, EntryRecovery w) { UUID from = segment.getSegmentId().asUUID(); for (int i = 0; i < segment.getReferencedSegmentIdCount(); i++) { w.recoverGraphEdge(from, segment.getReferencedSegmentId(i)); } } private static void populateTarBinaryReferences(final Segment segment, final EntryRecovery w) { final GCGeneration generation = segment.getGcGeneration(); final UUID id = segment.getSegmentId().asUUID(); segment.forEachRecord((number, type, offset) -> { if (type == RecordType.BLOB_ID) { w.recoverBinaryReference(generation, id, SegmentBlob.readBlobId(segment, number)); } }); } static Set<UUID> readReferences(Segment segment) { Set<UUID> references = new HashSet<>(); for (int i = 0; i < segment.getReferencedSegmentIdCount(); i++) { references.add(segment.getReferencedSegmentId(i)); } return references; } static Set<String> readBinaryReferences(final Segment segment) { final Set<String> binaryReferences = new HashSet<>(); segment.forEachRecord(new RecordConsumer() { @Override public void consume(int number, RecordType type, int offset) { if (type == RecordType.BLOB_ID) { binaryReferences.add(SegmentBlob.readBlobId(segment, number)); } } }); return binaryReferences; } static void closeAndLogOnFail(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (IOException ioe) { // ignore and log log.error(ioe.getMessage(), ioe); } } } Segment readSegmentUncached(TarFiles tarFiles, SegmentId id) { Buffer buffer = tarFiles.readSegment(id.getMostSignificantBits(), id.getLeastSignificantBits()); if (buffer == null) { throw new SegmentNotFoundException(id); } segmentBufferMonitor.trackAllocation(buffer); return new Segment(tracker, segmentReader, id, buffer); } /** * Finds all external blob references that are currently accessible * in this repository and adds them to the given collector. Useful * for collecting garbage in an external data store. * <p> * Note that this method only collects blob references that are already * stored in the repository (at the time when this method is called), so * the garbage collector will need some other mechanism for tracking * in-memory references and references stored while this method is * running. * @param collector reference collector called back for each blob reference found */ public abstract void collectBlobReferences(Consumer<String> collector) throws IOException; }
/* * Copyright 2016 The Bazel Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.idea.blaze.android.run.binary; import com.android.tools.idea.run.ValidationError; import com.android.tools.idea.run.editor.AndroidProfilersPanel; import com.android.tools.idea.run.editor.ProfilerState; import com.android.tools.idea.run.util.LaunchUtils; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.google.idea.blaze.android.run.BlazeAndroidRunConfigurationCommonState; import com.google.idea.blaze.android.run.binary.AndroidBinaryLaunchMethodsUtils.AndroidBinaryLaunchMethod; import com.google.idea.blaze.base.run.state.RunConfigurationState; import com.google.idea.blaze.base.run.state.RunConfigurationStateEditor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.WriteExternalException; import java.util.Map; import org.jdom.Element; /** State specific to the android binary run configuration. */ public final class BlazeAndroidBinaryRunConfigurationState implements RunConfigurationState { /** Element name used to group the {@link ProfilerState} settings */ private static final String PROFILERS_ELEMENT_NAME = "Profilers"; public static final String LAUNCH_DEFAULT_ACTIVITY = "default_activity"; public static final String LAUNCH_SPECIFIC_ACTIVITY = "specific_activity"; public static final String DO_NOTHING = "do_nothing"; public static final String LAUNCH_DEEP_LINK = "launch_deep_link"; private static final String LAUNCH_METHOD_ATTR = "launch-method"; // Remove once v2 becomes default. private static final String USE_SPLIT_APKS_IF_POSSIBLE = "use-split-apks-if-possible"; private static final String WORK_PROFILE_ATTR = "use-work-profile-if-present"; private static final String USER_ID_ATTR = "user-id"; private AndroidBinaryLaunchMethod launchMethod = AndroidBinaryLaunchMethod.MOBILE_INSTALL; private boolean useSplitApksIfPossible = false; private boolean useWorkProfileIfPresent = false; private Integer userId; private static final String SHOW_LOGCAT_AUTOMATICALLY = "show-logcat-automatically"; private boolean showLogcatAutomatically = false; private ProfilerState profilerState; private static final String DEEP_LINK = "DEEP_LINK"; private static final String ACTIVITY_CLASS = "ACTIVITY_CLASS"; private static final String MODE = "MODE"; private static final String ACTIVITY_EXTRA_FLAGS = "ACTIVITY_EXTRA_FLAGS"; private String deepLink = ""; private String activityClass = ""; private String mode = LAUNCH_DEFAULT_ACTIVITY; private static final String AM_START_OPTIONS = "AM_START_OPTIONS"; private String amStartOptions = ""; private final BlazeAndroidRunConfigurationCommonState commonState; BlazeAndroidBinaryRunConfigurationState(String buildSystemName) { commonState = new BlazeAndroidRunConfigurationCommonState(buildSystemName); profilerState = new ProfilerState(); } public BlazeAndroidRunConfigurationCommonState getCommonState() { return commonState; } public AndroidBinaryLaunchMethod getLaunchMethod() { return launchMethod; } @VisibleForTesting public void setLaunchMethod(AndroidBinaryLaunchMethod launchMethod) { this.launchMethod = launchMethod; } // This method is deprecated, as unused by mobile-install v2. // TODO(b/120300546): Remove once mobile-install v1 is completely deprecated. public boolean useSplitApksIfPossible() { return useSplitApksIfPossible; } // This method is deprecated, as unused by mobile-install v2. // TODO(b/120300546): Remove once mobile-install v1 is completely deprecated. void setUseSplitApksIfPossible(boolean useSplitApksIfPossible) { this.useSplitApksIfPossible = useSplitApksIfPossible; } public boolean useWorkProfileIfPresent() { return useWorkProfileIfPresent; } void setUseWorkProfileIfPresent(boolean useWorkProfileIfPresent) { this.useWorkProfileIfPresent = useWorkProfileIfPresent; } Integer getUserId() { return userId; } void setUserId(Integer userId) { this.userId = userId; } public boolean showLogcatAutomatically() { return showLogcatAutomatically; } public void setShowLogcatAutomatically(boolean showLogcatAutomatically) { this.showLogcatAutomatically = showLogcatAutomatically; } public String getDeepLink() { return deepLink; } public void setDeepLink(String deepLink) { this.deepLink = deepLink; } public String getActivityClass() { return activityClass; } public void setActivityClass(String activityClass) { this.activityClass = activityClass; } public String getMode() { return mode; } public void setMode(String mode) { this.mode = mode; } public ProfilerState getProfilerState() { return profilerState; } public void setAmStartOptions(String amStartOptions) { this.amStartOptions = amStartOptions; } public String getAmStartOptions() { return amStartOptions; } /** * We collect errors rather than throwing to avoid missing fatal errors by exiting early for a * warning. */ public ImmutableList<ValidationError> validate(Project project) { ImmutableList.Builder<ValidationError> errors = ImmutableList.builder(); errors.addAll(commonState.validate(project)); if (commonState.isNativeDebuggingEnabled() && AndroidBinaryLaunchMethodsUtils.useMobileInstall(launchMethod)) { errors.add( ValidationError.fatal("Native debugging is not supported when using mobile-install.")); } return errors.build(); } @Override public void readExternal(Element element) throws InvalidDataException { commonState.readExternal(element); // Group profiler settings under its own section. Element profilersElement = element.getChild(PROFILERS_ELEMENT_NAME); if (profilersElement != null) { profilerState.readExternal(profilersElement); } setDeepLink(Strings.nullToEmpty(element.getAttributeValue(DEEP_LINK))); setActivityClass(Strings.nullToEmpty(element.getAttributeValue(ACTIVITY_CLASS))); String modeValue = element.getAttributeValue(MODE); setMode(Strings.isNullOrEmpty(modeValue) ? LAUNCH_DEFAULT_ACTIVITY : modeValue); String launchMethodAttribute = element.getAttributeValue(LAUNCH_METHOD_ATTR); if (launchMethodAttribute != null) { launchMethod = AndroidBinaryLaunchMethod.valueOf(launchMethodAttribute); } else { launchMethod = AndroidBinaryLaunchMethod.MOBILE_INSTALL; } setUseSplitApksIfPossible( Boolean.parseBoolean(element.getAttributeValue(USE_SPLIT_APKS_IF_POSSIBLE))); setUseWorkProfileIfPresent(Boolean.parseBoolean(element.getAttributeValue(WORK_PROFILE_ATTR))); String userIdString = element.getAttributeValue(USER_ID_ATTR); if (userIdString != null) { setUserId(Integer.parseInt(userIdString)); } setShowLogcatAutomatically( Boolean.parseBoolean(element.getAttributeValue(SHOW_LOGCAT_AUTOMATICALLY))); String amStartOptionsString = element.getAttributeValue(AM_START_OPTIONS); if (amStartOptionsString != null) { setAmStartOptions(amStartOptionsString); } for (Map.Entry<String, String> entry : getLegacyValues(element).entrySet()) { String value = entry.getValue(); switch (entry.getKey()) { case DEEP_LINK: deepLink = Strings.nullToEmpty(value); break; case ACTIVITY_CLASS: activityClass = Strings.nullToEmpty(value); break; case MODE: mode = Strings.isNullOrEmpty(value) ? LAUNCH_DEFAULT_ACTIVITY : value; break; case ACTIVITY_EXTRA_FLAGS: if (userId == null) { userId = LaunchUtils.getUserIdFromFlags(value); } break; default: break; } } } @Override public void writeExternal(Element element) throws WriteExternalException { commonState.writeExternal(element); // Group profiler settings under its own section. Previously written profiler info // are replaced manually because ProfilerState#writeExternal does not handle the removal // process; unlike i.e, implementers of RunConfigurationState. Element profilersElement = new Element(PROFILERS_ELEMENT_NAME); element.removeChildren(PROFILERS_ELEMENT_NAME); element.addContent(profilersElement); profilerState.writeExternal(profilersElement); element.setAttribute(DEEP_LINK, deepLink); element.setAttribute(ACTIVITY_CLASS, activityClass); element.setAttribute(MODE, mode); element.setAttribute(LAUNCH_METHOD_ATTR, launchMethod.name()); element.setAttribute(USE_SPLIT_APKS_IF_POSSIBLE, Boolean.toString(useSplitApksIfPossible)); element.setAttribute(WORK_PROFILE_ATTR, Boolean.toString(useWorkProfileIfPresent)); element.setAttribute(SHOW_LOGCAT_AUTOMATICALLY, Boolean.toString(showLogcatAutomatically)); element.setAttribute(AM_START_OPTIONS, amStartOptions); if (userId != null) { element.setAttribute(USER_ID_ATTR, Integer.toString(userId)); } else { element.removeAttribute(USER_ID_ATTR); } } /** Imports legacy values in the old reflective JDOM externalizer manner. Can be removed ~2.0+. */ private static Map<String, String> getLegacyValues(Element element) { Map<String, String> result = Maps.newHashMap(); for (Element option : element.getChildren("option")) { String name = option.getAttributeValue("name"); String value = option.getAttributeValue("value"); result.put(name, value); } return result; } @Override public RunConfigurationStateEditor getEditor(Project project) { return new BlazeAndroidBinaryRunConfigurationStateEditor( commonState.getEditor(project), new AndroidProfilersPanel(project, profilerState), project); } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.ofbiz.entity.transaction; import java.sql.Connection; import java.sql.SQLException; import java.sql.Timestamp; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import javax.sql.XAConnection; import javax.transaction.HeuristicMixedException; import javax.transaction.HeuristicRollbackException; import javax.transaction.InvalidTransactionException; import javax.transaction.NotSupportedException; import javax.transaction.RollbackException; import javax.transaction.Status; import javax.transaction.Synchronization; import javax.transaction.SystemException; import javax.transaction.Transaction; import javax.transaction.TransactionManager; import javax.transaction.UserTransaction; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; import javax.transaction.xa.Xid; import org.apache.commons.collections.map.ListOrderedMap; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.UtilDateTime; import org.ofbiz.base.util.UtilGenerics; import org.ofbiz.base.util.UtilValidate; import org.ofbiz.entity.GenericEntityConfException; import org.ofbiz.entity.GenericEntityException; import org.ofbiz.entity.config.model.Datasource; import org.ofbiz.entity.config.model.EntityConfig; import org.ofbiz.entity.datasource.GenericHelperInfo; import org.ofbiz.entity.jdbc.CursorConnection; /** * <p>Transaction Utility to help with some common transaction tasks * <p>Provides a wrapper around the transaction objects to allow for changes in underlying implementations in the future. */ public class TransactionUtil implements Status { // Debug module name public static final String module = TransactionUtil.class.getName(); private static ThreadLocal<List<Transaction>> suspendedTxStack = new ThreadLocal<List<Transaction>>(); private static ThreadLocal<List<Exception>> suspendedTxLocationStack = new ThreadLocal<List<Exception>>(); private static ThreadLocal<Exception> transactionBeginStack = new ThreadLocal<Exception>(); private static ThreadLocal<List<Exception>> transactionBeginStackSave = new ThreadLocal<List<Exception>>(); private static ThreadLocal<RollbackOnlyCause> setRollbackOnlyCause = new ThreadLocal<RollbackOnlyCause>(); private static ThreadLocal<List<RollbackOnlyCause>> setRollbackOnlyCauseSave = new ThreadLocal<List<RollbackOnlyCause>>(); private static ThreadLocal<Timestamp> transactionStartStamp = new ThreadLocal<Timestamp>(); private static ThreadLocal<Timestamp> transactionLastNowStamp = new ThreadLocal<Timestamp>(); private static final boolean debugResources = readDebugResources(); public static Map<Xid, DebugXaResource> debugResMap = Collections.<Xid, DebugXaResource>synchronizedMap(new HashMap<Xid, DebugXaResource>()); // in order to improve performance allThreadsTransactionBeginStack and allThreadsTransactionBeginStackSave are only maintained when logging level INFO is on private static Map<Long, Exception> allThreadsTransactionBeginStack = Collections.<Long, Exception>synchronizedMap(new HashMap<Long, Exception>()); private static Map<Long, List<Exception>> allThreadsTransactionBeginStackSave = Collections.<Long, List<Exception>>synchronizedMap(new HashMap<Long, List<Exception>>()); public static <V> V doNewTransaction(Callable<V> callable, String ifErrorMessage, int timeout, boolean printException) throws GenericEntityException { return noTransaction(inTransaction(callable, ifErrorMessage, timeout, printException)).call(); } public static <V> V doTransaction(Callable<V> callable, String ifErrorMessage, int timeout, boolean printException) throws GenericEntityException { return inTransaction(callable, ifErrorMessage, timeout, printException).call(); } public static <V> NoTransaction<V> noTransaction(Callable<V> callable) { return new NoTransaction<V>(callable); } // This syntax is groovy compatible, with the primary(callable) as the first arg. // You could do: // use (TransactionUtil) { // Callable callable = .... // Object result = callable.noTransaction().inTransaction(ifError, timeout, print).call() // } public static <V> InTransaction<V> inTransaction(Callable<V> callable, String ifErrorMessage, int timeout, boolean printException) { return new InTransaction<V>(callable, ifErrorMessage, timeout, printException); } /** Begins a transaction in the current thread IF transactions are available; only * tries if the current transaction status is ACTIVE, if not active it returns false. * If and on only if it begins a transaction it will return true. In other words, if * a transaction is already in place it will return false and do nothing. */ public static boolean begin() throws GenericTransactionException { return begin(0); } /** Begins a transaction in the current thread IF transactions are available; only * tries if the current transaction status is ACTIVE, if not active it returns false. * If and on only if it begins a transaction it will return true. In other words, if * a transaction is already in place it will return false and do nothing. */ public static boolean begin(int timeout) throws GenericTransactionException { UserTransaction ut = TransactionFactoryLoader.getInstance().getUserTransaction(); if (ut != null) { try { int currentStatus = ut.getStatus(); if (Debug.verboseOn()) { Debug.logVerbose("Current status : " + getTransactionStateString(currentStatus), module); } if (currentStatus == Status.STATUS_ACTIVE) { if (Debug.verboseOn()) { Debug.logVerbose("Active transaction in place, so no transaction begun", module); } return false; } else if (currentStatus == Status.STATUS_MARKED_ROLLBACK) { Exception e = getTransactionBeginStack(); if (e != null) { Debug.logWarning(e, "Active transaction marked for rollback in place, so no transaction begun; this stack trace shows when the exception began: ", module); } else { Debug.logWarning("Active transaction marked for rollback in place, so no transaction begun", module); } RollbackOnlyCause roc = getSetRollbackOnlyCause(); // do we have a cause? if so, throw special exception if (UtilValidate.isNotEmpty(roc)) { throw new GenericTransactionException("The current transaction is marked for rollback, not beginning a new transaction and aborting current operation; the rollbackOnly was caused by: " + roc.getCauseMessage(), roc.getCauseThrowable()); } else { return false; } } internalBegin(ut, timeout); // reset the transaction stamps, just in case... clearTransactionStamps(); // initialize the start stamp getTransactionStartStamp(); // set the tx begin stack placeholder setTransactionBeginStack(); // initialize the debug resource if (debugResources()) { DebugXaResource dxa = new DebugXaResource(); try { dxa.enlist(); } catch (XAException e) { Debug.logError(e, module); } } return true; } catch (NotSupportedException e) { throw new GenericTransactionException("Not Supported error, could not begin transaction (probably a nesting problem)", e); } catch (SystemException e) { throw new GenericTransactionException("System error, could not begin transaction", e); } } else { if (Debug.infoOn()) Debug.logInfo("No user transaction, so no transaction begun", module); return false; } } protected static void internalBegin(UserTransaction ut, int timeout) throws SystemException, NotSupportedException { // set the timeout for THIS transaction if (timeout > 0) { ut.setTransactionTimeout(timeout); if (Debug.verboseOn()) { Debug.logVerbose("Set transaction timeout to : " + timeout + " seconds", module); } } // begin the transaction ut.begin(); if (Debug.verboseOn()) { Debug.logVerbose("Transaction begun", module); } // reset the timeout to the default if (timeout > 0) { ut.setTransactionTimeout(0); } } /** Gets the status of the transaction in the current thread IF * transactions are available, otherwise returns STATUS_NO_TRANSACTION */ public static int getStatus() throws GenericTransactionException { UserTransaction ut = TransactionFactoryLoader.getInstance().getUserTransaction(); if (ut != null) { try { return ut.getStatus(); } catch (SystemException e) { throw new GenericTransactionException("System error, could not get status", e); } } else { return STATUS_NO_TRANSACTION; } } public static String getStatusString() throws GenericTransactionException { return getTransactionStateString(getStatus()); } public static boolean isTransactionInPlace() throws GenericTransactionException { int status = getStatus(); if (status == STATUS_NO_TRANSACTION) { return false; } else { return true; } } /** Commits the transaction in the current thread IF transactions are available * AND if beganTransaction is true */ public static void commit(boolean beganTransaction) throws GenericTransactionException { if (beganTransaction) { TransactionUtil.commit(); } } /** Commits the transaction in the current thread IF transactions are available */ public static void commit() throws GenericTransactionException { UserTransaction ut = TransactionFactoryLoader.getInstance().getUserTransaction(); if (ut != null) { try { int status = ut.getStatus(); Debug.logVerbose("Current status : " + getTransactionStateString(status), module); if (status != STATUS_NO_TRANSACTION && status != STATUS_COMMITTING && status != STATUS_COMMITTED && status != STATUS_ROLLING_BACK && status != STATUS_ROLLEDBACK) { ut.commit(); // clear out the stamps to keep it clean clearTransactionStamps(); // clear out the stack too clearTransactionBeginStack(); clearSetRollbackOnlyCause(); Debug.logVerbose("Transaction committed", module); } else { Debug.logWarning("Not committing transaction, status is " + getStatusString(), module); } } catch (RollbackException e) { RollbackOnlyCause rollbackOnlyCause = getSetRollbackOnlyCause(); if (rollbackOnlyCause != null) { // the transaction is now definitely over, so clear stuff as normal now that we have the info from it that we want clearTransactionStamps(); clearTransactionBeginStack(); clearSetRollbackOnlyCause(); Debug.logError(e, "Rollback Only was set when trying to commit transaction here; throwing rollbackOnly cause exception", module); throw new GenericTransactionException("Roll back error, could not commit transaction, was rolled back instead because of: " + rollbackOnlyCause.getCauseMessage(), rollbackOnlyCause.getCauseThrowable()); } else { Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("Roll back error (with no rollbackOnly cause found), could not commit transaction, was rolled back instead: " + t.toString(), t); } } catch (IllegalStateException e) { Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("Could not commit transaction, IllegalStateException exception: " + t.toString(), t); } catch (HeuristicMixedException e) { Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("Could not commit transaction, HeuristicMixed exception: " + t.toString(), t); } catch (HeuristicRollbackException e) { Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("Could not commit transaction, HeuristicRollback exception: " + t.toString(), t); } catch (SystemException e) { Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("System error, could not commit transaction: " + t.toString(), t); } } else { Debug.logInfo("UserTransaction is null, not committing", module); } } /** Rolls back transaction in the current thread IF transactions are available * AND if beganTransaction is true; if beganTransaction is not true, * setRollbackOnly is called to insure that the transaction will be rolled back */ public static void rollback(boolean beganTransaction, String causeMessage, Throwable causeThrowable) throws GenericTransactionException { if (beganTransaction) { TransactionUtil.rollback(causeThrowable); } else { TransactionUtil.setRollbackOnly(causeMessage, causeThrowable); } } /** Rolls back transaction in the current thread IF transactions are available */ public static void rollback() throws GenericTransactionException { rollback(null); } /** Rolls back transaction in the current thread IF transactions are available */ public static void rollback(Throwable causeThrowable) throws GenericTransactionException { UserTransaction ut = TransactionFactoryLoader.getInstance().getUserTransaction(); if (ut != null) { try { int status = ut.getStatus(); Debug.logVerbose("Current status : " + getTransactionStateString(status), module); if (status != STATUS_NO_TRANSACTION) { //if (Debug.infoOn()) Thread.dumpStack(); if (causeThrowable == null && Debug.infoOn()) { Exception newE = new Exception("Stack Trace"); Debug.logError(newE, "[TransactionUtil.rollback]", module); } // clear out the stamps to keep it clean clearTransactionStamps(); // clear out the stack too clearTransactionBeginStack(); clearSetRollbackOnlyCause(); ut.rollback(); Debug.logInfo("Transaction rolled back", module); } else { Debug.logWarning("Transaction not rolled back, status is STATUS_NO_TRANSACTION", module); } } catch (IllegalStateException e) { Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("Could not rollback transaction, IllegalStateException exception: " + t.toString(), t); } catch (SystemException e) { Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("System error, could not rollback transaction: " + t.toString(), t); } } else { Debug.logInfo("No UserTransaction, transaction not rolled back", module); } } /** Makes a rollback the only possible outcome of the transaction in the current thread IF transactions are available */ public static void setRollbackOnly(String causeMessage, Throwable causeThrowable) throws GenericTransactionException { UserTransaction ut = TransactionFactoryLoader.getInstance().getUserTransaction(); if (ut != null) { try { int status = ut.getStatus(); Debug.logVerbose("Current code : " + getTransactionStateString(status), module); if (status != STATUS_NO_TRANSACTION) { if (status != STATUS_MARKED_ROLLBACK) { if (Debug.warningOn()) { Debug.logWarning(new Exception(causeMessage), "Calling transaction setRollbackOnly; this stack trace shows where this is happening:", module); } ut.setRollbackOnly(); setSetRollbackOnlyCause(causeMessage, causeThrowable); } else { Debug.logInfo("Transaction rollback only not set, rollback only is already set.", module); } } else { Debug.logWarning("Transaction rollback only not set, status is STATUS_NO_TRANSACTION", module); } } catch (IllegalStateException e) { Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("Could not set rollback only on transaction, IllegalStateException exception: " + t.toString(), t); } catch (SystemException e) { Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("System error, could not set rollback only on transaction: " + t.toString(), t); } } else { Debug.logInfo("No UserTransaction, transaction rollback only not set", module); } } public static Transaction suspend() throws GenericTransactionException { try { if (TransactionUtil.getStatus() != STATUS_NO_TRANSACTION) { TransactionManager txMgr = TransactionFactoryLoader.getInstance().getTransactionManager(); if (txMgr != null) { pushTransactionBeginStackSave(clearTransactionBeginStack()); pushSetRollbackOnlyCauseSave(clearSetRollbackOnlyCause()); Transaction trans = txMgr.suspend(); pushSuspendedTransaction(trans); return trans; } else { return null; } } else { Debug.logWarning("No transaction in place, so not suspending.", module); return null; } } catch (SystemException e) { throw new GenericTransactionException("System error, could not suspend transaction", e); } } public static void resume(Transaction parentTx) throws GenericTransactionException { if (parentTx == null) { return; } TransactionManager txMgr = TransactionFactoryLoader.getInstance().getTransactionManager(); try { if (txMgr != null) { setTransactionBeginStack(popTransactionBeginStackSave()); setSetRollbackOnlyCause(popSetRollbackOnlyCauseSave()); txMgr.resume(parentTx); removeSuspendedTransaction(parentTx); } } catch (InvalidTransactionException e) { throw new GenericTransactionException("System error, could not resume transaction", e); } catch (SystemException e) { throw new GenericTransactionException("System error, could not resume transaction", e); } } /** Sets the timeout of the transaction in the current thread IF transactions are available */ public static void setTransactionTimeout(int seconds) throws GenericTransactionException { UserTransaction ut = TransactionFactoryLoader.getInstance().getUserTransaction(); if (ut != null) { try { ut.setTransactionTimeout(seconds); } catch (SystemException e) { throw new GenericTransactionException("System error, could not set transaction timeout", e); } } } /** Enlists the given XAConnection and if a transaction is active in the current thread, returns a plain JDBC Connection */ public static Connection enlistConnection(XAConnection xacon) throws GenericTransactionException { if (xacon == null) { return null; } try { XAResource resource = xacon.getXAResource(); TransactionUtil.enlistResource(resource); return xacon.getConnection(); } catch (SQLException e) { throw new GenericTransactionException("SQL error, could not enlist connection in transaction even though transactions are available", e); } } public static void enlistResource(XAResource resource) throws GenericTransactionException { if (resource == null) { return; } try { TransactionManager tm = TransactionFactoryLoader.getInstance().getTransactionManager(); if (tm != null && tm.getStatus() == STATUS_ACTIVE) { Transaction tx = tm.getTransaction(); if (tx != null) { tx.enlistResource(resource); } } } catch (RollbackException e) { //This is Java 1.4 only, but useful for certain debuggins: Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("Roll Back error, could not enlist resource in transaction even though transactions are available, current transaction rolled back", e); } catch (SystemException e) { //This is Java 1.4 only, but useful for certain debuggins: Throwable t = e.getCause() == null ? e : e.getCause(); throw new GenericTransactionException("System error, could not enlist resource in transaction even though transactions are available", e); } } public static String getTransactionStateString(int state) { /* * javax.transaction.Status * STATUS_ACTIVE 0 * STATUS_MARKED_ROLLBACK 1 * STATUS_PREPARED 2 * STATUS_COMMITTED 3 * STATUS_ROLLEDBACK 4 * STATUS_UNKNOWN 5 * STATUS_NO_TRANSACTION 6 * STATUS_PREPARING 7 * STATUS_COMMITTING 8 * STATUS_ROLLING_BACK 9 */ switch (state) { case Status.STATUS_ACTIVE: return "Transaction Active (" + state + ")"; case Status.STATUS_COMMITTED: return "Transaction Committed (" + state + ")"; case Status.STATUS_COMMITTING: return "Transaction Committing (" + state + ")"; case Status.STATUS_MARKED_ROLLBACK: return "Transaction Marked Rollback (" + state + ")"; case Status.STATUS_NO_TRANSACTION: return "No Transaction (" + state + ")"; case Status.STATUS_PREPARED: return "Transaction Prepared (" + state + ")"; case Status.STATUS_PREPARING: return "Transaction Preparing (" + state + ")"; case Status.STATUS_ROLLEDBACK: return "Transaction Rolledback (" + state + ")"; case Status.STATUS_ROLLING_BACK: return "Transaction Rolling Back (" + state + ")"; case Status.STATUS_UNKNOWN: return "Transaction Status Unknown (" + state + ")"; default: return "Not a valid state code (" + state + ")"; } } private static boolean readDebugResources() { try { return EntityConfig.getInstance().getDebugXaResources().getValue(); } catch (GenericEntityConfException gece) { Debug.logWarning(gece, module); } return false; } public static boolean debugResources() { return debugResources; } public static void logRunningTx() { if (debugResources()) { if (UtilValidate.isNotEmpty(debugResMap)) { for (DebugXaResource dxa: debugResMap.values()) { dxa.log(); } } } } public static void registerSynchronization(Synchronization sync) throws GenericTransactionException { if (sync == null) { return; } try { TransactionManager tm = TransactionFactoryLoader.getInstance().getTransactionManager(); if (tm != null && tm.getStatus() == STATUS_ACTIVE) { Transaction tx = tm.getTransaction(); if (tx != null) { tx.registerSynchronization(sync); } } } catch (RollbackException e) { throw new GenericTransactionException("Roll Back error, could not register synchronization in transaction even though transactions are available, current transaction rolled back", e); } catch (SystemException e) { throw new GenericTransactionException("System error, could not register synchronization in transaction even though transactions are available", e); } } // ======================================= // SUSPENDED TRANSACTIONS // ======================================= /** BE VERY CAREFUL WHERE YOU CALL THIS!! */ public static int cleanSuspendedTransactions() throws GenericTransactionException { Transaction trans = null; int num = 0; while ((trans = popSuspendedTransaction()) != null) { resume(trans); rollback(); num++; } // no transaction stamps to remember anymore ;-) clearTransactionStartStampStack(); return num; } public static boolean suspendedTransactionsHeld() { List<Transaction> tl = suspendedTxStack.get(); return UtilValidate.isNotEmpty(tl); } public static List<Transaction> getSuspendedTxStack() { List<Transaction> tl = suspendedTxStack.get(); if (tl == null) { tl = new LinkedList<Transaction>(); suspendedTxStack.set(tl); } return tl; } public static List<Exception> getSuspendedTxLocationsStack() { List<Exception> tl = suspendedTxLocationStack.get(); if (tl == null) { tl = new LinkedList<Exception>(); suspendedTxLocationStack.set(tl); } return tl; } protected static void pushSuspendedTransaction(Transaction t) { List<Transaction> tl = getSuspendedTxStack(); tl.add(0, t); List<Exception> stls = getSuspendedTxLocationsStack(); stls.add(0, new Exception("TX Suspend Location")); // save the current transaction start stamp pushTransactionStartStamp(t); } protected static Transaction popSuspendedTransaction() { List<Transaction> tl = suspendedTxStack.get(); if (UtilValidate.isNotEmpty(tl)) { // restore the transaction start stamp popTransactionStartStamp(); List<Exception> stls = suspendedTxLocationStack.get(); if (UtilValidate.isNotEmpty(stls)) { stls.remove(0); } return tl.remove(0); } else { return null; } } protected static void removeSuspendedTransaction(Transaction t) { List<Transaction> tl = suspendedTxStack.get(); if (UtilValidate.isNotEmpty(tl)) { tl.remove(t); List<Exception> stls = suspendedTxLocationStack.get(); if (UtilValidate.isNotEmpty(stls)) { stls.remove(0); } popTransactionStartStamp(t); } } // ======================================= // TRANSACTION BEGIN STACK // ======================================= private static void pushTransactionBeginStackSave(Exception e) { // use the ThreadLocal one because it is more reliable than the all threads Map List<Exception> el = transactionBeginStackSave.get(); if (el == null) { el = new LinkedList<Exception>(); transactionBeginStackSave.set(el); } el.add(0, e); if (Debug.infoOn()) { Long curThreadId = Thread.currentThread().getId(); List<Exception> ctEl = allThreadsTransactionBeginStackSave.get(curThreadId); if (ctEl == null) { ctEl = new LinkedList<Exception>(); allThreadsTransactionBeginStackSave.put(curThreadId, ctEl); } ctEl.add(0, e); } } private static Exception popTransactionBeginStackSave() { if (Debug.infoOn()) { // do the unofficial all threads Map one first, and don't do a real return Long curThreadId = Thread.currentThread().getId(); List<Exception> ctEl = allThreadsTransactionBeginStackSave.get(curThreadId); if (UtilValidate.isNotEmpty(ctEl)) { ctEl.remove(0); } } // then do the more reliable ThreadLocal one List<Exception> el = transactionBeginStackSave.get(); if (UtilValidate.isNotEmpty(el)) { return el.remove(0); } else { return null; } } public static int getTransactionBeginStackSaveSize() { List<Exception> el = transactionBeginStackSave.get(); if (el != null) { return el.size(); } else { return 0; } } public static List<Exception> getTransactionBeginStackSave() { List<Exception> el = transactionBeginStackSave.get(); List<Exception> elClone = new LinkedList<Exception>(); elClone.addAll(el); return elClone; } public static Map<Long, List<Exception>> getAllThreadsTransactionBeginStackSave() { Map<Long, List<Exception>> attbssMap = allThreadsTransactionBeginStackSave; Map<Long, List<Exception>> attbssMapClone = new HashMap<Long, List<Exception>>(); attbssMapClone.putAll(attbssMap); return attbssMapClone; } public static void printAllThreadsTransactionBeginStacks() { if (!Debug.infoOn()) { return; } for (Map.Entry<Long, Exception> attbsMapEntry : allThreadsTransactionBeginStack.entrySet()) { Long curThreadId = attbsMapEntry.getKey(); Exception transactionBeginStack = attbsMapEntry.getValue(); List<Exception> txBeginStackList = allThreadsTransactionBeginStackSave.get(curThreadId); Debug.logInfo(transactionBeginStack, "===================================================\n===================================================\n Current tx begin stack for thread [" + curThreadId + "]:", module); if (UtilValidate.isNotEmpty(txBeginStackList)) { int stackLevel = 0; for (Exception stack : txBeginStackList) { Debug.logInfo(stack, "===================================================\n===================================================\n Tx begin stack history for thread [" + curThreadId + "] history number [" + stackLevel + "]:", module); stackLevel++; } } else { Debug.logInfo("========================================== No tx begin stack history found for thread [" + curThreadId + "]", module); } } } private static void setTransactionBeginStack() { Exception e = new Exception("Tx Stack Placeholder"); setTransactionBeginStack(e); } private static void setTransactionBeginStack(Exception newExc) { if (transactionBeginStack.get() != null) { Exception e = transactionBeginStack.get(); Debug.logWarning(e, "In setTransactionBeginStack a stack placeholder was already in place, here is where the transaction began: ", module); Exception e2 = new Exception("Current Stack Trace"); Debug.logWarning(e2, "In setTransactionBeginStack a stack placeholder was already in place, here is the current location: ", module); } transactionBeginStack.set(newExc); if (Debug.infoOn()) { Long curThreadId = Thread.currentThread().getId(); allThreadsTransactionBeginStack.put(curThreadId, newExc); } } private static Exception clearTransactionBeginStack() { if (Debug.infoOn()) { Long curThreadId = Thread.currentThread().getId(); allThreadsTransactionBeginStack.remove(curThreadId); } Exception e = transactionBeginStack.get(); if (e == null) { Exception e2 = new Exception("Current Stack Trace"); Debug.logWarning(e2, "In clearTransactionBeginStack no stack placeholder was in place, here is the current location: ", module); return null; } else { transactionBeginStack.set(null); return e; } } public static Exception getTransactionBeginStack() { Exception e = transactionBeginStack.get(); if (e == null) { Exception e2 = new Exception("Current Stack Trace"); Debug.logWarning(e2, "In getTransactionBeginStack no stack placeholder was in place, here is the current location: ", module); } return e; } // ======================================= // ROLLBACK ONLY CAUSE // ======================================= private static class RollbackOnlyCause { protected String causeMessage; protected Throwable causeThrowable; public RollbackOnlyCause(String causeMessage, Throwable causeThrowable) { this.causeMessage = causeMessage; this.causeThrowable = causeThrowable; } public String getCauseMessage() { return this.causeMessage + (this.causeThrowable == null ? "" : this.causeThrowable.toString()); } public Throwable getCauseThrowable() { return this.causeThrowable; } public void logError(String message) { Debug.logError(this.getCauseThrowable(), (message == null ? "" : message) + this.getCauseMessage(), module); } public boolean isEmpty() { return (UtilValidate.isEmpty(this.getCauseMessage()) && this.getCauseThrowable() == null); } } private static void pushSetRollbackOnlyCauseSave(RollbackOnlyCause e) { List<RollbackOnlyCause> el = setRollbackOnlyCauseSave.get(); if (el == null) { el = new LinkedList<RollbackOnlyCause>(); setRollbackOnlyCauseSave.set(el); } el.add(0, e); } private static RollbackOnlyCause popSetRollbackOnlyCauseSave() { List<RollbackOnlyCause> el = setRollbackOnlyCauseSave.get(); if (UtilValidate.isNotEmpty(el)) { return el.remove(0); } else { return null; } } private static void setSetRollbackOnlyCause(String causeMessage, Throwable causeThrowable) { RollbackOnlyCause roc = new RollbackOnlyCause(causeMessage, causeThrowable); setSetRollbackOnlyCause(roc); } private static void setSetRollbackOnlyCause(RollbackOnlyCause newRoc) { if (setRollbackOnlyCause.get() != null) { RollbackOnlyCause roc = setRollbackOnlyCause.get(); roc.logError("In setSetRollbackOnlyCause a stack placeholder was already in place, here is the original rollbackOnly cause: "); Exception e2 = new Exception("Current Stack Trace"); Debug.logWarning(e2, "In setSetRollbackOnlyCause a stack placeholder was already in place, here is the current location: ", module); } setRollbackOnlyCause.set(newRoc); } private static RollbackOnlyCause clearSetRollbackOnlyCause() { RollbackOnlyCause roc = setRollbackOnlyCause.get(); if (roc == null) { /* this is an obnoxious message, leaving out for now; could be added manually if a problem with this is suspected if (Debug.verboseOn()) { // for this in particular, unlike the begin location, normally there will not be a setRollbackOnlyCause, so don't complain about it except in verbose Debug.logVerbose(new Exception("Current Stack Trace"), "In clearSetRollbackOnlyCause no stack placeholder was in place, here is the current location: ", module); } */ return null; } else { setRollbackOnlyCause.set(null); return roc; } } public static RollbackOnlyCause getSetRollbackOnlyCause() { if (setRollbackOnlyCause.get() == null) { Exception e = new Exception("Current Stack Trace"); Debug.logWarning(e, "In getSetRollbackOnlyCause no stack placeholder was in place, here is the current location: ", module); } return setRollbackOnlyCause.get(); } // ======================================= // SUSPENDED TRANSACTIONS START TIMESTAMPS // ======================================= /** * Maintain the suspended transactions together with their timestamps */ private static ThreadLocal<Map<Transaction, Timestamp>> suspendedTxStartStamps = new ThreadLocal<Map<Transaction, Timestamp>>() { @Override public Map<Transaction, Timestamp> initialValue() { return UtilGenerics.checkMap(new ListOrderedMap()); } }; /** * Put the stamp to remember later * @param t transaction just suspended */ private static void pushTransactionStartStamp(Transaction t) { Map<Transaction, Timestamp> map = suspendedTxStartStamps.get(); Timestamp stamp = transactionStartStamp.get(); if (stamp != null) { map.put(t, stamp); } else { Debug.logError("Error in transaction handling - no start stamp to push.", module); } } /** * Method called when the suspended stack gets cleaned by {@link #cleanSuspendedTransactions()}. */ private static void clearTransactionStartStampStack() { suspendedTxStartStamps.get().clear(); } /** * Remove the stamp of the specified transaction from stack (when resuming) * and set it as current start stamp. * @param t transaction just resumed */ private static void popTransactionStartStamp(Transaction t) { Map<Transaction, Timestamp> map = suspendedTxStartStamps.get(); if (map.size() > 0) { Timestamp stamp = map.remove(t); if (stamp != null) { transactionStartStamp.set(stamp); } else { Debug.logError("Error in transaction handling - no saved start stamp found - using NOW.", module); transactionStartStamp.set(UtilDateTime.nowTimestamp()); } } } /** * Remove the stamp from stack (when resuming) */ private static void popTransactionStartStamp() { ListOrderedMap map = (ListOrderedMap) suspendedTxStartStamps.get(); if (map.size() > 0) { transactionStartStamp.set((Timestamp) map.remove(map.lastKey())); } else { Debug.logError("Error in transaction handling - no saved start stamp found - using NOW.", module); transactionStartStamp.set(UtilDateTime.nowTimestamp()); } } public static Timestamp getTransactionStartStamp() { Timestamp curStamp = transactionStartStamp.get(); if (curStamp == null) { curStamp = UtilDateTime.nowTimestamp(); transactionStartStamp.set(curStamp); // we know this is the first time set for this transaction, so make sure the StampClearSync is registered try { registerSynchronization(new StampClearSync()); } catch (GenericTransactionException e) { Debug.logError(e, "Error registering StampClearSync synchronization, stamps will still be reset if begin/commit/rollback are call through TransactionUtil, but not if otherwise", module); } } return curStamp; } public static Timestamp getTransactionUniqueNowStamp() { Timestamp lastNowStamp = transactionLastNowStamp.get(); Timestamp nowTimestamp = UtilDateTime.nowTimestamp(); // check for an overlap with the lastNowStamp, or if the lastNowStamp is in the future because of incrementing to make each stamp unique if (lastNowStamp != null && (lastNowStamp.equals(nowTimestamp) || lastNowStamp.after(nowTimestamp))) { nowTimestamp = new Timestamp(lastNowStamp.getTime() + 1); } transactionLastNowStamp.set(nowTimestamp); return nowTimestamp; } protected static void clearTransactionStamps() { transactionStartStamp.set(null); transactionLastNowStamp.set(null); } public static class StampClearSync implements Synchronization { public void afterCompletion(int status) { TransactionUtil.clearTransactionStamps(); } public void beforeCompletion() { } } public static final class NoTransaction<V> implements Callable<V> { private final Callable<V> callable; protected NoTransaction(Callable<V> callable) { this.callable = callable; } public V call() throws GenericEntityException { Transaction suspended = TransactionUtil.suspend(); try { try { return callable.call(); } catch (Throwable t) { while (t.getCause() != null) { t = t.getCause(); } throw t; } } catch (GenericEntityException e) { throw e; } catch (Error e) { throw e; } catch (RuntimeException e) { throw e; } catch (Throwable t) { throw new GenericEntityException(t); } finally { TransactionUtil.resume(suspended); } } } public static final class InTransaction<V> implements Callable<V> { private final Callable<V> callable; private final String ifErrorMessage; private final int timeout; private final boolean printException; protected InTransaction(Callable<V> callable, String ifErrorMessage, int timeout, boolean printException) { this.callable = callable; this.ifErrorMessage = ifErrorMessage; this.timeout = timeout; this.printException = printException; } public V call() throws GenericEntityException { boolean tx = TransactionUtil.begin(timeout); Throwable transactionAbortCause = null; try { try { return callable.call(); } catch (Throwable t) { while (t.getCause() != null) { t = t.getCause(); } throw t; } } catch (Error e) { transactionAbortCause = e; throw e; } catch (RuntimeException e) { transactionAbortCause = e; throw e; } catch (Throwable t) { transactionAbortCause = t; throw new GenericEntityException(t); } finally { if (transactionAbortCause == null) { TransactionUtil.commit(tx); } else { if (printException) { transactionAbortCause.printStackTrace(); } TransactionUtil.rollback(tx, ifErrorMessage, transactionAbortCause); } } } } public static Connection getCursorConnection(GenericHelperInfo helperInfo, Connection con) { Datasource datasourceInfo = EntityConfig.getDatasource(helperInfo.getHelperBaseName()); if (datasourceInfo == null) { Debug.logWarning("Could not find configuration for " + helperInfo.getHelperBaseName() + " datasource.", module); return con; } else if (datasourceInfo.getUseProxyCursor()) { try { if (datasourceInfo.getResultFetchSize() > 1) con = CursorConnection.newCursorConnection(con, datasourceInfo.getProxyCursorName(), datasourceInfo.getResultFetchSize()); } catch (Exception ex) { Debug.logWarning(ex, "Error creating the cursor connection proxy " + helperInfo.getHelperBaseName() + " datasource.", module); } } return con; } }
/* * $Id: TestFormPanel.java,v 1.6 2007/09/18 11:20:49 agoubard Exp $ * * Copyright 2003-2007 Orange Nederland Breedband B.V. * See the COPYRIGHT file for redistribution and use restrictions. */ package org.xins.common.spec; import java.awt.*; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; import javax.swing.*; import javax.swing.text.JTextComponent; import org.xins.common.types.EnumItem; import org.xins.common.types.EnumType; import org.xins.common.types.Type; /** * Graphical user interface that allows to browse the specification of an API * and execute the functions of this API. * * @version $Revision: 1.6 $ $Date: 2007/09/18 11:20:49 $ * @author <a href="mailto:anthony.goubard@japplis.com">Anthony Goubard</a> * * @since XINS 2.1 */ public class TestFormPanel extends JPanel { private APISpec apiSpec; private String functionName; private java.util.List<JComponent> parameterComponents; private ActionListener submitListener; private Color tfBackground; private Color tfInvalidColor; /** * Constructs a new <code>SpecGUI</code>. * * @param apiSpec * the specification of the API. * * @param functionName * the specification of the API. */ public TestFormPanel(APISpec apiSpec, String functionName, ActionListener submitListener) { this.apiSpec = apiSpec; this.functionName = functionName; this.submitListener = submitListener; try { initUI(); } catch (Exception ex) { ex.printStackTrace(); } initData(); } /** * Creates the user interface. */ protected void initUI() throws Exception { FunctionSpec functionSpec = apiSpec.getFunction(functionName); setLayout(new BorderLayout(5,5)); JLabel jlFunctionName = new JLabel(functionName + " function") { public void paint(Graphics g) { Graphics2D g2 = (Graphics2D) g; Color background = getBackground(); Paint oldPaint = g2.getPaint(); GradientPaint gradient = new GradientPaint(0.0f, 0.0f, background.brighter(), TestFormPanel.this.getWidth() + 0.1f, getHeight() + 0.1f, background.darker()); g2.setPaint(gradient); g2.fill(new Rectangle(TestFormPanel.this.getWidth(), getHeight())); g2.setPaint(oldPaint); super.paint(g); } }; jlFunctionName.setOpaque(false); jlFunctionName.setFont(jlFunctionName.getFont().deriveFont(20.0f)); jlFunctionName.setToolTipText(functionSpec.getDescription()); add(jlFunctionName, BorderLayout.NORTH); Map inputParameters = functionSpec.getInputParameters(); boolean hasInputDataSection = functionSpec.getInputDataSectionElements().size() > 0; parameterComponents = new ArrayList<JComponent>(); //JPanel paramNamesPanel = new JPanel(); //JPanel paramValuesPanel = new JPanel(); tfBackground = UIManager.getColor("TextField.background"); tfInvalidColor = new Color( Math.min(tfBackground.getRed() + 30, 255), Math.max(tfBackground.getGreen() - 15, 0), Math.max(tfBackground.getBlue() - 20, 0)); JPanel paramsPanel = new JPanel(); GridBagLayout gridbag = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); paramsPanel.setLayout(gridbag); c.fill = GridBagConstraints.HORIZONTAL; c.anchor = GridBagConstraints.WEST; c.insets = new Insets(2,5,2,5); Iterator itInputParameters = inputParameters.values().iterator(); while (itInputParameters.hasNext()) { ParameterSpec inputSpec = (ParameterSpec) itInputParameters.next(); JLabel jlInput = new JLabel(inputSpec.getName() + ":"); jlInput.setToolTipText(inputSpec.getDescription()); c.weightx = 0.2; c.gridwidth = 1; gridbag.setConstraints(jlInput, c); paramsPanel.add(jlInput); JComponent inputField = createInputComponent(inputSpec); c.weightx = 1.0; c.gridwidth = 2; gridbag.setConstraints(inputField, c); paramsPanel.add(inputField); parameterComponents.add(inputField); c.gridwidth = GridBagConstraints.REMAINDER; c.weightx = 0.2; JLabel jlBlank = new JLabel(); gridbag.setConstraints(jlBlank, c); paramsPanel.add(jlBlank); } if (hasInputDataSection) { JLabel jlInput = new JLabel("Data section:"); c.weightx = 0.2; c.gridwidth = 1; gridbag.setConstraints(jlInput, c); paramsPanel.add(jlInput); JTextArea inputField = new JTextArea(8,40); inputField.putClientProperty("PARAM_NAME", "_data"); c.weightx = 1.0; c.gridwidth = 2; c.fill = GridBagConstraints.BOTH; gridbag.setConstraints(inputField, c); paramsPanel.add(new JScrollPane(inputField)); parameterComponents.add(inputField); } add(paramsPanel, BorderLayout.CENTER); JPanel submitPanel = new JPanel(new FlowLayout(FlowLayout.RIGHT, 15, 5)); JButton jbSubmit = new JButton("Submit"); jbSubmit.addActionListener(submitListener); submitPanel.add(jbSubmit); add(submitPanel, BorderLayout.SOUTH); } protected void initData() { } protected JComponent createInputComponent(final ParameterSpec inputSpec) { final JComponent inputField; final Type inputType = inputSpec.getType(); String defaultValue = inputSpec.getDefault(); if (inputType instanceof EnumType) { inputField = new JComboBox(); Iterator itItems = ((EnumType) inputType).getEnumItems().iterator(); if (!inputSpec.isRequired()) { ((JComboBox) inputField).addItem(""); } while (itItems.hasNext()) { EnumItem item = (EnumItem) itItems.next(); ((JComboBox) inputField).addItem(item.getValue()); } } else if (inputType instanceof org.xins.common.types.standard.Boolean) { if (inputSpec.isRequired()) { inputField = new JCheckBox(); if ("true".equals(defaultValue)) { ((JCheckBox) inputField).setSelected(true); } } else { inputField = new JComboBox(); ((JComboBox) inputField).addItem(""); ((JComboBox) inputField).addItem("true"); ((JComboBox) inputField).addItem("false"); if (defaultValue != null) { ((JComboBox) inputField).setSelectedItem(defaultValue); } } } else { inputField = new JTextField(20); if (inputSpec.isRequired() && defaultValue == null) { inputField.setBackground(tfInvalidColor); } inputField.addKeyListener(new KeyAdapter() { public void keyTyped(KeyEvent ke) { String text = ((JTextField) inputField).getText(); if (!ke.isActionKey()) { text += ke.getKeyChar(); } if (inputType.isValidValue(text) || (text.equals("") && !inputSpec.isRequired())) { inputField.setBackground(tfBackground); } else { inputField.setBackground(tfInvalidColor); } } }); if (defaultValue != null) { ((JTextField) inputField).setText(defaultValue); } } inputField.setToolTipText(inputType.getName() + ": " + inputType.getDescription()); inputField.putClientProperty("PARAM_NAME", inputSpec.getName()); return inputField; } /** * Gets the list of parameters in a URL form. * * @return * the list of the parameters as it should be send to the URL * (starting with an '&') or an empty String if no parameter is set. */ public String getParameters() { String result = ""; Iterator<JComponent> itParameters = parameterComponents.iterator(); while (itParameters.hasNext()) { JComponent inputComponent = itParameters.next(); String paramName = (String) inputComponent.getClientProperty("PARAM_NAME"); String paramValue = ""; if (inputComponent instanceof JTextComponent) { paramValue = ((JTextComponent) inputComponent).getText(); } else if (inputComponent instanceof JComboBox) { paramValue = ((JComboBox) inputComponent).getSelectedItem().toString(); } else if (inputComponent instanceof JCheckBox) { paramValue = ((JCheckBox) inputComponent).isSelected() ? "true" : "false"; } if (!"".equals(paramValue)) { result += "&" + paramName + "=" + paramValue; } } return result; } }
/**************************************************************************** Copyright (c) 2010-2012 cocos2d-x.org Copyright (c) 2013-2014 Chukong Technologies Inc. http://www.cocos2d-x.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ****************************************************************************/ package org.cocos2dx.lib; import android.app.Dialog; import android.content.Context; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; import android.os.Handler; import android.text.InputFilter; import android.text.InputType; import android.util.TypedValue; import android.view.KeyEvent; import android.view.ViewGroup; import android.view.WindowManager; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.TextView.OnEditorActionListener; public class Cocos2dxEditBoxDialog extends Dialog { // =========================================================== // Constants // =========================================================== /** * The user is allowed to enter any text, including line breaks. */ private final int kEditBoxInputModeAny = 0; /** * The user is allowed to enter an e-mail address. */ private final int kEditBoxInputModeEmailAddr = 1; /** * The user is allowed to enter an integer value. */ private final int kEditBoxInputModeNumeric = 2; /** * The user is allowed to enter a phone number. */ private final int kEditBoxInputModePhoneNumber = 3; /** * The user is allowed to enter a URL. */ private final int kEditBoxInputModeUrl = 4; /** * The user is allowed to enter a real number value. This extends kEditBoxInputModeNumeric by allowing a decimal point. */ private final int kEditBoxInputModeDecimal = 5; /** * The user is allowed to enter any text, except for line breaks. */ private final int kEditBoxInputModeSingleLine = 6; /** * Indicates that the text entered is confidential data that should be obscured whenever possible. This implies EDIT_BOX_INPUT_FLAG_SENSITIVE. */ private final int kEditBoxInputFlagPassword = 0; /** * Indicates that the text entered is sensitive data that the implementation must never store into a dictionary or table for use in predictive, auto-completing, or other accelerated input schemes. A credit card number is an example of sensitive data. */ private final int kEditBoxInputFlagSensitive = 1; /** * This flag is a hint to the implementation that during text editing, the initial letter of each word should be capitalized. */ private final int kEditBoxInputFlagInitialCapsWord = 2; /** * This flag is a hint to the implementation that during text editing, the initial letter of each sentence should be capitalized. */ private final int kEditBoxInputFlagInitialCapsSentence = 3; /** * Capitalize all characters automatically. */ private final int kEditBoxInputFlagInitialCapsAllCharacters = 4; private final int kKeyboardReturnTypeDefault = 0; private final int kKeyboardReturnTypeDone = 1; private final int kKeyboardReturnTypeSend = 2; private final int kKeyboardReturnTypeSearch = 3; private final int kKeyboardReturnTypeGo = 4; // =========================================================== // Fields // =========================================================== private EditText mInputEditText; private TextView mTextViewTitle; private final String mTitle; private final String mMessage; private final int mInputMode; private final int mInputFlag; private final int mReturnType; private final int mMaxLength; private int mInputFlagConstraints; private int mInputModeContraints; private boolean mIsMultiline; // =========================================================== // Constructors // =========================================================== public Cocos2dxEditBoxDialog(final Context pContext, final String pTitle, final String pMessage, final int pInputMode, final int pInputFlag, final int pReturnType, final int pMaxLength) { super(pContext, android.R.style.Theme_Translucent_NoTitleBar_Fullscreen); // super(context, R.style.Theme_Translucent); this.mTitle = pTitle; this.mMessage = pMessage; this.mInputMode = pInputMode; this.mInputFlag = pInputFlag; this.mReturnType = pReturnType; this.mMaxLength = pMaxLength; } @Override protected void onCreate(final Bundle pSavedInstanceState) { super.onCreate(pSavedInstanceState); this.getWindow().setBackgroundDrawable(new ColorDrawable(0x80000000)); final LinearLayout layout = new LinearLayout(this.getContext()); layout.setOrientation(LinearLayout.VERTICAL); final LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); this.mTextViewTitle = new TextView(this.getContext()); final LinearLayout.LayoutParams textviewParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); textviewParams.leftMargin = textviewParams.rightMargin = this.convertDipsToPixels(10); this.mTextViewTitle.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); layout.addView(this.mTextViewTitle, textviewParams); this.mInputEditText = new EditText(this.getContext()); final LinearLayout.LayoutParams editTextParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT); editTextParams.leftMargin = editTextParams.rightMargin = this.convertDipsToPixels(10); layout.addView(this.mInputEditText, editTextParams); this.setContentView(layout, layoutParams); this.getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); this.mTextViewTitle.setText(this.mTitle); this.mInputEditText.setText(this.mMessage); int oldImeOptions = this.mInputEditText.getImeOptions(); this.mInputEditText.setImeOptions(oldImeOptions | EditorInfo.IME_FLAG_NO_EXTRACT_UI); oldImeOptions = this.mInputEditText.getImeOptions(); switch (this.mInputMode) { case kEditBoxInputModeAny: this.mInputModeContraints = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_MULTI_LINE; break; case kEditBoxInputModeEmailAddr: this.mInputModeContraints = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_EMAIL_ADDRESS; break; case kEditBoxInputModeNumeric: this.mInputModeContraints = InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_SIGNED; break; case kEditBoxInputModePhoneNumber: this.mInputModeContraints = InputType.TYPE_CLASS_PHONE; break; case kEditBoxInputModeUrl: this.mInputModeContraints = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_URI; break; case kEditBoxInputModeDecimal: this.mInputModeContraints = InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_DECIMAL | InputType.TYPE_NUMBER_FLAG_SIGNED; break; case kEditBoxInputModeSingleLine: this.mInputModeContraints = InputType.TYPE_CLASS_TEXT; break; default: break; } if (this.mIsMultiline) { this.mInputModeContraints |= InputType.TYPE_TEXT_FLAG_MULTI_LINE; } this.mInputEditText.setInputType(this.mInputModeContraints | this.mInputFlagConstraints); switch (this.mInputFlag) { case kEditBoxInputFlagPassword: this.mInputFlagConstraints = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_PASSWORD; break; case kEditBoxInputFlagSensitive: this.mInputFlagConstraints = InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS; break; case kEditBoxInputFlagInitialCapsWord: this.mInputFlagConstraints = InputType.TYPE_TEXT_FLAG_CAP_WORDS; break; case kEditBoxInputFlagInitialCapsSentence: this.mInputFlagConstraints = InputType.TYPE_TEXT_FLAG_CAP_SENTENCES; break; case kEditBoxInputFlagInitialCapsAllCharacters: this.mInputFlagConstraints = InputType.TYPE_TEXT_FLAG_CAP_CHARACTERS; break; default: break; } this.mInputEditText.setInputType(this.mInputFlagConstraints | this.mInputModeContraints); switch (this.mReturnType) { case kKeyboardReturnTypeDefault: this.mInputEditText.setImeOptions(oldImeOptions | EditorInfo.IME_ACTION_NONE); break; case kKeyboardReturnTypeDone: this.mInputEditText.setImeOptions(oldImeOptions | EditorInfo.IME_ACTION_DONE); break; case kKeyboardReturnTypeSend: this.mInputEditText.setImeOptions(oldImeOptions | EditorInfo.IME_ACTION_SEND); break; case kKeyboardReturnTypeSearch: this.mInputEditText.setImeOptions(oldImeOptions | EditorInfo.IME_ACTION_SEARCH); break; case kKeyboardReturnTypeGo: this.mInputEditText.setImeOptions(oldImeOptions | EditorInfo.IME_ACTION_GO); break; default: this.mInputEditText.setImeOptions(oldImeOptions | EditorInfo.IME_ACTION_NONE); break; } if (this.mMaxLength > 0) { this.mInputEditText.setFilters(new InputFilter[] { new InputFilter.LengthFilter(this.mMaxLength) }); } final Handler initHandler = new Handler(); initHandler.postDelayed(new Runnable() { @Override public void run() { Cocos2dxEditBoxDialog.this.mInputEditText.requestFocus(); Cocos2dxEditBoxDialog.this.mInputEditText.setSelection(Cocos2dxEditBoxDialog.this.mInputEditText.length()); Cocos2dxEditBoxDialog.this.openKeyboard(); } }, 200); this.mInputEditText.setOnEditorActionListener(new OnEditorActionListener() { @Override public boolean onEditorAction(final TextView v, final int actionId, final KeyEvent event) { /* If user didn't set keyboard type, this callback will be invoked twice with 'KeyEvent.ACTION_DOWN' and 'KeyEvent.ACTION_UP'. */ if (actionId != EditorInfo.IME_NULL || (actionId == EditorInfo.IME_NULL && event != null && event.getAction() == KeyEvent.ACTION_DOWN)) { Cocos2dxHelper.setEditTextDialogResult(Cocos2dxEditBoxDialog.this.mInputEditText.getText().toString()); Cocos2dxEditBoxDialog.this.closeKeyboard(); Cocos2dxEditBoxDialog.this.dismiss(); return true; } return false; } }); } // =========================================================== // Getter & Setter // =========================================================== // =========================================================== // Methods for/from SuperClass/Interfaces // =========================================================== // =========================================================== // Methods // =========================================================== private int convertDipsToPixels(final float pDIPs) { final float scale = this.getContext().getResources().getDisplayMetrics().density; return Math.round(pDIPs * scale); } private void openKeyboard() { final InputMethodManager imm = (InputMethodManager) this.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); imm.showSoftInput(this.mInputEditText, 0); } private void closeKeyboard() { final InputMethodManager imm = (InputMethodManager) this.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(this.mInputEditText.getWindowToken(), 0); } // =========================================================== // Inner and Anonymous Classes // =========================================================== }
/** * * Copyright 2003-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.tomcat; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.security.PermissionCollection; import java.security.Permissions; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import javax.management.ObjectName; import javax.security.jacc.WebResourcePermission; import javax.security.jacc.WebUserDataPermission; import org.apache.geronimo.security.deploy.DefaultPrincipal; import org.apache.geronimo.security.deploy.PrincipalInfo; import org.apache.geronimo.security.deploy.Role; import org.apache.geronimo.security.deploy.Security; import org.apache.geronimo.security.deployment.SecurityBuilder; import org.apache.geronimo.security.jacc.ComponentPermissions; /** * Tests the JACC security for Tomcat * * @version $Revision$ $Date$ */ public class JACCSecurityTest extends AbstractWebModuleTest { ObjectName appName = null; /** * Test the explicit map feature. Only Alan should be able to log in. * * @throws Exception thrown if an error in the test occurs */ public void testExplicitMapping() throws Exception { Security securityConfig = new Security(); securityConfig.setUseContextHandler(false); DefaultPrincipal defaultPrincipal = new DefaultPrincipal(); PrincipalInfo principalInfo = new PrincipalInfo("org.apache.geronimo.security.realm.providers.GeronimoUserPrincipal", "izumi", false); defaultPrincipal.setPrincipal(principalInfo); securityConfig.setDefaultPrincipal(defaultPrincipal); Role role = new Role(); role.setRoleName("content-administrator"); principalInfo = new PrincipalInfo("org.apache.geronimo.security.realm.providers.GeronimoGroupPrincipal", "it", false); role.getPrincipals().add(principalInfo); securityConfig.getRoleMappings().put(role.getRoleName(), role); Map roleDesignates = new HashMap(); Map principalRoleMap = new HashMap(); buildPrincipalRoleMap(securityConfig, roleDesignates, principalRoleMap); PermissionCollection uncheckedPermissions = new Permissions(); PermissionCollection excludedPermissions = new Permissions(); excludedPermissions.add(new WebResourcePermission("/auth/login.html", "")); excludedPermissions.add(new WebUserDataPermission("/auth/login.html", "")); Map rolePermissions = new HashMap(); PermissionCollection permissions = new Permissions(); permissions.add(new WebUserDataPermission("/protected/*", "")); permissions.add(new WebResourcePermission("/protected/*", "")); rolePermissions.put("content-administrator", permissions); rolePermissions.put("auto-administrator", permissions); ComponentPermissions componentPermissions = new ComponentPermissions(excludedPermissions, uncheckedPermissions, rolePermissions); startWebApp(roleDesignates, principalRoleMap, componentPermissions, defaultPrincipal, permissions); //Begin the test HttpURLConnection connection = (HttpURLConnection) new URL("http://localhost:8181/securetest/protected/hello.txt").openConnection(); connection.setInstanceFollowRedirects(false); assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode()); //Be sure we have been given the login page BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); assertEquals("<!-- Login Page -->", reader.readLine()); reader.close(); String cookie = connection.getHeaderField("Set-Cookie"); cookie = cookie.substring(0, cookie.lastIndexOf(';')); String location = "http://localhost:8181/securetest/protected/j_security_check?j_username=alan&j_password=starcraft"; connection = (HttpURLConnection) new URL(location).openConnection(); connection.setRequestMethod("POST"); connection.setRequestProperty("Referer","http://localhost:8181/securetest/auth/logon.html?param=test"); connection.setRequestProperty("Cookie", cookie); connection.setInstanceFollowRedirects(false); assertEquals(HttpURLConnection.HTTP_MOVED_TEMP, connection.getResponseCode()); connection = (HttpURLConnection) new URL("http://localhost:8181/securetest/protected/hello.txt").openConnection(); connection.setRequestProperty("Cookie", cookie); connection.setInstanceFollowRedirects(false); reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode()); assertEquals("Hello World", reader.readLine()); connection.disconnect(); //Now lets try it with izumi connection = (HttpURLConnection) new URL("http://localhost:8181/securetest/protected/hello.txt").openConnection(); connection.setInstanceFollowRedirects(false); assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode()); cookie = connection.getHeaderField("Set-Cookie"); cookie = cookie.substring(0, cookie.lastIndexOf(';')); //Be sure we have been given the login page reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); assertEquals("<!-- Login Page -->", reader.readLine()); reader.close(); location = "http://localhost:8181/securetest/protected/j_security_check?j_username=izumi&j_password=violin"; connection = (HttpURLConnection) new URL(location).openConnection(); connection.setRequestMethod("POST"); connection.setRequestProperty("Cookie", cookie); connection.setInstanceFollowRedirects(false); assertEquals(HttpURLConnection.HTTP_MOVED_TEMP, connection.getResponseCode()); try { connection = (HttpURLConnection) new URL("http://localhost:8181/securetest/protected/hello.txt").openConnection(); connection.setRequestProperty("Cookie", cookie); connection.setInstanceFollowRedirects(false); reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); fail("Should throw an IOException for HTTP 403 response"); } catch (IOException e) { } assertEquals(HttpURLConnection.HTTP_FORBIDDEN, connection.getResponseCode()); connection.disconnect(); stopWebApp(); } protected void startWebApp( Map roleDesignates, Map principalRoleMap, ComponentPermissions componentPermissions, DefaultPrincipal defaultPrincipal, PermissionCollection checked) throws Exception { appName = setUpSecureAppContext(roleDesignates, principalRoleMap, componentPermissions, defaultPrincipal, checked); } protected void stopWebApp() throws Exception { stop(appName); } public void buildPrincipalRoleMap(Security security, Map roleDesignates, Map principalRoleMap) { Map roleToPrincipalMap = new HashMap(); SecurityBuilder.buildRolePrincipalMap(security, roleDesignates, roleToPrincipalMap, getClass().getClassLoader()); invertMap(roleToPrincipalMap, principalRoleMap); } private static Map invertMap(Map roleToPrincipalMap, Map principalRoleMapping) { for (Iterator roles = roleToPrincipalMap.entrySet().iterator(); roles.hasNext();) { Map.Entry entry = (Map.Entry) roles.next(); String role = (String) entry.getKey(); Set principals = (Set) entry.getValue(); for (Iterator iter = principals.iterator(); iter.hasNext();) { java.security.Principal principal = (java.security.Principal) iter.next(); HashSet roleSet = (HashSet) principalRoleMapping.get(principal); if (roleSet == null) { roleSet = new HashSet(); principalRoleMapping.put(principal, roleSet); } roleSet.add(role); } } return principalRoleMapping; } protected void setUp() throws Exception { super.setUp("org.apache.geronimo.tomcat.realm.TomcatGeronimoRealm"); setUpSecurity(); } protected void tearDown() throws Exception { tearDownSecurity(); super.tearDown(); } }
/* * Copyright 2007-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Project: JGentleFramework */ package org.jgentleframework.configure.objectmeta; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.jgentleframework.configure.enums.Scope; import org.jgentleframework.configure.enums.Types; import org.jgentleframework.context.injecting.scope.ScopeInstance; import org.jgentleframework.core.factory.InOutDependencyException; import org.jgentleframework.reflection.Identification; import org.jgentleframework.utils.Assertor; import org.jgentleframework.utils.ReflectUtils; import org.jgentleframework.utils.data.Pair; /** * The Class ObjectBindingConstantImpl. * * @author LE QUOC CHUNG - mailto: <a * href="mailto:skydunkpro@yahoo.com">skydunkpro@yahoo.com</a> * @date Oct 22, 2007 */ class ObjectBindingConstantImpl implements ObjectBindingConstant { /** The ID. */ private String ID = ""; /** The properties list. */ private List<String> propertiesList = new ArrayList<String>(); /** The in class. */ private Class<?> inClass = null; /** The scope. */ private ScopeInstance scope = Scope.SINGLETON; /** Default value is <b>true</b>. */ private boolean lazyInit = true; /** The injected value list. */ private Map<String, Object> injectedValueList = new HashMap<String, Object>(); /** The annotated value list. */ private Map<Types, List<Pair<Identification<?>, Object>>> annotatedValueList = new HashMap<Types, List<Pair<Identification<?>, Object>>>(); /** * Constructor. */ public ObjectBindingConstantImpl() { } /** * Constructor. * * @param pairs * the pairs */ public ObjectBindingConstantImpl(Pair<String, Object>... pairs) { Assertor .notNull(pairs, "[Assertion failed] - this 'values' argument must not be null !"); init(pairs); } /** * Constructor. * * @param values * an array containing pairs of key and value which represent * name of properties and its value need to be injected. */ public ObjectBindingConstantImpl(Object[]... values) { Assertor .notNull(values, "[Assertion failed] - this 'values' argument must not be null !"); for (Object[] valuePair : values) { if (valuePair.length != 2) { throw new InOutDependencyException( "Size of binding array is not valid !"); } if (!ReflectUtils.isCast(String.class, valuePair[0])) { throw new InOutDependencyException( "The key of binding array ('" + valuePair[0] + "') must be String type."); } Pair<String, Object> item = new Pair<String, Object>( (String) valuePair[0], valuePair[1]); init(item); } } /** * Constructor. * * @param map * the given map containing pairs of key and value which * represent name of properties and its value need to be * injected. */ public ObjectBindingConstantImpl(Map<String, Object> map) { Assertor .notNull( map, "[Assertion failed] - the given map containing pairs of key and value must not be null !"); for (Entry<String, Object> entry : map.entrySet()) { Pair<String, Object> item = new Pair<String, Object>( entry.getKey(), entry.getValue()); init(item); } } /** * Constructor. * * @param properties * the properties */ public ObjectBindingConstantImpl(String... properties) { propertiesList.clear(); for (String property : properties) { if (this.propertiesList.contains(property)) { throw new InOutDependencyException("Property " + property + " is duplicated."); } this.propertiesList.add(property); } } /** * Init method. * * @param pairs * the pairs */ protected void init(Pair<String, Object>... pairs) { for (Pair<String, Object> pair : pairs) { init(pair); } } /** * Init method. * * @param pair * the pair */ protected void init(Pair<String, Object> pair) { if (pair.getKeyPair().isEmpty()) { throw new InOutDependencyException( "The value name must not be empty."); } synchronized (injectedValueList) { if (injectedValueList.containsKey(pair.getKey())) { throw new InOutDependencyException("Value '" + pair.getKey() + "' is duplicated."); } this.injectedValueList.put(pair.getKeyPair(), pair.getValuePair()); } } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectmeta.ObjectBindingConstant * #to(java.lang.Object[]) */ @Override public InClass to(Object... values) { Assertor .notNull(values, "[Assertion failed] - this 'values' argument must not be null !"); InClass result = null; if (this.propertiesList.size() != values.length) { throw new InOutDependencyException("Values size is invalid."); } for (int i = 0; i < values.length; i++) { this.injectedValueList.put(this.propertiesList.get(i), values[i]); } result = new InClassImpl(this); return result; } /* * (non-Javadoc) * @seeorg.jgentleframework.configure.objectmeta.ObjectBindingConstant# * getPropertiesList() */ @Override public List<String> getPropertiesList() { return propertiesList; } /* * (non-Javadoc) * @seeorg.jgentleframework.configure.objectmeta.ObjectBindingConstant# * setPropertiesList(java.util.List) */ @Override public void setPropertiesList(List<String> propertiesList) { this.propertiesList = propertiesList; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectMeta.ObjectBindingConstant * #getInClass() */ @Override public Class<?> getInClass() { return inClass; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectMeta.ObjectBindingConstant * #setInClass(java.lang.Class) */ @Override public void setInClass(Class<?> inClass) { this.inClass = inClass; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectMeta.ObjectBindingConstant * #getID() */ @Override public String getID() { return ID; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectMeta.ObjectBindingConstant * #setID(java.lang.String) */ @Override public void setID(String id) { ID = id; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectmeta.ObjectBindingConstant * #getScope() */ @Override public ScopeInstance getScope() { return scope; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectmeta.ObjectBindingConstant * #setScope(org.jgentleframework.context.injecting.scope.ScopeInstance) */ @Override public void setScope(ScopeInstance scope) { this.scope = scope; } /* * (non-Javadoc) * @seeorg.jgentleframework.configure.objectmeta.IPresentLoadingClass# * getPresentLoadingClasses() */ @Override public ArrayList<Class<?>> getPresentLoadingClasses() { ArrayList<Class<?>> result = new ArrayList<Class<?>>(); result.add(this.inClass); return result; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectmeta.ObjectBindingConstant * #lazy_init(boolean) */ @Override public ObjectBindingConstant lazyInit(boolean lazyInit) { setLazyInit(lazyInit); return this; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectmeta.ObjectBindingConstant * #isLazy_init() */ @Override public boolean isLazyInit() { return lazyInit; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectmeta.ObjectBindingConstant * #setLazy_init(boolean) */ @Override public void setLazyInit(boolean lazyInit) { this.lazyInit = lazyInit; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectmeta.ObjectBindingConstant * #getInjectedValueList() */ @Override public Map<String, Object> getInjectedValueList() { return injectedValueList; } /* * (non-Javadoc) * @seeorg.jgentleframework.configure.objectmeta.ObjectBindingConstant# * setInjectedValueList(java.util.Map) */ @Override public void setInjectedValueList(Map<String, Object> injectedValueList) { this.injectedValueList = injectedValueList; } /* * (non-Javadoc) * @see * org.jgentleframework.configure.injecting.objectmeta.ObjectBindingConstant * #getAnnotatedValueList() */ @Override public Map<Types, List<Pair<Identification<?>, Object>>> getAnnotatedValueList() { return annotatedValueList; } /* * (non-Javadoc) * @seeorg.jgentleframework.configure.objectmeta.ObjectBindingConstant# * setAnnotatedValueList(java.util.Map) */ @Override public void setAnnotatedValueList( Map<Types, List<Pair<Identification<?>, Object>>> annotatedValueList) { this.annotatedValueList = annotatedValueList; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.coord.zk; import static org.apache.drill.shaded.guava.com.google.common.collect.Collections2.transform; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.ArrayList; import java.util.Set; import java.util.HashSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.curator.framework.imps.DefaultACLProvider; import org.apache.drill.shaded.guava.com.google.common.base.Throwables; import org.apache.commons.collections.keyvalue.MultiKey; import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.api.ACLProvider; import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.curator.retry.RetryNTimes; import org.apache.curator.x.discovery.ServiceCache; import org.apache.curator.x.discovery.ServiceDiscovery; import org.apache.curator.x.discovery.ServiceDiscoveryBuilder; import org.apache.curator.x.discovery.ServiceInstance; import org.apache.curator.x.discovery.details.ServiceCacheListener; import org.apache.drill.common.AutoCloseables; import org.apache.drill.common.config.DrillConfig; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.coord.ClusterCoordinator; import org.apache.drill.exec.coord.DistributedSemaphore; import org.apache.drill.exec.coord.DrillServiceInstanceHelper; import org.apache.drill.exec.coord.store.CachingTransientStoreFactory; import org.apache.drill.exec.coord.store.TransientStore; import org.apache.drill.exec.coord.store.TransientStoreConfig; import org.apache.drill.exec.coord.store.TransientStoreFactory; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint.State; import org.apache.drill.shaded.guava.com.google.common.base.Function; /** * Manages cluster coordination utilizing zookeeper. * */ public class ZKClusterCoordinator extends ClusterCoordinator { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ZKClusterCoordinator.class); private CuratorFramework curator; private ServiceDiscovery<DrillbitEndpoint> discovery; private volatile Collection<DrillbitEndpoint> endpoints = Collections.emptyList(); private final String serviceName; private final CountDownLatch initialConnection = new CountDownLatch(1); private final TransientStoreFactory factory; private ServiceCache<DrillbitEndpoint> serviceCache; private DrillbitEndpoint endpoint; // endpointsMap maps Multikey( comprises of endoint address and port) to Drillbit endpoints private ConcurrentHashMap<MultiKey, DrillbitEndpoint> endpointsMap = new ConcurrentHashMap<MultiKey,DrillbitEndpoint>(); private static final Pattern ZK_COMPLEX_STRING = Pattern.compile("(^.*?)/(.*)/([^/]*)$"); public ZKClusterCoordinator(DrillConfig config, String connect) { this(config, connect, new DefaultACLProvider()); } public ZKClusterCoordinator(DrillConfig config, ACLProvider aclProvider) { this(config, null, aclProvider); } public ZKClusterCoordinator(DrillConfig config, String connect, ACLProvider aclProvider) { connect = connect == null || connect.isEmpty() ? config.getString(ExecConstants.ZK_CONNECTION) : connect; String clusterId = config.getString(ExecConstants.SERVICE_NAME); String zkRoot = config.getString(ExecConstants.ZK_ROOT); // check if this is a complex zk string. If so, parse into components. Matcher m = ZK_COMPLEX_STRING.matcher(connect); if(m.matches()) { connect = m.group(1); zkRoot = m.group(2); clusterId = m.group(3); } logger.debug("Connect {}, zkRoot {}, clusterId: " + clusterId, connect, zkRoot); this.serviceName = clusterId; RetryPolicy rp = new RetryNTimes(config.getInt(ExecConstants.ZK_RETRY_TIMES), config.getInt(ExecConstants.ZK_RETRY_DELAY)); curator = CuratorFrameworkFactory.builder() .namespace(zkRoot) .connectionTimeoutMs(config.getInt(ExecConstants.ZK_TIMEOUT)) .retryPolicy(rp) .connectString(connect) .aclProvider(aclProvider) .build(); curator.getConnectionStateListenable().addListener(new InitialConnectionListener()); curator.start(); discovery = newDiscovery(); factory = CachingTransientStoreFactory.of(new ZkTransientStoreFactory(curator)); } public CuratorFramework getCurator() { return curator; } @Override public void start(long millisToWait) throws Exception { logger.debug("Starting ZKClusterCoordination."); discovery.start(); if(millisToWait != 0) { boolean success = this.initialConnection.await(millisToWait, TimeUnit.MILLISECONDS); if (!success) { throw new IOException(String.format("Failure to connect to the zookeeper cluster service within the allotted time of %d milliseconds.", millisToWait)); } }else{ this.initialConnection.await(); } serviceCache = discovery .serviceCacheBuilder() .name(serviceName) .build(); serviceCache.addListener(new EndpointListener()); serviceCache.start(); updateEndpoints(); } private class InitialConnectionListener implements ConnectionStateListener{ @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { if(newState == ConnectionState.CONNECTED) { ZKClusterCoordinator.this.initialConnection.countDown(); client.getConnectionStateListenable().removeListener(this); } } } private class EndpointListener implements ServiceCacheListener { @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { } @Override public void cacheChanged() { logger.debug("Got cache changed --> updating endpoints"); updateEndpoints(); } } @Override public void close() throws Exception { // discovery attempts to close its caches(ie serviceCache) already. however, being good citizens we make sure to // explicitly close serviceCache. Not only that we make sure to close serviceCache before discovery to prevent // double releasing and disallowing jvm to spit bothering warnings. simply put, we are great! AutoCloseables.close(serviceCache, discovery, curator, factory); } @Override public RegistrationHandle register(DrillbitEndpoint data) { try { data = data.toBuilder().setState(State.ONLINE).build(); ServiceInstance<DrillbitEndpoint> serviceInstance = newServiceInstance(data); discovery.registerService(serviceInstance); return new ZKRegistrationHandle(serviceInstance.getId(),data); } catch (Exception e) { Throwables.throwIfUnchecked(e); throw new RuntimeException(e); } } @Override public void unregister(RegistrationHandle handle) { if (!(handle instanceof ZKRegistrationHandle)) { throw new UnsupportedOperationException("Unknown handle type: " + handle.getClass().getName()); } // when Drillbit is unregistered, clean all the listeners registered in CC. this.listeners.clear(); ZKRegistrationHandle h = (ZKRegistrationHandle) handle; try { ServiceInstance<DrillbitEndpoint> serviceInstance = ServiceInstance.<DrillbitEndpoint>builder() .address("") .port(0) .id(h.id) .name(serviceName) .build(); discovery.unregisterService(serviceInstance); } catch (Exception e) { Throwables.throwIfUnchecked(e); throw new RuntimeException(e); } } /** * Update drillbit endpoint state. Drillbit advertises its * state in Zookeeper when a shutdown request of drillbit is * triggered. State information is used during planning and * initial client connection phases. */ public RegistrationHandle update(RegistrationHandle handle, State state) { ZKRegistrationHandle h = (ZKRegistrationHandle) handle; try { endpoint = h.endpoint.toBuilder().setState(state).build(); ServiceInstance<DrillbitEndpoint> serviceInstance = ServiceInstance.<DrillbitEndpoint>builder() .name(serviceName) .id(h.id) .payload(endpoint).build(); discovery.updateService(serviceInstance); } catch (Exception e) { Throwables.throwIfUnchecked(e); throw new RuntimeException(e); } return handle; } @Override public Collection<DrillbitEndpoint> getAvailableEndpoints() { return this.endpoints; } /* * Get a collection of ONLINE Drillbit endpoints by excluding the drillbits * that are in QUIESCENT state (drillbits shutting down). Primarily used by the planner * to plan queries only on ONLINE drillbits and used by the client during initial connection * phase to connect to a drillbit (foreman) * @return A collection of ONLINE endpoints */ @Override public Collection<DrillbitEndpoint> getOnlineEndPoints() { Collection<DrillbitEndpoint> runningEndPoints = new ArrayList<>(); for (DrillbitEndpoint endpoint: endpoints){ if(isDrillbitInState(endpoint, State.ONLINE)) { runningEndPoints.add(endpoint); } } logger.debug("Online endpoints in ZK are" + runningEndPoints.toString()); return runningEndPoints; } @Override public DistributedSemaphore getSemaphore(String name, int maximumLeases) { return new ZkDistributedSemaphore(curator, "/semaphore/" + name, maximumLeases); } @Override public <V> TransientStore<V> getOrCreateTransientStore(final TransientStoreConfig<V> config) { final ZkEphemeralStore<V> store = (ZkEphemeralStore<V>)factory.getOrCreateStore(config); return store; } private synchronized void updateEndpoints() { try { // All active bits in the Zookeeper Collection<DrillbitEndpoint> newDrillbitSet = transform(discovery.queryForInstances(serviceName), new Function<ServiceInstance<DrillbitEndpoint>, DrillbitEndpoint>() { @Override public DrillbitEndpoint apply(ServiceInstance<DrillbitEndpoint> input) { return input.getPayload(); } }); // set of newly dead bits : original bits - new set of active bits. Set<DrillbitEndpoint> unregisteredBits = new HashSet<>(); // Set of newly live bits : new set of active bits - original bits. Set<DrillbitEndpoint> registeredBits = new HashSet<>(); // Updates the endpoints map if there is a change in state of the endpoint or with the addition // of new drillbit endpoints. Registered endpoints is set to newly live drillbit endpoints. for ( DrillbitEndpoint endpoint : newDrillbitSet) { String endpointAddress = endpoint.getAddress(); int endpointPort = endpoint.getUserPort(); if (! endpointsMap.containsKey(new MultiKey(endpointAddress, endpointPort))) { registeredBits.add(endpoint); } endpointsMap.put(new MultiKey(endpointAddress, endpointPort),endpoint); } // Remove all the endpoints that are newly dead for ( MultiKey key: endpointsMap.keySet()) { if(!newDrillbitSet.contains(endpointsMap.get(key))) { unregisteredBits.add(endpointsMap.get(key)); endpointsMap.remove(key); } } endpoints = endpointsMap.values(); if (logger.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("Active drillbit set changed. Now includes "); builder.append(newDrillbitSet.size()); builder.append(" total bits. New active drillbits:\n"); builder.append("Address | User Port | Control Port | Data Port | Version | State\n"); for (DrillbitEndpoint bit: newDrillbitSet) { builder.append(bit.getAddress()).append(" | "); builder.append(bit.getUserPort()).append(" | "); builder.append(bit.getControlPort()).append(" | "); builder.append(bit.getDataPort()).append(" | "); builder.append(bit.getVersion()).append(" |"); builder.append(bit.getState()).append(" | "); builder.append('\n'); } logger.debug(builder.toString()); } // Notify listeners of newly unregistered Drillbits. if (!unregisteredBits.isEmpty()) { drillbitUnregistered(unregisteredBits); } // Notify listeners of newly registered Drillbits. if (!registeredBits.isEmpty()) { drillbitRegistered(registeredBits); } } catch (Exception e) { logger.error("Failure while update Drillbit service location cache.", e); } } protected ServiceInstance<DrillbitEndpoint> newServiceInstance(DrillbitEndpoint endpoint) throws Exception { return ServiceInstance.<DrillbitEndpoint>builder() .name(serviceName) .payload(endpoint) .build(); } protected ServiceDiscovery<DrillbitEndpoint> newDiscovery() { return ServiceDiscoveryBuilder .builder(DrillbitEndpoint.class) .basePath("/") .client(curator) .serializer(DrillServiceInstanceHelper.SERIALIZER) .build(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache30; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import javax.naming.Context; import javax.transaction.UserTransaction; import org.junit.Ignore; import org.junit.Test; import org.apache.geode.CopyHelper; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.CacheEvent; import org.apache.geode.cache.CacheException; import org.apache.geode.cache.CacheListener; import org.apache.geode.cache.CacheLoader; import org.apache.geode.cache.CacheLoaderException; import org.apache.geode.cache.CacheTransactionManager; import org.apache.geode.cache.DataPolicy; import org.apache.geode.cache.EntryEvent; import org.apache.geode.cache.LoaderHelper; import org.apache.geode.cache.PartitionAttributes; import org.apache.geode.cache.PartitionAttributesFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.Scope; import org.apache.geode.cache.TransactionEvent; import org.apache.geode.cache.TransactionListener; import org.apache.geode.cache.query.IndexType; import org.apache.geode.cache.query.Query; import org.apache.geode.cache.query.QueryService; import org.apache.geode.cache.query.SelectResults; import org.apache.geode.cache.query.transaction.Person; import org.apache.geode.cache.util.CacheListenerAdapter; import org.apache.geode.cache.util.TransactionListenerAdapter; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.LogWriterUtils; import org.apache.geode.test.dunit.SerializableCallable; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase; /** * Test the order of operations done on the farside of a tx. * * @since GemFire 5.0 */ public class TXOrderDUnitTest extends JUnit4CacheTestCase { private final int TEST_PUT = 0; private final int TEST_INVALIDATE = 1; private final int TEST_DESTROY = 2; private transient Region r; protected transient int invokeCount; List expectedKeys; int clCount = 0; private VM getOtherVm() { Host host = Host.getHost(0); return host.getVM(0); } private void doCommitOtherVm() { VM vm = getOtherVm(); vm.invoke(new CacheSerializableRunnable("create root") { public void run2() throws CacheException { AttributesFactory af = new AttributesFactory(); af.setScope(Scope.DISTRIBUTED_ACK); Region r1 = createRootRegion("r1", af.create()); Region r2 = r1.createSubregion("r2", af.create()); Region r3 = r2.createSubregion("r3", af.create()); CacheTransactionManager ctm = getCache().getCacheTransactionManager(); ctm.begin(); r2.put("b", "value1"); r3.put("c", "value2"); r1.put("a", "value3"); r1.put("a2", "value4"); r3.put("c2", "value5"); r2.put("b2", "value6"); ctm.commit(); } }); } Object getCurrentExpectedKey() { Object result = this.expectedKeys.get(this.clCount); this.clCount += 1; return result; } /** * make sure listeners get invoked in correct order on far side of tx */ @Test public void testFarSideOrder() throws CacheException { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.REPLICATE); af.setScope(Scope.DISTRIBUTED_ACK); CacheListener cl1 = new CacheListenerAdapter() { public void afterCreate(EntryEvent e) { assertEquals(getCurrentExpectedKey(), e.getKey()); } }; af.addCacheListener(cl1); Region r1 = createRootRegion("r1", af.create()); Region r2 = r1.createSubregion("r2", af.create()); r2.createSubregion("r3", af.create()); TransactionListener tl1 = new TransactionListenerAdapter() { public void afterCommit(TransactionEvent e) { assertEquals(6, e.getEvents().size()); ArrayList keys = new ArrayList(); Iterator it = e.getEvents().iterator(); while (it.hasNext()) { EntryEvent ee = (EntryEvent) it.next(); keys.add(ee.getKey()); assertEquals(null, ee.getCallbackArgument()); assertEquals(true, ee.isCallbackArgumentAvailable()); } assertEquals(TXOrderDUnitTest.this.expectedKeys, keys); TXOrderDUnitTest.this.invokeCount = 1; } }; CacheTransactionManager ctm = getCache().getCacheTransactionManager(); ctm.addListener(tl1); this.invokeCount = 0; this.clCount = 0; this.expectedKeys = Arrays.asList(new String[] {"b", "c", "a", "a2", "c2", "b2"}); doCommitOtherVm(); assertEquals(1, this.invokeCount); assertEquals(6, this.clCount); } /** * Tests fix for #40870 Remote CacheListeners invoke afterCreate with Operation.LOCAL_LOAD_CREATE * when create executed transactionally" */ @Ignore("TODO: test is disabled") @Test public void testFarSideOpForLoad() throws Exception { Host host = Host.getHost(0); VM vm1 = host.getVM(0); VM vm2 = host.getVM(1); vm1.invoke(new SerializableCallable() { public Object call() throws Exception { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.REPLICATE); af.setScope(Scope.DISTRIBUTED_ACK); CacheListener cl1 = new CacheListenerAdapter() { public void afterCreate(EntryEvent e) { assertTrue(e.getOperation().isLocalLoad()); } }; af.addCacheListener(cl1); CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) throws CacheLoaderException { LogWriterUtils.getLogWriter().info("Loading value:" + helper.getKey() + "_value"); return helper.getKey() + "_value"; } public void close() {} }; af.setCacheLoader(cl); createRootRegion("r1", af.create()); return null; } }); vm2.invoke(new SerializableCallable() { public Object call() throws Exception { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.REPLICATE); af.setScope(Scope.DISTRIBUTED_ACK); CacheListener cl1 = new CacheListenerAdapter() { public void afterCreate(EntryEvent e) { LogWriterUtils.getLogWriter().info("op:" + e.getOperation().toString()); assertTrue(!e.getOperation().isLocalLoad()); } }; af.addCacheListener(cl1); createRootRegion("r1", af.create()); return null; } }); vm1.invoke(new SerializableCallable() { public Object call() throws Exception { Region r = getRootRegion("r1"); getCache().getCacheTransactionManager().begin(); r.get("obj_2"); getCache().getCacheTransactionManager().commit(); return null; } }); } @Test public void testInternalRegionNotExposed() { Host host = Host.getHost(0); VM vm1 = host.getVM(0); VM vm2 = host.getVM(1); SerializableCallable createRegion = new SerializableCallable() { public Object call() throws Exception { ExposedRegionTransactionListener tl = new ExposedRegionTransactionListener(); CacheTransactionManager ctm = getCache().getCacheTransactionManager(); ctm.addListener(tl); ExposedRegionCacheListener cl = new ExposedRegionCacheListener(); AttributesFactory af = new AttributesFactory(); PartitionAttributes pa = new PartitionAttributesFactory().setRedundantCopies(1).setTotalNumBuckets(1).create(); af.setPartitionAttributes(pa); af.addCacheListener(cl); Region pr = createRootRegion("testTxEventForRegion", af.create()); return null; } }; vm1.invoke(createRegion); vm2.invoke(createRegion); vm1.invoke(new SerializableCallable() { public Object call() throws Exception { Region pr = getRootRegion("testTxEventForRegion"); CacheTransactionManager ctm = getCache().getCacheTransactionManager(); pr.put(2, "tw"); pr.put(3, "three"); pr.put(4, "four"); ctm.begin(); pr.put(1, "one"); pr.put(2, "two"); pr.invalidate(3); pr.destroy(4); ctm.commit(); return null; } }); SerializableCallable verifyListener = new SerializableCallable() { public Object call() throws Exception { Region pr = getRootRegion("testTxEventForRegion"); CacheTransactionManager ctm = getCache().getCacheTransactionManager(); ExposedRegionTransactionListener tl = (ExposedRegionTransactionListener) ctm.getListeners()[0]; ExposedRegionCacheListener cl = (ExposedRegionCacheListener) pr.getAttributes().getCacheListeners()[0]; assertFalse(tl.exceptionOccurred); assertFalse(cl.exceptionOccurred); return null; } }; vm1.invoke(verifyListener); vm2.invoke(verifyListener); } private static class ExposedRegionTransactionListener extends TransactionListenerAdapter { private boolean exceptionOccurred = false; @Override public void afterCommit(TransactionEvent event) { List<CacheEvent<?, ?>> events = event.getEvents(); for (CacheEvent<?, ?> e : events) { if (!"/testTxEventForRegion".equals(e.getRegion().getFullPath())) { exceptionOccurred = true; } } } } private static class ExposedRegionCacheListener extends CacheListenerAdapter { private boolean exceptionOccurred = false; @Override public void afterCreate(EntryEvent event) { verifyRegion(event); } @Override public void afterUpdate(EntryEvent event) { verifyRegion(event); } private void verifyRegion(EntryEvent event) { if (!"/testTxEventForRegion".equals(event.getRegion().getFullPath())) { exceptionOccurred = true; } } } /** * verify that queries on indexes work with transaction */ @Test public void testFarSideIndexOnPut() throws Exception { doTest(TEST_PUT); } @Test public void testFarSideIndexOnInvalidate() throws Exception { doTest(TEST_INVALIDATE); } @Test public void testFarSideIndexOnDestroy() throws Exception { doTest(TEST_DESTROY); } private void doTest(final int op) throws Exception { Host host = Host.getHost(0); VM vm1 = host.getVM(0); VM vm2 = host.getVM(1); SerializableCallable createRegionAndIndex = new SerializableCallable() { public Object call() throws Exception { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.REPLICATE); af.setScope(Scope.DISTRIBUTED_ACK); Region region = createRootRegion("sample", af.create()); QueryService qs = getCache().getQueryService(); qs.createIndex("foo", IndexType.FUNCTIONAL, "age", "/sample"); return null; } }; vm1.invoke(createRegionAndIndex); vm2.invoke(createRegionAndIndex); // do transactional puts in vm1 vm1.invoke(new SerializableCallable() { public Object call() throws Exception { Context ctx = getCache().getJNDIContext(); UserTransaction utx = (UserTransaction) ctx.lookup("java:/UserTransaction"); Region region = getRootRegion("sample"); Integer x = new Integer(0); utx.begin(); region.create(x, new Person("xyz", 45)); utx.commit(); QueryService qs = getCache().getQueryService(); Query q = qs.newQuery("select * from /sample where age < 50"); assertEquals(1, ((SelectResults) q.execute()).size()); Person dsample = (Person) CopyHelper.copy(region.get(x)); dsample.setAge(55); utx.begin(); switch (op) { case TEST_PUT: region.put(x, dsample); break; case TEST_INVALIDATE: region.invalidate(x); break; case TEST_DESTROY: region.destroy(x); break; default: fail("unknown op"); } utx.commit(); assertEquals(0, ((SelectResults) q.execute()).size()); return null; } }); // run query and verify results in other vm vm2.invoke(new SerializableCallable() { public Object call() throws Exception { QueryService qs = getCache().getQueryService(); Query q = qs.newQuery("select * from /sample where age < 50"); assertEquals(0, ((SelectResults) q.execute()).size()); return null; } }); } @Test public void testBug43353() { Host host = Host.getHost(0); VM vm1 = host.getVM(0); VM vm2 = host.getVM(1); SerializableCallable createRegion = new SerializableCallable() { public Object call() throws Exception { getCache().createRegionFactory(RegionShortcut.REPLICATE).create(getTestMethodName()); return null; } }; vm1.invoke(createRegion); vm2.invoke(createRegion); vm1.invoke(new SerializableCallable() { public Object call() throws Exception { Region r = getCache().getRegion(getTestMethodName()); r.put("ikey", "value"); getCache().getCacheTransactionManager().begin(); r.put("key1", new byte[20]); r.invalidate("ikey"); getCache().getCacheTransactionManager().commit(); return null; } }); vm2.invoke(new SerializableCallable() { public Object call() throws Exception { Region r = getCache().getRegion(getTestMethodName()); Object v = r.get("key1"); assertNotNull(v); assertTrue(v instanceof byte[]); assertNull(r.get("ikey")); return null; } }); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.reindex; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.Retry; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import static java.lang.Math.max; import static java.lang.Math.min; import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableList; import static org.elasticsearch.action.bulk.BackoffPolicy.exponentialBackoff; import static org.elasticsearch.common.unit.TimeValue.timeValueNanos; import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_ALL_MATCHES; import static org.elasticsearch.rest.RestStatus.CONFLICT; import static org.elasticsearch.search.sort.SortBuilders.fieldSort; /** * Abstract base for scrolling across a search and executing bulk actions on all results. All package private methods are package private so * their tests can use them. Most methods run in the listener thread pool because the are meant to be fast and don't expect to block. */ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBulkByScrollRequest<Request>> { protected final Logger logger; protected final WorkingBulkByScrollTask task; protected final ThreadPool threadPool; /** * The request for this action. Named mainRequest because we create lots of <code>request</code> variables all representing child * requests of this mainRequest. */ protected final Request mainRequest; private final AtomicLong startTime = new AtomicLong(-1); private final Set<String> destinationIndices = Collections.newSetFromMap(new ConcurrentHashMap<>()); private final ParentTaskAssigningClient client; private final ActionListener<BulkIndexByScrollResponse> listener; private final Retry bulkRetry; private final ScrollableHitSource scrollSource; public AbstractAsyncBulkByScrollAction(WorkingBulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool, Request mainRequest, ActionListener<BulkIndexByScrollResponse> listener) { this.task = task; this.logger = logger; this.client = client; this.threadPool = threadPool; this.mainRequest = mainRequest; this.listener = listener; BackoffPolicy backoffPolicy = buildBackoffPolicy(); bulkRetry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.wrap(backoffPolicy, task::countBulkRetry)); scrollSource = buildScrollableResultSource(backoffPolicy); /* * Default to sorting by doc. We can't do this in the request itself because it is normal to *add* to the sorts rather than replace * them and if we add _doc as the first sort by default then sorts will never work.... So we add it here, only if there isn't * another sort. */ List<SortBuilder<?>> sorts = mainRequest.getSearchRequest().source().sorts(); if (sorts == null || sorts.isEmpty()) { mainRequest.getSearchRequest().source().sort(fieldSort("_doc")); } mainRequest.getSearchRequest().source().version(needsSourceDocumentVersions()); } /** * Does this operation need the versions of the source documents? */ protected abstract boolean needsSourceDocumentVersions(); protected abstract BulkRequest buildBulk(Iterable<? extends ScrollableHitSource.Hit> docs); protected ScrollableHitSource buildScrollableResultSource(BackoffPolicy backoffPolicy) { return new ClientScrollableHitSource(logger, backoffPolicy, threadPool, task::countSearchRetry, this::finishHim, client, mainRequest.getSearchRequest()); } /** * Build the response for reindex actions. */ protected BulkIndexByScrollResponse buildResponse(TimeValue took, List<BulkItemResponse.Failure> indexingFailures, List<SearchFailure> searchFailures, boolean timedOut) { return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); } /** * Start the action by firing the initial search request. */ public void start() { if (task.isCancelled()) { finishHim(null); return; } try { startTime.set(System.nanoTime()); scrollSource.start(response -> onScrollResponse(timeValueNanos(System.nanoTime()), 0, response)); } catch (Exception e) { finishHim(e); } } /** * Process a scroll response. * @param lastBatchStartTime the time when the last batch started. Used to calculate the throttling delay. * @param lastBatchSize the size of the last batch. Used to calculate the throttling delay. * @param response the scroll response to process */ void onScrollResponse(TimeValue lastBatchStartTime, int lastBatchSize, ScrollableHitSource.Response response) { if (task.isCancelled()) { finishHim(null); return; } if ( // If any of the shards failed that should abort the request. (response.getFailures().size() > 0) // Timeouts aren't shard failures but we still need to pass them back to the user. || response.isTimedOut() ) { refreshAndFinish(emptyList(), response.getFailures(), response.isTimedOut()); return; } long total = response.getTotalHits(); if (mainRequest.getSize() > 0) { total = min(total, mainRequest.getSize()); } task.setTotal(total); AbstractRunnable prepareBulkRequestRunnable = new AbstractRunnable() { @Override protected void doRun() throws Exception { /* * It is important that the batch start time be calculated from here, scroll response to scroll response. That way the time * waiting on the scroll doesn't count against this batch in the throttle. */ prepareBulkRequest(timeValueNanos(System.nanoTime()), response); } @Override public void onFailure(Exception e) { finishHim(e); } }; prepareBulkRequestRunnable = (AbstractRunnable) threadPool.getThreadContext().preserveContext(prepareBulkRequestRunnable); task.delayPrepareBulkRequest(threadPool, lastBatchStartTime, lastBatchSize, prepareBulkRequestRunnable); } /** * Prepare the bulk request. Called on the generic thread pool after some preflight checks have been done one the SearchResponse and any * delay has been slept. Uses the generic thread pool because reindex is rare enough not to need its own thread pool and because the * thread may be blocked by the user script. */ void prepareBulkRequest(TimeValue thisBatchStartTime, ScrollableHitSource.Response response) { if (task.isCancelled()) { finishHim(null); return; } if (response.getHits().isEmpty()) { refreshAndFinish(emptyList(), emptyList(), false); return; } task.countBatch(); List<? extends ScrollableHitSource.Hit> hits = response.getHits(); if (mainRequest.getSize() != SIZE_ALL_MATCHES) { // Truncate the hits if we have more than the request size long remaining = max(0, mainRequest.getSize() - task.getSuccessfullyProcessed()); if (remaining < hits.size()) { hits = hits.subList(0, (int) remaining); } } BulkRequest request = buildBulk(hits); if (request.requests().isEmpty()) { /* * If we noop-ed the entire batch then just skip to the next batch or the BulkRequest would fail validation. */ startNextScroll(thisBatchStartTime, 0); return; } request.timeout(mainRequest.getTimeout()); request.waitForActiveShards(mainRequest.getWaitForActiveShards()); if (logger.isDebugEnabled()) { logger.debug("sending [{}] entry, [{}] bulk request", request.requests().size(), new ByteSizeValue(request.estimatedSizeInBytes())); } sendBulkRequest(thisBatchStartTime, request); } /** * Send a bulk request, handling retries. */ void sendBulkRequest(TimeValue thisBatchStartTime, BulkRequest request) { if (task.isCancelled()) { finishHim(null); return; } bulkRetry.withAsyncBackoff(client, request, new ActionListener<BulkResponse>() { @Override public void onResponse(BulkResponse response) { onBulkResponse(thisBatchStartTime, response); } @Override public void onFailure(Exception e) { finishHim(e); } }); } /** * Processes bulk responses, accounting for failures. */ void onBulkResponse(TimeValue thisBatchStartTime, BulkResponse response) { try { List<Failure> failures = new ArrayList<Failure>(); Set<String> destinationIndicesThisBatch = new HashSet<>(); for (BulkItemResponse item : response) { if (item.isFailed()) { recordFailure(item.getFailure(), failures); continue; } switch (item.getOpType()) { case CREATE: case INDEX: if (item.getResponse().getResult() == DocWriteResponse.Result.CREATED) { task.countCreated(); } else { task.countUpdated(); } break; case UPDATE: task.countUpdated(); break; case DELETE: task.countDeleted(); break; } // Track the indexes we've seen so we can refresh them if requested destinationIndicesThisBatch.add(item.getIndex()); } if (task.isCancelled()) { finishHim(null); return; } addDestinationIndices(destinationIndicesThisBatch); if (false == failures.isEmpty()) { refreshAndFinish(unmodifiableList(failures), emptyList(), false); return; } if (mainRequest.getSize() != SIZE_ALL_MATCHES && task.getSuccessfullyProcessed() >= mainRequest.getSize()) { // We've processed all the requested docs. refreshAndFinish(emptyList(), emptyList(), false); return; } startNextScroll(thisBatchStartTime, response.getItems().length); } catch (Exception t) { finishHim(t); } } /** * Start the next scroll request. * * @param lastBatchSize the number of requests sent in the last batch. This is used to calculate the throttling values which are applied * when the scroll returns */ void startNextScroll(TimeValue lastBatchStartTime, int lastBatchSize) { if (task.isCancelled()) { finishHim(null); return; } TimeValue extraKeepAlive = task.throttleWaitTime(lastBatchStartTime, lastBatchSize); scrollSource.startNextScroll(extraKeepAlive, response -> { onScrollResponse(lastBatchStartTime, lastBatchSize, response); }); } private void recordFailure(Failure failure, List<Failure> failures) { if (failure.getStatus() == CONFLICT) { task.countVersionConflict(); if (false == mainRequest.isAbortOnVersionConflict()) { return; } } failures.add(failure); } /** * Start terminating a request that finished non-catastrophically by refreshing the modified indices and then proceeding to * {@link #finishHim(Exception, List, List, boolean)}. */ void refreshAndFinish(List<Failure> indexingFailures, List<SearchFailure> searchFailures, boolean timedOut) { if (task.isCancelled() || false == mainRequest.isRefresh() || destinationIndices.isEmpty()) { finishHim(null, indexingFailures, searchFailures, timedOut); return; } RefreshRequest refresh = new RefreshRequest(); refresh.indices(destinationIndices.toArray(new String[destinationIndices.size()])); client.admin().indices().refresh(refresh, new ActionListener<RefreshResponse>() { @Override public void onResponse(RefreshResponse response) { finishHim(null, indexingFailures, searchFailures, timedOut); } @Override public void onFailure(Exception e) { finishHim(e); } }); } /** * Finish the request. * * @param failure if non null then the request failed catastrophically with this exception */ void finishHim(Exception failure) { finishHim(failure, emptyList(), emptyList(), false); } /** * Finish the request. * @param failure if non null then the request failed catastrophically with this exception * @param indexingFailures any indexing failures accumulated during the request * @param searchFailures any search failures accumulated during the request * @param timedOut have any of the sub-requests timed out? */ void finishHim(Exception failure, List<Failure> indexingFailures, List<SearchFailure> searchFailures, boolean timedOut) { scrollSource.close(); if (failure == null) { listener.onResponse( buildResponse(timeValueNanos(System.nanoTime() - startTime.get()), indexingFailures, searchFailures, timedOut)); } else { listener.onFailure(failure); } } /** * Get the backoff policy for use with retries. */ BackoffPolicy buildBackoffPolicy() { return exponentialBackoff(mainRequest.getRetryBackoffInitialTime(), mainRequest.getMaxRetries()); } /** * Add to the list of indices that were modified by this request. This is the list of indices refreshed at the end of the request if the * request asks for a refresh. */ void addDestinationIndices(Collection<String> indices) { destinationIndices.addAll(indices); } /** * Set the last returned scrollId. Exists entirely for testing. */ void setScroll(String scroll) { scrollSource.setScroll(scroll); } }
/* Generated By:JavaCC: Do not edit this line. AtmelParserConstants.java */ package avrora.syntax.atmel; public interface AtmelParserConstants { int EOF = 0; int SINGLE_LINE_COMMENT = 7; int INTEGER_LITERAL = 9; int DECIMAL_LITERAL = 10; int HEX_LITERAL = 11; int BIN_LITERAL = 12; int OCTAL_LITERAL = 13; int CHARACTER_LITERAL = 14; int STRING_LITERAL = 15; int LOW = 16; int HIGH = 17; int LO8 = 18; int HI8 = 19; int BYTE2 = 20; int BYTE3 = 21; int BYTE4 = 22; int LWRD = 23; int HWRD = 24; int PAGE = 25; int EXP2 = 26; int LOG2 = 27; int ADD = 28; int ADC = 29; int ADIW = 30; int AND = 31; int ANDI = 32; int ASR = 33; int BCLR = 34; int BLD = 35; int BRBC = 36; int BRBS = 37; int BRCC = 38; int BRCS = 39; int BREAK = 40; int BREQ = 41; int BRGE = 42; int BRHC = 43; int BRHS = 44; int BRID = 45; int BRIE = 46; int BRLO = 47; int BRLT = 48; int BRMI = 49; int BRNE = 50; int BRPL = 51; int BRSH = 52; int BRTC = 53; int BRTS = 54; int BRVC = 55; int BRVS = 56; int BSET = 57; int BST = 58; int CALL = 59; int CBI = 60; int CBR = 61; int CLC = 62; int CLH = 63; int CLI = 64; int CLN = 65; int CLR = 66; int CLS = 67; int CLT = 68; int CLV = 69; int CLZ = 70; int COM = 71; int CP = 72; int CPC = 73; int CPI = 74; int CPSE = 75; int DEC = 76; int EICALL = 77; int EIJMP = 78; int ELPM = 79; int EOR = 80; int FMUL = 81; int FMULS = 82; int FMULSU = 83; int ICALL = 84; int IJMP = 85; int IN = 86; int INC = 87; int JMP = 88; int LD = 89; int LDD = 90; int LDI = 91; int LDS = 92; int LPM = 93; int LSL = 94; int LSR = 95; int MOV = 96; int MOVW = 97; int MUL = 98; int MULS = 99; int MULSU = 100; int NEG = 101; int NOP = 102; int OR = 103; int ORI = 104; int OUT = 105; int POP = 106; int PUSH = 107; int RCALL = 108; int RET = 109; int RETI = 110; int RJMP = 111; int ROL = 112; int ROR = 113; int SBC = 114; int SBCI = 115; int SBI = 116; int SBIC = 117; int SBIS = 118; int SBIW = 119; int SBR = 120; int SBRC = 121; int SBRS = 122; int SEC = 123; int SEH = 124; int SEI = 125; int SEN = 126; int SER = 127; int SES = 128; int SET = 129; int SEV = 130; int SEZ = 131; int SLEEP = 132; int SPM = 133; int ST = 134; int STD = 135; int STS = 136; int SUB = 137; int SUBI = 138; int SWAP = 139; int TST = 140; int WDR = 141; int IDENTIFIER = 142; int LETTER = 143; int DIGIT = 144; int DEFAULT = 0; int IN_SINGLE_LINE_COMMENT = 1; String[] tokenImage = { "<EOF>", "\" \"", "\"\\t\"", "\"\\n\"", "\"\\r\"", "\"\\f\"", "\";\"", "<SINGLE_LINE_COMMENT>", "<token of kind 8>", "<INTEGER_LITERAL>", "<DECIMAL_LITERAL>", "<HEX_LITERAL>", "<BIN_LITERAL>", "<OCTAL_LITERAL>", "<CHARACTER_LITERAL>", "<STRING_LITERAL>", "\"low\"", "\"high\"", "\"lo8\"", "\"hi8\"", "\"byte2\"", "\"byte3\"", "\"byte4\"", "\"lwrd\"", "\"hwrd\"", "\"page\"", "\"exp2\"", "\"log2\"", "\"add\"", "\"adc\"", "\"adiw\"", "\"and\"", "\"andi\"", "\"asr\"", "\"bclr\"", "\"bld\"", "\"brbc\"", "\"brbs\"", "\"brcc\"", "\"brcs\"", "\"break\"", "\"breq\"", "\"brge\"", "\"brhc\"", "\"brhs\"", "\"brid\"", "\"brie\"", "\"brlo\"", "\"brlt\"", "\"brmi\"", "\"brne\"", "\"brpl\"", "\"brsh\"", "\"brtc\"", "\"brts\"", "\"brvc\"", "\"brvs\"", "\"bset\"", "\"bst\"", "\"call\"", "\"cbi\"", "\"cbr\"", "\"clc\"", "\"clh\"", "\"cli\"", "\"cln\"", "\"clr\"", "\"cls\"", "\"clt\"", "\"clv\"", "\"clz\"", "\"com\"", "\"cp\"", "\"cpc\"", "\"cpi\"", "\"cpse\"", "\"dec\"", "\"eicall\"", "\"eijmp\"", "\"elpm\"", "\"eor\"", "\"fmul\"", "\"fmuls\"", "\"fmulsu\"", "\"icall\"", "\"ijmp\"", "\"in\"", "\"inc\"", "\"jmp\"", "\"ld\"", "\"ldd\"", "\"ldi\"", "\"lds\"", "\"lpm\"", "\"lsl\"", "\"lsr\"", "\"mov\"", "\"movw\"", "\"mul\"", "\"muls\"", "\"mulsu\"", "\"neg\"", "\"nop\"", "\"or\"", "\"ori\"", "\"out\"", "\"pop\"", "\"push\"", "\"rcall\"", "\"ret\"", "\"reti\"", "\"rjmp\"", "\"rol\"", "\"ror\"", "\"sbc\"", "\"sbci\"", "\"sbi\"", "\"sbic\"", "\"sbis\"", "\"sbiw\"", "\"sbr\"", "\"sbrc\"", "\"sbrs\"", "\"sec\"", "\"seh\"", "\"sei\"", "\"sen\"", "\"ser\"", "\"ses\"", "\"set\"", "\"sev\"", "\"sez\"", "\"sleep\"", "\"spm\"", "\"st\"", "\"std\"", "\"sts\"", "\"sub\"", "\"subi\"", "\"swap\"", "\"tst\"", "\"wdr\"", "<IDENTIFIER>", "<LETTER>", "<DIGIT>", "\",\"", "\"+\"", "\"-\"", "\":\"", "\".equ\"", "\"=\"", "\".org\"", "\".byte\"", "\".db\"", "\".dw\"", "\".dd\"", "\".def\"", "\".include\"", "\".exit\"", "\".nolist\"", "\".list\"", "\".dseg\"", "\".cseg\"", "\".eseg\"", "\"||\"", "\"&&\"", "\"|\"", "\"^\"", "\"&\"", "\"==\"", "\"!=\"", "\">\"", "\">=\"", "\"<\"", "\"<=\"", "\"<<\"", "\">>\"", "\"*\"", "\"/\"", "\"!\"", "\"~\"", "\"(\"", "\")\"", }; }
package com.jetbrains.edu.learning.actions; import com.intellij.execution.ExecutionException; import com.intellij.execution.process.CapturingProcessHandler; import com.intellij.execution.process.ProcessOutput; import com.intellij.ide.projectView.ProjectView; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.KeyboardShortcut; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.TaskInfo; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.popup.Balloon; import com.intellij.openapi.ui.popup.BalloonBuilder; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.WindowManager; import com.intellij.openapi.wm.ex.StatusBarEx; import com.intellij.openapi.wm.ex.WindowManagerEx; import com.jetbrains.edu.EduDocumentListener; import com.jetbrains.edu.EduUtils; import com.jetbrains.edu.courseFormat.AnswerPlaceholder; import com.jetbrains.edu.courseFormat.StudyStatus; import com.jetbrains.edu.courseFormat.Task; import com.jetbrains.edu.courseFormat.TaskFile; import com.jetbrains.edu.learning.StudyState; import com.jetbrains.edu.learning.StudyTaskManager; import com.jetbrains.edu.learning.StudyUtils; import com.jetbrains.edu.learning.editor.StudyEditor; import com.jetbrains.edu.learning.navigation.StudyNavigator; import com.jetbrains.edu.learning.run.StudySmartChecker; import com.jetbrains.edu.learning.run.StudyTestRunner; import com.jetbrains.edu.stepic.EduStepicConnector; import com.jetbrains.edu.stepic.StudySettings; import icons.InteractiveLearningIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.io.IOException; import java.util.List; import java.util.Map; public class StudyCheckAction extends DumbAwareAction { private static final Logger LOG = Logger.getInstance(StudyCheckAction.class.getName()); private static final String ANSWERS_POSTFIX = "_answers"; public static final String ACTION_ID = "CheckAction"; public static final String SHORTCUT = "ctrl alt pressed ENTER"; boolean checkInProgress = false; public StudyCheckAction() { super("Check Task (" + KeymapUtil.getShortcutText(new KeyboardShortcut(KeyStroke.getKeyStroke(SHORTCUT), null)) + ")", "Check current task", InteractiveLearningIcons.Resolve); } private static void flushWindows(@NotNull final Task task, @NotNull final VirtualFile taskDir) { for (Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) { String name = entry.getKey(); TaskFile taskFile = entry.getValue(); VirtualFile virtualFile = taskDir.findChild(name); if (virtualFile == null) { continue; } EduUtils.flushWindows(taskFile, virtualFile, true); } } private static void drawAllPlaceholders(@NotNull final Project project, @NotNull final Task task, @NotNull final VirtualFile taskDir) { for (Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) { String name = entry.getKey(); TaskFile taskFile = entry.getValue(); VirtualFile virtualFile = taskDir.findChild(name); if (virtualFile == null) { continue; } FileEditor fileEditor = FileEditorManager.getInstance(project).getSelectedEditor(virtualFile); if (fileEditor instanceof StudyEditor) { StudyEditor studyEditor = (StudyEditor)fileEditor; StudyUtils.drawAllWindows(studyEditor.getEditor(), taskFile); } } } public void check(@NotNull final Project project) { if (DumbService.isDumb(project)) { DumbService.getInstance(project).showDumbModeNotification("Check Action is not available while indexing is in progress"); return; } ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { CommandProcessor.getInstance().runUndoTransparentAction(new Runnable() { @Override public void run() { final StudyEditor selectedEditor = StudyUtils.getSelectedStudyEditor(project); if (selectedEditor == null) return; final StudyState studyState = new StudyState(selectedEditor); if (!studyState.isValid()) { LOG.error("StudyCheckAction was invoked outside study editor"); return; } final IdeFrame frame = ((WindowManagerEx)WindowManager.getInstance()).findFrameFor(project); final StatusBarEx statusBar = frame == null ? null : (StatusBarEx)frame.getStatusBar(); if (statusBar != null) { final List<Pair<TaskInfo, ProgressIndicator>> processes = statusBar.getBackgroundProcesses(); if (!processes.isEmpty()) return; } final Task task = studyState.getTask(); final VirtualFile taskDir = studyState.getTaskDir(); flushWindows(task, taskDir); final StudyRunAction runAction = (StudyRunAction)ActionManager.getInstance().getAction(StudyRunAction.ACTION_ID); if (runAction == null) { return; } runAction.run(project); ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { IdeFocusManager.getInstance(project).requestFocus(studyState.getEditor().getComponent(), true); } }); final StudyTestRunner testRunner = StudyUtils.getTestRunner(task, taskDir); Process testProcess = null; String commandLine = ""; try { final VirtualFile executablePath = getTaskVirtualFile(studyState, task, taskDir); if (executablePath != null) { commandLine = executablePath.getPath(); testProcess = testRunner.createCheckProcess(project, commandLine); } } catch (ExecutionException e) { LOG.error(e); } if (testProcess == null) { return; } checkInProgress = true; ProgressManager.getInstance().run(getCheckTask(studyState, testRunner, testProcess, commandLine, project, selectedEditor)); } }); } @Nullable private VirtualFile getTaskVirtualFile(@NotNull final StudyState studyState, @NotNull final Task task, @NotNull final VirtualFile taskDir) { VirtualFile taskVirtualFile = studyState.getVirtualFile(); for (Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) { String name = entry.getKey(); TaskFile taskFile = entry.getValue(); VirtualFile virtualFile = taskDir.findChild(name); if (virtualFile != null) { if (!taskFile.getAnswerPlaceholders().isEmpty()) { taskVirtualFile = virtualFile; } } } return taskVirtualFile; } }); } @NotNull private com.intellij.openapi.progress.Task.Backgroundable getCheckTask(final StudyState studyState, final StudyTestRunner testRunner, final Process testProcess, @NotNull final String commandLine, @NotNull final Project project, final StudyEditor selectedEditor) { final Task task = studyState.getTask(); final VirtualFile taskDir = studyState.getTaskDir(); final StudyTaskManager taskManager = StudyTaskManager.getInstance(project); final StudyStatus statusBeforeCheck = taskManager.getStatus(task); return new com.intellij.openapi.progress.Task.Backgroundable(project, "Checking Task", true) { @Override public void onSuccess() { StudyUtils.updateToolWindows(project); drawAllPlaceholders(project, task, taskDir); ProjectView.getInstance(project).refresh(); EduUtils.deleteWindowDescriptions(task, taskDir); checkInProgress = false; } @Override public void onCancel() { taskManager.setStatus(task, statusBeforeCheck); EduUtils.deleteWindowDescriptions(task, taskDir); checkInProgress = false; } @Override public void run(@NotNull ProgressIndicator indicator) { final Map<String, TaskFile> taskFiles = task.getTaskFiles(); final CapturingProcessHandler handler = new CapturingProcessHandler(testProcess, null, commandLine); final ProcessOutput output = handler.runProcessWithProgressIndicator(indicator); if (indicator.isCanceled()) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { showTestResultPopUp("Tests check cancelled.", MessageType.WARNING.getPopupBackground(), project); } }); return; } final StudyTestRunner.TestsOutput testsOutput = testRunner.getTestsOutput(output); String stderr = output.getStderr(); if (!stderr.isEmpty()) { LOG.info("#educational " + stderr); } final StudySettings studySettings = StudySettings.getInstance(); final String login = studySettings.getLogin(); final String password = StringUtil.isEmptyOrSpaces(login) ? "" : studySettings.getPassword(); if (testsOutput.isSuccess()) { taskManager.setStatus(task, StudyStatus.Solved); EduStepicConnector.postAttempt(task, true, login, password); ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { showTestResultPopUp(testsOutput.getMessage(), MessageType.INFO.getPopupBackground(), project); } }); } else { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (taskDir == null) return; EduStepicConnector.postAttempt(task, false, login, password); taskManager.setStatus(task, StudyStatus.Failed); for (Map.Entry<String, TaskFile> entry : taskFiles.entrySet()) { final String name = entry.getKey(); final TaskFile taskFile = entry.getValue(); if (taskFile.getAnswerPlaceholders().size() < 2) { taskManager.setStatus(taskFile, StudyStatus.Failed); continue; } CommandProcessor.getInstance().runUndoTransparentAction(new Runnable() { @Override public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { runSmartTestProcess(taskDir, testRunner, name, taskFile, project); } }); } }); } showTestResultPopUp(testsOutput.getMessage(), MessageType.ERROR.getPopupBackground(), project); navigateToFailedPlaceholder(studyState, task, taskDir, project); } }); } } }; } private static void navigateToFailedPlaceholder(@NotNull final StudyState studyState, @NotNull final Task task, @NotNull final VirtualFile taskDir, @NotNull final Project project) { TaskFile selectedTaskFile = studyState.getTaskFile(); Editor editor = studyState.getEditor(); TaskFile taskFileToNavigate = selectedTaskFile; VirtualFile fileToNavigate = studyState.getVirtualFile(); final StudyTaskManager taskManager = StudyTaskManager.getInstance(project); if (!taskManager.hasFailedAnswerPlaceholders(selectedTaskFile)) { for (Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) { String name = entry.getKey(); TaskFile taskFile = entry.getValue(); if (taskManager.hasFailedAnswerPlaceholders(taskFile)) { taskFileToNavigate = taskFile; VirtualFile virtualFile = taskDir.findChild(name); if (virtualFile == null) { continue; } FileEditor fileEditor = FileEditorManager.getInstance(project).getSelectedEditor(virtualFile); if (fileEditor instanceof StudyEditor) { StudyEditor studyEditor = (StudyEditor)fileEditor; editor = studyEditor.getEditor(); } fileToNavigate = virtualFile; break; } } } if (fileToNavigate != null) { FileEditorManager.getInstance(project).openFile(fileToNavigate, true); } final Editor editorToNavigate = editor; ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { IdeFocusManager.getInstance(project).requestFocus(editorToNavigate.getContentComponent(), true); } }); StudyNavigator.navigateToFirstFailedAnswerPlaceholder(editor, taskFileToNavigate); } private void runSmartTestProcess(@NotNull final VirtualFile taskDir, @NotNull final StudyTestRunner testRunner, final String taskFileName, @NotNull final TaskFile taskFile, @NotNull final Project project) { final TaskFile answerTaskFile = new TaskFile(); answerTaskFile.name = taskFileName; final VirtualFile virtualFile = taskDir.findChild(taskFileName); if (virtualFile == null) { return; } final VirtualFile answerFile = getCopyWithAnswers(taskDir, virtualFile, taskFile, answerTaskFile); for (final AnswerPlaceholder answerPlaceholder : answerTaskFile.getAnswerPlaceholders()) { final Document document = FileDocumentManager.getInstance().getDocument(virtualFile); if (document == null) { continue; } if (!answerPlaceholder.isValid(document)) { continue; } StudySmartChecker.smartCheck(answerPlaceholder, project, answerFile, answerTaskFile, taskFile, testRunner, virtualFile, document); } StudyUtils.deleteFile(answerFile); } private VirtualFile getCopyWithAnswers(@NotNull final VirtualFile taskDir, @NotNull final VirtualFile file, @NotNull final TaskFile source, @NotNull final TaskFile target) { VirtualFile copy = null; try { copy = file.copy(this, taskDir, file.getNameWithoutExtension() + ANSWERS_POSTFIX + "." + file.getExtension()); final FileDocumentManager documentManager = FileDocumentManager.getInstance(); final Document document = documentManager.getDocument(copy); if (document != null) { TaskFile.copy(source, target); EduDocumentListener listener = new EduDocumentListener(target); document.addDocumentListener(listener); for (AnswerPlaceholder answerPlaceholder : target.getAnswerPlaceholders()) { if (!answerPlaceholder.isValid(document)) { continue; } final int start = answerPlaceholder.getRealStartOffset(document); final int end = start + answerPlaceholder.getLength(); final String text = answerPlaceholder.getPossibleAnswer(); document.replaceString(start, end, text); } ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { documentManager.saveDocument(document); } }); } } catch (IOException e) { LOG.error(e); } return copy; } private static void showTestResultPopUp(final String text, Color color, @NotNull final Project project) { BalloonBuilder balloonBuilder = JBPopupFactory.getInstance().createHtmlTextBalloonBuilder(text, null, color, null); final Balloon balloon = balloonBuilder.createBalloon(); StudyUtils.showCheckPopUp(project, balloon); } @Override public void actionPerformed(@NotNull AnActionEvent e) { Project project = e.getProject(); if (project != null) { check(project); } } @Override public void update(AnActionEvent e) { final Presentation presentation = e.getPresentation(); StudyUtils.updateAction(e); if (presentation.isEnabled()) { presentation.setEnabled(!checkInProgress); } } }
// This file was generated by Mendix Modeler. // // WARNING: Code you write here will be lost the next time you deploy the project. package myfirstmodule.proxies; public class StringFromFile extends system.proxies.FileDocument { /** * Internal name of this entity */ public static final java.lang.String entityName = "MyFirstModule.StringFromFile"; /** * Enum describing members of this entity */ public enum MemberNames { ContentAsString("ContentAsString"), Encoding("Encoding"), FileID("FileID"), Name("Name"), DeleteAfterDownload("DeleteAfterDownload"), Contents("Contents"), HasContents("HasContents"), Size("Size"); private java.lang.String metaName; MemberNames(java.lang.String s) { metaName = s; } @java.lang.Override public java.lang.String toString() { return metaName; } } public StringFromFile(com.mendix.systemwideinterfaces.core.IContext context) { this(context, com.mendix.core.Core.instantiate(context, "MyFirstModule.StringFromFile")); } protected StringFromFile(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject stringFromFileMendixObject) { super(context, stringFromFileMendixObject); if (!com.mendix.core.Core.isSubClassOf("MyFirstModule.StringFromFile", stringFromFileMendixObject.getType())) throw new java.lang.IllegalArgumentException("The given object is not a MyFirstModule.StringFromFile"); } /** * @deprecated Use 'StringFromFile.load(IContext, IMendixIdentifier)' instead. */ @java.lang.Deprecated public static myfirstmodule.proxies.StringFromFile initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException { return myfirstmodule.proxies.StringFromFile.load(context, mendixIdentifier); } /** * Initialize a proxy using context (recommended). This context will be used for security checking when the get- and set-methods without context parameters are called. * The get- and set-methods with context parameter should be used when for instance sudo access is necessary (IContext.createSudoClone() can be used to obtain sudo access). */ public static myfirstmodule.proxies.StringFromFile initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject mendixObject) { return new myfirstmodule.proxies.StringFromFile(context, mendixObject); } public static myfirstmodule.proxies.StringFromFile load(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException { com.mendix.systemwideinterfaces.core.IMendixObject mendixObject = com.mendix.core.Core.retrieveId(context, mendixIdentifier); return myfirstmodule.proxies.StringFromFile.initialize(context, mendixObject); } public static java.util.List<myfirstmodule.proxies.StringFromFile> load(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String xpathConstraint) throws com.mendix.core.CoreException { java.util.List<myfirstmodule.proxies.StringFromFile> result = new java.util.ArrayList<myfirstmodule.proxies.StringFromFile>(); for (com.mendix.systemwideinterfaces.core.IMendixObject obj : com.mendix.core.Core.retrieveXPathQuery(context, "//MyFirstModule.StringFromFile" + xpathConstraint)) result.add(myfirstmodule.proxies.StringFromFile.initialize(context, obj)); return result; } /** * @return value of ContentAsString */ public final java.lang.String getContentAsString() { return getContentAsString(getContext()); } /** * @param context * @return value of ContentAsString */ public final java.lang.String getContentAsString(com.mendix.systemwideinterfaces.core.IContext context) { return (java.lang.String) getMendixObject().getValue(context, MemberNames.ContentAsString.toString()); } /** * Set value of ContentAsString * @param contentasstring */ public final void setContentAsString(java.lang.String contentasstring) { setContentAsString(getContext(), contentasstring); } /** * Set value of ContentAsString * @param context * @param contentasstring */ public final void setContentAsString(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String contentasstring) { getMendixObject().setValue(context, MemberNames.ContentAsString.toString(), contentasstring); } /** * Set value of Encoding * @param encoding */ public final communitycommons.proxies.StandardEncodings getEncoding() { return getEncoding(getContext()); } /** * @param context * @return value of Encoding */ public final communitycommons.proxies.StandardEncodings getEncoding(com.mendix.systemwideinterfaces.core.IContext context) { Object obj = getMendixObject().getValue(context, MemberNames.Encoding.toString()); if (obj == null) return null; return communitycommons.proxies.StandardEncodings.valueOf((java.lang.String) obj); } /** * Set value of Encoding * @param encoding */ public final void setEncoding(communitycommons.proxies.StandardEncodings encoding) { setEncoding(getContext(), encoding); } /** * Set value of Encoding * @param context * @param encoding */ public final void setEncoding(com.mendix.systemwideinterfaces.core.IContext context, communitycommons.proxies.StandardEncodings encoding) { if (encoding != null) getMendixObject().setValue(context, MemberNames.Encoding.toString(), encoding.toString()); else getMendixObject().setValue(context, MemberNames.Encoding.toString(), null); } @java.lang.Override public boolean equals(Object obj) { if (obj == this) return true; if (obj != null && getClass().equals(obj.getClass())) { final myfirstmodule.proxies.StringFromFile that = (myfirstmodule.proxies.StringFromFile) obj; return getMendixObject().equals(that.getMendixObject()); } return false; } @java.lang.Override public int hashCode() { return getMendixObject().hashCode(); } /** * @return String name of this class */ public static java.lang.String getType() { return "MyFirstModule.StringFromFile"; } /** * @return String GUID from this object, format: ID_0000000000 * @deprecated Use getMendixObject().getId().toLong() to get a unique identifier for this object. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getGUID() { return "ID_" + getMendixObject().getId().toLong(); } }
package org.bzewdu.tools.chart.soffice; import com.sun.star.beans.PropertyVetoException; import com.sun.star.beans.UnknownPropertyException; import com.sun.star.container.NoSuchElementException; import com.sun.star.container.XIndexAccess; import com.sun.star.lang.IllegalArgumentException; import com.sun.star.lang.Locale; import com.sun.star.lang.WrappedTargetException; import com.sun.star.sheet.XSpreadsheet; import com.sun.star.sheet.XSpreadsheetDocument; import com.sun.star.sheet.XSpreadsheets; import com.sun.star.table.XCell; import com.sun.star.uno.UnoRuntime; import com.sun.star.util.XNumberFormatsSupplier; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.math.BigDecimal; import java.util.*; public class Builder { private static final int ROW_OFFSET = 2; private int nNumberFormat; public Builder(XSpreadsheetDocument _theSpreadsheetDocument) { XNumberFormatsSupplier xNumberFormatsSupplier = (XNumberFormatsSupplier) UnoRuntime.queryInterface(XNumberFormatsSupplier.class, _theSpreadsheetDocument); Locale aLocale = new Locale(); com.sun.star.util.XNumberFormats xNumberFormats = xNumberFormatsSupplier.getNumberFormats(); com.sun.star.util.XNumberFormatTypes xNumberFormatTypes = (com.sun.star.util.XNumberFormatTypes) UnoRuntime.queryInterface(com.sun.star.util.XNumberFormatTypes.class, xNumberFormats); nNumberFormat = xNumberFormatTypes.getStandardFormat(com.sun.star.util.NumberFormat.PERCENT, aLocale); } private void populateSheet(sheetConfig sc, XSpreadsheet xSheet) { List<String> lb = Arrays.asList(sc.getBenchList()); List<Object> lm = Arrays.asList(sc.getBuildMap().keySet().toArray()); populateHeaders(sc, lb, xSheet, lm); sc.setCellRange(((char) (ROW_OFFSET + lb.size() + 65) + "" + 1) + ":" + ((char) ((ROW_OFFSET + (2 * lb.size())) + 65) + "" + (lm.size() + 1))); formatCells(xSheet, ROW_OFFSET + lb.size() + 1, 1, ROW_OFFSET + (2 * lb.size()), (lm.size() + 1)); populateCells(sc, lb, lm, xSheet); } private void populateCells(sheetConfig sc, List<String> lb, List<Object> lm, XSpreadsheet xSheet) { List<BigDecimal> scores = null; for (String benchmark : sc.getBenchList()) { for (String buildName : sc.getBuildMap().keySet()) { scores = getResultData(sc, buildName, benchmark); if (scores != null) { insertIntoCell( lb.indexOf(benchmark) + 1, lm.indexOf(buildName) + 1, "" + Helper.bd_getMean(scores, false), xSheet, "V"); String baseline = (char) ((lb.indexOf(benchmark)) + 1 + 65) + "" + 2; String specimen = (char) ((lb.indexOf(benchmark)) + 1 + 65) + "" + (lm.indexOf(buildName) + 2); insertIntoCell( lb.indexOf(benchmark) + 1 + ROW_OFFSET + lb.size(), lm.indexOf(buildName) + 1, "=1-((" + baseline + "-" + specimen + ")/" + baseline + ")", xSheet, ""); } } } } private List<BigDecimal> getResultData(sheetConfig sc, String buildName, String benchmark) { List<BigDecimal> scores = new ArrayList<BigDecimal>(); String buildScorePath = sc.getBuildMap().get(buildName); BigDecimal score = getScoreFromResultFile(buildScorePath + File.separator + "results." + benchmark + File.separator + "results." + benchmark); if (score != null) scores.add(score); return scores; } private static synchronized BigDecimal getScoreFromResultFile(String resultFileName) { FileInputStream fis = null; try { fis = new FileInputStream(resultFileName); Properties scoreProps = new Properties(); scoreProps.load(fis); return new BigDecimal(scoreProps.getProperty("score")); } catch (IOException e) { e.printStackTrace(); } finally { if (fis != null) try { fis.close(); } catch (IOException e) { e.printStackTrace(); } } return null; } private void populateHeaders(sheetConfig sc, List<String> lb, XSpreadsheet xSheet, List<Object> lm) { for (String benchmark : sc.getBenchList()) { insertIntoCell(lb.indexOf(benchmark) + 1, 0, benchmark, xSheet, ""); insertIntoCell(lb.indexOf(benchmark) + 1 + ROW_OFFSET + lb.size(), 0, benchmark, xSheet, ""); } for (String buildName : sc.getBuildMap().keySet()) { insertIntoCell(0, lm.indexOf(buildName) + 1, buildName, xSheet, ""); insertIntoCell(ROW_OFFSET + lb.size(), lm.indexOf(buildName) + 1, buildName, xSheet, ""); } } public void renderSheets(XSpreadsheets xSheets) { final Chart chart = new Chart(); HashMap<sheetConfig, XSpreadsheet> xSheetsMap = new HashMap<sheetConfig, XSpreadsheet>(); for (final sheetConfig sc : Config.getInstance().getSheetConfigs()) { xSheets.insertNewByName(sc.getDocumentTitle(), (short) 1); XIndexAccess oIndexSheets = (XIndexAccess) UnoRuntime.queryInterface(XIndexAccess.class, xSheets); try { xSheetsMap.put(sc, (XSpreadsheet) UnoRuntime.queryInterface(XSpreadsheet.class, oIndexSheets.getByIndex(1))); } catch (com.sun.star.lang.IndexOutOfBoundsException e) { e.printStackTrace(); } catch (WrappedTargetException e) { e.printStackTrace(); } } List<Thread> threadList = new ArrayList<Thread>(); Thread thread = null; for (final sheetConfig sc : xSheetsMap.keySet()) { final XSpreadsheet xSheet = xSheetsMap.get(sc); threadList.add(thread = new Thread() { public void run() { populateSheet(sc, xSheet); try { chart.draw(xSheet, sc); } catch (NoSuchElementException e) { e.printStackTrace(); } catch (WrappedTargetException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } catch (UnknownPropertyException e) { e.printStackTrace(); } catch (PropertyVetoException e) { e.printStackTrace(); } catch (com.sun.star.lang.IllegalArgumentException e) { e.printStackTrace(); } } }); thread.start(); } try { for (Thread t : threadList) t.join(); } catch (InterruptedException e) { e.printStackTrace(); } } private void insertIntoCell(int CellX, int CellY, String theValue, XSpreadsheet TT1, String flag) { XCell xCell = null; try { xCell = TT1.getCellByPosition(CellX, CellY); } catch (com.sun.star.lang.IndexOutOfBoundsException ex) { if (Config.debug) if (Config.debug) System.err.println("Could not get Cell"); ex.printStackTrace(System.err); } if (flag.equals("V")) { assert xCell != null; xCell.setValue(new Float(theValue)); } else { assert xCell != null; xCell.setFormula(theValue); } } private void formatCells(XSpreadsheet xSheet, int i, int j, int k, int l) { com.sun.star.table.XCellRange xCellRange = null; try { xCellRange = xSheet.getCellRangeByPosition(i, j, k, l); } catch (com.sun.star.lang.IndexOutOfBoundsException e) { e.printStackTrace(); } com.sun.star.beans.XPropertySet xCellProp = (com.sun.star.beans.XPropertySet) UnoRuntime.queryInterface(com.sun.star.beans.XPropertySet.class, xCellRange); try { xCellProp.setPropertyValue("NumberFormat", nNumberFormat); } catch (UnknownPropertyException e) { e.printStackTrace(); } catch (PropertyVetoException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (WrappedTargetException e) { e.printStackTrace(); } } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.devtools.build.lib.analysis.util.BuildViewTestCase; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.packages.ConstantRuleVisibility; import com.google.devtools.build.lib.packages.NoSuchTargetException; import com.google.devtools.build.lib.packages.Preprocessor; import com.google.devtools.build.lib.packages.util.SubincludePreprocessor; import com.google.devtools.build.lib.pkgcache.PackageCacheOptions; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.util.SkyframeExecutorTestUtils; import com.google.devtools.build.lib.testutil.ManualClock; import com.google.devtools.build.lib.util.BlazeClock; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.Dirent; import com.google.devtools.build.lib.vfs.FileStatus; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.ModifiedFileSet; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import com.google.devtools.build.skyframe.ErrorInfo; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.common.options.Options; import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Set; import java.util.UUID; import javax.annotation.Nullable; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Unit tests of specific functionality of PackageFunction. Note that it's already tested * indirectly in several other places. */ @RunWith(JUnit4.class) public class PackageFunctionTest extends BuildViewTestCase { private CustomInMemoryFs fs = new CustomInMemoryFs(new ManualClock()); private void preparePackageLoading(Path... roots) { PackageCacheOptions packageCacheOptions = Options.getDefaults(PackageCacheOptions.class); packageCacheOptions.defaultVisibility = ConstantRuleVisibility.PUBLIC; packageCacheOptions.showLoadingProgress = true; packageCacheOptions.globbingThreads = 7; getSkyframeExecutor() .preparePackageLoading( new PathPackageLocator(outputBase, ImmutableList.copyOf(roots)), packageCacheOptions, "", UUID.randomUUID(), ImmutableMap.<String, String>of(), new TimestampGranularityMonitor(BlazeClock.instance())); } @Override protected Preprocessor.Factory.Supplier getPreprocessorFactorySupplier() { return new SubincludePreprocessor.FactorySupplier(); } @Override protected FileSystem createFileSystem() { return fs; } private PackageValue validPackage(SkyKey skyKey) throws InterruptedException { EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); if (result.hasError()) { fail(result.getError(skyKey).getException().getMessage()); } PackageValue value = result.get(skyKey); assertFalse(value.getPackage().containsErrors()); return value; } @Test public void testValidPackage() throws Exception { scratch.file("pkg/BUILD"); validPackage(PackageValue.key(PackageIdentifier.parse("@//pkg"))); } @Test public void testInconsistentNewPackage() throws Exception { scratch.file("pkg/BUILD", "subinclude('//foo:sub')"); scratch.file("foo/sub"); preparePackageLoading(rootDirectory); SkyKey pkgLookupKey = PackageLookupValue.key(new PathFragment("foo")); EvaluationResult<PackageLookupValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), pkgLookupKey, /*keepGoing=*/false, reporter); assertFalse(result.hasError()); assertFalse(result.get(pkgLookupKey).packageExists()); scratch.file("foo/BUILD"); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//pkg")); result = SkyframeExecutorTestUtils.evaluate(getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); Throwable exception = result.getError(skyKey).getException(); assertThat(exception.getMessage()).contains("Inconsistent filesystem operations"); assertThat(exception.getMessage()).contains("Unexpected package"); } @Test public void testInconsistentMissingPackage() throws Exception { reporter.removeHandler(failFastHandler); Path root1 = fs.getPath("/root1"); scratch.file("/root1/WORKSPACE"); scratch.file("/root1/foo/sub"); scratch.file("/root1/pkg/BUILD", "subinclude('//foo:sub')"); Path root2 = fs.getPath("/root2"); scratch.file("/root2/foo/BUILD"); scratch.file("/root2/foo/sub"); preparePackageLoading(root1, root2); SkyKey pkgLookupKey = PackageLookupValue.key(PackageIdentifier.parse("@//foo")); EvaluationResult<PackageLookupValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), pkgLookupKey, /*keepGoing=*/false, reporter); assertFalse(result.hasError()); assertEquals(root2, result.get(pkgLookupKey).getRoot()); scratch.file("/root1/foo/BUILD"); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//pkg")); result = SkyframeExecutorTestUtils.evaluate(getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); Throwable exception = result.getError(skyKey).getException(); System.out.println("exception: " + exception.getMessage()); assertThat(exception.getMessage()).contains("Inconsistent filesystem operations"); assertThat(exception.getMessage()).contains("Inconsistent package location"); } @Test public void testPropagatesFilesystemInconsistencies() throws Exception { reporter.removeHandler(failFastHandler); RecordingDifferencer differencer = getSkyframeExecutor().getDifferencerForTesting(); Path pkgRoot = getSkyframeExecutor().getPathEntries().get(0); Path fooBuildFile = scratch.file("foo/BUILD"); Path fooDir = fooBuildFile.getParentDirectory(); // Our custom filesystem says "foo/BUILD" exists but its parent "foo" is a file. FileStatus inconsistentParentFileStatus = new FileStatus() { @Override public boolean isFile() { return true; } @Override public boolean isDirectory() { return false; } @Override public boolean isSymbolicLink() { return false; } @Override public boolean isSpecialFile() { return false; } @Override public long getSize() throws IOException { return 0; } @Override public long getLastModifiedTime() throws IOException { return 0; } @Override public long getLastChangeTime() throws IOException { return 0; } @Override public long getNodeId() throws IOException { return 0; } }; fs.stubStat(fooDir, inconsistentParentFileStatus); RootedPath pkgRootedPath = RootedPath.toRootedPath(pkgRoot, fooDir); SkyValue fooDirValue = FileStateValue.create(pkgRootedPath, tsgm); differencer.inject(ImmutableMap.of(FileStateValue.key(pkgRootedPath), fooDirValue)); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); String expectedMessage = "/workspace/foo/BUILD exists but its parent path /workspace/foo isn't " + "an existing directory"; EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); ErrorInfo errorInfo = result.getError(skyKey); String errorMessage = errorInfo.getException().getMessage(); assertThat(errorMessage).contains("Inconsistent filesystem operations"); assertThat(errorMessage).contains(expectedMessage); } @Test public void testPropagatesFilesystemInconsistencies_Globbing() throws Exception { reporter.removeHandler(failFastHandler); RecordingDifferencer differencer = getSkyframeExecutor().getDifferencerForTesting(); Path pkgRoot = getSkyframeExecutor().getPathEntries().get(0); scratch.file("foo/BUILD", "subinclude('//a:a')", "sh_library(name = 'foo', srcs = glob(['bar/**/baz.sh']))"); scratch.file("a/BUILD"); scratch.file("a/a"); Path bazFile = scratch.file("foo/bar/baz/baz.sh"); Path bazDir = bazFile.getParentDirectory(); Path barDir = bazDir.getParentDirectory(); // Our custom filesystem says "foo/bar/baz" does not exist but it also says that "foo/bar" // has a child directory "baz". fs.stubStat(bazDir, null); RootedPath barDirRootedPath = RootedPath.toRootedPath(pkgRoot, barDir); FileStateValue barDirFileStateValue = FileStateValue.create(barDirRootedPath, tsgm); FileValue barDirFileValue = FileValue.value(barDirRootedPath, barDirFileStateValue, barDirRootedPath, barDirFileStateValue); DirectoryListingValue barDirListing = DirectoryListingValue.value(barDirRootedPath, barDirFileValue, DirectoryListingStateValue.create(ImmutableList.of( new Dirent("baz", Dirent.Type.DIRECTORY)))); differencer.inject(ImmutableMap.of(DirectoryListingValue.key(barDirRootedPath), barDirListing)); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); String expectedMessage = "/workspace/foo/bar/baz is no longer an existing directory"; EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); ErrorInfo errorInfo = result.getError(skyKey); String errorMessage = errorInfo.getException().getMessage(); assertThat(errorMessage).contains("Inconsistent filesystem operations"); assertThat(errorMessage).contains(expectedMessage); } /** Regression test for unexpected exception type from PackageValue. */ @Test public void testDiscrepancyBetweenLegacyAndSkyframePackageLoadingErrors() throws Exception { reporter.removeHandler(failFastHandler); Path fooBuildFile = scratch.file("foo/BUILD", "sh_library(name = 'foo', srcs = glob(['bar/*.sh']))"); Path fooDir = fooBuildFile.getParentDirectory(); Path barDir = fooDir.getRelative("bar"); scratch.file("foo/bar/baz.sh"); fs.scheduleMakeUnreadableAfterReaddir(barDir); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); String expectedMessage = "Encountered error 'Directory is not readable'"; EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); ErrorInfo errorInfo = result.getError(skyKey); String errorMessage = errorInfo.getException().getMessage(); assertThat(errorMessage).contains("Inconsistent filesystem operations"); assertThat(errorMessage).contains(expectedMessage); } @Test public void testMultipleSubincludesFromSamePackage() throws Exception { scratch.file("foo/BUILD", "subinclude('//bar:a')", "subinclude('//bar:b')"); scratch.file("bar/BUILD", "exports_files(['a', 'b'])"); scratch.file("bar/a"); scratch.file("bar/b"); preparePackageLoading(rootDirectory); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); validPackage(skyKey); } @Test public void testTransitiveSubincludesStoredInPackage() throws Exception { scratch.file("foo/BUILD", "subinclude('//bar:a')"); scratch.file("bar/BUILD", "exports_files(['a'])"); scratch.file("bar/a", "subinclude('//baz:b')"); scratch.file("baz/BUILD", "exports_files(['b', 'c'])"); scratch.file("baz/b"); scratch.file("baz/c"); preparePackageLoading(rootDirectory); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); PackageValue value = validPackage(skyKey); assertThat(value.getPackage().getSubincludeLabels()).containsExactly( Label.parseAbsolute("//bar:a"), Label.parseAbsolute("//baz:b")); scratch.overwriteFile("bar/a", "subinclude('//baz:c')"); getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter, ModifiedFileSet.builder().modify(new PathFragment("bar/a")).build(), rootDirectory); value = validPackage(skyKey); assertThat(value.getPackage().getSubincludeLabels()).containsExactly( Label.parseAbsolute("//bar:a"), Label.parseAbsolute("//baz:c")); } @SuppressWarnings("unchecked") // Cast of srcs attribute to Iterable<Label>. @Test public void testGlobOrderStable() throws Exception { scratch.file("foo/BUILD", "sh_library(name = 'foo', srcs = glob(['**/*.txt']))"); scratch.file("foo/b.txt"); scratch.file("foo/c/c.txt"); preparePackageLoading(rootDirectory); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); PackageValue value = validPackage(skyKey); assertThat( (Iterable<Label>) value .getPackage() .getTarget("foo") .getAssociatedRule() .getAttributeContainer() .getAttr("srcs")) .containsExactly( Label.parseAbsoluteUnchecked("//foo:b.txt"), Label.parseAbsoluteUnchecked("//foo:c/c.txt")) .inOrder(); scratch.file("foo/d.txt"); getSkyframeExecutor() .invalidateFilesUnderPathForTesting( reporter, ModifiedFileSet.builder().modify(new PathFragment("foo/d.txt")).build(), rootDirectory); value = validPackage(skyKey); assertThat( (Iterable<Label>) value .getPackage() .getTarget("foo") .getAssociatedRule() .getAttributeContainer() .getAttr("srcs")) .containsExactly( Label.parseAbsoluteUnchecked("//foo:b.txt"), Label.parseAbsoluteUnchecked("//foo:c/c.txt"), Label.parseAbsoluteUnchecked("//foo:d.txt")) .inOrder(); } @SuppressWarnings("unchecked") // Cast of srcs attribute to Iterable<Label>. @Test public void testGlobOrderStableWithLegacyAndSkyframeComponents() throws Exception { scratch.file("foo/BUILD", "sh_library(name = 'foo', srcs = glob(['*.txt']))"); scratch.file("foo/b.txt"); scratch.file("foo/a.config"); preparePackageLoading(rootDirectory); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); PackageValue value = validPackage(skyKey); assertThat( (Iterable<Label>) value .getPackage() .getTarget("foo") .getAssociatedRule() .getAttributeContainer() .getAttr("srcs")) .containsExactly(Label.parseAbsoluteUnchecked("//foo:b.txt")); scratch.overwriteFile( "foo/BUILD", "sh_library(name = 'foo', srcs = glob(['*.txt', '*.config']))"); getSkyframeExecutor() .invalidateFilesUnderPathForTesting( reporter, ModifiedFileSet.builder().modify(new PathFragment("foo/BUILD")).build(), rootDirectory); value = validPackage(skyKey); assertThat( (Iterable<Label>) value .getPackage() .getTarget("foo") .getAssociatedRule() .getAttributeContainer() .getAttr("srcs")) .containsExactly( Label.parseAbsoluteUnchecked("//foo:a.config"), Label.parseAbsoluteUnchecked("//foo:b.txt")) .inOrder(); scratch.overwriteFile( "foo/BUILD", "sh_library(name = 'foo', srcs = glob(['*.txt', '*.config'])) # comment"); getSkyframeExecutor() .invalidateFilesUnderPathForTesting( reporter, ModifiedFileSet.builder().modify(new PathFragment("foo/BUILD")).build(), rootDirectory); value = validPackage(skyKey); assertThat( (Iterable<Label>) value .getPackage() .getTarget("foo") .getAssociatedRule() .getAttributeContainer() .getAttr("srcs")) .containsExactly( Label.parseAbsoluteUnchecked("//foo:a.config"), Label.parseAbsoluteUnchecked("//foo:b.txt")) .inOrder(); getSkyframeExecutor().resetEvaluator(); PackageCacheOptions packageCacheOptions = Options.getDefaults(PackageCacheOptions.class); packageCacheOptions.defaultVisibility = ConstantRuleVisibility.PUBLIC; packageCacheOptions.showLoadingProgress = true; packageCacheOptions.globbingThreads = 7; getSkyframeExecutor() .preparePackageLoading( new PathPackageLocator(outputBase, ImmutableList.<Path>of(rootDirectory)), packageCacheOptions, "", UUID.randomUUID(), ImmutableMap.<String, String>of(), tsgm); value = validPackage(skyKey); assertThat( (Iterable<Label>) value .getPackage() .getTarget("foo") .getAssociatedRule() .getAttributeContainer() .getAttr("srcs")) .containsExactly( Label.parseAbsoluteUnchecked("//foo:a.config"), Label.parseAbsoluteUnchecked("//foo:b.txt")) .inOrder(); } @Test public void testIncludeInMainAndDefaultRepository() throws Exception { scratch.file("foo/BUILD", "subinclude('//baz:a')"); scratch.file("bar/BUILD", "subinclude('@//baz:a')"); scratch.file("baz/BUILD", "exports_files(['a'])"); scratch.file("baz/a"); preparePackageLoading(rootDirectory); SkyKey fooKey = PackageValue.key(PackageIdentifier.parse("@//foo")); PackageValue fooValue = validPackage(fooKey); assertThat(fooValue.getPackage().getSubincludeLabels()).containsExactly( Label.parseAbsolute("//baz:a")); SkyKey barKey = PackageValue.key(PackageIdentifier.parse("@//bar")); PackageValue barValue = validPackage(barKey); assertThat(barValue.getPackage().getSubincludeLabels()).containsExactly( Label.parseAbsolute("@//baz:a")); } @Test public void testTransitiveSkylarkDepsStoredInPackage() throws Exception { scratch.file("foo/BUILD", "load('/bar/ext', 'a')"); scratch.file("bar/BUILD"); scratch.file("bar/ext.bzl", "load('/baz/ext', 'b')", "a = b"); scratch.file("baz/BUILD"); scratch.file("baz/ext.bzl", "b = 1"); scratch.file("qux/BUILD"); scratch.file("qux/ext.bzl", "c = 1"); preparePackageLoading(rootDirectory); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); PackageValue value = validPackage(skyKey); assertThat(value.getPackage().getSkylarkFileDependencies()).containsExactly( Label.parseAbsolute("//bar:ext.bzl"), Label.parseAbsolute("//baz:ext.bzl")); scratch.overwriteFile("bar/ext.bzl", "load('/qux/ext', 'c')", "a = c"); getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter, ModifiedFileSet.builder().modify(new PathFragment("bar/ext.bzl")).build(), rootDirectory); value = validPackage(skyKey); assertThat(value.getPackage().getSkylarkFileDependencies()).containsExactly( Label.parseAbsolute("//bar:ext.bzl"), Label.parseAbsolute("//qux:ext.bzl")); } @Test public void testNonExistingSkylarkExtension() throws Exception { reporter.removeHandler(failFastHandler); scratch.file("test/skylark/BUILD", "load('/test/skylark/bad_extension', 'some_symbol')", "genrule(name = gr,", " outs = ['out.txt'],", " cmd = 'echo hello >@')"); invalidatePackages(); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//test/skylark")); EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); ErrorInfo errorInfo = result.getError(skyKey); String expectedMsg = "error loading package 'test/skylark': " + "Extension file not found. Unable to load file '//test/skylark:bad_extension.bzl': " + "file doesn't exist or isn't a file"; assertThat(errorInfo.getException()) .hasMessage(expectedMsg); } @Test public void testNonExistingSkylarkExtensionWithPythonPreprocessing() throws Exception { reporter.removeHandler(failFastHandler); scratch.file("foo/BUILD", "exports_files(['a'])"); scratch.file("foo/a", "load('/test/skylark/bad_extension', 'some_symbol')"); scratch.file("test/skylark/BUILD", "subinclude('//foo:a')"); invalidatePackages(); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//test/skylark")); EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); ErrorInfo errorInfo = result.getError(skyKey); String expectedMsg = "error loading package 'test/skylark': " + "Extension file not found. Unable to load file '//test/skylark:bad_extension.bzl': " + "file doesn't exist or isn't a file"; assertThat(errorInfo.getException()) .hasMessage(expectedMsg); } @Test public void testNonExistingSkylarkExtensionFromExtension() throws Exception { reporter.removeHandler(failFastHandler); scratch.file("test/skylark/extension.bzl", "load('/test/skylark/bad_extension', 'some_symbol')", "a = 'a'"); scratch.file("test/skylark/BUILD", "load('/test/skylark/extension', 'a')", "genrule(name = gr,", " outs = ['out.txt'],", " cmd = 'echo hello >@')"); invalidatePackages(); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//test/skylark")); EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); ErrorInfo errorInfo = result.getError(skyKey); assertThat(errorInfo.getException()) .hasMessage("error loading package 'test/skylark': Extension file not found. " + "Unable to load file '//test/skylark:bad_extension.bzl': " + "file doesn't exist or isn't a file"); } @Test public void testSymlinkCycleWithSkylarkExtension() throws Exception { reporter.removeHandler(failFastHandler); Path extensionFilePath = scratch.resolve("/workspace/test/skylark/extension.bzl"); FileSystemUtils.ensureSymbolicLink(extensionFilePath, new PathFragment("extension.bzl")); scratch.file("test/skylark/BUILD", "load('/test/skylark/extension', 'a')", "genrule(name = gr,", " outs = ['out.txt'],", " cmd = 'echo hello >@')"); invalidatePackages(); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//test/skylark")); EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); ErrorInfo errorInfo = result.getError(skyKey); assertEquals(skyKey, errorInfo.getRootCauseOfException()); assertThat(errorInfo.getException()) .hasMessage( "error loading package 'test/skylark': Encountered error while reading extension " + "file 'test/skylark/extension.bzl': Symlink cycle"); } @Test public void testIOErrorLookingForSubpackageForLabelIsHandled() throws Exception { reporter.removeHandler(failFastHandler); scratch.file("foo/BUILD", "sh_library(name = 'foo', srcs = ['bar/baz.sh'])"); Path barBuildFile = scratch.file("foo/bar/BUILD"); fs.stubStatError(barBuildFile, new IOException("nope")); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter); assertTrue(result.hasError()); assertContainsEvent("nope"); } @Test public void testLoadRelativePath() throws Exception { scratch.file("pkg/BUILD", "load('ext', 'a')"); scratch.file("pkg/ext.bzl", "a = 1"); validPackage(PackageValue.key(PackageIdentifier.parse("@//pkg"))); } @Test public void testLoadAbsolutePath() throws Exception { scratch.file("pkg1/BUILD"); scratch.file("pkg2/BUILD", "load('/pkg1/ext', 'a')"); scratch.file("pkg1/ext.bzl", "a = 1"); validPackage(PackageValue.key(PackageIdentifier.parse("@//pkg2"))); } @Test public void testBadWorkspaceFile() throws Exception { Path workspacePath = scratch.overwriteFile("WORKSPACE", "junk"); SkyKey skyKey = PackageValue.key(PackageIdentifier.createInMainRepo("external")); getSkyframeExecutor() .invalidate( Predicates.equalTo( FileStateValue.key( RootedPath.toRootedPath( workspacePath.getParentDirectory(), new PathFragment(workspacePath.getBaseName()))))); reporter.removeHandler(failFastHandler); EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate( getSkyframeExecutor(), skyKey, /*keepGoing=*/ false, reporter); assertFalse(result.hasError()); assertTrue(result.get(skyKey).getPackage().containsErrors()); } // Regression test for the two ugly consequences of a bug where GlobFunction incorrectly matched // dangling symlinks. @Test public void testIncrementalSkyframeHybridGlobbingOnDanglingSymlink() throws Exception { Path packageDirPath = scratch.file("foo/BUILD", "exports_files(glob(['*.txt']))").getParentDirectory(); scratch.file("foo/existing.txt"); FileSystemUtils.ensureSymbolicLink(packageDirPath.getChild("dangling.txt"), "nope"); preparePackageLoading(rootDirectory); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); PackageValue value = validPackage(skyKey); assertFalse(value.getPackage().containsErrors()); assertThat(value.getPackage().getTarget("existing.txt").getName()).isEqualTo("existing.txt"); try { value.getPackage().getTarget("dangling.txt"); fail(); } catch (NoSuchTargetException expected) { } scratch.overwriteFile("foo/BUILD", "exports_files(glob(['*.txt'])),", "#some-irrelevant-comment"); getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter, ModifiedFileSet.builder().modify(new PathFragment("foo/BUILD")).build(), rootDirectory); value = validPackage(skyKey); assertFalse(value.getPackage().containsErrors()); assertThat(value.getPackage().getTarget("existing.txt").getName()).isEqualTo("existing.txt"); try { value.getPackage().getTarget("dangling.txt"); fail(); } catch (NoSuchTargetException expected) { // One consequence of the bug was that dangling symlinks were matched by globs evaluated by // Skyframe globbing, meaning there would incorrectly be corresponding targets in packages // that had skyframe cache hits during skyframe hybrid globbing. } scratch.file("foo/nope"); getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter, ModifiedFileSet.builder().modify(new PathFragment("foo/nope")).build(), rootDirectory); PackageValue newValue = validPackage(skyKey); assertFalse(newValue.getPackage().containsErrors()); assertThat(newValue.getPackage().getTarget("existing.txt").getName()).isEqualTo("existing.txt"); // Another consequence of the bug is that change pruning would incorrectly cut off changes that // caused a dangling symlink potentially matched by a glob to come into existence. assertThat(newValue.getPackage().getTarget("dangling.txt").getName()).isEqualTo("dangling.txt"); assertThat(newValue.getPackage()).isNotSameAs(value.getPackage()); } // Regression test for Skyframe globbing incorrectly matching the package's directory path on // 'glob(['**'], exclude_directories = 0)'. We test for this directly by triggering // hybrid globbing (gives coverage for both legacy globbing and skyframe globbing). @Test public void testRecursiveGlobNeverMatchesPackageDirectory() throws Exception { scratch.file("foo/BUILD", "[sh_library(name = x + '-matched') for x in glob(['**'], exclude_directories = 0)]"); scratch.file("foo/bar"); preparePackageLoading(rootDirectory); SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo")); PackageValue value = validPackage(skyKey); assertFalse(value.getPackage().containsErrors()); assertThat(value.getPackage().getTarget("bar-matched").getName()).isEqualTo("bar-matched"); try { value.getPackage().getTarget("-matched"); fail(); } catch (NoSuchTargetException expected) { } scratch.overwriteFile("foo/BUILD", "[sh_library(name = x + '-matched') for x in glob(['**'], exclude_directories = 0)]", "#some-irrelevant-comment"); getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter, ModifiedFileSet.builder().modify(new PathFragment("foo/BUILD")).build(), rootDirectory); value = validPackage(skyKey); assertFalse(value.getPackage().containsErrors()); assertThat(value.getPackage().getTarget("bar-matched").getName()).isEqualTo("bar-matched"); try { value.getPackage().getTarget("-matched"); fail(); } catch (NoSuchTargetException expected) { } } private static class CustomInMemoryFs extends InMemoryFileSystem { private abstract static class FileStatusOrException { abstract FileStatus get() throws IOException; private static class ExceptionImpl extends FileStatusOrException { private final IOException exn; private ExceptionImpl(IOException exn) { this.exn = exn; } @Override FileStatus get() throws IOException { throw exn; } } private static class FileStatusImpl extends FileStatusOrException { @Nullable private final FileStatus fileStatus; private FileStatusImpl(@Nullable FileStatus fileStatus) { this.fileStatus = fileStatus; } @Override @Nullable FileStatus get() { return fileStatus; } } } private Map<Path, FileStatusOrException> stubbedStats = Maps.newHashMap(); private Set<Path> makeUnreadableAfterReaddir = Sets.newHashSet(); public CustomInMemoryFs(ManualClock manualClock) { super(manualClock); } public void stubStat(Path path, @Nullable FileStatus stubbedResult) { stubbedStats.put(path, new FileStatusOrException.FileStatusImpl(stubbedResult)); } public void stubStatError(Path path, IOException stubbedResult) { stubbedStats.put(path, new FileStatusOrException.ExceptionImpl(stubbedResult)); } @Override public FileStatus stat(Path path, boolean followSymlinks) throws IOException { if (stubbedStats.containsKey(path)) { return stubbedStats.get(path).get(); } return super.stat(path, followSymlinks); } public void scheduleMakeUnreadableAfterReaddir(Path path) { makeUnreadableAfterReaddir.add(path); } @Override public Collection<Dirent> readdir(Path path, boolean followSymlinks) throws IOException { Collection<Dirent> result = super.readdir(path, followSymlinks); if (makeUnreadableAfterReaddir.contains(path)) { path.setReadable(false); } return result; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.password; import com.google.common.base.VerifyException; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.UncheckedExecutionException; import io.airlift.log.Logger; import io.prestosql.spi.security.AccessDeniedException; import io.prestosql.spi.security.BasicPrincipal; import io.prestosql.spi.security.PasswordAuthenticator; import javax.inject.Inject; import javax.naming.AuthenticationException; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.directory.DirContext; import javax.naming.directory.SearchControls; import javax.naming.directory.SearchResult; import java.security.Principal; import java.util.Map; import java.util.Objects; import java.util.Optional; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Throwables.throwIfInstanceOf; import static io.prestosql.plugin.password.jndi.JndiUtils.createDirContext; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static javax.naming.Context.INITIAL_CONTEXT_FACTORY; import static javax.naming.Context.PROVIDER_URL; import static javax.naming.Context.SECURITY_AUTHENTICATION; import static javax.naming.Context.SECURITY_CREDENTIALS; import static javax.naming.Context.SECURITY_PRINCIPAL; public class LdapAuthenticator implements PasswordAuthenticator { private static final Logger log = Logger.get(LdapAuthenticator.class); private final String userBindSearchPattern; private final Optional<String> groupAuthorizationSearchPattern; private final Optional<String> userBaseDistinguishedName; private final Map<String, String> basicEnvironment; private final LoadingCache<Credentials, Principal> authenticationCache; @Inject public LdapAuthenticator(LdapConfig serverConfig) { String ldapUrl = requireNonNull(serverConfig.getLdapUrl(), "ldapUrl is null"); this.userBindSearchPattern = requireNonNull(serverConfig.getUserBindSearchPattern(), "userBindSearchPattern is null"); this.groupAuthorizationSearchPattern = Optional.ofNullable(serverConfig.getGroupAuthorizationSearchPattern()); this.userBaseDistinguishedName = Optional.ofNullable(serverConfig.getUserBaseDistinguishedName()); if (groupAuthorizationSearchPattern.isPresent()) { checkState(userBaseDistinguishedName.isPresent(), "Base distinguished name (DN) for user is null"); } Map<String, String> environment = ImmutableMap.<String, String>builder() .put(INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory") .put(PROVIDER_URL, ldapUrl) .build(); this.basicEnvironment = environment; this.authenticationCache = CacheBuilder.newBuilder() .expireAfterWrite(serverConfig.getLdapCacheTtl().toMillis(), MILLISECONDS) .build(CacheLoader.from(this::authenticate)); } @Override public Principal createAuthenticatedPrincipal(String user, String password) { try { return authenticationCache.getUnchecked(new Credentials(user, password)); } catch (UncheckedExecutionException e) { throwIfInstanceOf(e.getCause(), AccessDeniedException.class); throw e; } } private Principal authenticate(Credentials credentials) { return authenticate(credentials.getUser(), credentials.getPassword()); } private Principal authenticate(String user, String password) { Map<String, String> environment = createEnvironment(user, password); DirContext context = null; try { context = createDirContext(environment); checkForGroupMembership(user, context); log.debug("Authentication successful for user [%s]", user); return new BasicPrincipal(user); } catch (AuthenticationException e) { log.debug("Authentication failed for user [%s]: %s", user, e.getMessage()); throw new AccessDeniedException("Invalid credentials"); } catch (NamingException e) { log.debug(e, "Authentication error for user [%s]", user); throw new RuntimeException("Authentication error"); } finally { if (context != null) { closeContext(context); } } } private Map<String, String> createEnvironment(String user, String password) { return ImmutableMap.<String, String>builder() .putAll(basicEnvironment) .put(SECURITY_AUTHENTICATION, "simple") .put(SECURITY_PRINCIPAL, createPrincipal(user)) .put(SECURITY_CREDENTIALS, password) .build(); } private String createPrincipal(String user) { return replaceUser(userBindSearchPattern, user); } private void checkForGroupMembership(String user, DirContext context) { if (!groupAuthorizationSearchPattern.isPresent()) { return; } String userBase = userBaseDistinguishedName.orElseThrow(VerifyException::new); String searchFilter = replaceUser(groupAuthorizationSearchPattern.get(), user); SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); boolean authorized; try { NamingEnumeration<SearchResult> search = context.search(userBase, searchFilter, searchControls); authorized = search.hasMoreElements(); search.close(); } catch (NamingException e) { log.debug("Authentication error for user [%s]: %s", user, e.getMessage()); throw new RuntimeException("Authentication error"); } if (!authorized) { String message = format("User [%s] not a member of the authorized group", user); log.debug(message); throw new AccessDeniedException(message); } } private static String replaceUser(String pattern, String user) { return pattern.replaceAll("\\$\\{USER}", user); } private static void closeContext(DirContext context) { try { context.close(); } catch (NamingException ignored) { } } private static class Credentials { private final String user; private final String password; private Credentials(String user, String password) { this.user = requireNonNull(user); this.password = requireNonNull(password); } public String getUser() { return user; } public String getPassword() { return password; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Credentials that = (Credentials) o; return Objects.equals(this.user, that.user) && Objects.equals(this.password, that.password); } @Override public int hashCode() { return Objects.hash(user, password); } @Override public String toString() { return toStringHelper(this) .add("user", user) .add("password", password) .toString(); } } }
package org.openestate.is24.restapi.xml.offerlistelement; import java.io.Serializable; import java.math.BigDecimal; import java.util.Calendar; import javax.annotation.Generated; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.jvnet.jaxb2_commons.lang.CopyStrategy2; import org.jvnet.jaxb2_commons.lang.CopyTo2; import org.jvnet.jaxb2_commons.lang.Equals2; import org.jvnet.jaxb2_commons.lang.EqualsStrategy2; import org.jvnet.jaxb2_commons.lang.JAXBCopyStrategy; import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy; import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy; import org.jvnet.jaxb2_commons.lang.ToString2; import org.jvnet.jaxb2_commons.lang.ToStringStrategy2; import org.jvnet.jaxb2_commons.locator.ObjectLocator; import org.jvnet.jaxb2_commons.locator.util.LocatorUtils; import org.openestate.is24.restapi.xml.Adapter1; import org.openestate.is24.restapi.xml.Adapter3; import org.openestate.is24.restapi.xml.common.CourtageInfo; import org.openestate.is24.restapi.xml.common.Price; /** * Elemente f\u00fcr WG-Zimmer. * * <p>Java class for OfferFlatShareRoom complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="OfferFlatShareRoom"&gt; * &lt;complexContent&gt; * &lt;extension base="{http://rest.immobilienscout24.de/schema/offer/listelement/1.0}OfferRealEstateForList"&gt; * &lt;group ref="{http://rest.immobilienscout24.de/schema/common/1.0}BaseFlatShareRoomGroup"/&gt; * &lt;/extension&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "OfferFlatShareRoom", propOrder = { "price", "roomSize", "startRentalDate", "courtage" }) @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public class OfferFlatShareRoom extends OfferRealEstateForList implements Serializable, Cloneable, CopyTo2, Equals2, ToString2 { @XmlElement(required = true) @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected Price price; @XmlElement(required = true, type = String.class) @XmlJavaTypeAdapter(Adapter1 .class) @XmlSchemaType(name = "double") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected BigDecimal roomSize; @XmlElement(required = true, type = String.class) @XmlJavaTypeAdapter(Adapter3 .class) @XmlSchemaType(name = "date") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected Calendar startRentalDate; @XmlElement(required = true) @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected CourtageInfo courtage; /** * Gets the value of the price property. * * @return * possible object is * {@link Price } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Price getPrice() { return price; } /** * Sets the value of the price property. * * @param value * allowed object is * {@link Price } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public void setPrice(Price value) { this.price = value; } /** * Gets the value of the roomSize property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public BigDecimal getRoomSize() { return roomSize; } /** * Sets the value of the roomSize property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public void setRoomSize(BigDecimal value) { this.roomSize = value; } /** * Gets the value of the startRentalDate property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Calendar getStartRentalDate() { return startRentalDate; } /** * Sets the value of the startRentalDate property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public void setStartRentalDate(Calendar value) { this.startRentalDate = value; } /** * Gets the value of the courtage property. * * @return * possible object is * {@link CourtageInfo } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public CourtageInfo getCourtage() { return courtage; } /** * Sets the value of the courtage property. * * @param value * allowed object is * {@link CourtageInfo } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public void setCourtage(CourtageInfo value) { this.courtage = value; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public String toString() { final ToStringStrategy2 strategy = JAXBToStringStrategy.INSTANCE2; final StringBuilder buffer = new StringBuilder(); append(null, buffer, strategy); return buffer.toString(); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) { strategy.appendStart(locator, this, buffer); appendFields(locator, buffer, strategy); strategy.appendEnd(locator, this, buffer); return buffer; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) { super.appendFields(locator, buffer, strategy); { Price thePrice; thePrice = this.getPrice(); strategy.appendField(locator, this, "price", buffer, thePrice, (this.price!= null)); } { BigDecimal theRoomSize; theRoomSize = this.getRoomSize(); strategy.appendField(locator, this, "roomSize", buffer, theRoomSize, (this.roomSize!= null)); } { Calendar theStartRentalDate; theStartRentalDate = this.getStartRentalDate(); strategy.appendField(locator, this, "startRentalDate", buffer, theStartRentalDate, (this.startRentalDate!= null)); } { CourtageInfo theCourtage; theCourtage = this.getCourtage(); strategy.appendField(locator, this, "courtage", buffer, theCourtage, (this.courtage!= null)); } return buffer; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Object clone() { return copyTo(createNewInstance()); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Object copyTo(Object target) { final CopyStrategy2 strategy = JAXBCopyStrategy.INSTANCE2; return copyTo(null, target, strategy); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Object copyTo(ObjectLocator locator, Object target, CopyStrategy2 strategy) { final Object draftCopy = ((target == null)?createNewInstance():target); super.copyTo(locator, draftCopy, strategy); if (draftCopy instanceof OfferFlatShareRoom) { final OfferFlatShareRoom copy = ((OfferFlatShareRoom) draftCopy); { Boolean priceShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.price!= null)); if (priceShouldBeCopiedAndSet == Boolean.TRUE) { Price sourcePrice; sourcePrice = this.getPrice(); Price copyPrice = ((Price) strategy.copy(LocatorUtils.property(locator, "price", sourcePrice), sourcePrice, (this.price!= null))); copy.setPrice(copyPrice); } else { if (priceShouldBeCopiedAndSet == Boolean.FALSE) { copy.price = null; } } } { Boolean roomSizeShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.roomSize!= null)); if (roomSizeShouldBeCopiedAndSet == Boolean.TRUE) { BigDecimal sourceRoomSize; sourceRoomSize = this.getRoomSize(); BigDecimal copyRoomSize = ((BigDecimal) strategy.copy(LocatorUtils.property(locator, "roomSize", sourceRoomSize), sourceRoomSize, (this.roomSize!= null))); copy.setRoomSize(copyRoomSize); } else { if (roomSizeShouldBeCopiedAndSet == Boolean.FALSE) { copy.roomSize = null; } } } { Boolean startRentalDateShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.startRentalDate!= null)); if (startRentalDateShouldBeCopiedAndSet == Boolean.TRUE) { Calendar sourceStartRentalDate; sourceStartRentalDate = this.getStartRentalDate(); Calendar copyStartRentalDate = ((Calendar) strategy.copy(LocatorUtils.property(locator, "startRentalDate", sourceStartRentalDate), sourceStartRentalDate, (this.startRentalDate!= null))); copy.setStartRentalDate(copyStartRentalDate); } else { if (startRentalDateShouldBeCopiedAndSet == Boolean.FALSE) { copy.startRentalDate = null; } } } { Boolean courtageShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.courtage!= null)); if (courtageShouldBeCopiedAndSet == Boolean.TRUE) { CourtageInfo sourceCourtage; sourceCourtage = this.getCourtage(); CourtageInfo copyCourtage = ((CourtageInfo) strategy.copy(LocatorUtils.property(locator, "courtage", sourceCourtage), sourceCourtage, (this.courtage!= null))); copy.setCourtage(copyCourtage); } else { if (courtageShouldBeCopiedAndSet == Boolean.FALSE) { copy.courtage = null; } } } } return draftCopy; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Object createNewInstance() { return new OfferFlatShareRoom(); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy2 strategy) { if ((object == null)||(this.getClass()!= object.getClass())) { return false; } if (this == object) { return true; } if (!super.equals(thisLocator, thatLocator, object, strategy)) { return false; } final OfferFlatShareRoom that = ((OfferFlatShareRoom) object); { Price lhsPrice; lhsPrice = this.getPrice(); Price rhsPrice; rhsPrice = that.getPrice(); if (!strategy.equals(LocatorUtils.property(thisLocator, "price", lhsPrice), LocatorUtils.property(thatLocator, "price", rhsPrice), lhsPrice, rhsPrice, (this.price!= null), (that.price!= null))) { return false; } } { BigDecimal lhsRoomSize; lhsRoomSize = this.getRoomSize(); BigDecimal rhsRoomSize; rhsRoomSize = that.getRoomSize(); if (!strategy.equals(LocatorUtils.property(thisLocator, "roomSize", lhsRoomSize), LocatorUtils.property(thatLocator, "roomSize", rhsRoomSize), lhsRoomSize, rhsRoomSize, (this.roomSize!= null), (that.roomSize!= null))) { return false; } } { Calendar lhsStartRentalDate; lhsStartRentalDate = this.getStartRentalDate(); Calendar rhsStartRentalDate; rhsStartRentalDate = that.getStartRentalDate(); if (!strategy.equals(LocatorUtils.property(thisLocator, "startRentalDate", lhsStartRentalDate), LocatorUtils.property(thatLocator, "startRentalDate", rhsStartRentalDate), lhsStartRentalDate, rhsStartRentalDate, (this.startRentalDate!= null), (that.startRentalDate!= null))) { return false; } } { CourtageInfo lhsCourtage; lhsCourtage = this.getCourtage(); CourtageInfo rhsCourtage; rhsCourtage = that.getCourtage(); if (!strategy.equals(LocatorUtils.property(thisLocator, "courtage", lhsCourtage), LocatorUtils.property(thatLocator, "courtage", rhsCourtage), lhsCourtage, rhsCourtage, (this.courtage!= null), (that.courtage!= null))) { return false; } } return true; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public boolean equals(Object object) { final EqualsStrategy2 strategy = JAXBEqualsStrategy.INSTANCE2; return equals(null, null, object, strategy); } }
/* * The MIT License (MIT) * * Copyright (c) 2014 The Voxel Plugineering Team * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.voxelplugineering.voxelsniper.bukkit.world; import static com.google.common.base.Preconditions.checkNotNull; import java.util.List; import java.util.Map; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.voxelplugineering.voxelsniper.api.entity.Entity; import com.voxelplugineering.voxelsniper.api.registry.MaterialRegistry; import com.voxelplugineering.voxelsniper.api.world.Block; import com.voxelplugineering.voxelsniper.api.world.Chunk; import com.voxelplugineering.voxelsniper.api.world.biome.Biome; import com.voxelplugineering.voxelsniper.api.world.material.Material; import com.voxelplugineering.voxelsniper.bukkit.entity.BukkitEntity; import com.voxelplugineering.voxelsniper.bukkit.world.biome.BukkitBiome; import com.voxelplugineering.voxelsniper.bukkit.world.material.BukkitMaterial; import com.voxelplugineering.voxelsniper.core.Gunsmith; import com.voxelplugineering.voxelsniper.core.util.math.Vector3i; import com.voxelplugineering.voxelsniper.core.world.AbstractWorld; import com.voxelplugineering.voxelsniper.core.world.CommonBlock; import com.voxelplugineering.voxelsniper.core.world.CommonLocation; /** * A wrapper for bukkit's {@link org.bukkit.World}s. */ public class BukkitWorld extends AbstractWorld<org.bukkit.World> { private final MaterialRegistry<org.bukkit.Material> materials; private final Map<org.bukkit.Chunk, Chunk> chunks; private final Map<org.bukkit.entity.Entity, Entity> entitiesCache; private final Thread worldThread; /** * Creates a new {@link BukkitWorld}. * * @param world the world * @param materialRegistry the registry * @param thread The world Thread */ public BukkitWorld(org.bukkit.World world, MaterialRegistry<org.bukkit.Material> materialRegistry, Thread thread) { super(world); this.materials = checkNotNull(materialRegistry); this.chunks = new MapMaker().weakKeys().makeMap(); this.entitiesCache = new MapMaker().weakKeys().makeMap(); this.worldThread = thread; } @Override public String getName() { return getThis().getName(); } @Override public Optional<Chunk> getChunk(int x, int y, int z) { if (!checkAsyncChunkAccess(x, y, z)) { return Optional.absent(); } org.bukkit.Chunk chunk = getThis().getChunkAt(x, z); if (chunk == null) { return Optional.absent(); } if (this.chunks.containsKey(chunk)) { return Optional.of(this.chunks.get(chunk)); } BukkitChunk newChunk = new BukkitChunk(chunk, this); this.chunks.put(chunk, newChunk); return Optional.<Chunk>of(newChunk); } private boolean checkAsyncChunkAccess(int x, int y, int z) { if (Thread.currentThread() != this.worldThread) { if (!getThis().isChunkLoaded(x, z)) { return false; } } return true; } @Override public Optional<Block> getBlock(int x, int y, int z) { if (!checkAsyncBlockAccess(x, y, z)) { return Optional.absent(); } org.bukkit.block.Block b = getThis().getBlockAt(x, y, z); CommonLocation l = new CommonLocation(this, b.getX(), b.getY(), b.getZ()); Optional<Material> m = this.materials.getMaterial(b.getType().name()); if (!m.isPresent()) { return Optional.absent(); } return Optional.<Block>of(new CommonBlock(l, m.get())); } private boolean checkAsyncBlockAccess(int x, int y, int z) { if (Thread.currentThread() != this.worldThread) { int cx = x < 0 ? (x / getChunkSize().getX() - 1) : x / 16; int cz = z < 0 ? (z / getChunkSize().getZ() - 1) : z / 16; if (!getThis().isChunkLoaded(cx, cz)) { return false; } } return true; } @Override public void setBlock(Material material, int x, int y, int z) { checkNotNull(material); if (y < 0 || y >= 256) { return; } if (material instanceof BukkitMaterial) { BukkitMaterial bukkitMaterial = (BukkitMaterial) material; getThis().getBlockAt(x, y, z).setType(bukkitMaterial.getThis()); } } @Override public Optional<Biome> getBiome(int x, int y, int z) { org.bukkit.block.Biome biome = getThis().getBiome(x, z); return Gunsmith.getBiomeRegistry().getBiome(biome.name()); } @Override public void setBiome(Biome biome, int x, int y, int z) { checkNotNull(biome); if (biome instanceof BukkitBiome) { BukkitBiome bukkitBiome = (BukkitBiome) biome; getThis().setBiome(x, z, bukkitBiome.getThis()); } } @Override public MaterialRegistry<?> getMaterialRegistry() { return this.materials; } @Override public Iterable<Entity> getLoadedEntities() { List<Entity> entities = Lists.newArrayList(); for (org.bukkit.entity.Entity e : getThis().getEntities()) { if (this.entitiesCache.containsKey(e)) { entities.add(this.entitiesCache.get(e)); } else { Entity ent = new BukkitEntity(e); this.entitiesCache.put(e, ent); entities.add(ent); } } return entities; } @Override public Vector3i getChunkSize() { return BukkitChunk.CHUNK_SIZE; } /** * Gets a live copy of the entities cache. * * @return The entities cache */ protected Map<org.bukkit.entity.Entity, Entity> getEntityCache() { return this.entitiesCache; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.indexer; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import io.druid.common.utils.UUIDUtils; import io.druid.java.util.common.StringUtils; import junit.framework.Assert; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MRJobConfig; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class HdfsClasspathSetupTest { private static MiniDFSCluster miniCluster; private static File hdfsTmpDir; private static Configuration conf; private static String dummyJarString = "This is a test jar file."; private File dummyJarFile; private Path finalClasspath; private Path intermediatePath; @Rule public final TemporaryFolder tempFolder = new TemporaryFolder(); @BeforeClass public static void setupStatic() throws IOException, ClassNotFoundException { hdfsTmpDir = File.createTempFile("hdfsClasspathSetupTest", "dir"); if (!hdfsTmpDir.delete()) { throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); } conf = new Configuration(true); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTmpDir.getAbsolutePath()); miniCluster = new MiniDFSCluster.Builder(conf).build(); } @Before public void setUp() throws IOException { // intermedatePath and finalClasspath are relative to hdfsTmpDir directory. intermediatePath = new Path(String.format("/tmp/classpath/%s", UUIDUtils.generateUuid())); finalClasspath = new Path(String.format("/tmp/intermediate/%s", UUIDUtils.generateUuid())); dummyJarFile = tempFolder.newFile("dummy-test.jar"); Files.copy( new ByteArrayInputStream(StringUtils.toUtf8(dummyJarString)), dummyJarFile.toPath(), StandardCopyOption.REPLACE_EXISTING ); } @AfterClass public static void tearDownStatic() throws IOException { if (miniCluster != null) { miniCluster.shutdown(true); } FileUtils.deleteDirectory(hdfsTmpDir); } @After public void tearDown() throws IOException { dummyJarFile.delete(); Assert.assertFalse(dummyJarFile.exists()); miniCluster.getFileSystem().delete(finalClasspath, true); Assert.assertFalse(miniCluster.getFileSystem().exists(finalClasspath)); miniCluster.getFileSystem().delete(intermediatePath, true); Assert.assertFalse(miniCluster.getFileSystem().exists(intermediatePath)); } @Test public void testAddSnapshotJarToClasspath() throws IOException { Job job = Job.getInstance(conf, "test-job"); DistributedFileSystem fs = miniCluster.getFileSystem(); Path intermediatePath = new Path("/tmp/classpath"); JobHelper.addSnapshotJarToClassPath(dummyJarFile, intermediatePath, fs, job); Path expectedJarPath = new Path(intermediatePath, dummyJarFile.getName()); // check file gets uploaded to HDFS Assert.assertTrue(fs.exists(expectedJarPath)); // check file gets added to the classpath Assert.assertEquals(expectedJarPath.toString(), job.getConfiguration().get(MRJobConfig.CLASSPATH_FILES)); Assert.assertEquals(dummyJarString, StringUtils.fromUtf8(IOUtils.toByteArray(fs.open(expectedJarPath)))); } @Test public void testAddNonSnapshotJarToClasspath() throws IOException { Job job = Job.getInstance(conf, "test-job"); DistributedFileSystem fs = miniCluster.getFileSystem(); JobHelper.addJarToClassPath(dummyJarFile, finalClasspath, intermediatePath, fs, job); Path expectedJarPath = new Path(finalClasspath, dummyJarFile.getName()); // check file gets uploaded to final HDFS path Assert.assertTrue(fs.exists(expectedJarPath)); // check that the intermediate file gets deleted Assert.assertFalse(fs.exists(new Path(intermediatePath, dummyJarFile.getName()))); // check file gets added to the classpath Assert.assertEquals(expectedJarPath.toString(), job.getConfiguration().get(MRJobConfig.CLASSPATH_FILES)); Assert.assertEquals(dummyJarString, StringUtils.fromUtf8(IOUtils.toByteArray(fs.open(expectedJarPath)))); } @Test public void testIsSnapshot() { Assert.assertTrue(JobHelper.isSnapshot(new File("test-SNAPSHOT.jar"))); Assert.assertTrue(JobHelper.isSnapshot(new File("test-SNAPSHOT-selfcontained.jar"))); Assert.assertFalse(JobHelper.isSnapshot(new File("test.jar"))); Assert.assertFalse(JobHelper.isSnapshot(new File("test-selfcontained.jar"))); Assert.assertFalse(JobHelper.isSnapshot(new File("iAmNotSNAPSHOT.jar"))); Assert.assertFalse(JobHelper.isSnapshot(new File("iAmNotSNAPSHOT-selfcontained.jar"))); } @Test public void testConcurrentUpload() throws IOException, InterruptedException, ExecutionException, TimeoutException { final int concurrency = 10; ListeningExecutorService pool = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrency)); // barrier ensures that all jobs try to add files to classpath at same time. final CyclicBarrier barrier = new CyclicBarrier(concurrency); final DistributedFileSystem fs = miniCluster.getFileSystem(); final Path expectedJarPath = new Path(finalClasspath, dummyJarFile.getName()); List<ListenableFuture<Boolean>> futures = new ArrayList<>(); for (int i = 0; i < concurrency; i++) { futures.add( pool.submit( new Callable() { @Override public Boolean call() throws Exception { int id = barrier.await(); Job job = Job.getInstance(conf, "test-job-" + id); Path intermediatePathForJob = new Path(intermediatePath, "job-" + id); JobHelper.addJarToClassPath(dummyJarFile, finalClasspath, intermediatePathForJob, fs, job); // check file gets uploaded to final HDFS path Assert.assertTrue(fs.exists(expectedJarPath)); // check that the intermediate file is not present Assert.assertFalse(fs.exists(new Path(intermediatePathForJob, dummyJarFile.getName()))); // check file gets added to the classpath Assert.assertEquals( expectedJarPath.toString(), job.getConfiguration().get(MRJobConfig.CLASSPATH_FILES) ); return true; } } ) ); } Futures.allAsList(futures).get(30, TimeUnit.SECONDS); pool.shutdownNow(); } }
package uk.ac.ebi.spot.ols.neo4j.model; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import org.neo4j.graphdb.Direction; import org.springframework.data.annotation.TypeAlias; import org.springframework.data.neo4j.annotation.*; import org.springframework.data.neo4j.fieldaccess.DynamicProperties; import org.springframework.data.neo4j.fieldaccess.DynamicPropertiesContainer; import java.util.Map; import java.util.Set; import java.util.TreeMap; import static uk.ac.ebi.spot.ols.neo4j.model.Neo4JNodePropertyNameConstants.*; /** * @author Simon Jupp * @date 17/08/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ @NodeEntity @TypeAlias(value = "Property") public class Property { @GraphId @JsonIgnore Long id; @JsonIgnore private String olsId; @GraphProperty(propertyName = "iri") @JsonProperty(value = "iri") private String iri; @GraphProperty(propertyName = "label") private String label; @GraphProperty(propertyName = "synonym") private Set<String> synonym; @GraphProperty(propertyName = "description") private Set<String> description; @GraphProperty(propertyName = LOCALIZED_LABELS) @JsonProperty(value = LOCALIZED_LABELS) private DynamicProperties localizedLabels = new DynamicPropertiesContainer(); @GraphProperty(propertyName = LOCALIZED_SYNONYMS) @JsonProperty(value = LOCALIZED_SYNONYMS) private DynamicProperties localizedSynonyms = new DynamicPropertiesContainer(); @GraphProperty(propertyName = LOCALIZED_DESCRIPTIONS) @JsonProperty(value = LOCALIZED_DESCRIPTIONS) private DynamicProperties localizedDescriptions = new DynamicPropertiesContainer(); @GraphProperty(propertyName = "ontology_name") @JsonProperty(value = "ontology_name") private String ontologyName; @GraphProperty(propertyName = "ontology_prefix") @JsonProperty(value = "ontology_prefix") private String ontologyPrefix; @GraphProperty(propertyName = "ontology_iri") @JsonProperty(value = "ontology_iri") private String ontologyIri; @GraphProperty(propertyName = "is_obsolete") @JsonProperty(value = "is_obsolete") private boolean isObsolete; @GraphProperty(propertyName = "is_defining_ontology") @JsonProperty(value = "is_defining_ontology") private boolean isLocal; @GraphProperty(propertyName = "has_children") @JsonProperty(value = "has_children") private boolean hasChildren; @GraphProperty(propertyName = "is_root") @JsonProperty(value = "is_root") private boolean isRoot; @GraphProperty(propertyName = "short_form") @JsonProperty(value = "short_form") private String shortForm; @GraphProperty(propertyName = "obo_id") @JsonProperty(value = "obo_id") private String oboId; private DynamicProperties annotation = new DynamicPropertiesContainer(); private DynamicProperties localizedAnnotation = new DynamicPropertiesContainer(); @RelatedTo(type = "SUBPROPERTYOF", direction = Direction.OUTGOING) @Fetch Set<Property> parent; public String getIri() { return iri; } public String[] getDescriptionsByLang(String lang) { String[] localizedDescriptions = (String[]) this.localizedDescriptions.getProperty(lang); if (localizedDescriptions != null && localizedDescriptions.length > 0) { return (String[]) localizedDescriptions; } if (description != null) { return description.toArray(new String[0]); } return new String[0]; } public String getLabelByLang(String lang) { return getLabelsByLang(lang)[0]; } public String[] getLabelsByLang(String lang) { String[] localizedLabels = (String[]) this.localizedLabels.getProperty(lang); if (localizedLabels != null && localizedLabels.length > 0) { return localizedLabels; } if (label != null) { return new String[]{label}; } return new String[0]; } public String[] getSynonymsByLang(String lang) { String[] localizedSynonyms = (String[]) this.localizedSynonyms.getProperty(lang); if (localizedSynonyms != null) { return localizedSynonyms; } if (synonym != null) { return synonym.toArray(new String[0]); } return new String[0]; } @JsonProperty(value = "ontology_name") public String getOntologyName() { return ontologyName; } public String getOntologyPrefix() { return ontologyPrefix; } public String getOntologyIri() { return ontologyIri; } @JsonProperty(value = "is_obsolete") public boolean isObsolete() { return isObsolete; } @JsonProperty(value = "is_defining_ontology") public boolean isLocal() { return isLocal; } @JsonProperty(value = "has_children") public boolean hasChildren() { return hasChildren; } @JsonProperty(value = "is_root") public boolean isRoot() { return isRoot; } public String getShortForm() { return shortForm; } public String getOboId() { return oboId; } public Map<String, Object> getAnnotationByLang(String lang) { Map<String, Object> localizedAnnotations = localizedAnnotation.asMap(); Map<String, Object> res = new TreeMap<>(); if ( (lang.equals("en") || lang.startsWith("en-")) && annotation != null) { res.putAll(annotation.asMap()); } for (String k : localizedAnnotations.keySet()) { int n = lang.lastIndexOf('-'); if(n != -1) { String annoLang = lang.substring(0, n); if (annoLang.equalsIgnoreCase(lang)) { res.put(k.substring(n + 1), localizedAnnotations.get(k)); } } } return res; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.processor.internals; import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.processor.StateRestoreCallback; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.streams.processor.TaskId; import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl; import org.apache.kafka.streams.state.internals.ThreadCache; import java.io.File; import java.util.HashMap; import java.util.Map; import java.util.Objects; public abstract class AbstractProcessorContext implements InternalProcessorContext { static final String NONEXIST_TOPIC = "__null_topic__"; private final TaskId taskId; private final String applicationId; private final StreamsConfig config; private final StreamsMetricsImpl metrics; private final Serde keySerde; private final ThreadCache cache; private final Serde valueSerde; private boolean initialized; protected ProcessorRecordContext recordContext; protected ProcessorNode currentNode; final StateManager stateManager; public AbstractProcessorContext(final TaskId taskId, final StreamsConfig config, final StreamsMetricsImpl metrics, final StateManager stateManager, final ThreadCache cache) { this.taskId = taskId; this.applicationId = config.getString(StreamsConfig.APPLICATION_ID_CONFIG); this.config = config; this.metrics = metrics; this.stateManager = stateManager; valueSerde = config.defaultValueSerde(); keySerde = config.defaultKeySerde(); this.cache = cache; } @Override public String applicationId() { return applicationId; } @Override public TaskId taskId() { return taskId; } @Override public Serde<?> keySerde() { return keySerde; } @Override public Serde<?> valueSerde() { return valueSerde; } @Override public File stateDir() { return stateManager.baseDir(); } @Override public StreamsMetricsImpl metrics() { return metrics; } @Override public void register(final StateStore store, final StateRestoreCallback stateRestoreCallback) { if (initialized) { throw new IllegalStateException("Can only create state stores during initialization."); } Objects.requireNonNull(store, "store must not be null"); stateManager.register(store, stateRestoreCallback); } /** * @throws IllegalStateException if the task's record is null */ @Override public String topic() { if (recordContext == null) { throw new IllegalStateException("This should not happen as topic() should only be called while a record is processed"); } final String topic = recordContext.topic(); if (topic.equals(NONEXIST_TOPIC)) { return null; } return topic; } /** * @throws IllegalStateException if partition is null */ @Override public int partition() { if (recordContext == null) { throw new IllegalStateException("This should not happen as partition() should only be called while a record is processed"); } return recordContext.partition(); } /** * @throws IllegalStateException if offset is null */ @Override public long offset() { if (recordContext == null) { throw new IllegalStateException("This should not happen as offset() should only be called while a record is processed"); } return recordContext.offset(); } @Override public Headers headers() { if (recordContext == null) { throw new IllegalStateException("This should not happen as headers() should only be called while a record is processed"); } return recordContext.headers(); } /** * @throws IllegalStateException if timestamp is null */ @Override public long timestamp() { if (recordContext == null) { throw new IllegalStateException("This should not happen as timestamp() should only be called while a record is processed"); } return recordContext.timestamp(); } @Override public Map<String, Object> appConfigs() { final Map<String, Object> combined = new HashMap<>(); combined.putAll(config.originals()); combined.putAll(config.values()); return combined; } @Override public Map<String, Object> appConfigsWithPrefix(final String prefix) { return config.originalsWithPrefix(prefix); } @Override public void setRecordContext(final ProcessorRecordContext recordContext) { this.recordContext = recordContext; } @Override public ProcessorRecordContext recordContext() { return recordContext; } @Override public void setCurrentNode(final ProcessorNode currentNode) { this.currentNode = currentNode; } @Override public ProcessorNode currentNode() { return currentNode; } @Override public ThreadCache getCache() { return cache; } @Override public void initialized() { initialized = true; } @Override public void uninitialize() { initialized = false; } }
/* * Copyright 2014 Guidewire Software, Inc. */ package gw.util.perf.objectsize; import gw.util.perf.InvocationCounter; import gw.util.Array; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.Map; import java.util.Stack; public class ObjectSizeUtil { public static boolean VERBOSE = true; private static final String INDENT = " "; private static final IObjectSizeFilter DEFAULT_FILTER = new DefaultObjectSizeFilter(); private static DecimalFormat decimalFormat = new DecimalFormat("##.#"); private static final Map primitiveSizes = new IdentityHashMap() { { put(boolean.class, new Integer(1)); put(byte.class, new Integer(1)); put(char.class, new Integer(2)); put(short.class, new Integer(2)); put(int.class, new Integer(4)); put(float.class, new Integer(4)); put(double.class, new Integer(8)); put(long.class, new Integer(8)); } }; public static int getFieldSize(Class clazz) { Integer i = (Integer) primitiveSizes.get(clazz); return i != null ? i.intValue() : getPointerSize(); } public static int getPointerSize() { return 4; } /** * Calculates full size of object iterating over its hierarchy graph. * * @param obj object to calculate size of * @param filter the filter used to ignore fields or objects * @param maxObjects the max numbers of objects to traverse * @return object size * @throws Exception */ public static ObjectSize deepSizeOf(Object obj, IObjectSizeFilter filter, int maxObjects) { Map<Object, Object> visited = new IdentityHashMap<Object, Object>(); Stack<ObjectEntry> stack = new Stack<ObjectEntry>(); InvocationCounter sizeHistogram = new InvocationCounter(false); long result = internalSizeOf(new ObjectEntry(obj, "", ""), stack, visited, filter, ""); sizeHistogram.recordInvocation(obj.getClass().getName(), (int)result); int n = 1; while (!stack.isEmpty()) { ObjectEntry entry = stack.pop(); long size = internalSizeOf(entry, stack, visited, filter, entry.indent); result += size; n++; sizeHistogram.recordInvocation(entry.object.getClass().getName(), (int)size); if (n >= maxObjects) { return new ObjectSize(result, false); } } visited.clear(); if (VERBOSE) { System.out.println(); System.out.println("-------------------------------------------------"); sizeHistogram.print(); } return new ObjectSize(result, true); } public static ObjectSize deepSizeOf(Object obj) { try { return deepSizeOf(obj, DEFAULT_FILTER, Integer.MAX_VALUE); } catch (Exception e) { throw new RuntimeException(e); } } private static boolean skipObject(Object obj, Map<Object, Object> visited,IObjectSizeFilter filter) { return obj == null || visited.containsKey(obj) || filter.skipObject(obj); } private static boolean skipField(Field field, IObjectSizeFilter filter) { return filter.skipField(field); } private static long getArrayShallowSize(Object obj) { long result = 16; int length = Array.getLength(obj); if (length != 0) { Class<?> arrayElementClazz = obj.getClass().getComponentType(); if (arrayElementClazz.isPrimitive()) { result += length * getFieldSize(arrayElementClazz); } else { result += length * getPointerSize(); } } return result; } private static long internalSizeOf(ObjectEntry entry, Stack<ObjectEntry> stack, Map<Object, Object> visited, IObjectSizeFilter filter, String indent) { Object obj = entry.object; if (skipObject(obj, visited, filter)) { return 0; } visited.put(obj, null); long result = 0; Class<?> clazz = obj.getClass(); if (clazz.isArray()) { result += getArrayShallowSize(obj); // process all array elements but skip primitive type array if (clazz.getName().length() != 2) { int length = Array.getLength(obj); for (int i = length - 1; i >= 0; i--) { Object o = Array.get(obj, i); if (!skipObject(o, visited, filter)) { stack.add(new ObjectEntry(o, indent + INDENT, "[" + i + "]: ")); } } } } else { result = 8; // process all fields of the object while (clazz != null) { Field[] fields = clazz.getDeclaredFields(); for (int i = 0; i < fields.length; i++) { if (!Modifier.isStatic(fields[i].getModifiers())) { Field field = fields[i]; result += getFieldSize(field.getType()); if (!field.getType().isPrimitive() && !skipField(field, filter)) { field.setAccessible(true); try { // objects to be estimated are put to stack Object objectToAdd = field.get(obj); if (!skipObject(objectToAdd, visited, filter)) { stack.add(new ObjectEntry(objectToAdd, indent + INDENT, field.getName() + ": ")); } } catch (IllegalAccessException ex) { assert false; } } } } clazz = clazz.getSuperclass(); } } result = roundUpToNearestEightBytes(result); if (VERBOSE ) { String extra = ""; if (obj instanceof HashMap) { try { Method m = obj.getClass().getDeclaredMethod("capacity"); m.setAccessible(true); int capacity = (Integer) m.invoke(obj); extra = " (" + decimalFormat.format(100.0 * ((HashMap)obj).size()/capacity) + "%) "; } catch (Exception e) { throw new RuntimeException(e); } } if (obj instanceof ArrayList) { try { Field f = obj.getClass().getDeclaredField("elementData"); f.setAccessible(true); int capacity = Array.getLength(f.get(obj)); if (capacity == 0) { extra = " (empty) "; } else { extra = " (" + decimalFormat.format(100.0 * ((ArrayList)obj).size()/capacity) + "%) "; } } catch (Exception e) { throw new RuntimeException(e); } } System.out.println(indent + entry.info + obj.getClass().getName() + extra + " - " + result); } return result; } private static long roundUpToNearestEightBytes(long result) { if ((result % 8) != 0) { result += 8 - (result % 8); } return result; } private static class ObjectEntry { public Object object; public String indent; public String info; public ObjectEntry(Object object, String indent, String info) { this.object = object; this.indent = indent; this.info = info; } } public static void main(String[] args) throws Exception { String[] s = new String[11*1024*1024]; System.out.println(ObjectSizeUtil.deepSizeOf(s)); } }
/** * Twoverse Planetary System Object * * by Christopher Peplin (chris.peplin@rhubarbtech.com) * for August 23, 1966 (GROCS Project Group) * University of Michigan, 2009 * * http://august231966.com * http://www.dc.umich.edu/grocs * * Copyright 2009 Christopher Peplin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package twoverse.object; import java.io.Serializable; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; import java.util.HashMap; import java.util.Properties; import java.util.Vector; import java.util.logging.Level; import nu.xom.Attribute; import nu.xom.Element; import processing.core.PApplet; import twoverse.object.applet.AppletBodyInterface; import twoverse.object.applet.AppletPlanetarySystem; import twoverse.util.PhysicsVector3d; import twoverse.util.Point; import twoverse.util.XmlExceptions.UnexpectedXmlElementException; public class PlanetarySystem extends CelestialBody implements Serializable { private static final long serialVersionUID = -1152118681822794656L; private static Properties sConfigFile; private int mCenterId; private double mMass; private static PreparedStatement sSelectAllPlanetarySystemsStatement; private static PreparedStatement sInsertPlanetarySystemStatement; private static PreparedStatement sUpdatePlanetarySystemStatement; private static Connection sConnection; public PlanetarySystem(int ownerId, String name, int parentId, Point position, PhysicsVector3d velocity, PhysicsVector3d acceleration, int centerStarId, double mass) { super(ownerId, name, parentId, position, velocity, acceleration); loadConfig(); initialize(centerStarId, mass); } public PlanetarySystem(int id, int ownerId, String name, Timestamp birthTime, Timestamp deathTime, int parentId, Point position, PhysicsVector3d velocity, PhysicsVector3d acceleration, Vector<Integer> children, int centerStarId, double mass) { super(id, ownerId, name, birthTime, deathTime, parentId, position, velocity, acceleration, children); loadConfig(); initialize(centerStarId, mass); } public PlanetarySystem(CelestialBody body, int centerStarId, double mass) { super(body); loadConfig(); initialize(centerStarId, mass); } public PlanetarySystem(Element element) { super(element.getFirstChildElement(CelestialBody.XML_TAG)); loadConfig(); if(!element.getLocalName() .equals(sConfigFile.getProperty("PLANETARY_SYSTEM_TAG"))) { throw new UnexpectedXmlElementException("Element is not a planetary system"); } int centerStarId = Integer.valueOf(element.getAttribute(sConfigFile.getProperty("CENTER_ID_ATTRIBUTE_TAG")) .getValue()); double mass = Double.valueOf(element.getAttribute(sConfigFile.getProperty("MASS_ATTRIBUTE_TAG")) .getValue()); initialize(centerStarId, mass); } public PlanetarySystem(PlanetarySystem system) { super(system); initialize(system.getCenterId(), system.getMass()); } private void initialize(int centerStarId, double mass) { setCenter(centerStarId); setMass(mass); } private synchronized void loadConfig() { if(sConfigFile == null) { sConfigFile = loadConfigFile("PlanetarySystem"); } } @Override public AppletBodyInterface getAsApplet(PApplet parent) { return new AppletPlanetarySystem(parent, this); } public static void prepareDatabaseStatements(Connection connection) throws SQLException { sConnection = connection; sSelectAllPlanetarySystemsStatement = sConnection.prepareStatement("SELECT * FROM object " + "NATURAL JOIN planetary_system " + "LEFT JOIN (user) " + "ON (object.owner = user.id)"); sInsertPlanetarySystemStatement = sConnection.prepareStatement("INSERT INTO planetary_system (id, centerid, mass) " + "VALUES (?, ?, ?)"); sUpdatePlanetarySystemStatement = sConnection.prepareStatement("UPDATE planetary_system " + "SET centerid = ?," + " mass = ? " + "WHERE id = ?"); } public static synchronized HashMap<Integer, CelestialBody> selectAllFromDatabase() throws SQLException { HashMap<Integer, CelestialBody> systems = new HashMap<Integer, CelestialBody>(); try { ResultSet resultSet = sSelectAllPlanetarySystemsStatement.executeQuery(); ArrayList<CelestialBody> bodies = parseAll(resultSet); resultSet.beforeFirst(); for(CelestialBody body : bodies) { if(!resultSet.next()) { throw new SQLException("Mismatch between systems and celestial bodies"); } PlanetarySystem system = new PlanetarySystem(body, resultSet.getInt("centerid"), resultSet.getDouble("mass")); system.setDirty(false); systems.put(system.getId(), system); } resultSet.close(); } catch(SQLException e) { sLogger.log(Level.WARNING, "Unable to get planetary systems", e); } return systems; } @Override public synchronized void insertInDatabase() throws SQLException { sLogger.log(Level.INFO, "Attempting to add system: " + this); try { super.insertInDatabase(); sInsertPlanetarySystemStatement.setInt(1, getId()); if(getCenterId() != 0) { sInsertPlanetarySystemStatement.setInt(2, getCenterId()); } else { sInsertPlanetarySystemStatement.setNull(2, Types.INTEGER); } sInsertPlanetarySystemStatement.setDouble(3, getMass()); sInsertPlanetarySystemStatement.executeUpdate(); setDirty(false); } catch(SQLException e) { sLogger.log(Level.WARNING, "Could not add system " + this, e); } } @Override public synchronized void updateInDatabase() throws SQLException { sLogger.log(Level.INFO, "Attempting to update system: " + this); try { super.updateInDatabase(); sUpdatePlanetarySystemStatement.setInt(1, getCenterId()); sUpdatePlanetarySystemStatement.setDouble(2, getMass()); sUpdatePlanetarySystemStatement.setDouble(3, getId()); sUpdatePlanetarySystemStatement.executeUpdate(); setDirty(false); } catch(SQLException e) { sLogger.log(Level.WARNING, "Could not update planetary system " + this, e); } } public void setCenter(int center) { mCenterId = center; } public int getCenterId() { return mCenterId; } public void setMass(double mass) { mMass = mass; } public double getMass() { return mMass; } @Override public Element toXmlElement() { loadConfig(); Element root = new Element(sConfigFile.getProperty("PLANETARY_SYSTEM_TAG")); root.appendChild(super.toXmlElement()); root.addAttribute(new Attribute(sConfigFile.getProperty("CENTER_ID_ATTRIBUTE_TAG"), String.valueOf(mCenterId))); root.addAttribute(new Attribute(sConfigFile.getProperty("MASS_ATTRIBUTE_TAG"), String.valueOf(mMass))); return root; } }
/* * Copyright (C) 2016-2022 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.xservlet.filter; import java.util.concurrent.atomic.AtomicBoolean; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.helger.commons.ValueEnforcer; import com.helger.commons.annotation.OverrideOnDemand; import com.helger.commons.debug.GlobalDebug; import com.helger.commons.http.CHttp; import com.helger.commons.http.EHttpMethod; import com.helger.commons.http.HttpHeaderMap; import com.helger.commons.state.EContinue; import com.helger.commons.string.StringHelper; import com.helger.http.EHttpVersion; import com.helger.servlet.request.RequestHelper; import com.helger.servlet.response.ResponseHelper; import com.helger.servlet.response.StatusAwareHttpResponseWrapper; /** * Handle special content related stuff that needs to be processed for every * servlet. Currently handled are: * <ul> * <li>Request fallback charset</li> * <li>Response fallback charset</li> * </ul> * * @author Philip Helger * @since 9.0.0 */ public class XServletFilterConsistency implements IXServletLowLevelFilter { public static final XServletFilterConsistency INSTANCE = new XServletFilterConsistency (); private static final Logger LOGGER = LoggerFactory.getLogger (XServletFilterConsistency.class); private static final AtomicBoolean SILENT_MODE = new AtomicBoolean (GlobalDebug.DEFAULT_SILENT_MODE); protected XServletFilterConsistency () {} /** * @return <code>true</code> if logging is disabled, <code>false</code> if it * is enabled. * @since 9.1.7 */ public static boolean isSilentMode () { return SILENT_MODE.get (); } /** * Enable or disable certain regular log messages. * * @param bSilentMode * <code>true</code> to disable logging, <code>false</code> to enable * logging * @return The previous value of the silent mode. * @since 9.1.7 */ public static boolean setSilentMode (final boolean bSilentMode) { return SILENT_MODE.getAndSet (bSilentMode); } @Nonnull public EContinue beforeRequest (@Nonnull final HttpServletRequest aHttpRequest, @Nonnull final HttpServletResponse aHttpResponse, @Nonnull final EHttpVersion eHttpVersion, @Nonnull final EHttpMethod eHttpMethod) { return EContinue.CONTINUE; } /** * @param sRequestURL * The request URL. * @param nStatusCode * The response status code. * @param eHttpMethod * Used HTTP Method */ @OverrideOnDemand protected void checkStatusCode (@Nonnull final String sRequestURL, final int nStatusCode, @Nonnull final EHttpMethod eHttpMethod) { // < 200 || >= 400? if (nStatusCode < CHttp.HTTP_OK || nStatusCode >= CHttp.HTTP_BAD_REQUEST) if (!isSilentMode ()) if (LOGGER.isWarnEnabled ()) LOGGER.warn ("HTTP status code " + nStatusCode + " in response to " + eHttpMethod.getName () + " '" + sRequestURL + "'"); } private static boolean _isContentExpected (final int nStatusCode) { // >= 200 && < 300 return nStatusCode >= CHttp.HTTP_OK && nStatusCode < CHttp.HTTP_MULTIPLE_CHOICES && !ResponseHelper.isEmptyStatusCode (nStatusCode); } /** * @param sRequestURL * The request URL. * @param sCharacterEncoding * The response character encoding. * @param nStatusCode * The response status code. * @param eHttpMethod * Used HTTP Method */ @OverrideOnDemand protected void checkCharacterEncoding (@Nonnull final String sRequestURL, @Nullable final String sCharacterEncoding, final int nStatusCode, @Nonnull final EHttpMethod eHttpMethod) { if (StringHelper.hasNoText (sCharacterEncoding) && _isContentExpected (nStatusCode)) if (!isSilentMode ()) if (LOGGER.isWarnEnabled ()) LOGGER.warn ("No character encoding on HTTP " + nStatusCode + " response to " + eHttpMethod.getName () + " '" + sRequestURL + "'"); } /** * @param sRequestURL * The request URL. * @param sContentType * The response content type. * @param nStatusCode * The response status code. * @param eHttpMethod * Used HTTP Method */ @OverrideOnDemand protected void checkContentType (@Nonnull final String sRequestURL, @Nullable final String sContentType, final int nStatusCode, @Nonnull final EHttpMethod eHttpMethod) { if (StringHelper.hasNoText (sContentType) && _isContentExpected (nStatusCode)) if (!isSilentMode ()) if (LOGGER.isWarnEnabled ()) LOGGER.warn ("No content type on HTTP " + nStatusCode + " response to " + eHttpMethod.getName () + " '" + sRequestURL + "'"); } /** * @param sRequestURL * The request URL. * @param aHeaders * All response HTTP headers. * @param nStatusCode * The response status code. * @param eHttpMethod * Used HTTP Method */ @OverrideOnDemand protected void checkHeaders (@Nonnull final String sRequestURL, @Nonnull final HttpHeaderMap aHeaders, final int nStatusCode, @Nonnull final EHttpMethod eHttpMethod) { // Happens because of the default headers in the // UnifiedResponseDefaultSettings if (false) if (nStatusCode != CHttp.HTTP_OK && aHeaders.isNotEmpty ()) if (!isSilentMode ()) if (LOGGER.isWarnEnabled ()) LOGGER.warn ("Headers on HTTP " + nStatusCode + " response to " + eHttpMethod.getName () + " '" + sRequestURL + "': " + aHeaders); } @Override public void afterRequest (@Nonnull final HttpServletRequest aHttpRequest, @Nonnull final HttpServletResponse aHttpResponse, @Nonnull final EHttpVersion eHttpVersion, @Nonnull final EHttpMethod eHttpMethod, final boolean bInvokeHandler, @Nullable final Throwable aCaughtException, final boolean bIsHandledAsync) { ValueEnforcer.isTrue (aHttpResponse instanceof StatusAwareHttpResponseWrapper, "Must be a StatusAwareHttpResponseWrapper"); final String sRequestURL = RequestHelper.getURLDecoded (aHttpRequest); final int nStatusCode = ((StatusAwareHttpResponseWrapper) aHttpResponse).getStatusCode (); final HttpHeaderMap aHeaders = ((StatusAwareHttpResponseWrapper) aHttpResponse).headerMap (); final String sCharacterEncoding = aHttpResponse.getCharacterEncoding (); final String sContentType = aHttpResponse.getContentType (); checkStatusCode (sRequestURL, nStatusCode, eHttpMethod); checkCharacterEncoding (sRequestURL, sCharacterEncoding, nStatusCode, eHttpMethod); if (!bIsHandledAsync) checkContentType (sRequestURL, sContentType, nStatusCode, eHttpMethod); checkHeaders (sRequestURL, aHeaders, nStatusCode, eHttpMethod); } }
/* * Copyright 2004-2010 the Seasar Foundation and the Others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.seasar.doma.internal.apt.domain; import org.seasar.doma.internal.apt.AptTestCase; import org.seasar.doma.internal.apt.DomainProcessor; import org.seasar.doma.internal.apt.lombok.Value; import org.seasar.doma.message.Message; /** * @author taedium * */ public class DomainProcessorTest extends AptTestCase { @Override protected void setUp() throws Exception { super.setUp(); addOption("-Adoma.test=true"); } public void testSalary() throws Exception { Class<?> target = Salary.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testPrimitiveValue() throws Exception { Class<?> target = PrimitiveValueDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testEnum() throws Exception { Class<?> target = EnumDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testUnsupportedValueType() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(UnsupportedValueTypeDomain.class); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4102); } public void testConstrutorNotFound() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(ConstrutorNotFoundDomain.class); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4103); } public void testAccessorNotFound() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(AccessorNotFoundDomain.class); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4104); } public void testInner() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(Outer.class); compile(); assertGeneratedSource(Outer.Inner.class); assertTrue(getCompiledResult()); } public void testInner_deep() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(Outer_deepInner.class); compile(); assertGeneratedSource(Outer_deepInner.Middle.Inner.class); assertTrue(getCompiledResult()); } public void testInner_nonStatic() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(Outer_nonStaticInner.class); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4275); } public void testInner_nonPublic() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(Outer_nonPublicInner.class); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4275); } public void testInner_illegalName() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(Outer__illegalName.class); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4277); } public void testMiddle_nonPublic() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(Outer_nonPublicMiddle.class); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4275); } public void testPackagePrivate() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(PackagePrivateDomain.class); compile(); assertTrue(getCompiledResult()); } public void testJobType() throws Exception { Class<?> target = JobType.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4184); } public void testAbstractDomain() throws Exception { Class<?> target = AbstractDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4132); } public void testOfSalary() throws Exception { Class<?> target = OfSalary.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testOfPrimitiveValue() throws Exception { Class<?> target = OfPrimitiveValueDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testOfEnum() throws Exception { Class<?> target = OfEnumDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testOfJobType() throws Exception { Class<?> target = OfJobType.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testOfPrimitiveValueType() throws Exception { Class<?> target = OfPrimitiveValueType.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testOfAbstractDomain() throws Exception { Class<?> target = OfAbstractDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testGenericDomain() throws Exception { Class<?> target = SpecificDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testVersionCheckSuppressed() throws Exception { addOption("-Adoma.version.validation=false"); Class<?> target = VersionCheckSuppressedDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testParametarizedSalary() throws Exception { Class<?> target = ParametarizedSalary.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testParametarizedOfSalary() throws Exception { Class<?> target = ParametarizedOfSalary.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testIllegalSizeParametarizedOfSalary() throws Exception { Class<?> target = IllegalSizeParametarizedOfSalary.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertMessage(Message.DOMA4106); } public void testIllegalTypeParametarizedOfSalary() throws Exception { Class<?> target = IllegalTypeParametarizedOfSalary.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertMessage(Message.DOMA4106); } public void testNullRejection() throws Exception { Class<?> target = NullRejectionDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testIllegalAcceptNullDomain() throws Exception { Class<?> target = IllegalAcceptNullDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertMessage(Message.DOMA4251); } public void testObjectDomain() throws Exception { Class<?> target = ObjectDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testInterface() throws Exception { Class<?> target = InterfaceDomain.class; DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testInterfaceFactoryOfAttributeMustNotBeNew() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(InterfaceNew.class); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4268); } public void testInterfaceInner() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(InterfaceOuter.class); compile(); assertGeneratedSource(InterfaceOuter.Inner.class); assertTrue(getCompiledResult()); } public void testAnnotationMustNotBeDomainClass() throws Exception { DomainProcessor processor = new DomainProcessor(); addProcessor(processor); addCompilationUnit(AnnotationDomain.class); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4105); } public void testLombokValue() throws Exception { addOption("-Adoma.lombok.Value=" + Value.class.getName()); DomainProcessor processor = new DomainProcessor(); Class<?> target = LombokValue.class; addProcessor(processor); addCompilationUnit(target); compile(); assertGeneratedSource(target); assertTrue(getCompiledResult()); } public void testLombokValueStaticConstructor() throws Exception { addOption("-Adoma.lombok.Value=" + Value.class.getName()); DomainProcessor processor = new DomainProcessor(); Class<?> target = LombokValueStaticConstructor.class; addProcessor(processor); addCompilationUnit(target); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4428); } public void testLombokValueNoField() throws Exception { addOption("-Adoma.lombok.Value=" + Value.class.getName()); DomainProcessor processor = new DomainProcessor(); Class<?> target = LombokValueNoField.class; addProcessor(processor); addCompilationUnit(target); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4430); } public void testLombokValueTwoFields() throws Exception { addOption("-Adoma.lombok.Value=" + Value.class.getName()); DomainProcessor processor = new DomainProcessor(); Class<?> target = LombokValueTwoFields.class; addProcessor(processor); addCompilationUnit(target); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4431); } public void testLombokValueTypeNotAssignable() throws Exception { addOption("-Adoma.lombok.Value=" + Value.class.getName()); DomainProcessor processor = new DomainProcessor(); Class<?> target = LombokValueTypeNotAssignable.class; addProcessor(processor); addCompilationUnit(target); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4432); } public void testLombokValueAccessorMethod() throws Exception { addOption("-Adoma.lombok.Value=" + Value.class.getName()); DomainProcessor processor = new DomainProcessor(); Class<?> target = LombokValueAccessorMethod.class; addProcessor(processor); addCompilationUnit(target); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4429); } public void testLombokValueAccessorMethod_boolean() throws Exception { addOption("-Adoma.lombok.Value=" + Value.class.getName()); DomainProcessor processor = new DomainProcessor(); Class<?> target = LombokValueAccessorMethod_boolean.class; addProcessor(processor); addCompilationUnit(target); compile(); assertFalse(getCompiledResult()); assertMessage(Message.DOMA4429); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.history.integration.revertion; import com.intellij.history.LocalHistory; import com.intellij.history.core.Content; import com.intellij.history.core.Paths; import com.intellij.history.core.changes.*; import com.intellij.history.core.tree.Entry; import com.intellij.history.integration.IdeaGateway; import com.intellij.openapi.command.impl.DocumentUndoProvider; import com.intellij.openapi.editor.Document; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.io.ReadOnlyAttributeUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; public class UndoChangeRevertingVisitor extends ChangeVisitor { private final IdeaGateway myGateway; private final Set<DelayedApply> myDelayedApplies = new HashSet<>(); private final long myFromChangeId; private final long myToChangeId; private boolean isReverting; public UndoChangeRevertingVisitor(IdeaGateway gw, @NotNull Long fromChangeId, @Nullable Long toChangeId) { myGateway = gw; myFromChangeId = fromChangeId; myToChangeId = toChangeId == null ? -1 : toChangeId; } protected boolean shouldRevert(Change c) { if (c.getId() == myFromChangeId) { isReverting = true; } return isReverting && !(c instanceof ContentChange); } protected void checkShouldStop(Change c) throws StopVisitingException { if (c.getId() == myToChangeId) stop(); } @Override public void visit(CreateEntryChange c) throws StopVisitingException { if (shouldRevert(c)) { VirtualFile f = myGateway.findVirtualFile(c.getPath()); if (f != null) { unregisterDelayedApplies(f); try { f.delete(LocalHistory.VFS_EVENT_REQUESTOR); } catch (IOException e) { throw new RuntimeIOException(e); } } } checkShouldStop(c); } @Override public void visit(ContentChange c) throws StopVisitingException { if (shouldRevert(c)) { try { VirtualFile f = myGateway.findOrCreateFileSafely(c.getPath(), false); registerDelayedContentApply(f, c.getOldContent()); } catch (IOException e) { throw new RuntimeIOException(e); } } checkShouldStop(c); } @Override public void visit(RenameChange c) throws StopVisitingException { if (shouldRevert(c)) { VirtualFile f = myGateway.findVirtualFile(c.getPath()); if (f != null) { VirtualFile existing = f.getParent().findChild(c.getOldName()); try { if (existing != null && !Comparing.equal(existing, f)) { existing.delete(LocalHistory.VFS_EVENT_REQUESTOR); } f.rename(LocalHistory.VFS_EVENT_REQUESTOR, c.getOldName()); } catch (IOException e) { throw new RuntimeIOException(e); } } } checkShouldStop(c); } @Override public void visit(ROStatusChange c) throws StopVisitingException { if (shouldRevert(c)) { VirtualFile f = myGateway.findVirtualFile(c.getPath()); if (f != null) { registerDelayedROStatusApply(f, c.getOldStatus()); } } checkShouldStop(c); } @Override public void visit(MoveChange c) throws StopVisitingException { if (shouldRevert(c)) { VirtualFile f = myGateway.findVirtualFile(c.getPath()); if (f != null) { try { VirtualFile parent = myGateway.findOrCreateFileSafely(c.getOldParent(), true); VirtualFile existing = parent.findChild(f.getName()); if (existing != null) existing.delete(LocalHistory.VFS_EVENT_REQUESTOR); f.move(LocalHistory.VFS_EVENT_REQUESTOR, parent); } catch (IOException e) { throw new RuntimeIOException(e); } } } checkShouldStop(c); } @Override public void visit(DeleteChange c) throws StopVisitingException { if (shouldRevert(c)) { try { VirtualFile parent = myGateway.findOrCreateFileSafely(Paths.getParentOf(c.getPath()), true); revertDeletion(parent, c.getDeletedEntry()); } catch (IOException e) { throw new RuntimeIOException(e); } } checkShouldStop(c); } private void revertDeletion(VirtualFile parent, Entry e) throws IOException { VirtualFile f = myGateway.findOrCreateFileSafely(parent, e.getName(), e.isDirectory()); if (e.isDirectory()) { for (Entry child : e.getChildren()) revertDeletion(f, child); } else { registerDelayedContentApply(f, e.getContent()); registerDelayedROStatusApply(f, e.isReadOnly()); } } private void registerDelayedContentApply(VirtualFile f, Content content) { registerDelayedApply(new DelayedContentApply(f, content)); } private void registerDelayedROStatusApply(VirtualFile f, boolean isReadOnly) { registerDelayedApply(new DelayedROStatusApply(f, isReadOnly)); } private void registerDelayedApply(DelayedApply a) { myDelayedApplies.remove(a); myDelayedApplies.add(a); } private void unregisterDelayedApplies(VirtualFile fileOrDir) { List<DelayedApply> toRemove = new ArrayList<>(); for (DelayedApply a : myDelayedApplies) { if (VfsUtil.isAncestor(fileOrDir, a.getFile(), false)) { toRemove.add(a); } } for (DelayedApply a : toRemove) { myDelayedApplies.remove(a); } } @Override public void finished() { try { for (DelayedApply a : myDelayedApplies) a.apply(); } catch (IOException e) { throw new RuntimeIOException(e); } } private static abstract class DelayedApply { protected VirtualFile myFile; protected DelayedApply(VirtualFile f) { myFile = f; } public VirtualFile getFile() { return myFile; } public abstract void apply() throws IOException; @Override public boolean equals(Object o) { if (!getClass().equals(o.getClass())) return false; return myFile.equals(((DelayedApply)o).myFile); } @Override public int hashCode() { return getClass().hashCode() + 32 * myFile.hashCode(); } } private static class DelayedContentApply extends DelayedApply { private final Content myContent; DelayedContentApply(VirtualFile f, Content content) { super(f); myContent = content; } @Override public void apply() throws IOException { if (!myContent.isAvailable()) return; boolean isReadOnly = !myFile.isWritable(); ReadOnlyAttributeUtil.setReadOnlyAttribute(myFile, false); Document doc = FileDocumentManager.getInstance().getCachedDocument(myFile); DocumentUndoProvider.startDocumentUndo(doc); try { myFile.setBinaryContent(myContent.getBytes()); } finally { DocumentUndoProvider.finishDocumentUndo(doc); } ReadOnlyAttributeUtil.setReadOnlyAttribute(myFile, isReadOnly); } } private static class DelayedROStatusApply extends DelayedApply { private final boolean isReadOnly; private DelayedROStatusApply(VirtualFile f, boolean isReadOnly) { super(f); this.isReadOnly = isReadOnly; } @Override public void apply() throws IOException { ReadOnlyAttributeUtil.setReadOnlyAttribute(myFile, isReadOnly); } } public static class RuntimeIOException extends RuntimeException { public RuntimeIOException(Throwable cause) { super(cause); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.rsgroup; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.MetaTableAccessor.DefaultVisitorBase; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableState; import org.apache.hadoop.hbase.constraint.ConstraintException; import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.master.ServerListener; import org.apache.hadoop.hbase.master.TableStateManager; import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.protobuf.ProtobufMagic; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos; import org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos; import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy; import org.apache.hadoop.hbase.security.access.AccessControlLists; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import com.google.protobuf.ServiceException; /** * This is an implementation of {@link RSGroupInfoManager} which makes * use of an HBase table as the persistence store for the group information. * It also makes use of zookeeper to store group information needed * for bootstrapping during offline mode. * * <h2>Concurrency</h2> * RSGroup state is kept locally in Maps. There is a rsgroup name to cached * RSGroupInfo Map at {@link #rsGroupMap} and a Map of tables to the name of the * rsgroup they belong too (in {@link #tableMap}). These Maps are persisted to the * hbase:rsgroup table (and cached in zk) on each modification. * * <p>Mutations on state are synchronized but reads can continue without having * to wait on an instance monitor, mutations do wholesale replace of the Maps on * update -- Copy-On-Write; the local Maps of state are read-only, just-in-case * (see flushConfig). * * <p>Reads must not block else there is a danger we'll deadlock. * * <p>Clients of this class, the {@link RSGroupAdminEndpoint} for example, want to query and * then act on the results of the query modifying cache in zookeeper without another thread * making intermediate modifications. These clients synchronize on the 'this' instance so * no other has access concurrently. Reads must be able to continue concurrently. */ @InterfaceAudience.Private class RSGroupInfoManagerImpl implements RSGroupInfoManager { private static final Log LOG = LogFactory.getLog(RSGroupInfoManagerImpl.class); /** Table descriptor for <code>hbase:rsgroup</code> catalog table */ private final static HTableDescriptor RSGROUP_TABLE_DESC; static { RSGROUP_TABLE_DESC = new HTableDescriptor(RSGROUP_TABLE_NAME); RSGROUP_TABLE_DESC.addFamily(new HColumnDescriptor(META_FAMILY_BYTES)); RSGROUP_TABLE_DESC.setRegionSplitPolicyClassName(DisabledRegionSplitPolicy.class.getName()); try { RSGROUP_TABLE_DESC.addCoprocessor( MultiRowMutationEndpoint.class.getName(), null, Coprocessor.PRIORITY_SYSTEM, null); } catch (IOException ex) { throw new RuntimeException(ex); } } // There two Maps are immutable and wholesale replaced on each modification // so are safe to access concurrently. See class comment. private volatile Map<String, RSGroupInfo> rsGroupMap = Collections.emptyMap(); private volatile Map<TableName, String> tableMap = Collections.emptyMap(); private final MasterServices masterServices; private Table rsGroupTable; private final ClusterConnection conn; private final ZooKeeperWatcher watcher; private final RSGroupStartupWorker rsGroupStartupWorker = new RSGroupStartupWorker(); // contains list of groups that were last flushed to persistent store private Set<String> prevRSGroups = new HashSet<>(); private final ServerEventsListenerThread serverEventsListenerThread = new ServerEventsListenerThread(); private RSGroupInfoManagerImpl(MasterServices masterServices) throws IOException { this.masterServices = masterServices; this.watcher = masterServices.getZooKeeper(); this.conn = masterServices.getClusterConnection(); } private synchronized void init() throws IOException{ refresh(); rsGroupStartupWorker.start(); serverEventsListenerThread.start(); masterServices.getServerManager().registerListener(serverEventsListenerThread); } static RSGroupInfoManager getInstance(MasterServices master) throws IOException { RSGroupInfoManagerImpl instance = new RSGroupInfoManagerImpl(master); instance.init(); return instance; } @Override public synchronized void addRSGroup(RSGroupInfo rsGroupInfo) throws IOException { checkGroupName(rsGroupInfo.getName()); if (rsGroupMap.get(rsGroupInfo.getName()) != null || rsGroupInfo.getName().equals(RSGroupInfo.DEFAULT_GROUP)) { throw new DoNotRetryIOException("Group already exists: "+ rsGroupInfo.getName()); } Map<String, RSGroupInfo> newGroupMap = Maps.newHashMap(rsGroupMap); newGroupMap.put(rsGroupInfo.getName(), rsGroupInfo); flushConfig(newGroupMap); } private RSGroupInfo getRSGroupInfo(final String groupName) throws DoNotRetryIOException { RSGroupInfo rsGroupInfo = getRSGroup(groupName); if (rsGroupInfo == null) { throw new DoNotRetryIOException("RSGroup " + groupName + " does not exist"); } return rsGroupInfo; } @Override public synchronized Set<Address> moveServers(Set<Address> servers, String srcGroup, String dstGroup) throws IOException { RSGroupInfo src = getRSGroupInfo(srcGroup); RSGroupInfo dst = getRSGroupInfo(dstGroup); // If destination is 'default' rsgroup, only add servers that are online. If not online, drop // it. If not 'default' group, add server to 'dst' rsgroup EVEN IF IT IS NOT online (could be a // rsgroup of dead servers that are to come back later). Set<Address> onlineServers = dst.getName().equals(RSGroupInfo.DEFAULT_GROUP)? Utility.getOnlineServers(this.masterServices): null; for (Address el: servers) { src.removeServer(el); if (onlineServers != null) { if (!onlineServers.contains(el)) { if (LOG.isDebugEnabled()) { LOG.debug("Dropping " + el + " during move-to-default rsgroup because not online"); } continue; } } dst.addServer(el); } Map<String,RSGroupInfo> newGroupMap = Maps.newHashMap(rsGroupMap); newGroupMap.put(src.getName(), src); newGroupMap.put(dst.getName(), dst); flushConfig(newGroupMap); return dst.getServers(); } @Override public RSGroupInfo getRSGroupOfServer(Address serverHostPort) throws IOException { for (RSGroupInfo info: rsGroupMap.values()) { if (info.containsServer(serverHostPort)) { return info; } } return null; } @Override public RSGroupInfo getRSGroup(String groupName) { return rsGroupMap.get(groupName); } @Override public String getRSGroupOfTable(TableName tableName) { return tableMap.get(tableName); } @Override public synchronized void moveTables(Set<TableName> tableNames, String groupName) throws IOException { if (groupName != null && !rsGroupMap.containsKey(groupName)) { throw new DoNotRetryIOException("Group "+groupName+" does not exist or is a special group"); } Map<String,RSGroupInfo> newGroupMap = Maps.newHashMap(rsGroupMap); for(TableName tableName: tableNames) { if (tableMap.containsKey(tableName)) { RSGroupInfo src = new RSGroupInfo(newGroupMap.get(tableMap.get(tableName))); src.removeTable(tableName); newGroupMap.put(src.getName(), src); } if(groupName != null) { RSGroupInfo dst = new RSGroupInfo(newGroupMap.get(groupName)); dst.addTable(tableName); newGroupMap.put(dst.getName(), dst); } } flushConfig(newGroupMap); } @Override public synchronized void removeRSGroup(String groupName) throws IOException { if (!rsGroupMap.containsKey(groupName) || groupName.equals(RSGroupInfo.DEFAULT_GROUP)) { throw new DoNotRetryIOException("Group " + groupName + " does not exist or is a reserved " + "group"); } Map<String,RSGroupInfo> newGroupMap = Maps.newHashMap(rsGroupMap); newGroupMap.remove(groupName); flushConfig(newGroupMap); } @Override public List<RSGroupInfo> listRSGroups() { return Lists.newLinkedList(rsGroupMap.values()); } @Override public boolean isOnline() { return rsGroupStartupWorker.isOnline(); } @Override public void moveServersAndTables(Set<Address> servers, Set<TableName> tables, String srcGroup, String dstGroup) throws IOException { //get server's group RSGroupInfo srcGroupInfo = getRSGroupInfo(srcGroup); RSGroupInfo dstGroupInfo = getRSGroupInfo(dstGroup); //move servers for (Address el: servers) { srcGroupInfo.removeServer(el); dstGroupInfo.addServer(el); } //move tables for(TableName tableName: tables) { srcGroupInfo.removeTable(tableName); dstGroupInfo.addTable(tableName); } //flush changed groupinfo Map<String,RSGroupInfo> newGroupMap = Maps.newHashMap(rsGroupMap); newGroupMap.put(srcGroupInfo.getName(), srcGroupInfo); newGroupMap.put(dstGroupInfo.getName(), dstGroupInfo); flushConfig(newGroupMap); } List<RSGroupInfo> retrieveGroupListFromGroupTable() throws IOException { List<RSGroupInfo> rsGroupInfoList = Lists.newArrayList(); for (Result result : rsGroupTable.getScanner(new Scan())) { RSGroupProtos.RSGroupInfo proto = RSGroupProtos.RSGroupInfo.parseFrom( result.getValue(META_FAMILY_BYTES, META_QUALIFIER_BYTES)); rsGroupInfoList.add(RSGroupProtobufUtil.toGroupInfo(proto)); } return rsGroupInfoList; } List<RSGroupInfo> retrieveGroupListFromZookeeper() throws IOException { String groupBasePath = ZKUtil.joinZNode(watcher.znodePaths.baseZNode, rsGroupZNode); List<RSGroupInfo> RSGroupInfoList = Lists.newArrayList(); //Overwrite any info stored by table, this takes precedence try { if(ZKUtil.checkExists(watcher, groupBasePath) != -1) { for(String znode: ZKUtil.listChildrenAndWatchForNewChildren(watcher, groupBasePath)) { byte[] data = ZKUtil.getData(watcher, ZKUtil.joinZNode(groupBasePath, znode)); if(data.length > 0) { ProtobufUtil.expectPBMagicPrefix(data); ByteArrayInputStream bis = new ByteArrayInputStream( data, ProtobufUtil.lengthOfPBMagic(), data.length); RSGroupInfoList.add(RSGroupProtobufUtil.toGroupInfo( RSGroupProtos.RSGroupInfo.parseFrom(bis))); } } LOG.debug("Read ZK GroupInfo count:" + RSGroupInfoList.size()); } } catch (KeeperException|DeserializationException|InterruptedException e) { throw new IOException("Failed to read rsGroupZNode",e); } return RSGroupInfoList; } @Override public void refresh() throws IOException { refresh(false); } /** * Read rsgroup info from the source of truth, the hbase:rsgroup table. * Update zk cache. Called on startup of the manager. */ private synchronized void refresh(boolean forceOnline) throws IOException { List<RSGroupInfo> groupList = new LinkedList<>(); // Overwrite anything read from zk, group table is source of truth // if online read from GROUP table if (forceOnline || isOnline()) { LOG.debug("Refreshing in Online mode."); if (rsGroupTable == null) { rsGroupTable = conn.getTable(RSGROUP_TABLE_NAME); } groupList.addAll(retrieveGroupListFromGroupTable()); } else { LOG.debug("Refreshing in Offline mode."); groupList.addAll(retrieveGroupListFromZookeeper()); } // refresh default group, prune NavigableSet<TableName> orphanTables = new TreeSet<>(); for(String entry: masterServices.getTableDescriptors().getAll().keySet()) { orphanTables.add(TableName.valueOf(entry)); } final List<TableName> specialTables; if(!masterServices.isInitialized()) { specialTables = Arrays.asList(AccessControlLists.ACL_TABLE_NAME, TableName.META_TABLE_NAME, TableName.NAMESPACE_TABLE_NAME, RSGROUP_TABLE_NAME); } else { specialTables = masterServices.listTableNamesByNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR); } for (TableName table : specialTables) { orphanTables.add(table); } for (RSGroupInfo group: groupList) { if(!group.getName().equals(RSGroupInfo.DEFAULT_GROUP)) { orphanTables.removeAll(group.getTables()); } } // This is added to the last of the list so it overwrites the 'default' rsgroup loaded // from region group table or zk groupList.add(new RSGroupInfo(RSGroupInfo.DEFAULT_GROUP, getDefaultServers(), orphanTables)); // populate the data HashMap<String, RSGroupInfo> newGroupMap = Maps.newHashMap(); HashMap<TableName, String> newTableMap = Maps.newHashMap(); for (RSGroupInfo group : groupList) { newGroupMap.put(group.getName(), group); for(TableName table: group.getTables()) { newTableMap.put(table, group.getName()); } } resetRSGroupAndTableMaps(newGroupMap, newTableMap); updateCacheOfRSGroups(rsGroupMap.keySet()); } private synchronized Map<TableName,String> flushConfigTable(Map<String,RSGroupInfo> groupMap) throws IOException { Map<TableName,String> newTableMap = Maps.newHashMap(); List<Mutation> mutations = Lists.newArrayList(); // populate deletes for(String groupName : prevRSGroups) { if(!groupMap.containsKey(groupName)) { Delete d = new Delete(Bytes.toBytes(groupName)); mutations.add(d); } } // populate puts for(RSGroupInfo RSGroupInfo : groupMap.values()) { RSGroupProtos.RSGroupInfo proto = RSGroupProtobufUtil.toProtoGroupInfo(RSGroupInfo); Put p = new Put(Bytes.toBytes(RSGroupInfo.getName())); p.addColumn(META_FAMILY_BYTES, META_QUALIFIER_BYTES, proto.toByteArray()); mutations.add(p); for(TableName entry: RSGroupInfo.getTables()) { newTableMap.put(entry, RSGroupInfo.getName()); } } if(mutations.size() > 0) { multiMutate(mutations); } return newTableMap; } private synchronized void flushConfig() throws IOException { flushConfig(this.rsGroupMap); } private synchronized void flushConfig(Map<String, RSGroupInfo> newGroupMap) throws IOException { Map<TableName, String> newTableMap; // For offline mode persistence is still unavailable // We're refreshing in-memory state but only for default servers if (!isOnline()) { Map<String, RSGroupInfo> m = Maps.newHashMap(rsGroupMap); RSGroupInfo oldDefaultGroup = m.remove(RSGroupInfo.DEFAULT_GROUP); RSGroupInfo newDefaultGroup = newGroupMap.remove(RSGroupInfo.DEFAULT_GROUP); if (!m.equals(newGroupMap) || !oldDefaultGroup.getTables().equals(newDefaultGroup.getTables())) { throw new IOException("Only default servers can be updated during offline mode"); } newGroupMap.put(RSGroupInfo.DEFAULT_GROUP, newDefaultGroup); rsGroupMap = newGroupMap; return; } newTableMap = flushConfigTable(newGroupMap); // Make changes visible after having been persisted to the source of truth resetRSGroupAndTableMaps(newGroupMap, newTableMap); try { String groupBasePath = ZKUtil.joinZNode(watcher.znodePaths.baseZNode, rsGroupZNode); ZKUtil.createAndFailSilent(watcher, groupBasePath, ProtobufMagic.PB_MAGIC); List<ZKUtil.ZKUtilOp> zkOps = new ArrayList<>(newGroupMap.size()); for(String groupName : prevRSGroups) { if(!newGroupMap.containsKey(groupName)) { String znode = ZKUtil.joinZNode(groupBasePath, groupName); zkOps.add(ZKUtil.ZKUtilOp.deleteNodeFailSilent(znode)); } } for (RSGroupInfo RSGroupInfo : newGroupMap.values()) { String znode = ZKUtil.joinZNode(groupBasePath, RSGroupInfo.getName()); RSGroupProtos.RSGroupInfo proto = RSGroupProtobufUtil.toProtoGroupInfo(RSGroupInfo); LOG.debug("Updating znode: "+znode); ZKUtil.createAndFailSilent(watcher, znode); zkOps.add(ZKUtil.ZKUtilOp.deleteNodeFailSilent(znode)); zkOps.add(ZKUtil.ZKUtilOp.createAndFailSilent(znode, ProtobufUtil.prependPBMagic(proto.toByteArray()))); } LOG.debug("Writing ZK GroupInfo count: " + zkOps.size()); ZKUtil.multiOrSequential(watcher, zkOps, false); } catch (KeeperException e) { LOG.error("Failed to write to rsGroupZNode", e); masterServices.abort("Failed to write to rsGroupZNode", e); throw new IOException("Failed to write to rsGroupZNode",e); } updateCacheOfRSGroups(newGroupMap.keySet()); } /** * Make changes visible. * Caller must be synchronized on 'this'. */ private void resetRSGroupAndTableMaps(Map<String, RSGroupInfo> newRSGroupMap, Map<TableName, String> newTableMap) { // Make maps Immutable. this.rsGroupMap = Collections.unmodifiableMap(newRSGroupMap); this.tableMap = Collections.unmodifiableMap(newTableMap); } /** * Update cache of rsgroups. * Caller must be synchronized on 'this'. * @param currentGroups Current list of Groups. */ private void updateCacheOfRSGroups(final Set<String> currentGroups) { this.prevRSGroups.clear(); this.prevRSGroups.addAll(currentGroups); } // Called by getDefaultServers. Presume it has lock in place. private List<ServerName> getOnlineRS() throws IOException { if (masterServices != null) { return masterServices.getServerManager().getOnlineServersList(); } LOG.debug("Reading online RS from zookeeper"); List<ServerName> servers = new LinkedList<>(); try { for (String el: ZKUtil.listChildrenNoWatch(watcher, watcher.znodePaths.rsZNode)) { servers.add(ServerName.parseServerName(el)); } } catch (KeeperException e) { throw new IOException("Failed to retrieve server list from zookeeper", e); } return servers; } // Called by ServerEventsListenerThread. Presume it has lock on this manager when it runs. private SortedSet<Address> getDefaultServers() throws IOException { SortedSet<Address> defaultServers = Sets.newTreeSet(); for (ServerName serverName : getOnlineRS()) { Address server = Address.fromParts(serverName.getHostname(), serverName.getPort()); boolean found = false; for(RSGroupInfo rsgi: listRSGroups()) { if(!RSGroupInfo.DEFAULT_GROUP.equals(rsgi.getName()) && rsgi.containsServer(server)) { found = true; break; } } if (!found) { defaultServers.add(server); } } return defaultServers; } // Called by ServerEventsListenerThread. Synchronize on this because redoing // the rsGroupMap then writing it out. private synchronized void updateDefaultServers(SortedSet<Address> servers) throws IOException { RSGroupInfo info = rsGroupMap.get(RSGroupInfo.DEFAULT_GROUP); RSGroupInfo newInfo = new RSGroupInfo(info.getName(), servers, info.getTables()); HashMap<String, RSGroupInfo> newGroupMap = Maps.newHashMap(rsGroupMap); newGroupMap.put(newInfo.getName(), newInfo); flushConfig(newGroupMap); } /** * Calls {@link RSGroupInfoManagerImpl#updateDefaultServers(SortedSet)} to update list of known * servers. Notifications about server changes are received by registering {@link ServerListener}. * As a listener, we need to return immediately, so the real work of updating the servers is * done asynchronously in this thread. */ private class ServerEventsListenerThread extends Thread implements ServerListener { private final Log LOG = LogFactory.getLog(ServerEventsListenerThread.class); private boolean changed = false; ServerEventsListenerThread() { setDaemon(true); } @Override public void serverAdded(ServerName serverName) { serverChanged(); } @Override public void serverRemoved(ServerName serverName) { serverChanged(); } private synchronized void serverChanged() { changed = true; this.notify(); } @Override public void run() { setName(ServerEventsListenerThread.class.getName() + "-" + masterServices.getServerName()); SortedSet<Address> prevDefaultServers = new TreeSet<>(); while(isMasterRunning(masterServices)) { try { LOG.info("Updating default servers."); SortedSet<Address> servers = RSGroupInfoManagerImpl.this.getDefaultServers(); if (!servers.equals(prevDefaultServers)) { RSGroupInfoManagerImpl.this.updateDefaultServers(servers); prevDefaultServers = servers; LOG.info("Updated with servers: "+servers.size()); } try { synchronized (this) { if(!changed) { wait(); } changed = false; } } catch (InterruptedException e) { LOG.warn("Interrupted", e); } } catch (IOException e) { LOG.warn("Failed to update default servers", e); } } } } private class RSGroupStartupWorker extends Thread { private final Log LOG = LogFactory.getLog(RSGroupStartupWorker.class); private volatile boolean online = false; RSGroupStartupWorker() { setDaemon(true); } @Override public void run() { setName(RSGroupStartupWorker.class.getName() + "-" + masterServices.getServerName()); if (waitForGroupTableOnline()) { LOG.info("GroupBasedLoadBalancer is now online"); } } private boolean waitForGroupTableOnline() { final List<HRegionInfo> foundRegions = new LinkedList<>(); final List<HRegionInfo> assignedRegions = new LinkedList<>(); final AtomicBoolean found = new AtomicBoolean(false); final TableStateManager tsm = masterServices.getTableStateManager(); boolean createSent = false; while (!found.get() && isMasterRunning(masterServices)) { foundRegions.clear(); assignedRegions.clear(); found.set(true); try { conn.getTable(TableName.NAMESPACE_TABLE_NAME); conn.getTable(RSGROUP_TABLE_NAME); boolean rootMetaFound = masterServices.getMetaTableLocator().verifyMetaRegionLocation( conn, masterServices.getZooKeeper(), 1); final AtomicBoolean nsFound = new AtomicBoolean(false); if (rootMetaFound) { MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() { @Override public boolean visitInternal(Result row) throws IOException { HRegionInfo info = MetaTableAccessor.getHRegionInfo(row); if (info != null) { Cell serverCell = row.getColumnLatestCell(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER); if (RSGROUP_TABLE_NAME.equals(info.getTable()) && serverCell != null) { ServerName sn = ServerName.parseVersionedServerName(CellUtil.cloneValue(serverCell)); if (sn == null) { found.set(false); } else if (tsm.isTableState(RSGROUP_TABLE_NAME, TableState.State.ENABLED)) { try { ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn); ClientProtos.GetRequest request = RequestConverter.buildGetRequest(info.getRegionName(), new Get(ROW_KEY)); rs.get(null, request); assignedRegions.add(info); } catch(Exception ex) { LOG.debug("Caught exception while verifying group region", ex); } } foundRegions.add(info); } if (TableName.NAMESPACE_TABLE_NAME.equals(info.getTable())) { Cell cell = row.getColumnLatestCell(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER); ServerName sn = null; if(cell != null) { sn = ServerName.parseVersionedServerName(CellUtil.cloneValue(cell)); } if (tsm.isTableState(TableName.NAMESPACE_TABLE_NAME, TableState.State.ENABLED)) { try { ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn); ClientProtos.GetRequest request = RequestConverter.buildGetRequest(info.getRegionName(), new Get(ROW_KEY)); rs.get(null, request); nsFound.set(true); } catch(Exception ex) { LOG.debug("Caught exception while verifying group region", ex); } } } } return true; } }; MetaTableAccessor.fullScanRegions(conn, visitor); // if no regions in meta then we have to create the table if (foundRegions.size() < 1 && rootMetaFound && !createSent && nsFound.get()) { createRSGroupTable(); createSent = true; } LOG.info("RSGroup table=" + RSGROUP_TABLE_NAME + " isOnline=" + found.get() + ", regionCount=" + foundRegions.size() + ", assignCount=" + assignedRegions.size() + ", rootMetaFound=" + rootMetaFound); found.set(found.get() && assignedRegions.size() == foundRegions.size() && foundRegions.size() > 0); } else { LOG.info("Waiting for catalog tables to come online"); found.set(false); } if (found.get()) { LOG.debug("With group table online, refreshing cached information."); RSGroupInfoManagerImpl.this.refresh(true); online = true; //flush any inconsistencies between ZK and HTable RSGroupInfoManagerImpl.this.flushConfig(); } } catch (RuntimeException e) { throw e; } catch(Exception e) { found.set(false); LOG.warn("Failed to perform check", e); } try { Thread.sleep(100); } catch (InterruptedException e) { LOG.info("Sleep interrupted", e); } } return found.get(); } private void createRSGroupTable() throws IOException { Long procId = masterServices.createSystemTable(RSGROUP_TABLE_DESC); // wait for region to be online int tries = 600; while (!(masterServices.getMasterProcedureExecutor().isFinished(procId)) && masterServices.getMasterProcedureExecutor().isRunning() && tries > 0) { try { Thread.sleep(100); } catch (InterruptedException e) { throw new IOException("Wait interrupted ", e); } tries--; } if(tries <= 0) { throw new IOException("Failed to create group table in a given time."); } else { Procedure<?> result = masterServices.getMasterProcedureExecutor().getResult(procId); if (result != null && result.isFailed()) { throw new IOException("Failed to create group table. " + result.getException().unwrapRemoteIOException()); } } } public boolean isOnline() { return online; } } private static boolean isMasterRunning(MasterServices masterServices) { return !masterServices.isAborted() && !masterServices.isStopped(); } private void multiMutate(List<Mutation> mutations) throws IOException { CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY); MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder = MultiRowMutationProtos.MutateRowsRequest.newBuilder(); for (Mutation mutation : mutations) { if (mutation instanceof Put) { mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT, mutation)); } else if (mutation instanceof Delete) { mmrBuilder.addMutationRequest( org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto. MutationType.DELETE, mutation)); } else { throw new DoNotRetryIOException("multiMutate doesn't support " + mutation.getClass().getName()); } } MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service = MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel); try { service.mutateRows(null, mmrBuilder.build()); } catch (ServiceException ex) { ProtobufUtil.toIOException(ex); } } private void checkGroupName(String groupName) throws ConstraintException { if (!groupName.matches("[a-zA-Z0-9_]+")) { throw new ConstraintException("RSGroup name should only contain alphanumeric characters"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.jobgraph.tasks; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.checkpoint.CheckpointMetaData; import org.apache.flink.runtime.checkpoint.CheckpointMetrics; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.execution.Environment; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.operators.coordination.OperatorEvent; import org.apache.flink.util.FlinkException; import org.apache.flink.util.SerializedValue; import org.apache.flink.util.function.ThrowingRunnable; import java.io.IOException; import java.util.concurrent.Future; import static org.apache.flink.util.Preconditions.checkNotNull; /** * This is the abstract base class for every task that can be executed by a TaskManager. * Concrete tasks extend this class, for example the streaming and batch tasks. * * <p>The TaskManager invokes the {@link #invoke()} method when executing a * task. All operations of the task happen in this method (setting up input * output stream readers and writers as well as the task's core operation). * * <p>All classes that extend must offer a constructor {@code MyTask(Environment, TaskStateSnapshot)}. * Tasks that are always stateless can, for convenience, also only implement the constructor * {@code MyTask(Environment)}. * * <p><i>Developer note: While constructors cannot be enforced at compile time, we did not yet venture * on the endeavor of introducing factories (it is only an internal API after all, and with Java 8, * one can use {@code Class::new} almost like a factory lambda.</i> * * <p><b>NOTE:</b> There is no constructor that accepts and initial task state snapshot * and stores it in a variable. That is on purpose, because the AbstractInvokable itself * does not need the state snapshot (only subclasses such as StreamTask do need the state) * and we do not want to store a reference indefinitely, thus preventing cleanup of * the initial state structure by the Garbage Collector. * * <p>Any subclass that supports recoverable state and participates in * checkpointing needs to override {@link #triggerCheckpointAsync(CheckpointMetaData, CheckpointOptions, boolean)}, * {@link #triggerCheckpointOnBarrier(CheckpointMetaData, CheckpointOptions, CheckpointMetrics)}, * {@link #abortCheckpointOnBarrier(long, Throwable)} and {@link #notifyCheckpointCompleteAsync(long)}. */ public abstract class AbstractInvokable { /** The environment assigned to this invokable. */ private final Environment environment; /** Flag whether cancellation should interrupt the executing thread. */ private volatile boolean shouldInterruptOnCancel = true; /** * Create an Invokable task and set its environment. * * @param environment The environment assigned to this invokable. */ public AbstractInvokable(Environment environment) { this.environment = checkNotNull(environment); } // ------------------------------------------------------------------------ // Core methods // ------------------------------------------------------------------------ /** * Starts the execution. * * <p>Must be overwritten by the concrete task implementation. This method * is called by the task manager when the actual execution of the task * starts. * * <p>All resources should be cleaned up when the method returns. Make sure * to guard the code with <code>try-finally</code> blocks where necessary. * * @throws Exception * Tasks may forward their exceptions for the TaskManager to handle through failure/recovery. */ public abstract void invoke() throws Exception; /** * This method is called when a task is canceled either as a result of a user abort or an execution failure. It can * be overwritten to respond to shut down the user code properly. * * @throws Exception * thrown if any exception occurs during the execution of the user code */ public void cancel() throws Exception { // The default implementation does nothing. } /** * Sets whether the thread that executes the {@link #invoke()} method should be * interrupted during cancellation. This method sets the flag for both the initial * interrupt, as well as for the repeated interrupt. Setting the interruption to * false at some point during the cancellation procedure is a way to stop further * interrupts from happening. */ public void setShouldInterruptOnCancel(boolean shouldInterruptOnCancel) { this.shouldInterruptOnCancel = shouldInterruptOnCancel; } /** * Checks whether the task should be interrupted during cancellation. * This method is check both for the initial interrupt, as well as for the * repeated interrupt. Setting the interruption to false via * {@link #setShouldInterruptOnCancel(boolean)} is a way to stop further interrupts * from happening. */ public boolean shouldInterruptOnCancel() { return shouldInterruptOnCancel; } // ------------------------------------------------------------------------ // Access to Environment and Configuration // ------------------------------------------------------------------------ /** * Returns the environment of this task. * * @return The environment of this task. */ public final Environment getEnvironment() { return this.environment; } /** * Returns the user code class loader of this invokable. * * @return user code class loader of this invokable. */ public final ClassLoader getUserCodeClassLoader() { return getEnvironment().getUserCodeClassLoader().asClassLoader(); } /** * Returns the current number of subtasks the respective task is split into. * * @return the current number of subtasks the respective task is split into */ public int getCurrentNumberOfSubtasks() { return this.environment.getTaskInfo().getNumberOfParallelSubtasks(); } /** * Returns the index of this subtask in the subtask group. * * @return the index of this subtask in the subtask group */ public int getIndexInSubtaskGroup() { return this.environment.getTaskInfo().getIndexOfThisSubtask(); } /** * Returns the task configuration object which was attached to the original {@link org.apache.flink.runtime.jobgraph.JobVertex}. * * @return the task configuration object which was attached to the original {@link org.apache.flink.runtime.jobgraph.JobVertex} */ public final Configuration getTaskConfiguration() { return this.environment.getTaskConfiguration(); } /** * Returns the job configuration object which was attached to the original {@link org.apache.flink.runtime.jobgraph.JobGraph}. * * @return the job configuration object which was attached to the original {@link org.apache.flink.runtime.jobgraph.JobGraph} */ public Configuration getJobConfiguration() { return this.environment.getJobConfiguration(); } /** * Returns the global ExecutionConfig. */ public ExecutionConfig getExecutionConfig() { return this.environment.getExecutionConfig(); } // ------------------------------------------------------------------------ // Checkpointing Methods // ------------------------------------------------------------------------ /** * This method is called to trigger a checkpoint, asynchronously by the checkpoint * coordinator. * * <p>This method is called for tasks that start the checkpoints by injecting the initial barriers, * i.e., the source tasks. In contrast, checkpoints on downstream operators, which are the result of * receiving checkpoint barriers, invoke the {@link #triggerCheckpointOnBarrier(CheckpointMetaData, CheckpointOptions, CheckpointMetrics)} * method. * * @param checkpointMetaData Meta data for about this checkpoint * @param checkpointOptions Options for performing this checkpoint * @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline * to fire any registered event-time timers * * @return future with value of {@code false} if the checkpoint was not carried out, {@code true} otherwise */ public Future<Boolean> triggerCheckpointAsync( CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) { throw new UnsupportedOperationException(String.format("triggerCheckpointAsync not supported by %s", this.getClass().getName())); } /** * This method is called when a checkpoint is triggered as a result of receiving checkpoint * barriers on all input streams. * * @param checkpointMetaData Meta data for about this checkpoint * @param checkpointOptions Options for performing this checkpoint * @param checkpointMetrics Metrics about this checkpoint * * @throws Exception Exceptions thrown as the result of triggering a checkpoint are forwarded. */ public void triggerCheckpointOnBarrier(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, CheckpointMetrics checkpointMetrics) throws IOException { throw new UnsupportedOperationException(String.format("triggerCheckpointOnBarrier not supported by %s", this.getClass().getName())); } /** * This method performs some action asynchronously in the task thread. * * @param runnable the action to perform * @param descriptionFormat the optional description for the command that is used for debugging and error-reporting. * @param descriptionArgs the parameters used to format the final description string. */ public <E extends Exception> void executeInTaskThread( ThrowingRunnable<E> runnable, String descriptionFormat, Object... descriptionArgs) throws E { throw new UnsupportedOperationException( String.format("executeInTaskThread not supported by %s", getClass().getName())); } /** * Aborts a checkpoint as the result of receiving possibly some checkpoint barriers, * but at least one {@link org.apache.flink.runtime.io.network.api.CancelCheckpointMarker}. * * <p>This requires implementing tasks to forward a * {@link org.apache.flink.runtime.io.network.api.CancelCheckpointMarker} to their outputs. * * @param checkpointId The ID of the checkpoint to be aborted. * @param cause The reason why the checkpoint was aborted during alignment */ public void abortCheckpointOnBarrier(long checkpointId, Throwable cause) throws IOException { throw new UnsupportedOperationException(String.format("abortCheckpointOnBarrier not supported by %s", this.getClass().getName())); } /** * Invoked when a checkpoint has been completed, i.e., when the checkpoint coordinator has received * the notification from all participating tasks. * * @param checkpointId The ID of the checkpoint that is complete. * * @return future that completes when the notification has been processed by the task. */ public Future<Void> notifyCheckpointCompleteAsync(long checkpointId) { throw new UnsupportedOperationException(String.format("notifyCheckpointCompleteAsync not supported by %s", this.getClass().getName())); } /** * Invoked when a checkpoint has been aborted, i.e., when the checkpoint coordinator has received a decline message * from one task and try to abort the targeted checkpoint by notification. * * @param checkpointId The ID of the checkpoint that is aborted. * * @return future that completes when the notification has been processed by the task. */ public Future<Void> notifyCheckpointAbortAsync(long checkpointId) { throw new UnsupportedOperationException(String.format("notifyCheckpointAbortAsync not supported by %s", this.getClass().getName())); } public void dispatchOperatorEvent(OperatorID operator, SerializedValue<OperatorEvent> event) throws FlinkException { throw new UnsupportedOperationException("dispatchOperatorEvent not supported by " + getClass().getName()); } }
/* * The MIT License * * Copyright (c) 2010, InfraDNA, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model.labels; import antlr.ANTLRException; import hudson.Launcher; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.BuildListener; import hudson.model.FreeStyleBuild; import hudson.model.FreeStyleProject; import hudson.model.FreeStyleProject.DescriptorImpl; import hudson.model.Label; import hudson.model.Node.Mode; import hudson.slaves.DumbSlave; import hudson.slaves.RetentionStrategy; import org.jvnet.hudson.test.Bug; import org.jvnet.hudson.test.HudsonTestCase; import org.jvnet.hudson.test.SequenceLock; import org.jvnet.hudson.test.TestBuilder; import java.io.IOException; import java.lang.reflect.Field; import java.util.Collections; import java.util.concurrent.Callable; import java.util.concurrent.Future; /** * @author Kohsuke Kawaguchi */ public class LabelExpressionTest extends HudsonTestCase { /** * Verifies the queueing behavior in the presence of the expression. */ public void testQueueBehavior() throws Exception { DumbSlave w32 = createSlave("win 32bit",null); DumbSlave w64 = createSlave("win 64bit",null); createSlave("linux 32bit",null); final SequenceLock seq = new SequenceLock(); FreeStyleProject p1 = createFreeStyleProject(); p1.getBuildersList().add(new TestBuilder() { public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { seq.phase(0); // first, make sure the w32 slave is occupied seq.phase(2); seq.done(); return true; } }); p1.setAssignedLabel(jenkins.getLabel("win && 32bit")); FreeStyleProject p2 = createFreeStyleProject(); p2.setAssignedLabel(jenkins.getLabel("win && 32bit")); FreeStyleProject p3 = createFreeStyleProject(); p3.setAssignedLabel(jenkins.getLabel("win")); Future<FreeStyleBuild> f1 = p1.scheduleBuild2(0); seq.phase(1); // we schedule p2 build after w32 slave is occupied Future<FreeStyleBuild> f2 = p2.scheduleBuild2(0); Thread.sleep(1000); // time window to ensure queue has tried to assign f2 build // p3 is tied to 'win', so even though p1 is busy, this should still go ahead and complete FreeStyleBuild b3 = assertBuildStatusSuccess(p3.scheduleBuild2(0)); assertSame(w64,b3.getBuiltOn()); seq.phase(3); // once we confirm that p3 build is over, we let p1 proceed // p1 should have been built on w32 FreeStyleBuild b1 = assertBuildStatusSuccess(f1); assertSame(w32,b1.getBuiltOn()); // and so is p2 FreeStyleBuild b2 = assertBuildStatusSuccess(f2); assertSame(w32,b2.getBuiltOn()); } /** * Push the build around to different nodes via the assignment * to make sure it gets where we need it to. */ public void testQueueBehavior2() throws Exception { DumbSlave s = createSlave("win",null); FreeStyleProject p = createFreeStyleProject(); p.setAssignedLabel(jenkins.getLabel("!win")); FreeStyleBuild b = assertBuildStatusSuccess(p.scheduleBuild2(0)); assertSame(jenkins,b.getBuiltOn()); p.setAssignedLabel(jenkins.getLabel("win")); b = assertBuildStatusSuccess(p.scheduleBuild2(0)); assertSame(s,b.getBuiltOn()); p.setAssignedLabel(jenkins.getLabel("!win")); b = assertBuildStatusSuccess(p.scheduleBuild2(0)); assertSame(jenkins,b.getBuiltOn()); } /** * Make sure we can reset the label of an existing slave. */ public void testSetLabelString() throws Exception { DumbSlave s = createSlave("foo","",null); assertSame(s.getLabelString(), ""); s.setLabelString("bar"); assertSame(s.getLabelString(), "bar"); } /** * Tests the expression parser. */ public void testParser() throws Exception { parseAndVerify("foo", "foo"); parseAndVerify("32bit.dot", "32bit.dot"); parseAndVerify("foo||bar", "foo || bar"); // user-given parenthesis is preserved parseAndVerify("foo||bar&&zot", "foo||bar&&zot"); parseAndVerify("foo||(bar&&zot)", "foo||(bar&&zot)"); parseAndVerify("(foo||bar)&&zot", "(foo||bar)&&zot"); parseAndVerify("foo->bar", "foo ->\tbar"); parseAndVerify("!foo<->bar", "!foo <-> bar"); } @Bug(8537) public void testParser2() throws Exception { parseAndVerify("aaa&&bbb&&ccc","aaa&&bbb&&ccc"); } private void parseAndVerify(String expected, String expr) throws ANTLRException { assertEquals(expected, LabelExpression.parseExpression(expr).getName()); } public void testParserError() throws Exception { parseShouldFail("foo bar"); parseShouldFail("foo (bar)"); } public void testLaxParsing() { // this should parse as an atom LabelAtom l = (LabelAtom) jenkins.getLabel("lucene.zones.apache.org (Solaris 10)"); assertEquals(l.getName(),"lucene.zones.apache.org (Solaris 10)"); assertEquals(l.getExpression(),"\"lucene.zones.apache.org (Solaris 10)\""); } public void testDataCompatibilityWithHostNameWithWhitespace() throws Exception { DumbSlave slave = new DumbSlave("abc def (xyz) : test", "dummy", createTmpDir().getPath(), "1", Mode.NORMAL, "", createComputerLauncher(null), RetentionStrategy.NOOP, Collections.EMPTY_LIST); jenkins.addNode(slave); FreeStyleProject p = createFreeStyleProject(); p.setAssignedLabel(jenkins.getLabel("abc def")); assertEquals("abc def",p.getAssignedLabel().getName()); assertEquals("\"abc def\"",p.getAssignedLabel().getExpression()); // expression should be persisted, not the name Field f = AbstractProject.class.getDeclaredField("assignedNode"); f.setAccessible(true); assertEquals("\"abc def\"",f.get(p)); // but if the name is set, we'd still like to parse it f.set(p,"a:b c"); assertEquals("a:b c",p.getAssignedLabel().getName()); } public void testQuote() { Label l = jenkins.getLabel("\"abc\\\\\\\"def\""); assertEquals("abc\\\"def",l.getName()); l = jenkins.getLabel("label1||label2"); // create label expression l = jenkins.getLabel("\"label1||label2\""); assertEquals("label1||label2",l.getName()); } /** * The name should have parenthesis at the right place to preserve the tree structure. */ public void testComposite() { LabelAtom x = jenkins.getLabelAtom("x"); assertEquals("!!x",x.not().not().getName()); assertEquals("(x||x)&&x",x.or(x).and(x).getName()); assertEquals("x&&x||x",x.and(x).or(x).getName()); } public void testDash() { jenkins.getLabelAtom("solaris-x86"); } private void parseShouldFail(String expr) { try { LabelExpression.parseExpression(expr); fail(expr + " should fail to parse"); } catch (ANTLRException e) { // expected } } public void testFormValidation() throws Exception { executeOnServer(new Callable<Object>() { public Object call() throws Exception { DescriptorImpl d = jenkins.getDescriptorByType(DescriptorImpl.class); Label l = jenkins.getLabel("foo"); DumbSlave s = createSlave(l); String msg = d.doCheckAssignedLabelString("goo").renderHtml(); assertTrue(msg.contains("foo")); assertTrue(msg.contains("goo")); msg = d.doCheckAssignedLabelString("master && goo").renderHtml(); assertTrue(msg.contains("foo")); assertTrue(msg.contains("goo")); return null; } }); } }
/* $This file is distributed under the terms of the license in LICENSE$ */ package edu.cornell.mannlib.vitro.webapp.controller.freemarker; import static edu.cornell.mannlib.vitro.webapp.controller.freemarker.ImageUploadController.PARAMETER_UPLOADED_FILE; import static edu.cornell.mannlib.vitro.webapp.controller.freemarker.ImageUploadController.THUMBNAIL_HEIGHT; import static edu.cornell.mannlib.vitro.webapp.controller.freemarker.ImageUploadController.THUMBNAIL_WIDTH; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.ServletContext; import org.apache.commons.fileupload.FileItem; import org.apache.commons.io.FilenameUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import edu.cornell.mannlib.vitro.webapp.application.ApplicationUtils; import edu.cornell.mannlib.vitro.webapp.beans.Individual; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.ImageUploadController.UserMistakeException; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory; import edu.cornell.mannlib.vitro.webapp.filestorage.TempFileHolder; import edu.cornell.mannlib.vitro.webapp.filestorage.UploadedFileHelper; import edu.cornell.mannlib.vitro.webapp.filestorage.model.FileInfo; import edu.cornell.mannlib.vitro.webapp.modules.fileStorage.FileAlreadyExistsException; import edu.cornell.mannlib.vitro.webapp.modules.fileStorage.FileStorage; import edu.cornell.mannlib.vitro.webapp.modules.imageProcessor.ImageProcessor.CropRectangle; import edu.cornell.mannlib.vitro.webapp.modules.imageProcessor.ImageProcessor.Dimensions; import edu.cornell.mannlib.vitro.webapp.modules.imageProcessor.ImageProcessor.ImageProcessorException; /** * Handle the mechanics of validating, storing, and deleting file images. */ public class ImageUploadHelper { private static final Log log = LogFactory.getLog(ImageUploadHelper.class); /* * Keys to text strings for error messages. */ private static final String ERROR_CODE_NO_IMAGE_TO_CROP = "imageUpload.errorNoImageForCropping"; private static final String ERROR_CODE_IMAGE_TOO_SMALL = "imageUpload.errorImageTooSmall"; private static final String ERROR_CODE_UNKNOWN = "imageUpload.errorUnknown"; private static final String ERROR_CODE_UNRECOGNIZED_FILE_TYPE = "imageUpload.errorUnrecognizedFileType"; private static final String ERROR_CODE_NO_PHOTO_SELECTED = "imageUpload.errorNoPhotoSelected"; private static final String ERROR_CODE_BAD_MULTIPART_REQUEST = "imageUpload.errorBadMultipartRequest"; private static final String ERROR_CODE_FORM_FIELD_MISSING = "imageUpload.errorFormFieldMissing"; /** * When they upload a new image, store it as this session attribute until * we're ready to attach it to the Individual. */ public static final String ATTRIBUTE_TEMP_FILE = "ImageUploadHelper.tempFile"; /** * If the main image is larger than this, it will be displayed at reduced * scale. */ public static final int MAXIMUM_IMAGE_DISPLAY_WIDTH = 500; /** Recognized file extensions mapped to MIME-types. */ private static final Map<String, String> RECOGNIZED_FILE_TYPES = createFileTypesMap(); /** Browser-specific MIME-types mapped to recognized MIME-types. */ private static final Map<String, String> NON_STANDARD_MIME_TYPES = createNonStandardMimeTypesMap(); private static Map<String, String> createFileTypesMap() { Map<String, String> map = new HashMap<String, String>(); map.put(".gif", "image/gif"); map.put(".png", "image/png"); map.put(".jpg", "image/jpeg"); map.put(".jpeg", "image/jpeg"); map.put(".jpe", "image/jpeg"); return Collections.unmodifiableMap(map); } /** * Internet Explorer can tell us that an image has a funky * Microsoft-specific MIME-type, and we can replace it with one that * everyone recognizes. This table records those types. */ private static Map<String, String> createNonStandardMimeTypesMap() { Map<String, String> map = new HashMap<String, String>(); map.put("image/x-png", "image/png"); map.put("image/pjpeg", "image/jpeg"); return Collections.unmodifiableMap(map); } private final FileStorage fileStorage; private final UploadedFileHelper uploadedFileHelper; ImageUploadHelper(FileStorage fileStorage, WebappDaoFactory webAppDaoFactory, ServletContext ctx) { this.fileStorage = fileStorage; this.uploadedFileHelper = new UploadedFileHelper(fileStorage, webAppDaoFactory, ctx); } /** * The image must be present and non-empty, and must have a mime-type that * represents an image we support. * * We rely on the fact that a FileUploadServletRequest will always * have a map of {@link FileItem}s, even if it is empty. However, that map * may not contain the field that we want, or that field may contain an * empty file. * * @throws UserMistakeException * if there is no file, if it is empty, or if it is not an image * file. */ FileItem validateImageFromRequest(VitroRequest vreq) throws UserMistakeException { Map<String, List<FileItem>> map = vreq.getFiles(); if (map == null) { throw new IllegalStateException(ERROR_CODE_BAD_MULTIPART_REQUEST); } List<FileItem> list = map.get(PARAMETER_UPLOADED_FILE); if ((list == null) || list.isEmpty()) { throw new UserMistakeException(ERROR_CODE_FORM_FIELD_MISSING, PARAMETER_UPLOADED_FILE); } FileItem file = list.get(0); if (file.getSize() == 0) { throw new UserMistakeException(ERROR_CODE_NO_PHOTO_SELECTED); } String filename = getSimpleFilename(file); String mimeType = getMimeType(file); if (!RECOGNIZED_FILE_TYPES.containsValue(mimeType)) { log.debug("Unrecognized MIME type: '" + mimeType + "'"); throw new UserMistakeException(ERROR_CODE_UNRECOGNIZED_FILE_TYPE, filename); } return file; } /** * The user has uploaded a new main image, but we're not ready to assign it * to them. * * Put it into the file storage system, and attach it as a temp file on the * session until we need it. */ FileInfo storeNewImage(FileItem fileItem, VitroRequest vreq) { InputStream inputStream = null; try { inputStream = fileItem.getInputStream(); String mimeType = getMimeType(fileItem); String filename = getSimpleFilename(fileItem); FileInfo fileInfo = uploadedFileHelper.createFile(filename, mimeType, inputStream); TempFileHolder.attach(vreq.getSession(), ATTRIBUTE_TEMP_FILE, fileInfo); return fileInfo; } catch (IOException e) { throw new IllegalStateException("Can't create the new image file.", e); } finally { if (inputStream != null) { try { inputStream.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * Find out how big this image is. * * @throws UserMistakeException * if the image is smaller than a thumbnail. */ Dimensions getNewImageSize(FileInfo fileInfo) throws UserMistakeException { InputStream source = null; try { String uri = fileInfo.getBytestreamUri(); String filename = fileInfo.getFilename(); source = fileStorage.getInputStream(uri, filename); Dimensions size = ApplicationUtils.instance().getImageProcessor() .getDimensions(source); log.debug("new image size is " + size); if ((size.height < THUMBNAIL_HEIGHT) || (size.width < THUMBNAIL_WIDTH)) { throw new UserMistakeException(ERROR_CODE_IMAGE_TOO_SMALL, THUMBNAIL_HEIGHT, THUMBNAIL_WIDTH); } return size; } catch (FileNotFoundException e) { throw new IllegalStateException("File not found: " + fileInfo, e); } catch (IOException e) { throw new IllegalStateException("Can't read image file: " + fileInfo, e); } catch (UserMistakeException e) { throw e; } catch (Exception e) { log.warn("Unexpected exception in image handling", e); throw new UserMistakeException(ERROR_CODE_UNKNOWN); } finally { if (source != null) { try { source.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * Get the info for the new image, from where we stored it in the session. * * @throws UserMistakeException * if it isn't there. */ FileInfo getNewImageInfo(VitroRequest vreq) throws UserMistakeException { FileInfo fileInfo = TempFileHolder.remove(vreq.getSession(), ATTRIBUTE_TEMP_FILE); if (fileInfo == null) { throw new UserMistakeException(ERROR_CODE_NO_IMAGE_TO_CROP); } return fileInfo; } /** * Crop the main image to create the thumbnail, and put it into the file * storage system. */ FileInfo generateThumbnail(CropRectangle crop, FileInfo newImage) { InputStream mainStream = null; InputStream thumbStream = null; try { String mainBytestreamUri = newImage.getBytestreamUri(); String mainFilename = newImage.getFilename(); mainStream = fileStorage.getInputStream(mainBytestreamUri, mainFilename); thumbStream = ApplicationUtils .instance() .getImageProcessor() .cropAndScale(mainStream, crop, new Dimensions(THUMBNAIL_WIDTH, THUMBNAIL_HEIGHT)); String mimeType = RECOGNIZED_FILE_TYPES.get(".jpg"); String filename = createThumbnailFilename(mainFilename); FileInfo fileInfo = uploadedFileHelper.createFile(filename, mimeType, thumbStream); log.debug("Created thumbnail: " + fileInfo); return fileInfo; } catch (FileAlreadyExistsException e) { throw new IllegalStateException("Can't create the thumbnail file: " + e.getMessage(), e); } catch (IOException e) { throw new IllegalStateException("Can't create the thumbnail file", e); } catch (ImageProcessorException e) { throw new IllegalStateException("Failed to scale the image", e); } finally { if (mainStream != null) { try { mainStream.close(); } catch (IOException e) { e.printStackTrace(); } } if (thumbStream != null) { try { thumbStream.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * If this entity already had a main image, remove it. If the image and the * thumbnail are no longer used by anyone, throw them away. */ void removeExistingImage(Individual person) { uploadedFileHelper.removeMainImage(person); } /** * Store the image on the entity, and the thumbnail on the image. */ void storeImageFiles(Individual entity, FileInfo newImage, FileInfo thumbnail) { uploadedFileHelper.setImagesOnEntity(entity.getURI(), newImage, thumbnail); } /** * Internet Explorer and Opera will give you the full path along with the * filename. This will remove the path. */ private String getSimpleFilename(FileItem item) { String fileName = item.getName(); if (fileName == null) { return null; } else { return FilenameUtils.getName(fileName); } } /** * Get the MIME type as supplied by the browser. If none, try to infer it * from the filename extension and the map of recognized MIME types. */ private String getMimeType(FileItem file) { String mimeType = file.getContentType(); if (mimeType != null) { // If the browser supplied the MIME type, we may need to // replace it with the standard value. if (NON_STANDARD_MIME_TYPES.containsKey(mimeType)) { mimeType = NON_STANDARD_MIME_TYPES.get(mimeType); } return mimeType; } String filename = getSimpleFilename(file); int periodHere = filename.lastIndexOf('.'); if (periodHere == -1) { return null; } String extension = filename.substring(periodHere); return RECOGNIZED_FILE_TYPES.get(extension); } /** * Create a name for the thumbnail from the name of the original file. * "myPicture.anything" becomes "thumbnail_myPicture.jpg". */ private String createThumbnailFilename(String filename) { String prefix = "thumbnail_"; String extension = ".jpg"; int periodHere = filename.lastIndexOf('.'); if (periodHere == -1) { return prefix + filename + extension; } else { return prefix + filename.substring(0, periodHere) + extension; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.webdav.jcr; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.webdav.DavConstants; import org.apache.jackrabbit.webdav.DavException; import org.apache.jackrabbit.webdav.DavLocatorFactory; import org.apache.jackrabbit.webdav.DavResource; import org.apache.jackrabbit.webdav.DavResourceFactory; import org.apache.jackrabbit.webdav.DavResourceLocator; import org.apache.jackrabbit.webdav.DavServletResponse; import org.apache.jackrabbit.webdav.MultiStatus; import org.apache.jackrabbit.webdav.MultiStatusResponse; import org.apache.jackrabbit.webdav.jcr.property.JcrDavPropertyNameSet; import org.apache.jackrabbit.webdav.property.DavPropertyName; import org.apache.jackrabbit.webdav.property.DefaultDavProperty; import org.apache.jackrabbit.webdav.property.HrefProperty; import org.apache.jackrabbit.webdav.property.DavProperty; import org.apache.jackrabbit.webdav.property.PropEntry; import org.apache.jackrabbit.webdav.version.LabelInfo; import org.apache.jackrabbit.webdav.version.MergeInfo; import org.apache.jackrabbit.webdav.version.UpdateInfo; import org.apache.jackrabbit.webdav.version.VersionControlledResource; import org.apache.jackrabbit.webdav.version.VersionHistoryResource; import org.apache.jackrabbit.webdav.version.VersionResource; import org.apache.jackrabbit.webdav.version.VersionableResource; import org.apache.jackrabbit.webdav.version.report.ReportType; import org.apache.jackrabbit.webdav.version.report.SupportedReportSetProperty; import org.apache.jackrabbit.webdav.xml.DomUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Element; import javax.jcr.Item; import javax.jcr.Node; import javax.jcr.NodeIterator; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.Value; import javax.jcr.ValueFormatException; import javax.jcr.observation.EventListener; import javax.jcr.version.Version; import javax.jcr.version.VersionHistory; import javax.jcr.version.VersionManager; import java.util.List; import java.util.Collections; /** * <code>VersionControlledItemCollection</code> represents a JCR node item and * covers all functionality related to versioning of {@link Node}s. * * @see Node */ public class VersionControlledItemCollection extends DefaultItemCollection implements VersionControlledResource { private static Logger log = LoggerFactory.getLogger(VersionControlledItemCollection.class); /** * Create a new <code>VersionControlledItemCollection</code>. * * @param locator * @param session * @param factory * @param item */ public VersionControlledItemCollection(DavResourceLocator locator, JcrDavSession session, DavResourceFactory factory, Item item) { super(locator, session, factory, item); if (exists() && !(item instanceof Node)) { throw new IllegalArgumentException("A collection resource can not be constructed from a Property item."); } } //----------------------------------------------< DavResource interface >--- /** * Return a comma separated string listing the supported method names. * * @return the supported method names. * @see org.apache.jackrabbit.webdav.DavResource#getSupportedMethods() */ @Override public String getSupportedMethods() { StringBuffer sb = new StringBuffer(super.getSupportedMethods()); // Versioning support sb.append(", ").append(VersionableResource.METHODS); if (isVersionControlled()) { try { if (((Node)item).isCheckedOut()) { sb.append(", ").append(VersionControlledResource.methods_checkedOut); } else { sb.append(", ").append(VersionControlledResource.methods_checkedIn); } } catch (RepositoryException e) { // should not occur. log.error(e.getMessage()); } } return sb.toString(); } @Override public DavProperty<?> getProperty(DavPropertyName name) { DavProperty prop = super.getProperty(name); if (prop == null && isVersionControlled()) { Node n = (Node) item; // properties defined by RFC 3253 for version-controlled resources // workspace property already set in AbstractResource.initProperties() try { if (VERSION_HISTORY.equals(name)) { // DAV:version-history (computed) String vhHref = getLocatorFromItem(n.getVersionHistory()).getHref(true); prop = new HrefProperty(VERSION_HISTORY, vhHref, true); } else if (CHECKED_OUT.equals(name) && n.isCheckedOut()) { // DAV:checked-out property (protected) String baseVHref = getLocatorFromItem(n.getBaseVersion()).getHref(true); prop = new HrefProperty(CHECKED_OUT, baseVHref, true); } else if (CHECKED_IN.equals(name) && !n.isCheckedOut()) { // DAV:checked-in property (protected) String baseVHref = getLocatorFromItem(n.getBaseVersion()).getHref(true); prop = new HrefProperty(CHECKED_IN, baseVHref, true); } } catch (RepositoryException e) { log.error(e.getMessage()); } } return prop; } /** * @param changeList * @throws DavException * @see DefaultItemCollection#alterProperties(List) * for additional description of non-compliant behaviour. */ @Override public MultiStatusResponse alterProperties(List<? extends PropEntry> changeList) throws DavException { /* first resolve merge conflict since they cannot be handled by setting property values in jcr (and are persisted immediately). NOTE: this violates RFC 2518 that requires that proppatch is processed in the order entries are present in the xml and that required that no changes must be persisted if any set/remove fails. */ // TODO: solve violation of RFC 2518 resolveMergeConflict(changeList); // alter other properties only if merge-conflicts could be handled return super.alterProperties(changeList); } /** * Resolve one or multiple merge conflicts present on this resource. Please * note that the 'setProperties' or 'removeProperties' set my contain additional * resource properties, that need to be changed. Those properties are left * untouched, whereas the {@link #AUTO_MERGE_SET DAV:auto-merge-set}, is * removed from the list upon successful resolution of a merge conflict.<br> * If the removeProperties or setProperties set do not contain the mentioned * merge conflict resource properties or if the value of those properties do * not allow for a resolution of an existing merge conflict, this method * returns silently. * * @param changeList * @throws org.apache.jackrabbit.webdav.DavException * @see Node#doneMerge(Version) * @see Node#cancelMerge(Version) */ private void resolveMergeConflict(List<? extends PropEntry> changeList) throws DavException { if (!exists()) { throw new DavException(DavServletResponse.SC_NOT_FOUND); } try { Node n = (Node) item; VersionManager vMgr = getVersionManager(); String path = item.getPath(); DavProperty<?> autoMergeSet = null; DavProperty<?> predecessorSet = null; /* find DAV:auto-merge-set entries. If none exists no attempt is made to resolve merge conflict > return silently */ for (int i = 0; i < changeList.size(); i++) { PropEntry propEntry = changeList.get(i); // If DAV:auto-merge-set is DavPropertyName all remaining merge // conflicts are resolved with 'cancel' if (propEntry instanceof DavPropertyName && AUTO_MERGE_SET.equals(propEntry)) { // retrieve the current jcr:mergeFailed property values if (!n.hasProperty(JcrConstants.JCR_MERGEFAILED)) { throw new DavException(DavServletResponse.SC_CONFLICT, "Attempt to resolve non-existing merge conflicts."); } Value[] mergeFailed = n.getProperty(JcrConstants.JCR_MERGEFAILED).getValues(); for (Value value : mergeFailed) { vMgr.cancelMerge(path, (Version) getRepositorySession().getNodeByIdentifier(value.getString())); } // remove this entry from the changeList changeList.remove(propEntry); } else if (propEntry instanceof DavProperty) { if (AUTO_MERGE_SET.equals(((DavProperty<?>)propEntry).getName())) { autoMergeSet = (DavProperty<?>) propEntry; } else if (PREDECESSOR_SET.equals(((DavProperty<?>)propEntry).getName())) { predecessorSet = (DavProperty<?>) propEntry; } } } // If DAV:auto-merge-set is a DavProperty merge conflicts need to be // resolved individually according to the DAV:predecessor-set property. if (autoMergeSet != null) { // retrieve the current jcr:mergeFailed property values if (!n.hasProperty(JcrConstants.JCR_MERGEFAILED)) { throw new DavException(DavServletResponse.SC_CONFLICT, "Attempt to resolve non-existing merge conflicts."); } List<String> mergeset = new HrefProperty(autoMergeSet).getHrefs(); List<String> predecL; if (predecessorSet == null) { predecL = Collections.emptyList(); } else { predecL = new HrefProperty(predecessorSet).getHrefs(); } Session session = getRepositorySession(); // loop over the mergeFailed values (versions) and test whether they are // removed from the DAV:auto-merge-set thus indicating resolution. Value[] mergeFailed = n.getProperty(JcrConstants.JCR_MERGEFAILED).getValues(); for (Value value : mergeFailed) { // build version-href from each entry in the jcr:mergeFailed property // in order to be able to compare to the entries in the HrefProperty. Version version = (Version) session.getNodeByIdentifier(value.getString()); String href = getLocatorFromItem(version).getHref(true); // Test if that version has been removed from the merge-set. // thus indicating that this merge conflict needs to be resolved. if (!mergeset.contains(href)) { // If the conflict value has been moved over from DAV:auto-merge-set // to the predecessor-set, resolution with 'doneMerge' is // appropriate. If the value has been removed from the // merge-set but not added to the predecessors 'cancelMerge' // must be called. if (predecL.contains(href)) { vMgr.doneMerge(path, version); } else { vMgr.cancelMerge(path, version); } } } // after successful resolution of merge-conflicts according to // DAV:auto-merge-set and DAV:predecessor-set remove these entries // from the changeList. changeList.remove(autoMergeSet); if (predecessorSet != null) { changeList.remove(predecessorSet); } } } catch (RepositoryException e) { throw new JcrDavException(e); } } //--------------------------------------< VersionableResource interface >--- /** * Adds version control to this resource. If the resource is already under * version control, this method has no effect. * * @throws org.apache.jackrabbit.webdav.DavException if this resource does not * exist yet or if an error occurs while making the underlying node versionable. * @see org.apache.jackrabbit.webdav.version.VersionableResource#addVersionControl() */ @Override public void addVersionControl() throws DavException { if (!exists()) { throw new DavException(DavServletResponse.SC_NOT_FOUND); } if (!isVersionControlled()) { try { ((Node)item).addMixin(JcrConstants.MIX_VERSIONABLE); item.save(); } catch (RepositoryException e) { throw new JcrDavException(e); } } // else: is already version controlled -> ignore } //--------------------------------< VersionControlledResource interface >--- /** * Calls {@link javax.jcr.Node#checkin()} on the underlying repository node. * * @throws org.apache.jackrabbit.webdav.DavException * @see org.apache.jackrabbit.webdav.version.VersionControlledResource#checkin() */ @Override public String checkin() throws DavException { if (!exists()) { throw new DavException(DavServletResponse.SC_NOT_FOUND); } if (!isVersionControlled()) { throw new DavException(DavServletResponse.SC_METHOD_NOT_ALLOWED); } try { Version v = getVersionManager().checkin(item.getPath()); String versionHref = getLocatorFromItem(v).getHref(true); return versionHref; } catch (RepositoryException e) { // UnsupportedRepositoryException should not occur throw new JcrDavException(e); } } /** * Calls {@link javax.jcr.Node#checkout()} on the underlying repository node. * * @throws org.apache.jackrabbit.webdav.DavException * @see org.apache.jackrabbit.webdav.version.VersionControlledResource#checkout() */ @Override public void checkout() throws DavException { if (!exists()) { throw new DavException(DavServletResponse.SC_NOT_FOUND); } if (!isVersionControlled()) { throw new DavException(DavServletResponse.SC_METHOD_NOT_ALLOWED); } try { getVersionManager().checkout(item.getPath()); } catch (RepositoryException e) { // UnsupportedRepositoryException should not occur throw new JcrDavException(e); } } /** * Not implemented. Always throws a <code>DavException</code> with error code * {@link org.apache.jackrabbit.webdav.DavServletResponse#SC_NOT_IMPLEMENTED}. * * @throws org.apache.jackrabbit.webdav.DavException * @see org.apache.jackrabbit.webdav.version.VersionControlledResource#uncheckout() */ @Override public void uncheckout() throws DavException { throw new DavException(DavServletResponse.SC_NOT_IMPLEMENTED); } /** * Perform an update on this resource. Depending on the format of the <code>updateInfo</code> * this is translated to one of the following methods defined by the JCR API: * <ul> * <li>{@link Node#restore(javax.jcr.version.Version, boolean)}</li> * <li>{@link Node#restore(javax.jcr.version.Version, String, boolean)}</li> * <li>{@link Node#restoreByLabel(String, boolean)}</li> * <li>{@link Node#update(String)}</li> * </ul> * <p> * Limitation: note that the <code>MultiStatus</code> returned by this method * will not list any nodes that have been removed due to an Uuid conflict. * * @param updateInfo * @return * @throws org.apache.jackrabbit.webdav.DavException * @see org.apache.jackrabbit.webdav.version.VersionControlledResource#update(org.apache.jackrabbit.webdav.version.UpdateInfo) */ //TODO: with jcr the node must not be versionable in order to perform Node.update. @Override public MultiStatus update(UpdateInfo updateInfo) throws DavException { if (updateInfo == null) { throw new DavException(DavServletResponse.SC_BAD_REQUEST, "Valid update request body required."); } if (!exists()) { throw new DavException(DavServletResponse.SC_NOT_FOUND); } MultiStatus ms = new MultiStatus(); try { Node node = (Node)item; Element udElem = updateInfo.getUpdateElement(); boolean removeExisting = DomUtil.hasChildElement(udElem, XML_REMOVEEXISTING, NAMESPACE); // register eventListener in order to be able to report the modified resources. EventListener el = new EListener(updateInfo.getPropertyNameSet(), ms); registerEventListener(el, node.getPath()); // perform the update/restore according to the update info if (updateInfo.getVersionHref() != null) { String[] hrefs = updateInfo.getVersionHref(); if (hrefs.length != 1) { throw new DavException(DavServletResponse.SC_BAD_REQUEST, "Invalid update request body missing version href or containing multiple version hrefs."); } String versionPath = getLocatorFromHref(hrefs[0]).getRepositoryPath(); String versionName = getItemName(versionPath); String relPath = DomUtil.getChildText(udElem, XML_RELPATH, NAMESPACE); if (relPath == null) { // restore version by name node.restore(versionName, removeExisting); } else if (node.hasNode(relPath)) { Version v = node.getNode(relPath).getVersionHistory().getVersion(versionName); node.restore(v, relPath, removeExisting); } else { Version v = (Version) getRepositorySession().getNode(versionPath); node.restore(v, relPath, removeExisting); } } else if (updateInfo.getLabelName() != null) { String[] labels = updateInfo.getLabelName(); if (labels.length != 1) { throw new DavException(DavServletResponse.SC_BAD_REQUEST, "Invalid update request body: Multiple labels specified."); } node.restoreByLabel(labels[0], removeExisting); } else if (updateInfo.getWorkspaceHref() != null) { String href = obtainAbsolutePathFromUri(updateInfo.getWorkspaceHref()); String workspaceName = getLocatorFromHref(href).getWorkspaceName(); node.update(workspaceName); } else { throw new DavException(DavServletResponse.SC_BAD_REQUEST, "Invalid update request body."); } // unregister the event listener again unregisterEventListener(el); } catch (RepositoryException e) { throw new JcrDavException(e); } return ms; } /** * Merge the repository node represented by this resource according to the * information present in the given {@link MergeInfo} object. * * @param mergeInfo * @return <code>MultiStatus</code> recording all repository items modified * by this merge call as well as the resources that a client must modify to * complete the merge (see <a href="http://www.webdav.org/specs/rfc3253.html#METHOD_MERGE">RFC 3253</a>) * @throws org.apache.jackrabbit.webdav.DavException * @see org.apache.jackrabbit.webdav.version.VersionControlledResource#merge(org.apache.jackrabbit.webdav.version.MergeInfo) * @see Node#merge(String, boolean) */ //TODO: with jcr the node must not be versionable in order to perform Node.merge @Override public MultiStatus merge(MergeInfo mergeInfo) throws DavException { if (mergeInfo == null) { throw new DavException(DavServletResponse.SC_BAD_REQUEST); } if (!exists()) { throw new DavException(DavServletResponse.SC_NOT_FOUND); } MultiStatus ms = new MultiStatus(); try { // NOTE: RFC requires that all modified resources are reported in the // multistatus response. this doesn't work however with the remoting // there is no way to distinguish the 'failedId's from any other // resources that got modified by this merge operation -> omitted. // todo: RFC allows multiple href elements inside the DAV:source element String workspaceName = getLocatorFromHref(mergeInfo.getSourceHrefs()[0]).getWorkspaceName(); String depth = DomUtil.getChildTextTrim(mergeInfo.getMergeElement(), DavConstants.XML_DEPTH, DavConstants.NAMESPACE); boolean isShallow = "0".equals(depth); NodeIterator failed = getVersionManager().merge(item.getPath(), workspaceName, !mergeInfo.isNoAutoMerge(), isShallow); // add resources to the multistatus, that failed to be merged while (failed.hasNext()) { Node failedNode = failed.nextNode(); DavResourceLocator loc = getLocatorFromItem(failedNode); DavResource res = createResourceFromLocator(loc); ms.addResponse(new MultiStatusResponse(res, mergeInfo.getPropertyNameSet())); } } catch (RepositoryException e) { throw new JcrDavException(e); } return ms; } /** * Modify the labels present with the versions of this resource. * * @param labelInfo * @throws DavException * @see VersionHistory#addVersionLabel(String, String, boolean) * @see VersionHistory#removeVersionLabel(String) */ @Override public void label(LabelInfo labelInfo) throws DavException { if (labelInfo == null) { throw new DavException(DavServletResponse.SC_BAD_REQUEST, "Valid label request body required."); } if (!exists()) { throw new DavException(DavServletResponse.SC_NOT_FOUND); } try { if (!isVersionControlled() || ((Node)item).isCheckedOut()) { throw new DavException(DavServletResponse.SC_PRECONDITION_FAILED, "A LABEL request may only be applied to a version-controlled, checked-in resource."); } DavResource[] resArr = this.getReferenceResources(CHECKED_IN); if (resArr.length == 1 && resArr[0] instanceof VersionResource) { ((VersionResource)resArr[0]).label(labelInfo); } else { throw new DavException(DavServletResponse.SC_INTERNAL_SERVER_ERROR, "DAV:checked-in property on '" + getHref() + "' did not point to a single VersionResource."); } } catch (RepositoryException e) { throw new JcrDavException(e); } } /** * Returns the {@link VersionHistory} associated with the repository node. * If the node is not versionable an exception is thrown. * * @return the {@link VersionHistoryResource} associated with this resource. * @throws org.apache.jackrabbit.webdav.DavException * @see org.apache.jackrabbit.webdav.version.VersionControlledResource#getVersionHistory() * @see javax.jcr.Node#getVersionHistory() */ @Override public VersionHistoryResource getVersionHistory() throws DavException { if (!exists()) { throw new DavException(DavServletResponse.SC_NOT_FOUND); } try { VersionHistory vh = ((Node)item).getVersionHistory(); DavResourceLocator loc = getLocatorFromItem(vh); return (VersionHistoryResource) createResourceFromLocator(loc); } catch (RepositoryException e) { throw new JcrDavException(e); } } //-------------------------------------------------------------------------- /** * Define the set of reports supported by this resource. * * @see SupportedReportSetProperty */ @Override protected void initSupportedReports() { super.initSupportedReports(); if (exists()) { supportedReports.addReportType(ReportType.LOCATE_BY_HISTORY); if (this.isVersionControlled()) { supportedReports.addReportType(ReportType.VERSION_TREE); } } } @Override protected void initPropertyNames() { super.initPropertyNames(); if (isVersionControlled()) { names.addAll(JcrDavPropertyNameSet.VERSIONABLE_SET); Node n = (Node) item; try { if (n.isCheckedOut()) { names.add(CHECKED_OUT); if (n.hasProperty(JcrConstants.JCR_PREDECESSORS)) { names.add(PREDECESSOR_SET); } if (n.hasProperty(JcrConstants.JCR_MERGEFAILED)) { names.add(AUTO_MERGE_SET); } // todo: checkout-fork, checkin-fork } else { names.add(CHECKED_IN); } } catch (RepositoryException e) { log.warn(e.getMessage()); } } } /** * Fill the property set for this resource. */ @Override protected void initProperties() { super.initProperties(); if (isVersionControlled()) { Node n = (Node)item; // properties defined by RFC 3253 for version-controlled resources // workspace property already set in AbstractResource.initProperties() try { // DAV:version-history (computed) String vhHref = getLocatorFromItem(n.getVersionHistory()).getHref(true); properties.add(new HrefProperty(VERSION_HISTORY, vhHref, true)); // DAV:auto-version property: there is no auto version, explicit CHECKOUT is required. properties.add(new DefaultDavProperty<String>(AUTO_VERSION, null, false)); String baseVHref = getLocatorFromItem(n.getBaseVersion()).getHref(true); if (n.isCheckedOut()) { // DAV:predecessors property if (n.hasProperty(JcrConstants.JCR_PREDECESSORS)) { Value[] predec = n.getProperty(JcrConstants.JCR_PREDECESSORS).getValues(); addHrefProperty(PREDECESSOR_SET, predec, false); } // DAV:auto-merge-set property. NOTE: the DAV:merge-set // never occurs, because merging without bestEffort flag // being set results in an exception on failure. if (n.hasProperty(JcrConstants.JCR_MERGEFAILED)) { Value[] mergeFailed = n.getProperty(JcrConstants.JCR_MERGEFAILED).getValues(); addHrefProperty(AUTO_MERGE_SET, mergeFailed, false); } // todo: checkout-fork, checkin-fork } } catch (RepositoryException e) { log.error(e.getMessage()); } } } /** * Add a {@link org.apache.jackrabbit.webdav.property.HrefProperty} with the * specified property name and values. * * @param name * @param values Array of {@link Value}s. * @param isProtected * @throws javax.jcr.ValueFormatException * @throws IllegalStateException * @throws javax.jcr.RepositoryException */ private void addHrefProperty(DavPropertyName name, Value[] values, boolean isProtected) throws ValueFormatException, IllegalStateException, RepositoryException { Node[] nodes = new Node[values.length]; for (int i = 0; i < values.length; i++) { nodes[i] = getRepositorySession().getNodeByIdentifier(values[i].getString()); } addHrefProperty(name, nodes, isProtected); } /** * @return true, if this resource represents an existing repository node * that has the mixin nodetype 'mix:versionable' set. */ private boolean isVersionControlled() { boolean vc = false; if (exists()) { try { vc = ((Node) item).isNodeType(JcrConstants.MIX_VERSIONABLE); } catch (RepositoryException e) { log.warn(e.getMessage()); } } return vc; } /** * Build a new locator for the given href. * * @param href * @return */ private DavResourceLocator getLocatorFromHref(String href) { DavLocatorFactory f = getLocator().getFactory(); String prefix = getLocator().getPrefix(); return f.createResourceLocator(prefix, href); } private VersionManager getVersionManager() throws RepositoryException { return getRepositorySession().getWorkspace().getVersionManager(); } private static String obtainAbsolutePathFromUri(String uri) { try { java.net.URI u = new java.net.URI(uri); StringBuilder sb = new StringBuilder(); sb.append(u.getRawPath()); if (u.getRawQuery() != null) { sb.append("?").append(u.getRawQuery()); } return sb.toString(); } catch (java.net.URISyntaxException ex) { log.warn("parsing " + uri, ex); return uri; } } }
package com.luantc.test; /** * Created by luantruong on 6/30/16. */ import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import android.animation.ValueAnimator; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.CornerPathEffect; import android.graphics.Paint; import android.graphics.Path; import android.graphics.PointF; import android.graphics.PorterDuff; import android.graphics.PorterDuffXfermode; import android.graphics.RectF; import android.graphics.Xfermode; import android.graphics.Region.Op; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Handler; import android.util.AttributeSet; import android.util.Log; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.LinearLayout; import com.luantc.test.animation.ChartAnimator; import com.luantc.test.animation.Easing; import com.luantc.test.animation.EasingFunction; public class CircleLayout extends ViewGroup { public static final int LAYOUT_NORMAL = 1; public static final int LAYOUT_PIE = 2; private int mLayoutMode = LAYOUT_NORMAL; private Drawable mInnerCircle; private float mAngleOffset; private float mAngleRange; private float mDividerWidth; private int mInnerRadius; private Paint mDividerPaint; private Paint mCirclePaint; private RectF mBounds = new RectF(); private Bitmap mDst; private Bitmap mSrc; private Canvas mSrcCanvas; private Canvas mDstCanvas; private Xfermode mXfer; private Paint mXferPaint; private View mMotionTarget; private Bitmap mDrawingCache; private Canvas mCachedCanvas; private Set<View> mDirtyViews = new HashSet<View>(); private boolean mCached = false; ChartAnimator mAnimator; private boolean isAnimationOnly = false; private boolean isAnimationView = true; private float sweepAngle; private List<Float> mSweeps; private static final float SWEEP_INC = 100f; /** * holds the raw version of the current rotation angle of the chart */ private float mRawRotationAngle = 270f; private float mPadding = 7.5f; Context mContext; public CircleLayout(Context context) { this(context, null); mContext = context; } @SuppressLint("NewApi") public CircleLayout(Context context, AttributeSet attrs) { super(context, attrs); mContext = context; mDividerPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mCirclePaint = new Paint(Paint.ANTI_ALIAS_FLAG); TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CircleLayout, 0, 0); try { int dividerColor = a.getColor(R.styleable.CircleLayout_sliceDivider, android.R.color.darker_gray); mInnerCircle = a.getDrawable(R.styleable.CircleLayout_innerCircle); if (mInnerCircle instanceof ColorDrawable) { int innerColor = a.getColor(R.styleable.CircleLayout_innerCircle, android.R.color.white); mCirclePaint.setColor(innerColor); } mDividerPaint.setColor(dividerColor); mAngleOffset = a.getFloat(R.styleable.CircleLayout_angleOffset, 0f); mAngleRange = a.getFloat(R.styleable.CircleLayout_angleRange, 360f); mDividerWidth = a.getDimensionPixelSize(R.styleable.CircleLayout_dividerWidth, 1); mInnerRadius = a.getDimensionPixelSize(R.styleable.CircleLayout_innerRadius, 80); mLayoutMode = a.getColor(R.styleable.CircleLayout_layoutMode, LAYOUT_NORMAL); } finally { a.recycle(); } mDividerPaint.setStrokeWidth(mDividerWidth); mXfer = new PorterDuffXfermode(PorterDuff.Mode.SRC_IN); mXferPaint = new Paint(Paint.ANTI_ALIAS_FLAG); //Turn off hardware acceleration if possible if (Build.VERSION.SDK_INT >= 11) { setLayerType(LAYER_TYPE_SOFTWARE, null); } } public void setAnimationOnly(boolean isAnimationOnly) { this.isAnimationOnly = isAnimationOnly; } public void setLayoutMode(int mode) { mLayoutMode = mode; requestLayout(); invalidate(); } public int getLayoutMode() { return mLayoutMode; } public int getRadius() { final int width = getWidth(); final int height = getHeight(); final float minDimen = width > height ? height : width; float radius = (minDimen - mInnerRadius) / 2f; return (int) radius; } public void getCenter(PointF p) { p.set(getWidth() / 2f, getHeight() / 2); } public void setAngleOffset(float offset) { mAngleOffset = offset; requestLayout(); invalidate(); } public float getAngleOffset() { return mAngleOffset; } public void setInnerRadius(int radius) { mInnerRadius = radius; requestLayout(); invalidate(); } public int getInnerRadius() { return mInnerRadius; } public void setInnerCircle(Drawable d) { mInnerCircle = d; requestLayout(); invalidate(); } public void setInnerCircle(int res) { mInnerCircle = getContext().getResources().getDrawable(res); requestLayout(); invalidate(); } public void setInnerCircleColor(int color) { mInnerCircle = new ColorDrawable(color); requestLayout(); invalidate(); } public Drawable getInnerCircle() { return mInnerCircle; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { final int count = getChildCount(); int maxHeight = 0; int maxWidth = 0; // Find rightmost and bottommost child for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { measureChild(child, widthMeasureSpec, heightMeasureSpec); //maxWidth = Math.max(maxWidth, child.getMeasuredWidth()); //maxHeight = Math.max(maxHeight, child.getMeasuredHeight()); } } // Check against our minimum height and width maxHeight = Math.max(maxHeight, getSuggestedMinimumHeight()); maxWidth = Math.max(maxWidth, getSuggestedMinimumWidth()); int width = resolveSize(maxWidth, widthMeasureSpec); int height = resolveSize(maxHeight, heightMeasureSpec); setMeasuredDimension(width, height); if (mSrc != null && (mSrc.getWidth() != width || mSrc.getHeight() != height)) { /*mDst.recycle(); mSrc.recycle(); mDrawingCache.recycle();*/ mDst = null; mSrc = null; mDrawingCache = null; } if (mSrc == null) { mSrc = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); mDst = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); mDrawingCache = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); mSrcCanvas = new Canvas(mSrc); mDstCanvas = new Canvas(mDst); mCachedCanvas = new Canvas(mDrawingCache); // Handle when drawborder dont mSrc.eraseColor(Color.TRANSPARENT); mDst.eraseColor(Color.TRANSPARENT); mDrawingCache.eraseColor(Color.TRANSPARENT); } } public LayoutParams layoutParams(View child) { return (LayoutParams) child.getLayoutParams(); } @Override @SuppressWarnings("deprecation") protected void onLayout(boolean changed, int l, int t, int r, int b) { final int childs = getChildCount(); float totalWeight = 0f; for (int i = 0; i < childs; i++) { final View child = getChildAt(i); LayoutParams lp = layoutParams(child); totalWeight += lp.weight; } final int width = getWidth(); final int height = getHeight(); final float minDimen = width > height ? height : width; final float radius = (minDimen - mInnerRadius) / 2f; mBounds.set(width / 2 - minDimen / 2 + mPadding, height / 2 - minDimen / 2 + mPadding, width / 2 + minDimen / 2 - mPadding, height / 2 + minDimen / 2 - mPadding); float startAngle = mAngleOffset; for (int i = 0; i < childs; i++) { final View child = getChildAt(i); final LayoutParams lp = layoutParams(child); //final float angle = mAngleRange /totalWeight * lp.weight; float angle = 0; if (i == (childs - 1)) { } ViewModel model = (ViewModel) child.getTag(); angle = mAngleRange * (model.getPercentage() / 100); final float centerAngle = startAngle + angle / 2f; final int x; final int y; if (childs > 1) { x = (int) (radius * Math.cos(Math.toRadians(centerAngle))) + width / 2; y = (int) (radius * Math.sin(Math.toRadians(centerAngle))) + height / 2; } else { x = width / 2; y = height / 2; } final int halfChildWidth = child.getMeasuredWidth() / 2; final int halfChildHeight = child.getMeasuredHeight() / 2; final int left = lp.width != LayoutParams.FILL_PARENT ? x - halfChildWidth : 0; final int top = lp.height != LayoutParams.FILL_PARENT ? y - halfChildHeight : 0; final int right = lp.width != LayoutParams.FILL_PARENT ? x + halfChildWidth : width; final int bottom = lp.height != LayoutParams.FILL_PARENT ? y + halfChildHeight : height; child.layout(left, top, right, bottom); if (left != child.getLeft() || top != child.getTop() || right != child.getRight() || bottom != child.getBottom() || lp.startAngle != startAngle || lp.endAngle != startAngle + angle) { mCached = false; } lp.startAngle = startAngle; startAngle += angle; lp.endAngle = startAngle; } invalidate(); } @Override protected LayoutParams generateDefaultLayoutParams() { return new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); } @Override protected LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { LayoutParams lp = new LayoutParams(p.width, p.height); if (p instanceof LinearLayout.LayoutParams) { lp.weight = ((LinearLayout.LayoutParams) p).weight; } return lp; } @Override public LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams; } @Override public boolean dispatchTouchEvent(MotionEvent ev) { if (mLayoutMode == LAYOUT_NORMAL) { return super.dispatchTouchEvent(ev); } final int action = ev.getAction(); final float x = ev.getX() - getWidth() / 2f; final float y = ev.getY() - getHeight() / 2f; if (action == MotionEvent.ACTION_DOWN) { if (mMotionTarget != null) { MotionEvent cancelEvent = MotionEvent.obtain(ev); cancelEvent.setAction(MotionEvent.ACTION_CANCEL); cancelEvent.offsetLocation(-mMotionTarget.getLeft(), -mMotionTarget.getTop()); mMotionTarget.dispatchTouchEvent(cancelEvent); cancelEvent.recycle(); mMotionTarget = null; } final float radius = (float) Math.sqrt(x * x + y * y); if (radius < mInnerRadius || radius > getWidth() / 2f || radius > getHeight() / 2f) { return false; } float angle = (float) Math.toDegrees(Math.atan2(y, x)); if (angle < 0) angle += mAngleRange; final int childs = getChildCount(); for (int i = 0; i < childs; i++) { final View child = getChildAt(i); final LayoutParams lp = layoutParams(child); float startAngle = lp.startAngle % mAngleRange; float endAngle = lp.endAngle % mAngleRange; float touchAngle = angle; if (startAngle > endAngle) { if (touchAngle < startAngle && touchAngle < endAngle) { touchAngle += mAngleRange; } endAngle += mAngleRange; } if (startAngle <= touchAngle && endAngle >= touchAngle) { ev.offsetLocation(-child.getLeft(), -child.getTop()); boolean dispatched = child.dispatchTouchEvent(ev); if (dispatched) { mMotionTarget = child; return true; } else { ev.setLocation(0f, 0f); return onTouchEvent(ev); } } } } else if (mMotionTarget != null) { ev.offsetLocation(-mMotionTarget.getLeft(), -mMotionTarget.getTop()); mMotionTarget.dispatchTouchEvent(ev); if (action == MotionEvent.ACTION_UP || action == MotionEvent.ACTION_CANCEL) { mMotionTarget = null; } } return onTouchEvent(ev); } private void drawChild(int i,Canvas canvas, View child, LayoutParams lp, boolean animation) { mSrcCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); mDstCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); mSrcCanvas.save(); int childLeft = child.getLeft(); int childTop = child.getTop(); int childRight = child.getRight(); int childBottom = child.getBottom(); mSrcCanvas.clipRect(childLeft, childTop, childRight, childBottom, Op.REPLACE); mSrcCanvas.translate(childLeft, childTop); child.draw(mSrcCanvas); mSrcCanvas.restore(); mXferPaint.setXfermode(null); mXferPaint.setColor(Color.BLACK); sweepAngle = (lp.endAngle - lp.startAngle); if (isAnimationOnly) { if (mSweeps.get(i) < sweepAngle) { mSweeps.set(i,(mSweeps.get(i) + SWEEP_INC)); Log.d("Sweep","View " + i + " : " + mSweeps.get(i) + " / " + sweepAngle); mDstCanvas.drawArc(mBounds, lp.startAngle, mSweeps.get(i), true, mXferPaint); mXferPaint.setXfermode(mXfer); mDstCanvas.drawBitmap(mSrc, 0f, 0f, mXferPaint); invalidate(); isAnimationView = true; } else { mDstCanvas.drawArc(mBounds, lp.startAngle, sweepAngle, true, mXferPaint); mXferPaint.setXfermode(mXfer); mDstCanvas.drawBitmap(mSrc, 0f, 0f, mXferPaint); isAnimationView = false; if (animation) isAnimationOnly = false; } } else { mDstCanvas.drawArc(mBounds, lp.startAngle, sweepAngle, true, mXferPaint); mXferPaint.setXfermode(mXfer); mDstCanvas.drawBitmap(mSrc, 0f, 0f, mXferPaint); ViewModel model = (ViewModel) child.getTag(); if (model.isNeedHighlight()) { Paint border = new Paint(Paint.ANTI_ALIAS_FLAG); border.setXfermode(null); border.setAntiAlias(true); border.setDither(true); border.setColor(Color.WHITE); border.setStyle(Paint.Style.STROKE); border.setStrokeJoin(Paint.Join.ROUND); border.setStrokeCap(Paint.Cap.ROUND); border.setStrokeWidth(5f); // set stroke width mDstCanvas.drawArc(mBounds, lp.startAngle, sweepAngle, true, border); } } canvas.drawBitmap(mDst, 0f, 0f, null); } private void drawDividers(Canvas canvas, float halfWidth, float halfHeight, float radius) { final int childs = getChildCount(); if (childs < 2) { return; } for (int i = 0; i < childs; i++) { final View child = getChildAt(i); LayoutParams lp = layoutParams(child); canvas.drawLine(halfWidth, halfHeight, radius * (float) Math.cos(Math.toRadians(lp.startAngle)) + halfWidth, radius * (float) Math.sin(Math.toRadians(lp.startAngle)) + halfHeight, mDividerPaint); if (i == childs - 1) { canvas.drawLine(halfWidth, halfHeight, radius * (float) Math.cos(Math.toRadians(lp.endAngle)) + halfWidth, radius * (float) Math.sin(Math.toRadians(lp.endAngle)) + halfHeight, mDividerPaint); } } } private void drawInnerCircle(Canvas canvas, float halfWidth, float halfHeight) { if (mInnerCircle != null) { if (!(mInnerCircle instanceof ColorDrawable)) { mInnerCircle.setBounds( (int) halfWidth - mInnerRadius, (int) halfHeight - mInnerRadius, (int) halfWidth + mInnerRadius, (int) halfHeight + mInnerRadius); mInnerCircle.draw(canvas); } else { canvas.drawCircle(halfWidth, halfHeight, mInnerRadius, mCirclePaint); } } } boolean isListFloat = true; @Override protected void dispatchDraw(Canvas canvas) { if (mLayoutMode == LAYOUT_NORMAL) { super.dispatchDraw(canvas); return; } if (mSrc == null || mDst == null || mSrc.isRecycled() || mDst.isRecycled()) { return; } final int childs = getChildCount(); if (isListFloat){ mSweeps = new ArrayList<>(); for (int i = 0 ;i<childs;i++){ mSweeps.add(i,0f); } isListFloat = false; } final float halfWidth = getWidth() / 2f; final float halfHeight = getHeight() / 2f; final float radius = halfWidth > halfHeight ? halfHeight : halfWidth; /*if (mCached && mDrawingCache != null && !mDrawingCache.isRecycled() && mDirtyViews.size() < childs / 2) { canvas.drawBitmap(mDrawingCache, 0f, 0f, null); redrawDirty(canvas); drawDividers(canvas, halfWidth, halfHeight, radius); drawInnerCircle(canvas, halfWidth, halfHeight); return; } else { mCached = false; }*/ /*Canvas sCanvas = null; if (mCachedCanvas != null) { sCanvas = canvas; canvas = mCachedCanvas; }*/ Drawable bkg = getBackground(); if (bkg != null) { bkg.draw(canvas); } for (int i = 0; i < childs; i++) { final View child = getChildAt(i); LayoutParams lp = layoutParams(child); if (i == childs - 1) drawChild(i,canvas, child, lp, true); else{ drawChild(i,canvas, child, lp, false); if (isAnimationOnly){ if (isAnimationView) break; else continue; } } } drawDividers(canvas, halfWidth, halfHeight, radius); drawInnerCircle(canvas, halfWidth, halfHeight); /*if (mCachedCanvas != null) { sCanvas.drawBitmap(mDrawingCache, 0f, 0f, null); mDirtyViews.clear(); mCached = true; }*/ } public static class LayoutParams extends ViewGroup.LayoutParams { public float startAngle; public float endAngle; public float weight = 1f; public LayoutParams(int width, int height) { super(width, height); } public LayoutParams(Context context, AttributeSet attrs) { super(context, attrs); } } }
/* * Copyright (c) 2008-2014 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb; import java.io.IOException; import java.io.InterruptedIOException; import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; import static com.mongodb.ClusterConnectionMode.Multiple; import static com.mongodb.ClusterConnectionMode.Single; import static com.mongodb.ClusterType.ReplicaSet; import static com.mongodb.ClusterType.Sharded; import static com.mongodb.ClusterType.Unknown; import static com.mongodb.MongoAuthority.Type.Set; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.bson.util.Assertions.isTrue; /** * @deprecated This class is NOT part of the public API. It will be dropped in 3.x releases. */ @Deprecated public class DBTCPConnector implements DBConnector { private static final AtomicInteger NEXT_CLUSTER_ID = new AtomicInteger(1); private volatile boolean _closed; private final Mongo _mongo; private ScheduledExecutorService scheduledExecutorService; private Cluster cluster; private final MyPort _myPort = new MyPort(); private final ClusterConnectionMode connectionMode; private ClusterType type = ClusterType.Unknown; private MongosHAServerSelector mongosHAServerSelector; /** * @param mongo the Mongo instance * @throws MongoException */ public DBTCPConnector( Mongo mongo ) { _mongo = mongo; connectionMode = _mongo.getAuthority().getType() == Set || _mongo.getMongoOptions().getRequiredReplicaSetName() != null ? Multiple : Single; } public void start() { isTrue("open", !_closed); MongoOptions options = _mongo.getMongoOptions(); String clusterId = Integer.toString(NEXT_CLUSTER_ID.getAndIncrement()); scheduledExecutorService = Executors.newScheduledThreadPool(options.heartbeatThreadCount > 0 ? options.heartbeatThreadCount : _mongo.getAuthority().getServerAddresses().size(), new DefaultThreadFactory(clusterId)); cluster = Clusters.create(clusterId, ClusterSettings.builder() .hosts(_mongo.getAuthority().getServerAddresses()) .mode(connectionMode) .requiredReplicaSetName(_mongo.getMongoOptions().getRequiredReplicaSetName()) .build(), ServerSettings.builder() .heartbeatFrequency(options.heartbeatFrequencyMS, MILLISECONDS) .heartbeatConnectRetryFrequency(options.heartbeatConnectRetryFrequencyMS, MILLISECONDS) .heartbeatSocketSettings(SocketSettings.builder() .connectTimeout(options.heartbeatConnectTimeoutMS, MILLISECONDS) .readTimeout(options.heartbeatReadTimeoutMS, MILLISECONDS) .socketFactory(_mongo.getMongoOptions().getSocketFactory()) .build()) .build(), scheduledExecutorService, null, _mongo); } /** * Start a "request". * * A "request" is a group of operations in which order matters. Examples * include inserting a document and then performing a query which expects * that document to have been inserted, or performing an operation and * then using com.mongodb.Mongo.getLastError to perform error-checking * on that operation. When a thread performs operations in a "request", all * operations will be performed on the same socket, so they will be * correctly ordered. */ @Override public void requestStart(){ isTrue("open", !_closed); _myPort.requestStart(); } /** * End the current "request", if this thread is in one. * * By ending a request when it is safe to do so the built-in connection- * pool is allowed to reassign requests to different sockets in order to * more effectively balance load. See requestStart for more information. */ @Override public void requestDone(){ isTrue("open", !_closed); _myPort.requestDone(); } /** * @throws MongoException */ @Override public void requestEnsureConnection(){ isTrue("open", !_closed); _myPort.requestEnsureConnection(); } private WriteResult _checkWriteError( DB db, DBPort port , WriteConcern concern ) throws IOException{ CommandResult e = port.runCommand( db , concern.getCommand() ); e.throwOnError(); return new WriteResult( e , concern ); } /** * @param db * @param m * @param concern * @return * @throws MongoException */ @Override public WriteResult say( DB db , OutMessage m , WriteConcern concern ){ isTrue("open", !_closed); return say( db , m , concern , (ServerAddress) null); } /** * @param db * @param m * @param concern * @param hostNeeded * @return * @throws MongoException */ @Override public WriteResult say( DB db , OutMessage m , WriteConcern concern , ServerAddress hostNeeded ){ isTrue("open", !_closed); DBPort port = _myPort.get(true, ReadPreference.primary(), hostNeeded); try { return say(db, m, concern, port); } finally { _myPort.done(port); } } WriteResult say(final DB db, final OutMessage m, final WriteConcern concern, final DBPort port){ isTrue("open", !_closed); if (concern == null) { throw new IllegalArgumentException("Write concern is null"); } try { return doOperation(db, port, new DBPort.Operation<WriteResult>() { @Override public WriteResult execute() throws IOException { port.say(m); if (concern.callGetLastError()) { return _checkWriteError(db, port, concern); } else { return new WriteResult(db, port, concern); } } }); } catch (MongoException.Network e) { if ( concern.raiseNetworkErrors() ) throw e; CommandResult res = new CommandResult(port.serverAddress()); res.put( "ok" , false ); res.put( "$err" , "NETWORK ERROR" ); return new WriteResult( res , concern ); } finally { m.doneWithMessage(); } } <T> T doOperation(final DB db, final DBPort port, final DBPort.Operation<T> operation){ isTrue("open", !_closed); try { port.checkAuth( db.getMongo() ); return operation.execute(); } catch ( IOException ioe ){ _myPort.error(port, ioe); throw new MongoException.Network("Operation on server " + port.getAddress() + " failed" , ioe ); } catch ( RuntimeException re ){ _myPort.error(port, re); throw re; } } /** * @param db * @param coll * @param m * @param hostNeeded * @param decoder * @return * @throws MongoException */ @Override public Response call( DB db , DBCollection coll , OutMessage m, ServerAddress hostNeeded, DBDecoder decoder ){ isTrue("open", !_closed); return call( db , coll , m , hostNeeded , 2, null, decoder ); } /** * @param db * @param coll * @param m * @param hostNeeded * @param retries * @return * @throws MongoException */ @Override public Response call( DB db , DBCollection coll , OutMessage m , ServerAddress hostNeeded , int retries ){ isTrue("open", !_closed); return call(db, coll, m, hostNeeded, retries, null, null); } /** * @param db * @param coll * @param m * @param hostNeeded * @param readPref * @param decoder * @return * @throws MongoException */ @Override public Response call( DB db, DBCollection coll, OutMessage m, ServerAddress hostNeeded, int retries, ReadPreference readPref, DBDecoder decoder ){ isTrue("open", !_closed); try { return innerCall(db, coll, m, hostNeeded, retries, readPref, decoder); } finally { m.doneWithMessage(); } } // This method is recursive. It calls itself to implement query retry logic. private Response innerCall(final DB db, final DBCollection coll, final OutMessage m, final ServerAddress hostNeeded, final int remainingRetries, ReadPreference readPref, final DBDecoder decoder) { if (readPref == null) readPref = ReadPreference.primary(); if (readPref == ReadPreference.primary() && m.hasOption( Bytes.QUERYOPTION_SLAVEOK )) readPref = ReadPreference.secondaryPreferred(); final DBPort port = _myPort.get(false, readPref, hostNeeded); Response res = null; boolean retry = false; try { port.checkAuth( db.getMongo() ); res = port.call( m , coll, decoder ); if ( res._responseTo != m.getId() ) throw new MongoException( "ids don't match" ); } catch ( IOException ioe ){ _myPort.error(port, ioe); retry = shouldRetryQuery(readPref, coll, ioe, remainingRetries); if ( !retry ){ throw new MongoException.Network("Read operation to server " + port.host() + " failed on database " + db , ioe ); } } catch ( RuntimeException re ){ _myPort.error(port, re); throw re; } finally { _myPort.done(port); } if (retry) return innerCall( db , coll , m , hostNeeded , remainingRetries - 1 , readPref, decoder ); ServerError err = res.getError(); if ( err != null && err.isNotMasterError() ){ if ( remainingRetries <= 0 ){ throw new MongoException( "not talking to master and retries used up" ); } return innerCall( db , coll , m , hostNeeded , remainingRetries -1, readPref, decoder ); } return res; } public ServerAddress getAddress() { isTrue("open", !_closed); ClusterDescription clusterDescription = getClusterDescription(); if (connectionMode == Single) { return clusterDescription.getAny().get(0).getAddress(); } if (clusterDescription.getPrimaries().isEmpty()) { return null; } return clusterDescription.getPrimaries().get(0).getAddress(); } /** * Gets the list of seed server addresses * @return */ public List<ServerAddress> getAllAddress() { isTrue("open", !_closed); return _mongo._authority.getServerAddresses(); } /** * Gets the list of server addresses currently seen by the connector. * This includes addresses auto-discovered from a replica set. * @return * @throws MongoException */ public List<ServerAddress> getServerAddressList() { isTrue("open", !_closed); List<ServerAddress> serverAddressList = new ArrayList<ServerAddress>(); ClusterDescription clusterDescription = getClusterDescription(); for (ServerDescription serverDescription : clusterDescription.getAll()) { serverAddressList.add(serverDescription.getAddress()); } return serverAddressList; } public ReplicaSetStatus getReplicaSetStatus() { isTrue("open", !_closed); return getType() == ReplicaSet && connectionMode == Multiple ? new ReplicaSetStatus(getClusterDescription()) : null; } // This call can block if it's not yet known. boolean isMongosConnection() { isTrue("open", !_closed); return getType() == Sharded; } public String getConnectPoint(){ isTrue("open", !_closed); ServerAddress master = getAddress(); return master != null ? master.toString() : null; } private boolean shouldRetryQuery(ReadPreference readPreference, final DBCollection coll, final IOException ioe, final int remainingRetries) { if (remainingRetries == 0) { return false; } if (coll._name.equals("$cmd")) { return false; } if (ioe instanceof SocketTimeoutException) { return false; } if (readPreference.equals(ReadPreference.primary())) { return false; } return connectionMode == Multiple && getType() == ReplicaSet; } private ClusterDescription getClusterDescription() { return cluster.getDescription(getClusterWaitTimeMS(), MILLISECONDS); } private int getClusterWaitTimeMS() { return Math.min(_mongo.getMongoOptions().maxWaitTime, _mongo.getMongoOptions().connectTimeout); } private int getConnectionWaitTimeMS() { return _mongo.getMongoOptions().maxWaitTime; } DBPort getPrimaryPort() { isTrue("open", !_closed); return _myPort.get(true, ReadPreference.primary(), null); } void releasePort(final DBPort port) { isTrue("open", !_closed); _myPort.done(port); } ServerDescription getServerDescription(final ServerAddress address) { isTrue("open", !_closed); return getClusterDescription().getByServerAddress(address); } class MyPort { DBPort get( boolean keep , ReadPreference readPref, ServerAddress hostNeeded ){ DBPort pinnedRequestPort = getPinnedRequestPortForThread(); if ( hostNeeded != null ) { if (pinnedRequestPort != null && pinnedRequestPort.serverAddress().equals(hostNeeded)) { return pinnedRequestPort; } // asked for a specific host return getConnection(new ServerAddressSelector(hostNeeded)); } if ( pinnedRequestPort != null ){ // we are within a request, and have a port, should stick to it if ( portIsAPrimary(pinnedRequestPort) || !keep ) { // if keep is false, it's a read, so we use port even if primary changed return pinnedRequestPort; } // it's write and primary has changed // we fall back on new primary and try to go on with request // this may not be best behavior if spec of request is to stick with same server pinnedRequestPort.getProvider().release(pinnedRequestPort); setPinnedRequestPortForThread(null); } DBPort port = getConnection(createServerSelector(readPref)); // if within request, remember port to stick to same server if (threadHasPinnedRequest()) { setPinnedRequestPortForThread(port); } return port; } private boolean portIsAPrimary(final DBPort pinnedRequestPort) { for (ServerDescription cur : getClusterDescription().getPrimaries()) { if (cur.getAddress().equals(pinnedRequestPort.serverAddress())) { return true; } } return false; } void done( DBPort port ) { Connection requestPort = getPinnedRequestPortForThread(); // keep request port if (port != requestPort) { port.getProvider().release(port); } } /** * call this method when there is an IOException or other low level error on port. * @param port * @param e */ void error( DBPort port , Exception e ){ if (!(e instanceof InterruptedIOException)) { getServer(new ServerAddressSelector(port.getAddress())).invalidate(); } port.close(); pinnedRequestStatusThreadLocal.remove(); } void requestEnsureConnection(){ if ( !threadHasPinnedRequest() ) return; if ( getPinnedRequestPortForThread() != null ) return; setPinnedRequestPortForThread(getConnection(createServerSelector(ReadPreference.primary()))); } private DBPort getConnection(final ServerSelector serverSelector) { return (DBPort) getServer(serverSelector).getConnection(getConnectionWaitTimeMS(), MILLISECONDS); } void requestStart() { PinnedRequestStatus current = getPinnedRequestStatusForThread(); if (current == null) { pinnedRequestStatusThreadLocal.set(new PinnedRequestStatus()); } else { current.nestedBindings++; } } void requestDone(){ PinnedRequestStatus current = getPinnedRequestStatusForThread(); if (current != null) { if (current.nestedBindings > 0) { current.nestedBindings--; } else { pinnedRequestStatusThreadLocal.remove(); if (current.requestPort != null) current.requestPort.getProvider().release(current.requestPort); } } } PinnedRequestStatus getPinnedRequestStatusForThread() { return pinnedRequestStatusThreadLocal.get(); } boolean threadHasPinnedRequest() { return pinnedRequestStatusThreadLocal.get() != null; } DBPort getPinnedRequestPortForThread() { return threadHasPinnedRequest() ? pinnedRequestStatusThreadLocal.get().requestPort : null; } void setPinnedRequestPortForThread(final DBPort port) { pinnedRequestStatusThreadLocal.get().requestPort = port; } private final ThreadLocal<PinnedRequestStatus> pinnedRequestStatusThreadLocal = new ThreadLocal<PinnedRequestStatus>(); } private ServerSelector createServerSelector(final ReadPreference readPreference) { if (connectionMode == Multiple) { List<ServerSelector> serverSelectorList = new ArrayList<ServerSelector>(); if (getType() == Sharded) { serverSelectorList.add(getMongosHAServerSelector()); } else if (getType() == ReplicaSet) { serverSelectorList.add(new ReadPreferenceServerSelector(readPreference)); } else { serverSelectorList.add(new AnyServerSelector()); } serverSelectorList.add(new LatencyMinimizingServerSelector(_mongo.getMongoOptions().acceptableLatencyDifferenceMS, MILLISECONDS)); return new CompositeServerSelector(serverSelectorList); } else { return new AnyServerSelector(); } } private synchronized ClusterType getType() { if (type == Unknown) { type = getClusterDescription().getType(); } return type; } // There needs to be just one instance of this because it's stateful between requests private synchronized MongosHAServerSelector getMongosHAServerSelector() { if (mongosHAServerSelector == null) { mongosHAServerSelector = new MongosHAServerSelector(); } return mongosHAServerSelector; } static class PinnedRequestStatus { DBPort requestPort; public int nestedBindings; } public String debugString(){ return getClusterDescription().getShortDescription(); } public void close(){ _closed = true; if (cluster != null) { cluster.close(); cluster = null; } if (scheduledExecutorService != null) { scheduledExecutorService.shutdownNow(); scheduledExecutorService = null; } } /** * Assigns a new DBPortPool for a given ServerAddress. * This is used to obtain a new pool when the resolved IP of a host changes, for example. * User application should not have to call this method directly. * @param addr */ public void updatePortPool(ServerAddress addr) { } /** * Gets the DBPortPool associated with a ServerAddress. * @param addr * @return */ public DBPortPool getDBPortPool(ServerAddress addr) { throw new UnsupportedOperationException(); } public boolean isOpen(){ return !_closed; } @Override public CommandResult authenticate(MongoCredential credentials) { final DBPort port = _myPort.get(false, ReadPreference.primaryPreferred(), null); try { CommandResult result = port.authenticate(_mongo, credentials); _mongo.getAuthority().getCredentialsStore().add(credentials); return result; } finally { _myPort.done(port); } } /** * Gets the maximum size for a BSON object supported by the current master server. * Note that this value may change over time depending on which server is master. * @return the maximum size, or 0 if not obtained from servers yet. */ public int getMaxBsonObjectSize() { ClusterDescription clusterDescription = getClusterDescription(); if (clusterDescription.getPrimaries().isEmpty()) { return Bytes.MAX_OBJECT_SIZE; } return clusterDescription.getPrimaries().get(0).getMaxDocumentSize(); } // expose for unit testing MyPort getMyPort() { return _myPort; } private Server getServer(final ServerSelector serverSelector) { return cluster.getServer(serverSelector, getClusterWaitTimeMS(), MILLISECONDS); } // Custom thread factory for scheduled executor service that creates daemon threads. Otherwise, // applications that neglect to close the MongoClient will not exit. static class DefaultThreadFactory implements ThreadFactory { private final AtomicInteger threadNumber = new AtomicInteger(1); private final String clusterId; DefaultThreadFactory(final String clusterId) { this.clusterId = clusterId; } public Thread newThread(Runnable runnable) { Thread t = new Thread(runnable, "cluster-" + clusterId + "-thread-" + threadNumber.getAndIncrement()); t.setDaemon(true); return t; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.reflection; import groovy.lang.GroovyRuntimeException; import groovy.lang.MetaMethod; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.Serializable; import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; public abstract class GeneratedMetaMethod extends MetaMethod { private final String name; private final CachedClass declaringClass; private final Class returnType; public GeneratedMetaMethod(String name, CachedClass declaringClass, Class returnType, Class[] parameters) { this.name = name; this.declaringClass = declaringClass; this.returnType = returnType; nativeParamTypes = parameters; } @Override public int getModifiers() { return Modifier.PUBLIC; } @Override public String getName() { return name; } @Override public Class getReturnType() { return returnType; } @Override public CachedClass getDeclaringClass() { return declaringClass; } public static class Proxy extends GeneratedMetaMethod { private volatile MetaMethod proxy; private final String className; public Proxy(String className, String name, CachedClass declaringClass, Class returnType, Class[] parameters) { super(name, declaringClass, returnType, parameters); this.className = className; } @Override public boolean isValidMethod(Class[] arguments) { return proxy().isValidMethod(arguments); } @Override public Object doMethodInvoke(Object object, Object[] argumentArray) { return proxy().doMethodInvoke(object, argumentArray); } @Override public Object invoke(Object object, Object[] arguments) { return proxy().invoke(object, arguments); } public final MetaMethod proxy() { if (proxy == null) { synchronized(this) { if (proxy == null) createProxy(); } } return proxy; } private void createProxy() { try { Class<?> aClass = getClass().getClassLoader().loadClass(className.replace('/', '.')); Constructor<?> constructor = aClass.getConstructor(String.class, CachedClass.class, Class.class, Class[].class); proxy = (MetaMethod) constructor.newInstance(getName(), getDeclaringClass(), getReturnType(), getNativeParameterTypes()); } catch (Throwable t) { t.printStackTrace(); throw new GroovyRuntimeException("Failed to create DGM method proxy : " + t, t); } } } public static class DgmMethodRecord implements Serializable { private static final long serialVersionUID = -5639988016452884450L; public String className; public String methodName; public Class returnType; public Class[] parameters; private static final Class[] PRIMITIVE_CLASSES = { Boolean.TYPE, Character.TYPE, Byte.TYPE, Short.TYPE, Integer.TYPE, Long.TYPE, Double.TYPE, Float.TYPE, Void.TYPE, boolean[].class, char[].class, byte[].class, short[].class, int[].class, long[].class, double[].class, float[].class, int[][].class, long[][].class, double[][].class, Object[].class, String[].class, Class[].class, Byte[].class, CharSequence[].class, }; public static void saveDgmInfo(List<DgmMethodRecord> records, String file) throws IOException { try (DataOutputStream out = new DataOutputStream( new BufferedOutputStream( new FileOutputStream(file)))) { Map<String, Integer> classes = new LinkedHashMap<String, Integer>(); int nextClassId = 0; for (Class primitive : PRIMITIVE_CLASSES) { classes.put(primitive.getName(), nextClassId++); } for (DgmMethodRecord record : records) { String name = record.returnType.getName(); Integer id = classes.get(name); if (id == null) { id = nextClassId++; classes.put(name, id); } for (int i = 0; i < record.parameters.length; i++) { name = record.parameters[i].getName(); id = classes.get(name); if (id == null) { id = nextClassId++; classes.put(name, id); } } } for (Map.Entry<String, Integer> stringIntegerEntry : classes.entrySet()) { out.writeUTF(stringIntegerEntry.getKey()); out.writeInt(stringIntegerEntry.getValue()); } out.writeUTF(""); out.writeInt(records.size()); for (DgmMethodRecord record : records) { out.writeUTF(record.className); out.writeUTF(record.methodName); out.writeInt(classes.get(record.returnType.getName())); out.writeInt(record.parameters.length); for (int i = 0; i < record.parameters.length; i++) { Integer key = classes.get(record.parameters[i].getName()); out.writeInt(key); } } } } public static List<DgmMethodRecord> loadDgmInfo() throws IOException { ClassLoader loader = DgmMethodRecord.class.getClassLoader(); try (DataInputStream in = new DataInputStream( new BufferedInputStream( loader.getResourceAsStream("META-INF/dgminfo")))) { Map<Integer, Class> classes = new HashMap<Integer, Class>(); for (int i = 0; i < PRIMITIVE_CLASSES.length; i++) { classes.put(i, PRIMITIVE_CLASSES[i]); } int skip = 0; for (; ; ) { String name = in.readUTF(); if (name.length() == 0) break; int key = in.readInt(); if (skip++ < PRIMITIVE_CLASSES.length) continue; Class cls = null; try { cls = loader.loadClass(name); } catch (ClassNotFoundException e) { // under certain restrictive environments, loading certain classes may be forbidden // and could yield a ClassNotFoundException (Google App Engine) continue; } classes.put(key, cls); } int size = in.readInt(); List<DgmMethodRecord> res = new ArrayList<DgmMethodRecord>(size); for (int i = 0; i != size; ++i) { boolean skipRecord = false; DgmMethodRecord record = new DgmMethodRecord(); record.className = in.readUTF(); record.methodName = in.readUTF(); record.returnType = classes.get(in.readInt()); if (record.returnType == null) { skipRecord = true; } int psize = in.readInt(); record.parameters = new Class[psize]; for (int j = 0; j < record.parameters.length; j++) { record.parameters[j] = classes.get(in.readInt()); if (record.parameters[j] == null) { skipRecord = true; } } if (!skipRecord) { res.add(record); } } return res; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.rows; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import org.apache.cassandra.db.DeletionTime; import org.apache.cassandra.db.partitions.UnfilteredPartitionIterator; import org.apache.cassandra.utils.AbstractIterator; import org.apache.cassandra.utils.CloseableIterator; import com.google.common.annotations.VisibleForTesting; /** * A utility class to split the given {@link#UnfilteredRowIterator} into smaller chunks each * having at most {@link #throttle} + 1 unfiltereds. * * Only the first output contains partition level info: {@link UnfilteredRowIterator#partitionLevelDeletion} * and {@link UnfilteredRowIterator#staticRow}. * * Besides splitting, this iterator will also ensure each chunk does not finish with an open tombstone marker, * by closing any opened tombstone markers and re-opening on the next chunk. * * The lifecycle of outputed {{@link UnfilteredRowIterator} only last till next call to {@link #next()}. * * A subsequent {@link #next} call will exhaust the previously returned iterator before computing the next, * effectively skipping unfiltereds up to the throttle size. * * Closing this iterator will close the underlying iterator. * */ public class ThrottledUnfilteredIterator extends AbstractIterator<UnfilteredRowIterator> implements CloseableIterator<UnfilteredRowIterator> { private final UnfilteredRowIterator origin; private final int throttle; // internal mutable state private UnfilteredRowIterator throttledItr; // extra unfiltereds from previous iteration private Iterator<Unfiltered> overflowed = Collections.emptyIterator(); @VisibleForTesting ThrottledUnfilteredIterator(UnfilteredRowIterator origin, int throttle) { assert origin != null; assert throttle > 1 : "Throttle size must be higher than 1 to properly support open and close tombstone boundaries."; this.origin = origin; this.throttle = throttle; this.throttledItr = null; } @Override protected UnfilteredRowIterator computeNext() { // exhaust previous throttled iterator while (throttledItr != null && throttledItr.hasNext()) throttledItr.next(); // The original UnfilteredRowIterator may have only partition deletion or static column but without unfiltereds. // Return the original UnfilteredRowIterator if (!origin.hasNext()) { if (throttledItr != null) return endOfData(); return throttledItr = origin; } throttledItr = new WrappingUnfilteredRowIterator(origin) { private int count = 0; private boolean isFirst = throttledItr == null; // current batch's openMarker. if it's generated in previous batch, // it must be consumed as first element of current batch private RangeTombstoneMarker openMarker; // current batch's closeMarker. // it must be consumed as last element of current batch private RangeTombstoneMarker closeMarker = null; @Override public boolean hasNext() { return (withinLimit() && wrapped.hasNext()) || closeMarker != null; } @Override public Unfiltered next() { if (closeMarker != null) { assert count == throttle; Unfiltered toReturn = closeMarker; closeMarker = null; return toReturn; } Unfiltered next; assert withinLimit(); // in the beginning of the batch, there might be remaining unfiltereds from previous iteration if (overflowed.hasNext()) next = overflowed.next(); else next = wrapped.next(); recordNext(next); return next; } private void recordNext(Unfiltered unfiltered) { count++; if (unfiltered.isRangeTombstoneMarker()) updateMarker((RangeTombstoneMarker) unfiltered); // when reach throttle with a remaining openMarker, we need to create corresponding closeMarker. if (count == throttle && openMarker != null) { assert wrapped.hasNext(); closeOpenMarker(wrapped.next()); } } private boolean withinLimit() { return count < throttle; } private void updateMarker(RangeTombstoneMarker marker) { openMarker = marker.isOpen(isReverseOrder()) ? marker : null; } /** * There 3 cases for next, 1. if it's boundaryMarker, we split it as closeMarker for current batch, next * openMarker for next batch 2. if it's boundMakrer, it must be closeMarker. 3. if it's Row, create * corresponding closeMarker for current batch, and create next openMarker for next batch including current * Row. */ private void closeOpenMarker(Unfiltered next) { assert openMarker != null; if (next.isRangeTombstoneMarker()) { RangeTombstoneMarker marker = (RangeTombstoneMarker) next; // if it's boundary, create closeMarker for current batch and openMarker for next batch if (marker.isBoundary()) { RangeTombstoneBoundaryMarker boundary = (RangeTombstoneBoundaryMarker) marker; closeMarker = boundary.createCorrespondingCloseMarker(isReverseOrder()); overflowed = Collections.singleton((Unfiltered)boundary.createCorrespondingOpenMarker(isReverseOrder())).iterator(); } else { // if it's bound, it must be closeMarker. assert marker.isClose(isReverseOrder()); updateMarker(marker); closeMarker = marker; } } else { // it's Row, need to create closeMarker for current batch and openMarker for next batch DeletionTime openDeletion = openMarker.openDeletionTime(isReverseOrder()); ByteBuffer[] buffers = next.clustering().getRawValues(); closeMarker = RangeTombstoneBoundMarker.exclusiveClose(isReverseOrder(), buffers, openDeletion); // for next batch overflowed = Arrays.asList(RangeTombstoneBoundMarker.inclusiveOpen(isReverseOrder(), buffers, openDeletion), next).iterator(); } } @Override public DeletionTime partitionLevelDeletion() { return isFirst ? wrapped.partitionLevelDeletion() : DeletionTime.LIVE; } @Override public Row staticRow() { return isFirst ? wrapped.staticRow() : Rows.EMPTY_STATIC_ROW; } @Override public void close() { // no op } }; return throttledItr; } public void close() { if (origin != null) origin.close(); } /** * Splits a {@link UnfilteredPartitionIterator} in {@link UnfilteredRowIterator} batches with size no higher than * <b>maxBatchSize</b> * * @param partitionIterator * @param maxBatchSize max number of unfiltereds in the UnfilteredRowIterator. if 0 is given, it means no throttle. * @return */ public static CloseableIterator<UnfilteredRowIterator> throttle(UnfilteredPartitionIterator partitionIterator, int maxBatchSize) { if (maxBatchSize == 0) // opt out return partitionIterator; return new AbstractIterator<UnfilteredRowIterator>() { ThrottledUnfilteredIterator current = null; protected UnfilteredRowIterator computeNext() { if (current != null && !current.hasNext()) { current.close(); current = null; } if (current == null && partitionIterator.hasNext()) { current = new ThrottledUnfilteredIterator(partitionIterator.next(), maxBatchSize); } if (current != null && current.hasNext()) return current.next(); return endOfData(); } public void close() { if (current != null) current.close(); } }; } }
/* Copyright 2012 Olaf Delgado-Friedrichs Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.gavrog.joss.dsyms.generators; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.NoSuchElementException; import java.util.Set; import org.gavrog.box.collections.IteratorAdapter; import org.gavrog.box.collections.Iterators; import org.gavrog.box.collections.NiftyList; import org.gavrog.joss.dsyms.basic.DSymbol; import org.gavrog.joss.dsyms.basic.DynamicDSymbol; import org.gavrog.joss.dsyms.basic.IndexList; /** * Augments a tile in all possible ways by splitting edges (introducing * vertices of degree two) so that the symbol for the resulting tile has a * specified size. */ class SplitEdges2d extends IteratorAdapter<DSymbol> { final static private IndexList idcsEdge2d = new IndexList(0, 2); final private DSymbol base; final private int targetSize; final int orbRep[]; final int orbSize[]; final int orbAdded[]; int currentSize; final Set<NiftyList<Integer>> results = new HashSet<NiftyList<Integer>>(); /** * Constructs an instance. * @param base the symbol representing the input tile * @param size the target size for the augmented symbol */ public SplitEdges2d(final DSymbol base, final int size) { // --- store parameters this.base = base; this.targetSize = size; this.currentSize = base.size(); // --- collect (0,2)-orbits final List<List<Integer>> orbits = new ArrayList<List<Integer>>(); for (final int D: base.orbitReps(idcsEdge2d)) { final List<Integer> orbit = Iterators.asList(base.orbit(idcsEdge2d, D)); orbits.add(orbit); } // --- sort by decreasing size Collections.sort(orbits, new Comparator<List<?>>() { public int compare(final List<?> l0, final List<?> l1) { return l1.size() - l0.size(); } }); // --- create arrays final int n = orbits.size(); this.orbRep = new int[n]; this.orbSize = new int[n]; this.orbAdded = new int[n]; // --- fill arrays for (int i = 0; i < n; ++i) { final List<Integer> orb = orbits.get(i); this.orbRep[i] = orb.get(0); this.orbSize[i] = orb.size(); this.orbAdded[i] = 0; } // --- a little trick to make findNext() code simpler if (this.currentSize == this.targetSize) { this.orbAdded[n-1] = -this.orbSize[n-1]; this.currentSize -= this.orbSize[n-1]; } } /* (non-Javadoc) * @see org.gavrog.box.collections.IteratorAdapter#findNext() */ protected DSymbol findNext() throws NoSuchElementException { while (true) { final int target = this.targetSize; int i = this.orbRep.length - 1; while (i >= 0 && this.currentSize + this.orbSize[i] > target) { this.currentSize -= this.orbAdded[i]; this.orbAdded[i] = 0; --i; } if (i < 0) { throw new NoSuchElementException("at end"); } this.orbAdded[i] += this.orbSize[i]; this.currentSize += this.orbSize[i]; if (this.currentSize == this.targetSize) { final DSymbol ds = augmented(); final NiftyList<Integer> invariant = ds.invariant(); if (!this.results.contains(invariant)) { this.results.add(invariant); return ds; } } } } /** * Constructs the augmented symbol based on the current values in * <code>orbAdded</code>. * @return the augmented symbol. */ private DSymbol augmented() { final DynamicDSymbol ds = new DynamicDSymbol(this.base); for (int i = 0; i < this.orbRep.length; ++i) { final int size = this.orbSize[i]; final int added = this.orbAdded[i]; if (added == 0) { continue; } // --- holds orbit and added elements final int D[] = new int[size + added]; // --- encodes original 0 operation final int op0[]; // --- encodes original 2 operation final int op2[]; // --- fill "op" arrays and start "D" array D[0] = this.orbRep[i]; switch (size) { case 1: op0 = new int[] { 0 }; op2 = new int[] { 0 }; break; case 2: if (ds.op(0, D[0]).equals(D[0])) { D[1] = ds.op(2, D[0]); op0 = new int[] { 0, 1 }; } else { D[1] = ds.op(0, D[0]); op0 = new int[] { 1, 0 }; } if (ds.op(2, D[0]).equals(D[0])) { op2 = new int[] { 0, 1 }; } else { op2 = new int[] { 1, 0 }; } break; case 4: D[1] = ds.op(0, D[0]); D[2] = ds.op(2, D[1]); D[3] = ds.op(0, D[2]); op0 = new int[] { 1, 0, 3, 2 }; op2 = new int[] { 3, 2, 1, 0 }; break; default: throw new RuntimeException("this should not happen"); } // --- remember v01 values and remove 0 edges final int v[] = new int[size]; for (int k = 0; k < size; ++k) { final int E = D[k]; v[k] = ds.v(0, 1, E); ds.undefineOp(0, E); } // --- add new elements for augmentation to array final List<Integer> newElements = ds.grow(added); for (int k = 0; k < added; ++k) { D[size + k] = newElements.get(k); } // --- set some v values int n = added / size; for (int k = 1; k <= n; ++k) { for (int m = 0; m < size; ++m) { ds.redefineV(0, 1, D[k*size + m], v[m]); } } // --- connect the elements int idx = 0; for (int k = 0; k <= n; ++k) { for (int m = 0; m < size; ++m) { final int E = D[k*size + m]; final int E2 = D[k*size + op2[m]]; ds.redefineOp(2, E, E2); final int Ei; if (k < n) { Ei = D[(k+1)*size + m]; } else { Ei = D[k*size + op0[m]]; } ds.redefineOp(idx, E, Ei); } idx = 1 - idx; } // --- set more v values for (int k = size; k < D.length; ++k) { final int E = D[k]; ds.redefineV(1, 2, E, 2 / ds.r(1, 2, E)); } } return new DSymbol(ds); } }
package minesim.entities; import java.io.Serializable; import java.util.ArrayList; import minesim.contexts.PeonStatusContextControllerHandler; import minesim.entities.items.Item; /** * Each peon will have their own inventory where inventory items are stored. An * inventory currently has 9 slots for items, however a peon can possess 10 * items at a time including the one item they are equipped with. * * @author alana_clover **/ @SuppressWarnings("serial") public class Inventory implements Serializable { final int head = 0; final int body = 1; final int legs = 2; final int shoes = 3; final int equip = 4; // A peon's inventory of inventory items ArrayList<InventoryItem> peonInventory; // ArrayList<Integer> matchingInstanceIndexList = new ArrayList<Integer>(); // The maximum number of different inventory items a peon can have in their // inventory int defaultMaxNumberOfItemSlots = 9; // The maximum number of inventory items a peon can have of one type int defaultMaxItemStackLimit = 99; // The item variable for a peon's head wear Item headSlot = null; // The item variable for a peon's shirt wear Item bodySlot = null; // The item variable for a peon's shorts Item legsSlot = null; // The item variable for a peon's footwear Item shoesSlot = null; // The current inventory item currently in use by the peon Item equipSlot = null; // The maximum number of items a peon can use at once int defaultMaxInventoryItemsInUse = 1; private int amount; // The maximum number of items can be stacked by default, but can be changed // by the Item itself in special circumstances private int stackLimit; private Peon peon; /** * Constructor method for a peon's Inventory * * @param defaultNumberOfItemSlots * -the number of inventory slots in a peon's inventory * (inventory capacity) * @param defaultItemStackLimit * -the maximum number of inventory items of the same type that * can be stored in one inventory slot * @param head * -the slot for apparel items worn on head * @param body * -the slot for apparel to be equipped on the body * @param legs * -leg slot for equippable apparel items * @param shoes * -feet slot for wearable apparel items/shoes * @param currentItemEquipped * the inventory item currently beind using * @param peonInventory * a peon's inventory of inventory items **/ public Inventory(int defaultNumberOfItemSlots, int defaultItemStackLimit, Item head, Item body, Item legs, Item shoes, Item currentItemEquipped, ArrayList<InventoryItem> peonInventory, Peon peon) { this.defaultMaxNumberOfItemSlots = defaultNumberOfItemSlots; this.defaultMaxItemStackLimit = defaultItemStackLimit; this.peonInventory = new ArrayList<InventoryItem>(); this.bodySlot = body; this.headSlot = head; this.legsSlot = legs; this.shoesSlot = shoes; this.equipSlot = currentItemEquipped; this.peon = peon; } /** * Adds an item into a peon's inventory. If the item already exists and the * stack limit has not been exceeded, the item count will be incremented by * 1. If the item does not exist or if the stack limit of a pre-existing * item has been reached, the item will be added into a new "slot" of a * peon's inventory providing there is a free slot. * * @param item * -the item to be added to the inventory **/ public void addItem(Item item) { InventoryItem newItem = new InventoryItem(item, 1); // If the item to be added already exists as an inventory item if (findIndexOfItemInInventory(item) >= 0) { // store index of existing item int itemIndex = findIndexOfItemInInventory(item); // If the existing inventory item stack is not full if (!peonInventory.get(itemIndex).stackFull()) { peonInventory.get(itemIndex).incrementAmount(1); try { PeonStatusContextControllerHandler.getInstance() .showPeonStatus(this.peon); } catch (Exception E) { // this exception will only happen in tests } return; } else { if (inventorySlotsLeft() != 0) { peonInventory.add(newItem); try { PeonStatusContextControllerHandler.getInstance() .showPeonStatus(this.peon); } catch (Exception E) { // this exception will only happen in tests } return; } } // If item to be added does not already exist of if stack is full, // add a to inventory if there is a spare slot } else { if (inventorySlotsLeft() != 0) { peonInventory.add(newItem); try { PeonStatusContextControllerHandler.getInstance() .showPeonStatus(this.peon); } catch (Exception E) { // this exception will only happen in tests } return; } } this.peon.dropItem(item, false); try { PeonStatusContextControllerHandler.getInstance().showPeonStatus( this.peon); } catch (Exception E) { // this exception will only happen in tests } } /** * Getter method for peon inventory size * * @return the current size (number of occupied slots) in a peon's inventory **/ public int getInventorySize() { return peonInventory.size(); } /** * Removes an item from a peon's inventory. If there is only one of the * item, the item will be completely removed. If there is more than one of * this item, the amount of the item will be decremented by 1. * * @param item * -the item to be removed */ public void removeItem(Item item) { if (findIndexOfItemInInventory(item) >= 0) { int itemIndex = findIndexOfItemInInventory(item); if (peonInventory.get(itemIndex).getAmount() > 1) { peonInventory.get(itemIndex).decrementAmount(1); } else { peonInventory.remove(itemIndex); } } try { PeonStatusContextControllerHandler.getInstance().showPeonStatus( this.peon); } catch (Exception E) { // this exception will only happen in tests } } /** * Finds the index for a given item in a peon inventory * * @param item * the item to be found * @return the index of the item or -1 if it does not exist */ public int findIndexOfItemInInventory(Item item) { for (int i = peonInventory.size() - 1; i >= 0; i--) { if (peonInventory.get(i).getItem().getName() == item.getName()) { return i; } } return -1; } /** * Finds the index for a given item name in a peon inventory * * @param itemName * the item name to be found * @return the index of the item that corresponds to the item name and -1 if * it does not exist */ public int findIndexOfItemInInventoryByName(String itemName) { for (int i = peonInventory.size() - 1; i >= 0; i--) { if (peonInventory.get(i).getItem().getName() == itemName) { return i; } } return -1; } /** * Getter method for peon inventory * * @return an ArrayList of InventoryItems, which is an inventory subclass **/ public ArrayList<InventoryItem> getInventory() { return this.peonInventory; } /** * Clears/removes all inventory items from a peon inventory */ public void clearInventory() { peonInventory.clear(); } /** * Returns true if a peon inventory contains the item passed and false if it * does not * * @param item * -the item to be checked for containing * @return true if item is in inventory, false otherwise */ public boolean doesInventoryContain(Item item) { for (int i = 0; i < peonInventory.size(); i++) { if (peonInventory.get(i).getItem().getName() == item.getName()) { return true; } } return false; } /** * Returns true if a peon inventory contains an item with the name passed * and false if otherwise * * @param itemName * the item name to be checked for in the peon inventory * @return true if the item name corresponds to an item in the inventory, * false if otherwise */ public boolean doesInventoryContainItemWithName(String itemName) { for (int i = 0; i < peonInventory.size(); i++) { if (peonInventory.get(i).getItem().getName() == itemName) { return true; } } return false; } /** * Getter method for a peon's digging tool (i.e. shovel) * * @param name * the name of the digging tool (currently always "shovel") * @return the digging tool item */ public Item getDiggingTool(String name) { for (int i = 0; i < peonInventory.size(); i++) { if (peonInventory.get(i).getItem().getName() == name) { return peonInventory.get(i).getItem(); } } return null; } /** * Method removes the digging tool (i.e. shovel) from the peon's inventory * * @param name * the name of the digging tool (currently always "shovel") that * corresponds to the item to be removed */ public void removeDiggingTool(String name) { for (int i = 0; i < peonInventory.size(); i++) { if (peonInventory.get(i).getItem().getName() == name) { peonInventory.remove(i); } } } /** * Returns true if the item is currently equipped by the poen * * @param item * the item being observed in the equip slot * @return true if equipped, false otherwise **/ public boolean isItemEquipped(Item item) { return item.getItem().equals(this.equipSlot); } /** * Returns the number of remaning empty slots in a peon inventory * * @return the number of available slots in a peon inventory **/ public int inventorySlotsLeft() { return this.defaultMaxNumberOfItemSlots - this.peonInventory.size(); } /** * Returns string summary of inventory contents by combining them in a * stringbuilder and converting it to a normal string. * * @return String representation of inventory contents, with each item and * its amount being returned as tuples **/ @Override public String toString() { StringBuilder sb = new StringBuilder(); if (this.peonInventory.isEmpty()) { return "Inventory is empty"; } else { sb.append("Inventory contains: "); for (InventoryItem item : this.peonInventory) { sb.append(item.getAmount() + " " + item.getItem()); } } return sb.toString(); } /** * Method equips a peon with the given item * * @param item * the item to be equipped by the peon * @return true if the item was successfully equipped, false if otherwise */ public boolean equipItem(Item item) { if (item.getType() == "none") { return false; } else if (!(this.peon.getClass().equals(item.getPeonClass()) || "all" .equals(item.getPeonClass()))) { return false; } switch (item.getType()) { case "head": if (this.headSlot == null) { this.headSlot = item; } else { addItem(this.headSlot); this.headSlot = item; } removeItem(item); return true; case "body": if (this.bodySlot == null) { this.bodySlot = item; } else { addItem(this.bodySlot); this.bodySlot = item; } removeItem(item); return true; case "legs": if (this.legsSlot == null) { this.legsSlot = item; } else { addItem(this.legsSlot); this.legsSlot = item; } removeItem(item); return true; case "shoes": if (this.shoesSlot == null) { this.shoesSlot = item; } else { addItem(this.shoesSlot); this.shoesSlot = item; } removeItem(item); return true; case "equip": if (this.equipSlot == null) { this.equipSlot = item; } else { addItem(this.equipSlot); this.equipSlot = item; } removeItem(item); return true; } return false; } /** * Method dequips an item from a peon * * @param item * the item to be dequipped */ public void dequipItem(Item item) { switch (item.getType()) { case "head": this.headSlot = null; addItem(item); break; case "body": this.bodySlot = null; addItem(item); break; case "legs": this.legsSlot = null; addItem(item); break; case "shoes": this.shoesSlot = null; addItem(item); break; case "equip": this.equipSlot = null; addItem(item); break; } } public Item getHeadSlot() { return this.headSlot; } public Item getBodySlot() { return this.bodySlot; } public Item getLegsSlot() { return this.legsSlot; } public Item getShoesSlot() { return this.shoesSlot; } public Item getEquipSlot() { return this.equipSlot; } /** * The InventoryItem class represents an item in a peon's inventory, and * stores the item itself and the quantitY of that item held. InventoryItem * is therefore a tuple, of an item and an amount. InventoryItems are stored * in a peonInventory. The stack limit of an item is the pre-defined * defaultMaxItemStackLimit. **/ public class InventoryItem extends Item { // The name of an item private Item item; // The quantity of the item held in the inventory slot private int amount; // The maximum amount of an item that can be held in one inventory slot private int stackLimit; /** * Constructor method that defines an inventory item * * @param item * -the name of the item being stored * @param amount * -the quantity of the item in the inventory. Max amount = * stackLimit **/ public InventoryItem(Item item, int amount) { super(0, 0, 0, 0, item.getName(), amount); this.item = item; this.amount = amount; this.stackLimit = item.getStackLimit(); } /** * Getter method for the name of an inventory item * * @return the Item object tied to this slot **/ public Item getItem() { return this.item; } /** * Getter method for the amount of an inventory item * * @return amount of items in slot, as an int **/ public int getAmount() { return this.amount; } /** * Increments the count of an inventory item by the specified amount * * @param incrementAmount * the amount to increase the item count by **/ public void incrementAmount(int incrementAmount) { this.amount += incrementAmount; } /** * Decrements the count of an inventory item by the specified amount * * @param decrementAmount * the amount to decrease the item count by **/ public void decrementAmount(int decrementAmount) { this.amount -= decrementAmount; } /** * Returns true if the amount of an inventory item in a stack has * reached the stack limit. * * @return true if item amount has reached stack limit, false otherwise **/ public boolean stackFull() { return (this.amount == this.stackLimit); } } }
/* * Copyright 2005-2010 Ignis Software Tools Ltd. All rights reserved. */ package jsystem.extensions.report.html; import java.io.Externalizable; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.logging.Level; import java.util.logging.Logger; import jsystem.extensions.report.html.summary.HtmlSummaryReporter; import jsystem.framework.FrameworkOptions; import jsystem.framework.JSystemProperties; import jsystem.framework.RunnerStatePersistencyManager; import jsystem.framework.report.ExtendTestListener; import jsystem.framework.report.ExtendTestReporter; import jsystem.framework.report.ListenerstManager; import jsystem.framework.report.Reporter; import jsystem.framework.report.Summary; import jsystem.framework.report.TestInfo; import jsystem.framework.scenario.JTestContainer; import jsystem.framework.scenario.ScenariosManager; import jsystem.framework.scenario.flow_control.AntForLoop; import jsystem.framework.sut.SutFactory; import jsystem.utils.BrowserLauncher; import jsystem.utils.DateUtils; import jsystem.utils.FileUtils; import jsystem.utils.StringUtils; import junit.framework.AssertionFailedError; import junit.framework.NamedTest; import junit.framework.Test; /** * A TestReporter implementation that write the report to html. * * @author Guy Arieli */ public class HtmlTestReporter implements ExtendTestReporter, ExtendTestListener, Externalizable { protected static Logger log = Logger.getLogger(HtmlTestReporter.class .getName()); protected HtmlWriter writer = null; private File logDirectory = null; private File logCurrent = null; private File logOld = null; private File logIndexFile = null; private HtmlSummaryReporter summary; private String lastTestFileName; protected int lastTestStatus = Reporter.PASS; private String lastTestClassName = null; private TestInfo lastTestInfo; private long startTestTime = 0; private String reportDir; boolean isTemp = false; private String timeStampToReplace = ""; /** * If set to true (by the html.zip.disable=true in the jsystem.properties * file) The log will not be ziped/backuped on runner exit. */ private boolean isZipLogDisable = false; public void setLastTestStatus(int lastTestStatus) { this.lastTestStatus = lastTestStatus; } public int getLastTestStatus() { return lastTestStatus; } public HtmlTestReporter() throws Exception { if (!JSystemProperties.getInstance().isReporterVm()) { throw new Exception("You are trying to activate the HTML reporter from the test, while test is communicating with the runner. " + "A common reason for this problem is activating the reporter from the RunnerListenersManager. Use the ListenersManager instead"); } boolean loadReportersState = RunnerStatePersistencyManager.getInstance().getLoadReporters(); if (loadReportersState){ return; } updateLogDir(); /* * Init the isZipLogDisable option. By default it's not set so it's not * disabled. */ isZipLogDisable = ("true".equals(JSystemProperties.getInstance() .getPreference(FrameworkOptions.HTML_ZIP_DISABLE))); setLogDirectory(new File(reportDir + File.separator + "current")); // check if old reports where not backed up, if so , back up before // deletion logCurrent = new File(getLogDirectory(), "test_1"); File zipped = new File(getLogDirectory(), ".zipped"); if (logCurrent.exists() && !zipped.exists()) { init(!isZipLogDisable, true); log.info("Logs were found without a backup, creating a backup of logs and deleting old logs..."); } else { init(false, true); } /* * If the ZIP log option is not disabled */ if (!isTemp) { // zip on runner close ZipDeleteLogDirectory dl = new ZipDeleteLogDirectory(logCurrent, logOld, false,true); addToShutdownHook(dl); } } /** * re-read html log directory from the jsystem.properties file */ protected void updateLogDir(){ reportDir = JSystemProperties.getInstance().getPreference(FrameworkOptions.LOG_FOLDER); if (reportDir == null || reportDir.equals("./log")) { reportDir = "log"; JSystemProperties.getInstance().setPreference(FrameworkOptions.LOG_FOLDER,reportDir); } } public HtmlTestReporter(String directory, boolean isTemp) throws Exception { reportDir = directory; this.isTemp = isTemp; init(); summary = new HtmlSummaryReporter(logCurrent); } public void init() { isZipLogDisable = ("true".equals(JSystemProperties.getInstance() .getPreference(FrameworkOptions.HTML_ZIP_DISABLE))); try { init(!isZipLogDisable, true); } catch (Exception e) { log.log(Level.SEVERE, "Fail to init HtmlTestReporter", e); } } /** * init current logs * * @param directory the "current" directory that contains the log * @param zipFirst if True will zip before deletion * @param deleteCurrent if True will delete current logs * @throws Exception */ public void init(boolean zipFirst, boolean deleteCurrent) throws Exception { updateLogDir(); setLogDirectory(new File(reportDir)); if (!getLogDirectory().exists()) { getLogDirectory().mkdirs(); } logCurrent = new File(getLogDirectory(), "current"); if (!logCurrent.exists()) { logCurrent.mkdirs(); } String oDir = JSystemProperties.getInstance().getPreference( FrameworkOptions.HTML_OLD_DIRECTORY); if (oDir != null && !oDir.equals("")) { logOld = new File(oDir); } else { logOld = new File(getLogDirectory(), "old"); } if (!logOld.exists()) { logOld.mkdirs(); } // ZipDeleteLogDirectory dl = new ZipDeleteLogDirectory(logCurrent, logOld, deleteCurrent,zipFirst); dl.start(); try { dl.join(); } catch (InterruptedException e) { return; } summary = new HtmlSummaryReporter(logCurrent); writer = new HtmlWriter(logCurrent.getPath()); } private static boolean addZip = false; private void addToShutdownHook(ZipDeleteLogDirectory dl) { if (!addZip) { Runtime.getRuntime().addShutdownHook(dl); addZip = true; } } public void initReporterManager() throws IOException { BrowserLauncher.openURL(getIndexFile().getAbsolutePath()); } public boolean asUI() { return true; } public void report(String title, String message, boolean isPass,boolean bold, boolean ignore) { try { int status; if (isPass) { status = Reporter.PASS; } else { status = Reporter.FAIL; lastTestStatus = status; } writer.addReport(new TestReport(title, String.valueOf(message),status, bold, ignore, null)); } catch (Exception e) { log.log(Level.WARNING, "Fail to add report", e); } } public void report(String title, String message, boolean isPass, boolean bold) { report(title, message, isPass, bold, false); } public String getName() { return "html report"; } public void addError(Test test, Throwable t) { lastTestStatus = Reporter.FAIL; } public void addFailure(Test test, AssertionFailedError t) { lastTestStatus = Reporter.FAIL; } /* * (non-Javadoc) * * @see jsystem.framework.system.ExtendTestListener#addWarning(junit.framework.Test) */ public void addWarning(Test test) { lastTestStatus = Reporter.WARNING; } public void endTest(String packageName, String testName, String methodName,long time) { if (methodName != null && !(lastTestStatus != Reporter.FAIL && lastTestInfo.isHiddenInHTML) ) { summary.endTest(testName + "." + methodName, packageName,lastTestFileName, lastTestStatus, time); } try { if (lastTestStatus != Reporter.FAIL) { writer.endTest(time, false); } else { writer.endTest(time, true); } } catch (Exception e) { log.log(Level.WARNING, "End test notification fail", e); } } public void endTest(Test test) { String testName; String testClass = lastTestClassName; String packageName = StringUtils.getPackageName(testClass); testName = StringUtils.getClassName(testClass); String methodName = null; if (test instanceof NamedTest) { methodName = ((NamedTest) test).getMethodName(); } endTest(packageName, testName, methodName, System.currentTimeMillis()- startTestTime); } public void startTest(TestInfo testInfo) { startTestTime = System.currentTimeMillis(); lastTestStatus = Reporter.PASS; lastTestClassName = testInfo.className; lastTestInfo = testInfo; try { lastTestFileName = writer.newTestStart(testInfo); } catch (Exception e) { log.log(Level.WARNING, "Start test notification failed", e); } } public void startTest(Test test) { // not implemented } public File getLogDirectory() { return logDirectory; } public void setLogDirectory(File logDirectory) { this.logDirectory = logDirectory; } public File getCurrentDirectory() { return logCurrent; } public File getIndexFile() { return logIndexFile; } public String getReportDir() { return reportDir; } public void setReportDir(String reportDir) { this.reportDir = reportDir; } /* * (non-Javadoc) * * @see jsystem.framework.report.ExtendTestReporter#saveFile(java.lang.String, * java.io.InputStream) */ public void saveFile(String fileName, byte[] content) { try { File file = new File(ListenerstManager.getInstance() .getCurrentTestFolder(), fileName); file.getParentFile().mkdirs(); FileOutputStream out = new FileOutputStream(file); try { out.write(content); }finally{ out.close(); } } catch (IOException e) { log.log(Level.WARNING, "Fail to save file", e); } } public static boolean isImage(String fileName) { String[] imagesTypes = { ".jpg", ".png", ".gif" }; for (int i = 0; i < imagesTypes.length; i++) { if (String.valueOf(fileName).toLowerCase().endsWith(imagesTypes[i])) { return true; } } return false; } /* * (non-Javadoc) * * @see jsystem.framework.report.ExtendTestReporter#report(java.lang.String, * java.lang.String, int, boolean) */ public void report(String title, String message, int status, boolean bold, boolean html, boolean link) { TestReport tr = null; if (link) { if (isImage(message)) { String mm = "<img src=\"" + message.replace('\\', '/') + "\">"; tr = new TestReport(title, mm, status, false, false,null); tr.setHtmlMessage(true); } else { tr = new TestReport(title, null, status, false, false,null); tr.setFileName(String.valueOf(message)); } } else { if (status == Reporter.FAIL) { lastTestStatus = Reporter.FAIL; } else if (lastTestStatus == Reporter.PASS && status == Reporter.WARNING) { lastTestStatus = Reporter.WARNING; } tr = new TestReport(title, message, status, bold, false, null); tr.setTime(timeStampToReplace); timeStampToReplace = ""; if (html) { tr.setHtmlMessage(true); } } try { writer.addReport(tr); } catch (Exception e) { log.log(Level.WARNING, "Fail to add report", e); } } /* * (non-Javadoc) * * @see jsystem.framework.report.ExtendTestReporter#startSection() */ public void startSection() { try { writer.addReport(new SectionReport(true)); } catch (Exception e) { log.log(Level.WARNING, "Fail to add report", e); } } /* * (non-Javadoc) * * @see jsystem.framework.report.ExtendTestReporter#endSection() */ public void endSection() { try { writer.addReport(new SectionReport(false)); } catch (Exception e) { log.log(Level.WARNING, "Fail to add report", e); } } public void setData(String data) { // not implemented } public void endRun() { writer.runEnded(); } public void addProperty(String key, String value) { String title = "Added Property: " + key + "=" + value; report(title, null, 0, false, false, false); } public void report(String title, String message, int status, boolean bold) { report(title, message, status == Reporter.PASS, bold); } /** * * The HtmlTestReporter was refactored to serialize itself and de-serialize itself, * this was done to allow agent restart support. After restart, we don't want the report to be zipped * and backed up, we want the user to see a continuous report, to achieve that, before agent restart, we * save html report state to a binary file, and after restart we load state from file, and thus we can * continue runing almost as if there was no restart. * There is one thing however, in order to fully support saving reporter state we need to serialize * the EventParser, and this is something I can't do in the so late in 5.5. * The fact the the event parser is not serialized and de-serialized causes a bug in html report hierarchy. * * The fast and most correct solution at this stage is that after restart the run after restart will be seen * as a a new hierarchy. To achieve that I'm pointing the currentContainer to tree root. */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { writer = (HtmlWriter) in.readObject(); writer.resetCurrentContainer(); logDirectory = (File) in.readObject(); logCurrent = (File) in.readObject(); logOld = (File) in.readObject(); logIndexFile = (File) in.readObject(); summary = (HtmlSummaryReporter) in.readObject(); lastTestFileName = (String) in.readObject(); lastTestStatus = (Integer) in.readObject(); lastTestClassName = (String) in.readObject(); startTestTime = (Long) in.readObject(); reportDir = (String) in.readObject(); isTemp = (Boolean) in.readObject(); isZipLogDisable = (Boolean) in.readObject(); } @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(writer); out.writeObject(logDirectory); out.writeObject(logCurrent); out.writeObject(logOld); out.writeObject(logIndexFile); out.writeObject(summary); out.writeObject(lastTestFileName); out.writeObject(lastTestStatus); out.writeObject(lastTestClassName); out.writeObject(startTestTime); out.writeObject(reportDir); out.writeObject(isTemp); out.writeObject(isZipLogDisable); } @Override public void endContainer(JTestContainer container) { writer.endContainer(container); } @Override public void endLoop(AntForLoop loop, int count) { writer.endLoop(loop, count); } @Override public void startContainer(JTestContainer container) { writer.startContainer(container); } @Override public void startLoop(AntForLoop loop, int count) { writer.startLoop(loop, count); } public void flush() throws Exception { writer.flush(); summary.saveFile(); } @Override public void setContainerProperties(int ancestorLevel, String key, String value) { writer.setContainerProperties(ancestorLevel, key, value); } public void setTimeStampToReplace(String timeStampToReplace) { this.timeStampToReplace = timeStampToReplace; } } class ZipDeleteLogDirectory extends Thread { private static Logger log = Logger.getLogger(ZipDeleteLogDirectory.class .getName()); File toDelete = null; File oldDir = null; boolean deleteCurrent = false; boolean zipFirst = true; public static final File ZIP_FILE = new File(".zipped"); public ZipDeleteLogDirectory(File toDelete, File oldDir, boolean deleteCurrent,boolean zipFirst) { super("ZipDeleteLogDirectory"); this.toDelete = toDelete; this.oldDir = oldDir; this.deleteCurrent = deleteCurrent; this.zipFirst = zipFirst; } public void run() { boolean disableZipLog = "true".equals(JSystemProperties.getInstance() .getPreference(FrameworkOptions.HTML_ZIP_DISABLE)); if (disableZipLog || !zipFirst) { if (deleteCurrent) { deleteLogDirectory(); } return; } if (JSystemProperties.getInstance().isJsystemRunner()) { System.out.println("Log backup process ... (don't close)"); } /* * If the date was not set in the beginning of test execution set it to * the current time. */ String date = Summary.getInstance().getProperties().getProperty("Date"); if (date == null) { date = DateUtils.getDate(); if (date == null) { date = Long.toString(System.currentTimeMillis()); } } String fileName = "log_" + date.replace(':', '_').replace(' ', '_').replace('+', '_'); File zipFile = new File(oldDir, fileName + ".zip"); int index = 1; String oFileName = fileName; while (zipFile.exists()) { fileName = oFileName + "_" + index; zipFile = new File(oldDir, fileName + ".zip"); index++; } try { String[] toDeleteList = toDelete.list(); if (toDeleteList != null && toDeleteList.length > 0) { FileUtils.zipDirectory(toDelete.getPath(), "", zipFile .getPath(), JSystemProperties.getInstance() .isJsystemRunner()); } } catch (Exception e) { log.log(Level.WARNING, "Fail to zip old log - Current logs are not deleted!!!", e); return; } File sutFile = SutFactory.getInstance().getSutFile(false); if (sutFile != null) { // no sut - probably someone tampered with jsystem.properties file String setup = null; setup = sutFile.getName(); if (setup != null && setup.toLowerCase().endsWith(".xml")) { setup = setup.substring(0, setup.length() - 4); } String oldPath = JSystemProperties.getInstance().getPreference( FrameworkOptions.HTML_OLD_PATH); File dest; if (oldPath == null) { dest = new File(oldDir.getPath() + File.separator + "setup-" + setup + File.separator + "version-" + Summary.getInstance().getProperties().getProperty( "Version")); } else { dest = findTreePath(oldDir, oldPath); } dest.mkdirs(); try { if (zipFile.exists()){ FileUtils.copyFile(zipFile, new File(dest, fileName + ".zip")); } } catch (IOException e1) { log.log(Level.WARNING, "Fail to copy old log to Hierarchical folders of Sut and Version", e1); return; } /** * if html.tree is set to true the log zip will be only in the tree. */ String htmlTree = JSystemProperties.getInstance().getPreference( FrameworkOptions.HTML_ZIP_TREE_ONLY); if (htmlTree != null && htmlTree.toLowerCase().equals("true")) { zipFile.delete(); } }else{ log.info("Skipped Html zip tree - No Sut!"); } if (deleteCurrent) { deleteLogDirectory(); } else { try { FileUtils.write( toDelete.getPath() + File.separator + ".zipped", ""); } catch (IOException e) { log.warning("Creating .zip file was failed"); } } } private File findTreePath(File root, String pathString) { String[] paths = pathString.split(";"); File toReturn = root; for (int i = 0; i < paths.length; i++) { if (paths[i].toLowerCase().equals("setup")) { String setup = SutFactory.getInstance().getSutFile().getName(); if (setup != null && setup.toLowerCase().endsWith(".xml")) { setup = setup.substring(0, setup.length() - 4); } toReturn = new File(toReturn, "setup-" + setup); } else if (paths[i].toLowerCase().equals("version")) { String version = Summary.getInstance().getProperties() .getProperty("Version"); toReturn = new File(toReturn, "version-" + version); } else if (paths[i].toLowerCase().equals("scenario")) { String scenario = ScenariosManager.getInstance() .getCurrentScenario().getName(); toReturn = new File(toReturn, "scenario-" + scenario); } else { String value = Summary.getInstance().getProperties() .getProperty(paths[i]); if (value == null) { value = paths[i]; } toReturn = new File(toReturn, value); } } return toReturn; } public void deleteLogDirectory() { if (!toDelete.exists()) { return; } FileUtils.deltree(toDelete); if (toDelete.exists()){ log.info("Failed to delete current log directory: "+toDelete.getAbsolutePath()); } else { toDelete.mkdirs(); } } }
package org.hisp.dhis.analytics.table; /* * Copyright (c) 2004-2015, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.lang3.StringUtils; import org.hisp.dhis.analytics.AggregationType; import org.hisp.dhis.analytics.AnalyticsTable; import org.hisp.dhis.analytics.DataQueryParams; import org.hisp.dhis.common.ValueType; import org.hisp.dhis.commons.util.TextUtils; import org.hisp.dhis.dataelement.CategoryOptionGroupSet; import org.hisp.dhis.dataelement.DataElementCategory; import org.hisp.dhis.dataelement.DataElementGroupSet; import org.hisp.dhis.organisationunit.OrganisationUnitGroupSet; import org.hisp.dhis.organisationunit.OrganisationUnitLevel; import org.hisp.dhis.period.PeriodType; import org.hisp.dhis.system.util.DateUtils; import org.hisp.dhis.system.util.MathUtils; import org.hisp.dhis.util.ObjectUtils; import org.springframework.scheduling.annotation.Async; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Future; import static org.hisp.dhis.commons.util.TextUtils.getQuotedCommaDelimitedString; import static org.hisp.dhis.dataapproval.DataApprovalLevelService.APPROVAL_LEVEL_UNAPPROVED; /** * This class manages the analytics table. The analytics table is a denormalized * table designed for analysis which contains raw data values. It has columns for * each organisation unit group set and organisation unit level. Also, columns * for dataelementid, periodid, organisationunitid, categoryoptioncomboid, value. * <p> * The analytics table is horizontally partitioned. The partition key is the start * date of the period of the data record. The table is partitioned according to * time span with one partition per calendar quarter. * <p> * The data records in this table are not aggregated. Typically, queries will * aggregate in organisation unit hierarchy dimension, in the period/time dimension, * and the category dimensions, as well as organisation unit group set dimensions. * * @author Lars Helge Overland */ public class JdbcAnalyticsTableManager extends AbstractJdbcTableManager { // ------------------------------------------------------------------------- // Implementation // ------------------------------------------------------------------------- @Override public String validState() { boolean hasData = jdbcTemplate.queryForRowSet( "select dataelementid from datavalue limit 1" ).next(); if ( !hasData ) { return "No data values exist, not updating aggregate analytics tables"; } int orgUnitLevels = organisationUnitService.getNumberOfOrganisationalLevels(); if ( orgUnitLevels == 0 ) { return "No organisation unit levels exist, not updating aggregate analytics tables"; } log.info( "Approval enabled: " + isApprovalEnabled() ); return null; } @Override public String getTableName() { return ANALYTICS_TABLE_NAME; } @Override public void preCreateTables() { if ( isApprovalEnabled() ) { resourceTableService.generateDataApprovalMinLevelTable(); } } @Override public void createTable( AnalyticsTable table ) { final String tableName = table.getTempTableName(); final String dbl = statementBuilder.getDoubleColumnType(); final String sqlDrop = "drop table " + tableName; executeSilently( sqlDrop ); String sqlCreate = "create table " + tableName + " ("; List<String[]> columns = getDimensionColumns( table ); validateDimensionColumns( columns ); for ( String[] col : columns ) { sqlCreate += col[0] + " " + col[1] + ","; } sqlCreate += "daysxvalue " + dbl + ", daysno integer not null, value " + dbl + ", textvalue varchar(50000)) "; sqlCreate += statementBuilder.getTableOptions( false ); log.info( "Creating table: " + tableName + ", columns: " + columns.size() ); log.debug( "Create SQL: " + sqlCreate ); executeSilently( sqlCreate ); } @Override @Async public Future<?> populateTableAsync( ConcurrentLinkedQueue<AnalyticsTable> tables ) { final String dbl = statementBuilder.getDoubleColumnType(); final String approvalClause = getApprovalJoinClause(); taskLoop: while ( true ) { AnalyticsTable table = tables.poll(); if ( table == null ) { break taskLoop; } String intClause = "dv.value " + statementBuilder.getRegexpMatch() + " '" + MathUtils.NUMERIC_LENIENT_REGEXP + "' " + "and ( dv.value != '0' or de.aggregationtype in ('" + AggregationType.AVERAGE + ',' + AggregationType.AVERAGE_SUM_ORG_UNIT + "') " + "or de.zeroissignificant = true ) "; populateTable( table, "cast(dv.value as " + dbl + ")", "null", ValueType.NUMERIC_TYPES, intClause, approvalClause ); populateTable( table, "1", "null", Sets.newHashSet( ValueType.BOOLEAN, ValueType.TRUE_ONLY ), "dv.value = 'true'", approvalClause ); populateTable( table, "0", "null", Sets.newHashSet( ValueType.BOOLEAN ), "dv.value = 'false'", approvalClause ); populateTable( table, "1", "null", Sets.newHashSet( ValueType.TRUE_ONLY ), "dv.value = 'true'", approvalClause ); populateTable( table, "null", "dv.value", ValueType.TEXT_TYPES, null, approvalClause ); } return null; } /** * Populates the given analytics table. * * @param table analytics table to populate. * @param valueExpression numeric value expression. * @param textValueExpression textual value expression. * @param valueTypes data element value types to include data for. * @param whereClause where clause to constrain data query. */ private void populateTable( AnalyticsTable table, String valueExpression, String textValueExpression, Set<ValueType> valueTypes, String whereClause, String approvalClause ) { final String start = DateUtils.getMediumDateString( table.getPeriod().getStartDate() ); final String end = DateUtils.getMediumDateString( table.getPeriod().getEndDate() ); final String tableName = table.getTempTableName(); final String valTypes = TextUtils.getQuotedCommaDelimitedString( ObjectUtils.asStringList( valueTypes ) ); String sql = "insert into " + table.getTempTableName() + " ("; List<String[]> columns = getDimensionColumns( table ); validateDimensionColumns( columns ); for ( String[] col : columns ) { sql += col[0] + ","; } sql += "daysxvalue, daysno, value, textvalue) select "; for ( String[] col : columns ) { sql += col[2] + ","; } sql += valueExpression + " * ps.daysno as daysxvalue, " + "ps.daysno as daysno, " + valueExpression + " as value, " + textValueExpression + " as textvalue " + "from datavalue dv " + "left join _dataelementgroupsetstructure degs on dv.dataelementid=degs.dataelementid " + "left join _organisationunitgroupsetstructure ougs on dv.sourceid=ougs.organisationunitid " + "left join _categoryoptiongroupsetstructure cogs on dv.categoryoptioncomboid=cogs.categoryoptioncomboid " + "left join _categoryoptiongroupsetstructure aogs on dv.attributeoptioncomboid=aogs.categoryoptioncomboid " + "left join _categorystructure dcs on dv.categoryoptioncomboid=dcs.categoryoptioncomboid " + "left join _categorystructure acs on dv.attributeoptioncomboid=acs.categoryoptioncomboid " + "left join _orgunitstructure ous on dv.sourceid=ous.organisationunitid " + "left join _dataelementstructure des on dv.dataelementid = des.dataelementid " + "inner join dataelement de on dv.dataelementid=de.dataelementid " + "inner join categoryoptioncombo co on dv.categoryoptioncomboid=co.categoryoptioncomboid " + "inner join categoryoptioncombo ao on dv.attributeoptioncomboid=ao.categoryoptioncomboid " + "inner join _categoryoptioncomboname aon on dv.attributeoptioncomboid=aon.categoryoptioncomboid " + "inner join period pe on dv.periodid=pe.periodid " + "inner join _periodstructure ps on dv.periodid=ps.periodid " + "inner join organisationunit ou on dv.sourceid=ou.organisationunitid " + approvalClause + "where de.valuetype in (" + valTypes + ") " + "and de.domaintype = 'AGGREGATE' " + "and pe.startdate >= '" + start + "' " + "and pe.startdate <= '" + end + "' " + "and dv.value is not null "; if ( whereClause != null ) { sql += "and " + whereClause; } populateAndLog( sql, tableName + ", " + valueTypes ); } /** * Returns sub-query for approval level. First looks for approval level in * data element resource table which will indicate level 0 (highest) if approval * is not required. Then looks for highest level in dataapproval table. */ private String getApprovalJoinClause() { if ( isApprovalEnabled() ) { String sql = "left join _dataapprovalminlevel da " + "on des.datasetid=da.datasetid and da.periodid=dv.periodid and da.attributeoptioncomboid=dv.attributeoptioncomboid " + "and ("; Set<OrganisationUnitLevel> levels = dataApprovalLevelService.getOrganisationUnitApprovalLevels(); for ( OrganisationUnitLevel level : levels ) { sql += "ous.idlevel" + level.getLevel() + " = da.organisationunitid or "; } return TextUtils.removeLastOr( sql ) + ") "; } return StringUtils.EMPTY; } @Override public List<String[]> getDimensionColumns( AnalyticsTable table ) { List<String[]> columns = new ArrayList<>(); List<DataElementGroupSet> dataElementGroupSets = idObjectManager.getDataDimensionsNoAcl( DataElementGroupSet.class ); List<OrganisationUnitGroupSet> orgUnitGroupSets = idObjectManager.getDataDimensionsNoAcl( OrganisationUnitGroupSet.class ); List<CategoryOptionGroupSet> disaggregationCategoryOptionGroupSets = categoryService.getDisaggregationCategoryOptionGroupSetsNoAcl(); List<CategoryOptionGroupSet> attributeCategoryOptionGroupSets = categoryService.getAttributeCategoryOptionGroupSetsNoAcl(); List<DataElementCategory> disaggregationCategories = categoryService.getDisaggregationDataDimensionCategoriesNoAcl(); List<DataElementCategory> attributeCategories = categoryService.getAttributeDataDimensionCategoriesNoAcl(); List<OrganisationUnitLevel> levels = organisationUnitService.getFilledOrganisationUnitLevels(); for ( DataElementGroupSet groupSet : dataElementGroupSets ) { String[] col = { quote( groupSet.getUid() ), "character(11)", "degs." + quote( groupSet.getUid() ) }; columns.add( col ); } for ( OrganisationUnitGroupSet groupSet : orgUnitGroupSets ) { String[] col = { quote( groupSet.getUid() ), "character(11)", "ougs." + quote( groupSet.getUid() ) }; columns.add( col ); } for ( CategoryOptionGroupSet groupSet : disaggregationCategoryOptionGroupSets ) { String[] col = { quote( groupSet.getUid() ), "character(11)", "cogs." + quote( groupSet.getUid() ) }; columns.add( col ); } for ( CategoryOptionGroupSet groupSet : attributeCategoryOptionGroupSets ) { String[] col = { quote( groupSet.getUid() ), "character(11)", "aogs." + quote( groupSet.getUid() ) }; columns.add( col ); } for ( DataElementCategory category : disaggregationCategories ) { String[] col = { quote( category.getUid() ), "character(11)", "dcs." + quote( category.getUid() ) }; columns.add( col ); } for ( DataElementCategory category : attributeCategories ) { String[] col = { quote( category.getUid() ), "character(11)", "acs." + quote( category.getUid() ) }; columns.add( col ); } for ( OrganisationUnitLevel level : levels ) { String column = quote( PREFIX_ORGUNITLEVEL + level.getLevel() ); String[] col = { column, "character(11)", "ous." + column }; columns.add( col ); } List<PeriodType> periodTypes = PeriodType.getAvailablePeriodTypes(); for ( PeriodType periodType : periodTypes ) { String column = quote( periodType.getName().toLowerCase() ); String[] col = { column, "character varying(15)", "ps." + column }; columns.add( col ); } String[] de = { quote( "dx" ), "character(11) not null", "de.uid" }; String[] co = { quote( "co" ), "character(11) not null", "co.uid" }; String[] ao = { quote( "ao" ), "character(11) not null", "ao.uid" }; String[] ou = { quote( "ou" ), "character(11) not null", "ou.uid" }; String[] level = { quote( "level" ), "integer", "ous.level" }; columns.addAll( Lists.newArrayList( de, co, ao, ou, level ) ); if ( isApprovalEnabled() ) { String col = "coalesce(des.datasetapprovallevel, aon.approvallevel, da.minlevel, " + APPROVAL_LEVEL_UNAPPROVED + ")"; String[] al = { quote( "approvallevel" ), "integer", col }; columns.add( al ); } return columns; } @Override public List<Integer> getDataYears( Date earliest ) { String sql = "select distinct(extract(year from pe.startdate)) " + "from datavalue dv " + "inner join period pe on dv.periodid=pe.periodid " + "where pe.startdate is not null "; if ( earliest != null ) { sql += "and pe.startdate >= '" + DateUtils.getMediumDateString( earliest ) + "'"; } return jdbcTemplate.queryForList( sql, Integer.class ); } @Override @Async public Future<?> applyAggregationLevels( ConcurrentLinkedQueue<AnalyticsTable> tables, Collection<String> dataElements, int aggregationLevel ) { taskLoop: while ( true ) { AnalyticsTable table = tables.poll(); if ( table == null ) { break taskLoop; } StringBuilder sql = new StringBuilder( "update " + table.getTempTableName() + " set " ); for ( int i = 0; i < aggregationLevel; i++ ) { int level = i + 1; String column = quote( DataQueryParams.LEVEL_PREFIX + level ); sql.append( column + " = null," ); } sql.deleteCharAt( sql.length() - ",".length() ); sql.append( " where level > " + aggregationLevel ); sql.append( " and dx in (" + getQuotedCommaDelimitedString( dataElements ) + ")" ); log.debug( "Aggregation level SQL: " + sql.toString() ); jdbcTemplate.execute( sql.toString() ); } return null; } @Override @Async public Future<?> vacuumTablesAsync( ConcurrentLinkedQueue<AnalyticsTable> tables ) { taskLoop: while ( true ) { AnalyticsTable table = tables.poll(); if ( table == null ) { break taskLoop; } final String sql = statementBuilder.getVacuum( table.getTempTableName() ); log.debug( "Vacuum SQL: " + sql ); jdbcTemplate.execute( sql ); } return null; } /** * Indicates whether the system should ignore data which has not been approved * in analytics tables. */ private boolean isApprovalEnabled() { boolean setting = systemSettingManager.hideUnapprovedDataInAnalytics(); boolean levels = !dataApprovalLevelService.getAllDataApprovalLevels().isEmpty(); return setting && levels; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.hadoop.shuffle.collections; import java.io.DataInput; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.concurrent.ConcurrentLinkedQueue; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; import org.apache.ignite.internal.processors.hadoop.HadoopSerialization; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.shuffle.streams.HadoopDataInStream; import org.apache.ignite.internal.processors.hadoop.shuffle.streams.HadoopDataOutStream; import org.apache.ignite.internal.processors.hadoop.shuffle.streams.HadoopOffheapBuffer; import org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.processors.hadoop.HadoopJobProperty.SHUFFLE_OFFHEAP_PAGE_SIZE; import static org.apache.ignite.internal.processors.hadoop.HadoopJobProperty.get; /** * Base class for all multimaps. */ public abstract class HadoopMultimapBase implements HadoopMultimap { /** Default offheap page size. */ private static final int DFLT_OFFHEAP_PAGE_SIZE = 1024 * 1024; /** */ protected final GridUnsafeMemory mem; /** */ protected final int pageSize; /** */ private final Collection<Page> allPages = new ConcurrentLinkedQueue<>(); /** * @param jobInfo Job info. * @param mem Memory. */ protected HadoopMultimapBase(HadoopJobInfo jobInfo, GridUnsafeMemory mem) { assert jobInfo != null; assert mem != null; this.mem = mem; pageSize = get(jobInfo, SHUFFLE_OFFHEAP_PAGE_SIZE, DFLT_OFFHEAP_PAGE_SIZE); } /** * @param page Page. */ private void deallocate(Page page) { assert page != null; mem.release(page.ptr, page.size); } /** * @param valPtr Value page pointer. * @param nextValPtr Next value page pointer. */ protected void nextValue(long valPtr, long nextValPtr) { mem.writeLong(valPtr, nextValPtr); } /** * @param valPtr Value page pointer. * @return Next value page pointer. */ protected long nextValue(long valPtr) { return mem.readLong(valPtr); } /** * @param valPtr Value page pointer. * @param size Size. */ protected void valueSize(long valPtr, int size) { mem.writeInt(valPtr + 8, size); } /** * @param valPtr Value page pointer. * @return Value size. */ protected int valueSize(long valPtr) { return mem.readInt(valPtr + 8); } /** {@inheritDoc} */ @Override public void close() { for (Page page : allPages) deallocate(page); } /** * Reader for key and value. */ protected class ReaderBase implements AutoCloseable { /** */ private Object tmp; /** */ private final HadoopSerialization ser; /** */ private final HadoopDataInStream in = new HadoopDataInStream(mem); /** * @param ser Serialization. */ protected ReaderBase(HadoopSerialization ser) { assert ser != null; this.ser = ser; } /** * @param valPtr Value page pointer. * @return Value. */ public Object readValue(long valPtr) { assert valPtr > 0 : valPtr; try { return read(valPtr + 12, valueSize(valPtr)); } catch (IgniteCheckedException e) { throw new IgniteException(e); } } /** * Resets temporary object to the given one. * * @param tmp Temporary object for reuse. */ public void resetReusedObject(Object tmp) { this.tmp = tmp; } /** * @param ptr Pointer. * @param size Object size. * @return Object. */ protected Object read(long ptr, long size) throws IgniteCheckedException { in.buffer().set(ptr, size); tmp = ser.read(in, tmp); return tmp; } /** {@inheritDoc} */ @Override public void close() throws IgniteCheckedException { ser.close(); } } /** * Base class for adders. */ protected abstract class AdderBase implements Adder { /** */ protected final HadoopSerialization keySer; /** */ protected final HadoopSerialization valSer; /** */ private final HadoopDataOutStream out; /** */ private long writeStart; /** Current page. */ private Page curPage; /** * @param ctx Task context. * @throws IgniteCheckedException If failed. */ protected AdderBase(HadoopTaskContext ctx) throws IgniteCheckedException { valSer = ctx.valueSerialization(); keySer = ctx.keySerialization(); out = new HadoopDataOutStream(mem) { @Override public long move(long size) { long ptr = super.move(size); if (ptr == 0) // Was not able to move - not enough free space. ptr = allocateNextPage(size); assert ptr != 0; return ptr; } }; } /** * @param requestedSize Requested size. * @return Next write pointer. */ private long allocateNextPage(long requestedSize) { int writtenSize = writtenSize(); long newPageSize = nextPageSize(writtenSize + requestedSize); long newPagePtr = mem.allocate(newPageSize); HadoopOffheapBuffer b = out.buffer(); b.set(newPagePtr, newPageSize); if (writtenSize != 0) { mem.copyMemory(writeStart, newPagePtr, writtenSize); b.move(writtenSize); } writeStart = newPagePtr; // At this point old page is not needed, so we release it. Page oldPage = curPage; curPage = new Page(newPagePtr, newPageSize); if (oldPage != null) allPages.add(oldPage); return b.move(requestedSize); } /** * Get next page size. * * @param required Required amount of data. * @return Next page size. */ private long nextPageSize(long required) { long pages = (required / pageSize) + 1; long pagesPow2 = nextPowerOfTwo(pages); return pagesPow2 * pageSize; } /** * Get next power of two which greater or equal to the given number. Naive implementation. * * @param val Number * @return Nearest pow2. */ private long nextPowerOfTwo(long val) { long res = 1; while (res < val) res = res << 1; if (res < 0) throw new IllegalArgumentException("Value is too big to find positive pow2: " + val); return res; } /** * @return Fixed pointer. */ private long fixAlignment() { HadoopOffheapBuffer b = out.buffer(); long ptr = b.pointer(); if ((ptr & 7L) != 0) { // Address is not aligned by octet. ptr = (ptr + 8L) & ~7L; b.pointer(ptr); } return ptr; } /** * @param off Offset. * @param o Object. * @return Page pointer. * @throws IgniteCheckedException If failed. */ protected long write(int off, Object o, HadoopSerialization ser) throws IgniteCheckedException { writeStart = fixAlignment(); if (off != 0) out.move(off); ser.write(out, o); return writeStart; } /** * @param size Size. * @return Pointer. */ protected long allocate(int size) { writeStart = fixAlignment(); out.move(size); return writeStart; } /** * Rewinds local allocation pointer to the given pointer if possible. * * @param ptr Pointer. */ protected void localDeallocate(long ptr) { HadoopOffheapBuffer b = out.buffer(); if (b.isInside(ptr)) b.pointer(ptr); else b.reset(); } /** * @return Written size. */ protected int writtenSize() { return (int)(out.buffer().pointer() - writeStart); } /** {@inheritDoc} */ @Override public Key addKey(DataInput in, @Nullable Key reuse) throws IgniteCheckedException { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public void close() throws IgniteCheckedException { if (curPage != null) allPages.add(curPage); keySer.close(); valSer.close(); } } /** * Iterator over values. */ protected class ValueIterator implements Iterator<Object> { /** */ private long valPtr; /** */ private final ReaderBase valReader; /** * @param valPtr Value page pointer. * @param valReader Value reader. */ protected ValueIterator(long valPtr, ReaderBase valReader) { this.valPtr = valPtr; this.valReader = valReader; } /** * @param valPtr Head value pointer. */ public void head(long valPtr) { this.valPtr = valPtr; } /** {@inheritDoc} */ @Override public boolean hasNext() { return valPtr != 0; } /** {@inheritDoc} */ @Override public Object next() { if (!hasNext()) throw new NoSuchElementException(); Object res = valReader.readValue(valPtr); valPtr = nextValue(valPtr); return res; } /** {@inheritDoc} */ @Override public void remove() { throw new UnsupportedOperationException(); } } /** * Page. */ private static class Page { /** Pointer. */ private final long ptr; /** Size. */ private final long size; /** * Constructor. * * @param ptr Pointer. * @param size Size. */ public Page(long ptr, long size) { this.ptr = ptr; this.size = size; } } }
/* * Copyright (c) 2008-2015 Citrix Systems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.citrix.netscaler.nitro.resource.config.lb; import com.citrix.netscaler.nitro.resource.base.*; import com.citrix.netscaler.nitro.service.nitro_service; import com.citrix.netscaler.nitro.service.options; import com.citrix.netscaler.nitro.util.*; import com.citrix.netscaler.nitro.exception.nitro_exception; class lbmonbindings_response extends base_response { public lbmonbindings[] lbmonbindings; } /** * Configuration for monitro bindings resource. */ public class lbmonbindings extends base_resource { private String monitorname; //------- Read only Parameter ---------; private String type; private String state; private Long __count; /** * <pre> * The name of the monitor.<br> Minimum length = 1 * </pre> */ public void set_monitorname(String monitorname) throws Exception{ this.monitorname = monitorname; } /** * <pre> * The name of the monitor.<br> Minimum length = 1 * </pre> */ public String get_monitorname() throws Exception { return this.monitorname; } /** * <pre> * The type of monitor.<br> Possible values = PING, TCP, HTTP, TCP-ECV, HTTP-ECV, UDP-ECV, DNS, FTP, LDNS-PING, LDNS-TCP, LDNS-DNS, RADIUS, USER, HTTP-INLINE, SIP-UDP, LOAD, FTP-EXTENDED, SMTP, SNMP, NNTP, MYSQL, MYSQL-ECV, MSSQL-ECV, ORACLE-ECV, LDAP, POP3, CITRIX-XML-SERVICE, CITRIX-WEB-INTERFACE, DNS-TCP, RTSP, ARP, CITRIX-AG, CITRIX-AAC-LOGINPAGE, CITRIX-AAC-LAS, CITRIX-XD-DDC, ND6, CITRIX-WI-EXTENDED, DIAMETER, RADIUS_ACCOUNTING, STOREFRONT * </pre> */ public String get_type() throws Exception { return this.type; } /** * <pre> * The state of the monitor.<br> Possible values = ENABLED, DISABLED * </pre> */ public String get_state() throws Exception { return this.state; } /** * <pre> * converts nitro response into object and returns the object array in case of get request. * </pre> */ protected base_resource[] get_nitro_response(nitro_service service, String response) throws Exception{ lbmonbindings_response result = (lbmonbindings_response) service.get_payload_formatter().string_to_resource(lbmonbindings_response.class, response); if(result.errorcode != 0) { if (result.errorcode == 444) { service.clear_session(); } if(result.severity != null) { if (result.severity.equals("ERROR")) throw new nitro_exception(result.message,result.errorcode); } else { throw new nitro_exception(result.message,result.errorcode); } } return result.lbmonbindings; } /** * <pre> * Returns the value of object identifier argument * </pre> */ protected String get_object_name() { return this.monitorname; } /** * Use this API to fetch lbmonbindings resource of given name . */ public static lbmonbindings get(nitro_service service, String monitorname) throws Exception{ lbmonbindings obj = new lbmonbindings(); obj.set_monitorname(monitorname); lbmonbindings response = (lbmonbindings) obj.get_resource(service); return response; } /** * Use this API to fetch lbmonbindings resources of given names . */ public static lbmonbindings[] get(nitro_service service, String monitorname[]) throws Exception{ if (monitorname !=null && monitorname.length>0) { lbmonbindings response[] = new lbmonbindings[monitorname.length]; lbmonbindings obj[] = new lbmonbindings[monitorname.length]; for (int i=0;i<monitorname.length;i++) { obj[i] = new lbmonbindings(); obj[i].set_monitorname(monitorname[i]); response[i] = (lbmonbindings) obj[i].get_resource(service); } return response; } return null; } /** * Use this API to fetch filtered set of lbmonbindings resources. * filter string should be in JSON format.eg: "port:80,servicetype:HTTP". */ public static lbmonbindings[] get_filtered(nitro_service service, lbmonbindings obj, String filter) throws Exception{ options option = new options(); option.set_filter(filter); option.set_args(nitro_util.object_to_string_withoutquotes(obj)); lbmonbindings[] response = (lbmonbindings[]) obj.getfiltered(service, option); return response; } /** * Use this API to fetch filtered set of lbmonbindings resources. * set the filter parameter values in filtervalue object. */ public static lbmonbindings[] get_filtered(nitro_service service, lbmonbindings obj, filtervalue[] filter) throws Exception{ options option = new options(); option.set_filter(filter); option.set_args(nitro_util.object_to_string_withoutquotes(obj)); lbmonbindings[] response = (lbmonbindings[]) obj.getfiltered(service, option); return response; } /** * Use this API to count the lbmonbindings resources configured on NetScaler. */ public static long count(nitro_service service, lbmonbindings obj) throws Exception{ options option = new options(); option.set_count(true); option.set_args(nitro_util.object_to_string_withoutquotes(obj)); lbmonbindings[] response = (lbmonbindings[])obj.get_resources(service, option); if (response != null) { return response[0].__count; } return 0; } /** * Use this API to count filtered the set of lbmonbindings resources. * filter string should be in JSON format.eg: "port:80,servicetype:HTTP". */ public static long count_filtered(nitro_service service, lbmonbindings obj, String filter) throws Exception{ options option = new options(); option.set_count(true); option.set_filter(filter); option.set_args(nitro_util.object_to_string_withoutquotes(obj)); lbmonbindings[] response = (lbmonbindings[]) obj.getfiltered(service, option); if (response != null) { return response[0].__count; } return 0; } /** * Use this API to count the filtered set of lbmonbindings resources. * set the filter parameter values in filtervalue object. */ public static long count_filtered(nitro_service service, lbmonbindings obj, filtervalue[] filter) throws Exception{ options option = new options(); option.set_count(true); option.set_filter(filter); option.set_args(nitro_util.object_to_string_withoutquotes(obj)); lbmonbindings[] response = (lbmonbindings[]) obj.getfiltered(service, option); if (response != null) { return response[0].__count; } return 0; } public static class stateEnum { public static final String ENABLED = "ENABLED"; public static final String DISABLED = "DISABLED"; } public static class typeEnum { public static final String PING = "PING"; public static final String TCP = "TCP"; public static final String HTTP = "HTTP"; public static final String TCP_ECV = "TCP-ECV"; public static final String HTTP_ECV = "HTTP-ECV"; public static final String UDP_ECV = "UDP-ECV"; public static final String DNS = "DNS"; public static final String FTP = "FTP"; public static final String LDNS_PING = "LDNS-PING"; public static final String LDNS_TCP = "LDNS-TCP"; public static final String LDNS_DNS = "LDNS-DNS"; public static final String RADIUS = "RADIUS"; public static final String USER = "USER"; public static final String HTTP_INLINE = "HTTP-INLINE"; public static final String SIP_UDP = "SIP-UDP"; public static final String LOAD = "LOAD"; public static final String FTP_EXTENDED = "FTP-EXTENDED"; public static final String SMTP = "SMTP"; public static final String SNMP = "SNMP"; public static final String NNTP = "NNTP"; public static final String MYSQL = "MYSQL"; public static final String MYSQL_ECV = "MYSQL-ECV"; public static final String MSSQL_ECV = "MSSQL-ECV"; public static final String ORACLE_ECV = "ORACLE-ECV"; public static final String LDAP = "LDAP"; public static final String POP3 = "POP3"; public static final String CITRIX_XML_SERVICE = "CITRIX-XML-SERVICE"; public static final String CITRIX_WEB_INTERFACE = "CITRIX-WEB-INTERFACE"; public static final String DNS_TCP = "DNS-TCP"; public static final String RTSP = "RTSP"; public static final String ARP = "ARP"; public static final String CITRIX_AG = "CITRIX-AG"; public static final String CITRIX_AAC_LOGINPAGE = "CITRIX-AAC-LOGINPAGE"; public static final String CITRIX_AAC_LAS = "CITRIX-AAC-LAS"; public static final String CITRIX_XD_DDC = "CITRIX-XD-DDC"; public static final String ND6 = "ND6"; public static final String CITRIX_WI_EXTENDED = "CITRIX-WI-EXTENDED"; public static final String DIAMETER = "DIAMETER"; public static final String RADIUS_ACCOUNTING = "RADIUS_ACCOUNTING"; public static final String STOREFRONT = "STOREFRONT"; } }
/* * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Rhino code, released * May 6, 1999. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1997-1999 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Bob Jervis * Google Inc. * * Alternatively, the contents of this file may be used under the terms of * the GNU General Public License Version 2 or later (the "GPL"), in which * case the provisions of the GPL are applicable instead of those above. If * you wish to allow use of your version of this file only under the terms of * the GPL and not to allow others to use your version of this file under the * MPL, indicate your decision by deleting the provisions above and replacing * them with the notice and other provisions required by the GPL. If you do * not delete the provisions above, a recipient may use your version of this * file under either the MPL or the GPL. * * ***** END LICENSE BLOCK ***** */ package com.google.javascript.rhino.jstype; import com.google.common.base.Preconditions; import com.google.javascript.rhino.ErrorReporter; /** * A {@code NamedType} is a named reference to some other type. This provides * a convenient mechanism for implementing forward references to types; a * {@code NamedType} can be used as a placeholder until its reference is * resolved. It is also useful for representing type names in jsdoc type * annotations, some of which may never be resolved (as they may refer to * types in host systems not yet supported by JSCompiler, such as the JVM.)<p> * * An important distinction: {@code NamedType} is a type name reference, * whereas {@link ObjectType} is a named type object, such as an Enum name. * The Enum itself is typically used only in a dot operator to name one of its * constants, or in a declaration, where its name will appear in a * NamedType.<p> * * A {@code NamedType} is not currently a full-fledged typedef, because it * cannot resolve to any JavaScript type. It can only resolve to a named * {@link JSTypeRegistry} type, or to {@link FunctionType} or * {@link EnumType}.<p> * * If full typedefs are to be supported, then each method on each type class * needs to be reviewed to make sure that everything works correctly through * typedefs. Alternatively, we would need to walk through the parse tree and * unroll each reference to a {@code NamedType} to its resolved type before * applying the rest of the analysis.<p> * * TODO(user): Revisit all of this logic.<p> * * The existing typing logic is hacky. Unresolved types should get processed * in a more consistent way, but with the Rhino merge coming, there will be * much that has to be changed.<p> * * * */ class NamedType extends ProxyObjectType { private static final long serialVersionUID = 1L; private final String reference; private final String sourceName; private final int lineno; private final int charno; /** * If true, don't warn about unresolveable type names. * * NOTE(nicksantos): A lot of third-party code doesn't use our type syntax. * They have code like * {@code @return} the bus. * and they clearly don't mean that "the" is a type. In these cases, we're * forgiving and try to guess whether or not "the" is a type when it's not * clear. */ private boolean forgiving = false; /** * Create a named type based on the reference. */ NamedType(JSTypeRegistry registry, String reference, String sourceName, int lineno, int charno) { super(registry, registry.getNativeObjectType(JSTypeNative.UNKNOWN_TYPE)); Preconditions.checkNotNull(reference); this.reference = reference; this.sourceName = sourceName; this.lineno = lineno; this.charno = charno; } @Override void forgiveUnknownNames() { forgiving = true; } /** Returns the type to which this refers (which is unknown if unresolved). */ public JSType getReferencedType() { return referencedType; } @Override public String getReferenceName() { return reference; } @Override public String toString() { return reference; } @Override public boolean hasReferenceName() { return true; } @Override boolean isNamedType() { return true; } @Override public boolean isNominalType() { return true; } /** * Two named types are equivalent if they are the same {@code * ObjectType} object. This is complicated by the fact that isEquivalent * is sometimes called before we have a chance to resolve the type * names. * * @return {@code true} iff {@code that} == {@code this} or {@code that} * is a {@link NamedType} whose reference is the same as ours, * or {@code that} is the type we reference. */ @Override public boolean isEquivalentTo(JSType that) { if (this == that) { return true; } ObjectType objType = ObjectType.cast(that); if (objType != null) { return objType.isNominalType() && reference.equals(objType.getReferenceName()); } return false; } @Override public int hashCode() { return reference.hashCode(); } /** * Resolve the referenced type within the enclosing scope. */ @Override JSType resolveInternal(ErrorReporter t, StaticScope<JSType> enclosing) { // TODO(user): Investigate whether it is really necessary to keep two // different mechanisms for resolving named types, and if so, which order // makes more sense. Now, resolution via registry is first in order to // avoid triggering the warnings built into the resolution via properties. boolean resolved = resolveViaRegistry(t, enclosing); if (detectImplicitPrototypeCycle()) { handleTypeCycle(t); } if (resolved) { super.resolveInternal(t, enclosing); return referencedType; } resolveViaProperties(t, enclosing); if (detectImplicitPrototypeCycle()) { handleTypeCycle(t); } super.resolveInternal(t, enclosing); return referencedType; } /** * Resolves a named type by looking it up in the registry. * @return True if we resolved successfully. */ private boolean resolveViaRegistry( ErrorReporter t, StaticScope<JSType> enclosing) { ObjectType type = ObjectType.cast(registry.getType(reference)); if (type != null) { setReferencedType(type, t, enclosing); return true; } return false; } /** * Resolves a named type by looking up its first component in the scope, and * subsequent components as properties. The scope must have been fully * parsed and a symbol table constructed. */ private void resolveViaProperties(ErrorReporter t, StaticScope<JSType> enclosing) { String[] componentNames = reference.split("\\.", -1); if (componentNames[0].length() == 0) { handleUnresolvedType(t); return; } StaticSlot<JSType> slot = enclosing.getSlot(componentNames[0]); if (slot == null) { handleUnresolvedType(t); return; } // If the first component has a type of 'Unknown', then any type // names using it should be regarded as silently 'Unknown' rather than be // noisy about it. JSType slotType = slot.getType(); if (slotType == null || slotType.isAllType() || slotType.isNoType()) { handleUnresolvedType(t); return; } JSType value = getTypedefType(t, slot, componentNames[0]); if (value == null) { handleUnresolvedType(t); return; } // resolving component by component for (int i = 1; i < componentNames.length; i++) { ObjectType parentClass = ObjectType.cast(value); if (parentClass == null) { handleUnresolvedType(t); return; } if (componentNames[i].length() == 0) { handleUnresolvedType(t); return; } value = parentClass.getPropertyType(componentNames[i]); } // last component of the chain if (value instanceof FunctionType) { FunctionType functionType = (FunctionType)value; if (functionType.isConstructor() || functionType.isInterface()) { setReferencedType(functionType.getInstanceType(), t, enclosing); } else { handleUnresolvedType(t); } } else if (value instanceof EnumType) { setReferencedType(((EnumType) value).getElementsType(), t, enclosing); } else { handleUnresolvedType(t); } } private void setReferencedType(ObjectType type, ErrorReporter t, StaticScope<JSType> enclosing) { referencedType = type; checkEnumElementCycle(t); setResolvedTypeInternal(referencedType); } private void handleTypeCycle(ErrorReporter t) { referencedType = registry.getNativeObjectType(JSTypeNative.UNKNOWN_TYPE); t.warning("Cycle detected in inheritance chain of type " + reference, sourceName, lineno, null, charno); setResolvedTypeInternal(referencedType); } private void checkEnumElementCycle(ErrorReporter t) { if (referencedType instanceof EnumElementType && ((EnumElementType) referencedType).getPrimitiveType() == this) { handleTypeCycle(t); } } // Warns about this type being unresolved iff it's not a forward-declared // type name. private void handleUnresolvedType(ErrorReporter t) { if (!registry.isForwardDeclaredType(reference) && !forgiving && registry.isLastGeneration()) { t.warning("Unknown type " + reference, sourceName, lineno, null, charno); } else { referencedType = registry.getNativeObjectType( JSTypeNative.CHECKED_UNKNOWN_TYPE); } setResolvedTypeInternal(referencedType); } JSType getTypedefType(ErrorReporter t, StaticSlot<JSType> slot, String name) { JSType type = slot.getType(); if (type != null) { return type; } handleUnresolvedType(t); return null; } }
// TODO mix jdbc and tx test package za.sabob.olive.jdbc.mixed; import java.sql.*; import java.util.*; import javax.sql.*; import org.testng.*; import org.testng.annotations.*; import za.sabob.olive.domain.*; import za.sabob.olive.jdbc.*; import za.sabob.olive.jdbc.context.*; import za.sabob.olive.postgres.*; import za.sabob.olive.ps.*; import za.sabob.olive.query.*; import static za.sabob.olive.util.DBTestUtils.isTimeout; import za.sabob.olive.util.*; public class DeadlockTest extends PostgresBaseTest { int personsCount = 0; @BeforeClass(alwaysRun = true) public void beforeClass() { ds = PostgresTestUtils.createDS( 1 ); System.out.println( "Postgres created" ); PostgresTestUtils.createPersonTable( ds ); ds.setCheckoutTimeout( 0 ); // There should be no deadlocks because Olive uses only 1 connection per thread. } @Test(successPercentage = 100, threadPoolSize = 20, invocationCount = 100, timeOut = 1110000) public void threadTest() throws Exception { JDBCContext ctx = null; PreparedStatement ps = null; ResultSet rs = null; try { ctx = JDBC.beginOperation( ds ); SqlParams params = new SqlParams(); params.set( "name", "Bob" ); ps = OliveUtils.prepareStatement( ctx.getConnection(), "insert into person (name) values(:name)", params ); int count = ps.executeUpdate(); params.set( "name", "John" ); count = ps.executeUpdate(); nestedJDBC( ds ); List<Person> persons = getJDBCPersons(); personsCount = persons.size(); } catch ( Throwable e ) { if ( isTimeout( e ) ) { throw new IllegalStateException( "Oh no! Timeout!! ", e ); } else { throw new RuntimeException( e ); } //System.out.println( "WHY 2? " + e.getMessage() ); //throw new RuntimeException( e ); } finally { try { boolean connectionCreated = ctx != null; if ( connectionCreated ) { boolean isRoot = ctx.isRootContext(); Assert.assertTrue( isRoot, "JDBC Connection was created, we must be root" ); Assert.assertTrue( ctx.isRootConnectionHolder() ); JDBC.cleanupOperation( ctx ); Assert.assertTrue( ctx.isRootContext() ); Assert.assertFalse( ctx.isRootConnectionHolder() ); } } catch ( Throwable e ) { e.printStackTrace(); System.out.println( "WHY: " + e.getMessage() ); JDBC.cleanupOperation( ctx ); } //Assert.assertTrue( isAtRoot ); //Assert.assertFalse( isAtRoot, "cleanupTransaction should remove all datasources in the JDBC Operation" ); } } public void nestedJDBC( DataSource ds ) { JDBCContext ctx = JDBC.beginOperation( ds ); ctx.isRootContext(); try { nestedTX( ds ); List<Person> persons = getJDBCPersons(); //System.out.println( "PERSONS " + persons.size() ); } catch ( Throwable e ) { if ( isTimeout( e ) ) { // ignore } else { throw new RuntimeException( e ); } //System.out.println( "SERIOUS PROBLEM 1? " + e.getMessage() ); } finally { try { boolean isRoot = ctx.isRootContext(); //Assert.assertFalse( ); JDBC.cleanupOperation( ctx ); //Assert.assertTrue( JDBC.isAtRootConnection() ); isRoot = ctx.isRootContext(); if ( !isRoot ) { System.out.println( "BUG 1.2" ); } } catch ( Throwable e ) { System.out.println( "SERIOUS PROBLEM 1.1" + e.getMessage() ); JDBC.cleanupOperation( ctx ); } } } public void nestedTX( DataSource ds ) { JDBCContext ctx = JDBC.beginTransaction( ds ); try { List<Person> persons = getTXPersons(); } catch ( Exception ex ) { if ( isTimeout( ex ) ) { //ignore } else { throw new RuntimeException( ex ); } // err = ex; // System.out.println( "SERIOUS PROBLEM 2" + ex.getMessage() + ", fault? " + TX.isFaultRegisteringDS() + ", thread: " // + Thread.currentThread().getId() ); } finally { try { // if ( err != null ) { // System.out.println( "..." ); // } boolean isRoot = ctx.isRootConnectionHolder(); boolean connectionCreated = ctx != null; JDBC.cleanupTransaction( ctx ); isRoot = ctx.isRootContext(); Assert.assertTrue( isRoot, "TX Connection was closed, this must be root Context" ); if ( connectionCreated ) { Assert.assertTrue( isRoot, "TX Connection was created, we must be root connection holder " ); } else { if ( isRoot ) { System.out.println( "BUG TX, conn creted?: " + connectionCreated + ", isRoot: " + isRoot ); } } } catch ( Exception e ) { System.out.println( "SERIOUS PROBLEM 2.2" + e.getMessage() ); } } } public List<Person> getPersons( JDBCContext ctx ) { try { PreparedStatement ps = OliveUtils.prepareStatement( ctx, "select * from person" ); List<Person> persons = OliveUtils.mapToBeans( ps, new RowMapper<Person>() { @Override public Person map( ResultSet rs, int rowNum ) throws SQLException { Person person = new Person(); person.id = rs.getLong( "id" ); person.name = rs.getString( "name" ); return person; } } ); return persons; } catch ( Exception e ) { throw new RuntimeException( e ); } } public List<Person> getJDBCPersons() { JDBCContext ctx = DSF.getLatestJDBCContext( ds ); boolean isAutoCommit = OliveUtils.getAutoCommit( ctx.getConnection() ); Assert.assertTrue( isAutoCommit, " Connection should not be a transactional connection." ); return getPersons( ctx ); } public List<Person> getTXPersons() { JDBCContext ctx = DSF.getLatestJDBCContext( ds ); boolean isTransaction = !OliveUtils.getAutoCommit( ctx.getConnection() ); Assert.assertTrue( isTransaction, " Connection should be a transactional connection." ); return getPersons( ctx ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/licenses/publicdomain */ /** * Misc utilities in JSR166 performance tests */ package com.gemstone.gemfire.internal.util.concurrent.cm; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; class LoopHelpers { static final SimpleRandom staticRNG = new SimpleRandom(); // Some mindless computation to do between synchronizations... /** * generates 32 bit pseudo-random numbers. * Adapted from http://www.snippets.org */ public static int compute1(int x) { int lo = 16807 * (x & 0xFFFF); int hi = 16807 * (x >>> 16); lo += (hi & 0x7FFF) << 16; if ((lo & 0x80000000) != 0) { lo &= 0x7fffffff; ++lo; } lo += hi >>> 15; if (lo == 0 || (lo & 0x80000000) != 0) { lo &= 0x7fffffff; ++lo; } return lo; } /** * Computes a linear congruential random number a random number * of times. */ public static int compute2(int x) { int loops = (x >>> 4) & 7; while (loops-- > 0) { x = (x * 2147483647) % 16807; } return x; } /** * Yet another random number generator */ public static int compute3(int x) { int t = (x % 127773) * 16807 - (x / 127773) * 2836; return (t > 0)? t : t + 0x7fffffff; } /** * Yet another random number generator */ public static int compute4(int x) { return x * 134775813 + 1; } /** * Yet another random number generator */ public static int compute5(int x) { return 36969 * (x & 65535) + (x >> 16); } /** * Marsaglia xorshift (1, 3, 10) */ public static int compute6(int seed) { seed ^= seed << 1; seed ^= seed >>> 3; seed ^= (seed << 10); return seed; } /** * Marsaglia xorshift (6, 21, 7) */ public static int compute7(int y) { y ^= y << 6; y ^= y >>> 21; y ^= (y << 7); return y; } /** * Marsaglia xorshift for longs */ public static long compute8(long x) { x ^= x << 13; x ^= x >>> 7; x ^= (x << 17); return x; } public static final class XorShift32Random { static final AtomicInteger seq = new AtomicInteger(8862213); int x = -1831433054; public XorShift32Random(int seed) { x = seed; } public XorShift32Random() { this((int)System.nanoTime() + seq.getAndAdd(129)); } public int next() { x ^= x << 6; x ^= x >>> 21; x ^= (x << 7); return x; } } /** Multiplication-free RNG from Marsaglia "Xorshift RNGs" paper */ public static final class MarsagliaRandom { static final AtomicInteger seq = new AtomicInteger(3122688); int x; int y = 842502087; int z = -715159705; int w = 273326509; public MarsagliaRandom(int seed) { x = seed; } public MarsagliaRandom() { this((int)System.nanoTime() + seq.getAndAdd(129)); } public int next() { int t = x ^ (x << 11); x = y; y = z; z = w; return w = (w ^ (w >>> 19) ^ (t ^ (t >>> 8))); } } /** * Unsynchronized version of java.util.Random algorithm. */ public static final class SimpleRandom { private final static long multiplier = 0x5DEECE66DL; private final static long addend = 0xBL; private final static long mask = (1L << 48) - 1; static final AtomicLong seq = new AtomicLong( -715159705); private long seed; SimpleRandom(long s) { seed = s; } SimpleRandom() { seed = System.nanoTime() + seq.getAndAdd(129); } public void setSeed(long s) { seed = s; } public int next() { long nextseed = (seed * multiplier + addend) & mask; seed = nextseed; return ((int)(nextseed >>> 17)) & 0x7FFFFFFF; } } public static class BarrierTimer implements Runnable { volatile boolean started; volatile long startTime; volatile long endTime; public void run() { long t = System.nanoTime(); if (!started) { started = true; startTime = t; } else endTime = t; } public void clear() { started = false; } public long getTime() { return endTime - startTime; } } public static String rightJustify(long n) { // There's probably a better way to do this... String field = " "; String num = Long.toString(n); if (num.length() >= field.length()) return num; StringBuffer b = new StringBuffer(field); b.replace(b.length()-num.length(), b.length(), num); return b.toString(); } }
package org.apereo.cas.config; import org.apereo.cas.CentralAuthenticationService; import org.apereo.cas.adaptors.radius.JRadiusServerImpl; import org.apereo.cas.adaptors.radius.RadiusClientFactory; import org.apereo.cas.adaptors.radius.RadiusProtocol; import org.apereo.cas.adaptors.radius.authentication.RadiusMultifactorAuthenticationProvider; import org.apereo.cas.adaptors.radius.authentication.RadiusTokenAuthenticationHandler; import org.apereo.cas.adaptors.radius.RadiusAuthenticationMetaDataPopulator; import org.apereo.cas.adaptors.radius.web.flow.RadiusAuthenticationWebflowAction; import org.apereo.cas.adaptors.radius.web.flow.RadiusAuthenticationWebflowEventResolver; import org.apereo.cas.adaptors.radius.web.flow.RadiusMultifactorWebflowConfigurer; import org.apereo.cas.authentication.AuthenticationSystemSupport; import org.apereo.cas.authentication.principal.DefaultPrincipalFactory; import org.apereo.cas.authentication.principal.PrincipalFactory; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.services.MultifactorAuthenticationProvider; import org.apereo.cas.services.MultifactorAuthenticationProviderSelector; import org.apereo.cas.services.ServicesManager; import org.apereo.cas.ticket.registry.TicketRegistrySupport; import org.apereo.cas.web.flow.CasWebflowConfigurer; import org.apereo.cas.web.flow.authentication.FirstMultifactorAuthenticationProviderSelector; import org.apereo.cas.web.flow.resolver.CasWebflowEventResolver; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.context.config.annotation.RefreshScope; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.web.util.CookieGenerator; import org.springframework.webflow.config.FlowDefinitionRegistryBuilder; import org.springframework.webflow.definition.registry.FlowDefinitionRegistry; import org.springframework.webflow.engine.builder.support.FlowBuilderServices; import org.springframework.webflow.execution.Action; import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * This is {@link RadiusMultifactorConfiguration}. * * @author Misagh Moayyed * @author Nagai Takayuki * @since 5.0.0 */ @Configuration("radiusMfaConfiguration") @EnableConfigurationProperties(CasConfigurationProperties.class) public class RadiusMultifactorConfiguration { @Autowired private CasConfigurationProperties casProperties; @Autowired private ApplicationContext applicationContext; @Autowired @Qualifier("authenticationHandlersResolvers") private Map authenticationHandlersResolvers; @Autowired @Qualifier("authenticationMetadataPopulators") private List authenticationMetadataPopulators; @Autowired @Qualifier("builder") private FlowBuilderServices builder; @Autowired @Qualifier("loginFlowRegistry") private FlowDefinitionRegistry loginFlowDefinitionRegistry; @Autowired private FlowBuilderServices flowBuilderServices; @Autowired @Qualifier("centralAuthenticationService") private CentralAuthenticationService centralAuthenticationService; @Autowired @Qualifier("defaultAuthenticationSystemSupport") private AuthenticationSystemSupport authenticationSystemSupport; @Autowired @Qualifier("defaultTicketRegistrySupport") private TicketRegistrySupport ticketRegistrySupport; @Autowired @Qualifier("servicesManager") private ServicesManager servicesManager; @Autowired(required = false) @Qualifier("multifactorAuthenticationProviderSelector") private MultifactorAuthenticationProviderSelector multifactorAuthenticationProviderSelector = new FirstMultifactorAuthenticationProviderSelector(); @Autowired @Qualifier("warnCookieGenerator") private CookieGenerator warnCookieGenerator; /** * Radius flow registry flow definition registry. * * @return the flow definition registry */ @RefreshScope @Bean public FlowDefinitionRegistry radiusFlowRegistry() { final FlowDefinitionRegistryBuilder builder = new FlowDefinitionRegistryBuilder(this.applicationContext, this.builder); builder.setBasePath("classpath*:/webflow"); builder.addFlowLocationPattern("/mfa-radius/*-webflow.xml"); return builder.build(); } /** * Radius servers list. * * @return the list */ @RefreshScope @Bean public List radiusTokenServers() { final List<JRadiusServerImpl> list = new ArrayList<>(); final RadiusClientFactory factory = new RadiusClientFactory(); factory.setAccountingPort(casProperties.getAuthn().getMfa().getRadius().getClient().getAccountingPort()); factory.setAuthenticationPort(casProperties.getAuthn().getMfa().getRadius().getClient().getAuthenticationPort()); factory.setInetAddress(casProperties.getAuthn().getMfa().getRadius().getClient().getInetAddress()); factory.setSharedSecret(casProperties.getAuthn().getMfa().getRadius().getClient().getSharedSecret()); factory.setSocketTimeout(casProperties.getAuthn().getMfa().getRadius().getClient().getSocketTimeout()); final RadiusProtocol protocol = RadiusProtocol.valueOf(casProperties.getAuthn().getMfa().getRadius().getServer().getProtocol()); final JRadiusServerImpl impl = new JRadiusServerImpl(protocol, factory); impl.setRetries(casProperties.getAuthn().getMfa().getRadius().getServer().getRetries()); impl.setNasIdentifier(casProperties.getAuthn().getMfa().getRadius().getServer().getNasIdentifier()); impl.setNasPort(casProperties.getAuthn().getMfa().getRadius().getServer().getNasPort()); impl.setNasPortId(casProperties.getAuthn().getMfa().getRadius().getServer().getNasPortId()); impl.setNasRealPort(casProperties.getAuthn().getMfa().getRadius().getServer().getNasRealPort()); impl.setNasIpAddress(casProperties.getAuthn().getMfa().getRadius().getServer().getNasIpAddress()); impl.setNasIpv6Address(casProperties.getAuthn().getMfa().getRadius().getServer().getNasIpv6Address()); list.add(impl); return list; } @RefreshScope @Bean public MultifactorAuthenticationProvider radiusAuthenticationProvider() { final RadiusMultifactorAuthenticationProvider p = new RadiusMultifactorAuthenticationProvider(); p.setRadiusAuthenticationHandler(radiusTokenAuthenticationHandler()); return p; } @Bean @RefreshScope public RadiusAuthenticationMetaDataPopulator radiusAuthenticationMetaDataPopulator() { final RadiusAuthenticationMetaDataPopulator pop = new RadiusAuthenticationMetaDataPopulator(); pop.setAuthenticationContextAttribute(casProperties.getAuthn().getMfa().getAuthenticationContextAttribute()); pop.setAuthenticationHandler(radiusTokenAuthenticationHandler()); pop.setProvider(radiusAuthenticationProvider()); return pop; } @RefreshScope @Bean public RadiusTokenAuthenticationHandler radiusTokenAuthenticationHandler() { final RadiusTokenAuthenticationHandler a = new RadiusTokenAuthenticationHandler(); a.setPrincipalFactory(radiusTokenPrincipalFactory()); a.setServicesManager(servicesManager); a.setServers(radiusTokenServers()); a.setFailoverOnAuthenticationFailure( casProperties.getAuthn().getMfa().getRadius().isFailoverOnAuthenticationFailure()); a.setFailoverOnException(casProperties.getAuthn().getMfa().getRadius().isFailoverOnException()); return a; } @Bean public PrincipalFactory radiusTokenPrincipalFactory() { return new DefaultPrincipalFactory(); } @Bean public Action radiusAuthenticationWebflowAction() { final RadiusAuthenticationWebflowAction w = new RadiusAuthenticationWebflowAction(); w.setRadiusAuthenticationWebflowEventResolver(radiusAuthenticationWebflowEventResolver()); return w; } @Bean public CasWebflowEventResolver radiusAuthenticationWebflowEventResolver() { final RadiusAuthenticationWebflowEventResolver r = new RadiusAuthenticationWebflowEventResolver(); r.setAuthenticationSystemSupport(authenticationSystemSupport); r.setCentralAuthenticationService(centralAuthenticationService); r.setMultifactorAuthenticationProviderSelector(multifactorAuthenticationProviderSelector); r.setServicesManager(servicesManager); r.setTicketRegistrySupport(ticketRegistrySupport); r.setWarnCookieGenerator(warnCookieGenerator); return r; } @ConditionalOnMissingBean(name="radiusMultifactorWebflowConfigurer") @Bean public CasWebflowConfigurer radiusMultifactorWebflowConfigurer() { final RadiusMultifactorWebflowConfigurer w = new RadiusMultifactorWebflowConfigurer(); w.setRadiusFlowRegistry(radiusFlowRegistry()); w.setLoginFlowDefinitionRegistry(loginFlowDefinitionRegistry); w.setFlowBuilderServices(flowBuilderServices); return w; } @PostConstruct protected void initializeRootApplicationContext() { authenticationHandlersResolvers.put(radiusTokenAuthenticationHandler(), null); authenticationMetadataPopulators.add(0, radiusAuthenticationMetaDataPopulator()); } }
package org.apache.solr.cloud; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import static org.apache.solr.cloud.OverseerCollectionProcessor.MAX_SHARDS_PER_NODE; import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES; import static org.apache.solr.cloud.OverseerCollectionProcessor.REPLICATION_FACTOR; import static org.apache.solr.cloud.OverseerCollectionProcessor.ROUTER; import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARDS_PROP; import static org.apache.solr.common.params.ShardParams._ROUTE_; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.Future; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.lucene.util.Constants; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util._TestUtil; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.ImplicitDocRouter; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkNodeProps; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.CollectionParams.CollectionAction; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.update.DirectUpdateHandler2; import org.apache.solr.util.DefaultSolrThreadFactory; import org.junit.Before; import org.junit.BeforeClass; /** * Tests the Custom Sharding API. */ @Slow public class CustomCollectionTest extends AbstractFullDistribZkTestBase { private static final String DEFAULT_COLLECTION = "collection1"; private static final boolean DEBUG = false; ThreadPoolExecutor executor = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 5, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), new DefaultSolrThreadFactory("testExecutor")); CompletionService<Object> completionService; Set<Future<Object>> pending; @BeforeClass public static void beforeThisClass2() throws Exception { assumeFalse("FIXME: This test fails under Java 8 all the time, see SOLR-4711", Constants.JRE_IS_MINIMUM_JAVA8); } @Before @Override public void setUp() throws Exception { super.setUp(); System.setProperty("numShards", Integer.toString(sliceCount)); System.setProperty("solr.xml.persist", "true"); } protected String getSolrXml() { return "solr-no-core.xml"; } public CustomCollectionTest() { fixShardCount = true; sliceCount = 2; shardCount = 4; completionService = new ExecutorCompletionService<Object>(executor); pending = new HashSet<Future<Object>>(); checkCreatedVsState = false; } @Override protected void setDistributedParams(ModifiableSolrParams params) { if (r.nextBoolean()) { // don't set shards, let that be figured out from the cloud state } else { // use shard ids rather than physical locations StringBuilder sb = new StringBuilder(); for (int i = 0; i < shardCount; i++) { if (i > 0) sb.append(','); sb.append("shard" + (i + 3)); } params.set("shards", sb.toString()); } } @Override public void doTest() throws Exception { testCustomCollectionsAPI(); testRouteFieldForHashRouter(); testCreateShardRepFactor(); if (DEBUG) { super.printLayout(); } } private void testCustomCollectionsAPI() throws Exception { String COLL_PREFIX = "implicitcoll"; // TODO: fragile - because we dont pass collection.confName, it will only // find a default if a conf set with a name matching the collection name is found, or // if there is only one conf set. That and the fact that other tests run first in this // env make this pretty fragile // create new collections rapid fire Map<String,List<Integer>> collectionInfos = new HashMap<String,List<Integer>>(); int replicationFactor = _TestUtil.nextInt(random(), 0, 3) + 2; int cnt = random().nextInt(6) + 1; for (int i = 0; i < cnt; i++) { int numShards = 3; int maxShardsPerNode = ((((numShards+1) * replicationFactor) / getCommonCloudSolrServer() .getZkStateReader().getClusterState().getLiveNodes().size())) + 1; CloudSolrServer client = null; try { if (i == 0) { // Test if we can create a collection through CloudSolrServer where // you havnt set default-collection // This is nice because you want to be able to create you first // collection using CloudSolrServer, and in such case there is // nothing reasonable to set as default-collection client = createCloudClient(null); } else if (i == 1) { // Test if we can create a collection through CloudSolrServer where // you have set default-collection to a non-existing collection // This is nice because you want to be able to create you first // collection using CloudSolrServer, and in such case there is // nothing reasonable to set as default-collection, but you might want // to use the same CloudSolrServer throughout the entire // lifetime of your client-application, so it is nice to be able to // set a default-collection on this CloudSolrServer once and for all // and use this CloudSolrServer to create the collection client = createCloudClient(COLL_PREFIX + i); } Map<String, Object> props = ZkNodeProps.makeMap( "router.name", ImplicitDocRouter.NAME, REPLICATION_FACTOR, replicationFactor, MAX_SHARDS_PER_NODE, maxShardsPerNode, SHARDS_PROP,"a,b,c"); createCollection(collectionInfos, COLL_PREFIX + i,props,client); } finally { if (client != null) client.shutdown(); } } Set<Entry<String,List<Integer>>> collectionInfosEntrySet = collectionInfos.entrySet(); for (Entry<String,List<Integer>> entry : collectionInfosEntrySet) { String collection = entry.getKey(); List<Integer> list = entry.getValue(); checkForCollection(collection, list, null); String url = getUrlFromZk(getCommonCloudSolrServer().getZkStateReader().getClusterState(), collection); HttpSolrServer collectionClient = new HttpSolrServer(url); // poll for a second - it can take a moment before we are ready to serve waitForNon403or404or503(collectionClient); } ZkStateReader zkStateReader = getCommonCloudSolrServer().getZkStateReader(); for (int j = 0; j < cnt; j++) { waitForRecoveriesToFinish(COLL_PREFIX + j, zkStateReader, false); } ClusterState clusterState = zkStateReader.getClusterState(); DocCollection coll = clusterState.getCollection(COLL_PREFIX + 0); assertEquals("implicit", ((Map)coll.get(ROUTER)).get("name") ); assertNotNull(coll.getStr(REPLICATION_FACTOR)); assertNotNull(coll.getStr(MAX_SHARDS_PER_NODE)); assertNull("A shard of a Collection configured with implicit router must have null range", coll.getSlice("a").getRange()); List<String> collectionNameList = new ArrayList<String>(); collectionNameList.addAll(collectionInfos.keySet()); log.info("Collections created : "+collectionNameList ); String collectionName = collectionNameList.get(random().nextInt(collectionNameList.size())); String url = getUrlFromZk(getCommonCloudSolrServer().getZkStateReader().getClusterState(), collectionName); HttpSolrServer collectionClient = new HttpSolrServer(url); // lets try and use the solrj client to index a couple documents collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1, "humpty dumpy sat on a wall", _ROUTE_,"a")); collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1, "humpty dumpy3 sat on a walls", _ROUTE_,"a")); collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1, "humpty dumpy2 sat on a walled", _ROUTE_,"a")); collectionClient.commit(); assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound()); assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound()); assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound()); collectionClient.deleteByQuery("*:*"); collectionClient.commit(true,true); assertEquals(0, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound()); UpdateRequest up = new UpdateRequest(); up.setParam(_ROUTE_, "c"); up.setParam("commit","true"); up.add(getDoc(id, 9, i1, -600, tlong, 600, t1, "humpty dumpy sat on a wall")); up.add(getDoc(id, 10, i1, -600, tlong, 600, t1, "humpty dumpy3 sat on a walls")); up.add(getDoc(id, 11, i1, -600, tlong, 600, t1, "humpty dumpy2 sat on a walled")); collectionClient.request(up); assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound()); assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound()); assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"c")).getResults().getNumFound()); //Testing CREATESHARD ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionAction.CREATESHARD.toString()); params.set("collection", collectionName); params.set("shard", "x"); SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); createNewSolrServer("", getBaseUrl((HttpSolrServer) clients.get(0))).request(request); waitForCollection(zkStateReader,collectionName,4); //wait for all the replicas to become active int attempts = 0; while(true){ if(attempts>30 ) fail("Not enough active replicas in the shard 'x'"); attempts++; int activeReplicaCount = 0; for (Replica x : zkStateReader.getClusterState().getCollection(collectionName).getSlice("x").getReplicas()) { if("active".equals(x.getStr("state"))) activeReplicaCount++; } Thread.sleep(500); if(activeReplicaCount >= replicationFactor) break; } log.info(zkStateReader.getClusterState().toString()); collectionClient.add(getDoc(id, 66, i1, -600, tlong, 600, t1, "humpty dumpy sat on a wall", _ROUTE_,"x")); collectionClient.commit(); assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"x")).getResults().getNumFound()); int numShards = 4; replicationFactor = _TestUtil.nextInt(random(), 0, 3) + 2; int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrServer() .getZkStateReader().getClusterState().getLiveNodes().size())) + 1; CloudSolrServer client = null; String shard_fld = "shard_s"; try { client = createCloudClient(null); Map<String, Object> props = ZkNodeProps.makeMap( "router.name", ImplicitDocRouter.NAME, REPLICATION_FACTOR, replicationFactor, MAX_SHARDS_PER_NODE, maxShardsPerNode, SHARDS_PROP,"a,b,c,d", "router.field", shard_fld); collectionName = COLL_PREFIX + "withShardField"; createCollection(collectionInfos, collectionName,props,client); } finally { if (client != null) client.shutdown(); } List<Integer> list = collectionInfos.get(collectionName); checkForCollection(collectionName, list, null); url = getUrlFromZk(getCommonCloudSolrServer().getZkStateReader().getClusterState(), collectionName); collectionClient = new HttpSolrServer(url); // poll for a second - it can take a moment before we are ready to serve waitForNon403or404or503(collectionClient); collectionClient = new HttpSolrServer(url); // lets try and use the solrj client to index a couple documents collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1, "humpty dumpy sat on a wall", shard_fld,"a")); collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1, "humpty dumpy3 sat on a walls", shard_fld,"a")); collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1, "humpty dumpy2 sat on a walled", shard_fld,"a")); collectionClient.commit(); assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound()); assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound()); //TODO debug the following case assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound()); } private void testRouteFieldForHashRouter()throws Exception{ String collectionName = "routeFieldColl"; int numShards = 4; int replicationFactor = 2; int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrServer() .getZkStateReader().getClusterState().getLiveNodes().size())) + 1; HashMap<String, List<Integer>> collectionInfos = new HashMap<String, List<Integer>>(); CloudSolrServer client = null; String shard_fld = "shard_s"; try { client = createCloudClient(null); Map<String, Object> props = ZkNodeProps.makeMap( REPLICATION_FACTOR, replicationFactor, MAX_SHARDS_PER_NODE, maxShardsPerNode, NUM_SLICES,numShards, "router.field", shard_fld); createCollection(collectionInfos, collectionName,props,client); } finally { if (client != null) client.shutdown(); } List<Integer> list = collectionInfos.get(collectionName); checkForCollection(collectionName, list, null); String url = getUrlFromZk(getCommonCloudSolrServer().getZkStateReader().getClusterState(), collectionName); HttpSolrServer collectionClient = new HttpSolrServer(url); // poll for a second - it can take a moment before we are ready to serve waitForNon403or404or503(collectionClient); collectionClient = new HttpSolrServer(url); // lets try and use the solrj client to index a couple documents collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1, "humpty dumpy sat on a wall", shard_fld,"a")); collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1, "humpty dumpy3 sat on a walls", shard_fld,"a")); collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1, "humpty dumpy2 sat on a walled", shard_fld,"a")); collectionClient.commit(); assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound()); //TODO debug the following case assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound()); collectionClient.deleteByQuery("*:*"); collectionClient.commit(); collectionClient.add (getDoc( id,100,shard_fld, "b!doc1")); collectionClient.commit(); assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "b!")).getResults().getNumFound()); } private void testCreateShardRepFactor() throws Exception { String collectionName = "testCreateShardRepFactor"; HashMap<String, List<Integer>> collectionInfos = new HashMap<String, List<Integer>>(); CloudSolrServer client = null; try { client = createCloudClient(null); Map<String, Object> props = ZkNodeProps.makeMap( REPLICATION_FACTOR, 1, MAX_SHARDS_PER_NODE, 5, NUM_SLICES, 2, "shards", "a,b", "router.name", "implicit"); createCollection(collectionInfos, collectionName, props, client); } finally { if (client != null) client.shutdown(); } ZkStateReader zkStateReader = getCommonCloudSolrServer().getZkStateReader(); waitForRecoveriesToFinish(collectionName, zkStateReader, false); ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionAction.CREATESHARD.toString()); params.set("collection", collectionName); params.set("shard", "x"); SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); createNewSolrServer("", getBaseUrl((HttpSolrServer) clients.get(0))).request(request); waitForRecoveriesToFinish(collectionName, zkStateReader, false); int replicaCount = 0; int attempts = 0; while (true) { if (attempts > 30) fail("Not enough active replicas in the shard 'x'"); zkStateReader.updateClusterState(true); attempts++; replicaCount = zkStateReader.getClusterState().getSlice(collectionName, "x").getReplicas().size(); if (replicaCount >= 1) break; Thread.sleep(500); } assertEquals("CREATESHARD API created more than replicationFactor number of replicas", 1, replicaCount); } @Override protected QueryResponse queryServer(ModifiableSolrParams params) throws SolrServerException { if (r.nextBoolean()) return super.queryServer(params); if (r.nextBoolean()) params.set("collection",DEFAULT_COLLECTION); QueryResponse rsp = getCommonCloudSolrServer().query(params); return rsp; } @Override public void tearDown() throws Exception { super.tearDown(); System.clearProperty("numShards"); System.clearProperty("zkHost"); System.clearProperty("solr.xml.persist"); // insurance DirectUpdateHandler2.commitOnClose = true; } }
/* * Copyright 2016 Lutz Fischer <l.fischer@ed.ac.uk>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.rappsilber.gui.components.memory; import java.awt.Component; import java.awt.event.WindowEvent; import java.awt.event.WindowListener; import java.beans.PropertyChangeSupport; import java.util.LinkedList; import java.util.TimerTask; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JFrame; import javax.swing.JScrollPane; import javax.swing.JTextArea; import javax.swing.event.AncestorEvent; import javax.swing.event.AncestorListener; import org.rappsilber.gui.components.StackTraceMonitor; import org.rappsilber.utils.ObjectWrapper; import org.rappsilber.utils.RArrayUtils; import org.rappsilber.utils.StringUtils; import org.rappsilber.utils.UStackTraces; /** * * @author Lutz Fischer <l.fischer@ed.ac.uk> */ public class Memory extends javax.swing.JPanel { private final transient PropertyChangeSupport propertyChangeSupport = new java.beans.PropertyChangeSupport(this); public static final String PROP_SHOWGCBUTTON = "showGCButton"; java.util.Timer m_scanTimer = new java.util.Timer("Timer - FreeMemoryScan",true); private int m_timeout = 600; Runtime runtime = Runtime.getRuntime(); private boolean showLogButton = true; private boolean showAutoGCButton = true; private boolean showGCButton = true; private StackTraceMonitor stacktracemonitor; //private boolean agc = false; protected class ScanTask extends TimerTask { AtomicBoolean running = new AtomicBoolean(false); double recentMinFreeMem = 0; double recentMaxFreeMem = 0; LinkedList<Double> recent = new LinkedList<>(); int maxRecent=100; int updateSteps=10; int logMemory = 0; int didgc = 0; int step=0; @Override public void run() { if (running.compareAndSet(false, true)) { try { double fm = runtime.freeMemory(); String fmu = "B"; double mm = runtime.maxMemory(); double tm = runtime.totalMemory(); double um = tm-fm; recent.add(um); if (recent.size()>maxRecent) { recent.removeFirst(); } if (++step==updateSteps) { step=0; ObjectWrapper<Double> min= new ObjectWrapper<>(); ObjectWrapper<Double> max = new ObjectWrapper<>(); ObjectWrapper<Double> average = new ObjectWrapper<>(); RArrayUtils.minmaxaverage(recent,min,max,average); String message = "Used: " + StringUtils.toHuman(um) + " of " + StringUtils.toHuman(mm) + " (Free:" + StringUtils.toHuman(fm) + " Total:" + StringUtils.toHuman(tm) + " Max:"+ StringUtils.toHuman(mm) +") (recent used:[" + StringUtils.toHuman(min.value) +".."+ StringUtils.toHuman(average.value) +".." + StringUtils.toHuman(max.value) +"])"; if (mckLog.isSelected()) { if (logMemory++ % 60 == 0 ) { Logger.getLogger(Memory.class.getName()).log(Level.INFO,message); } } else logMemory = 0; if (txtMemory!=null) { txtMemory.setText(message); } if (mckAGC.isSelected() && mm-um < 10*1024*1024 && didgc== 0) { Logger.getLogger(Memory.class.getName()).log(Level.INFO,"AutoGC triggered"); message = "Used: " + StringUtils.toHuman(um) + " of " + StringUtils.toHuman(mm) + " (Free:" + StringUtils.toHuman(fm) + " Total:" + StringUtils.toHuman(tm) + " Max:"+ StringUtils.toHuman(mm) +")"; Logger.getLogger(Memory.class.getName()).log(Level.INFO,"Memory before GC:" + message); System.gc(); System.gc(); fm = runtime.freeMemory(); mm = runtime.maxMemory(); tm = runtime.totalMemory(); um = tm-fm; message = "Used: " + StringUtils.toHuman(um) + " of " + StringUtils.toHuman(mm) + " (Free:" + StringUtils.toHuman(fm) + " Total:" + StringUtils.toHuman(tm) + " Max:"+ StringUtils.toHuman(mm) +")"; Logger.getLogger(Memory.class.getName()).log(Level.INFO,"Memory after GC:" + message); didgc=100; } else if (didgc>0) { didgc--; } } } catch (Exception e) { Logger.getLogger(Memory.class.getName()).log(Level.INFO,"Error during memory display:",e); } running.set(false); } } } /** * Creates new form Memory */ public Memory() { initComponents(); m_scanTimer.scheduleAtFixedRate(new ScanTask(), 100, 100); this.addAncestorListener(new AncestorListener() { public void ancestorAdded(AncestorEvent event) { Component c = Memory.this; while (c.getParent() != null) { c = c.getParent(); } if (c instanceof java.awt.Window) { ((java.awt.Window) c).addWindowListener(new WindowListener() { public void windowOpened(WindowEvent e) { m_scanTimer.scheduleAtFixedRate(new ScanTask(), 1000, 1000); } public void windowClosing(WindowEvent e) { } public void windowClosed(WindowEvent e) { m_scanTimer.cancel(); } public void windowIconified(WindowEvent e) { } public void windowDeiconified(WindowEvent e) { } public void windowActivated(WindowEvent e) { } public void windowDeactivated(WindowEvent e) { } }); } } public void ancestorRemoved(AncestorEvent event) { } public void ancestorMoved(AncestorEvent event) { } }); } @Override protected void finalize() { m_scanTimer.cancel(); } /** * @return the showLogButton */ public boolean isShowLogButton() { return showLogButton; } /** * @return the showAutoGCButton */ public boolean isShowAutoGCButton() { return showAutoGCButton; } /** * @return the showGCButton */ public boolean isShowGCButton() { return showGCButton; } /** * @param showGCButton the showGCButton to set */ public void setShowGCButton(boolean showGCButton) { boolean oldShowGCButton = this.showGCButton; this.showGCButton = showGCButton; propertyChangeSupport.firePropertyChange(PROP_SHOWGCBUTTON, oldShowGCButton, showGCButton); this.gc.setVisible(showGCButton); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { utilsmenu = new javax.swing.JMenu(); mGC = new javax.swing.JMenuItem(); mckLog = new javax.swing.JCheckBoxMenuItem(); mckAGC = new javax.swing.JCheckBoxMenuItem(); mStackTraces = new javax.swing.JMenuItem(); mStackTracesLog = new javax.swing.JMenuItem(); txtMemory = new javax.swing.JTextField(); gc = new javax.swing.JButton(); utilsmenu.setText("jMenu1"); mGC.setText("Garbage Collection"); mGC.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { mGCActionPerformed(evt); } }); utilsmenu.add(mGC); mckLog.setText("Log Memory Usage"); utilsmenu.add(mckLog); mckAGC.setText("Automatic GC on low memory"); utilsmenu.add(mckAGC); mStackTraces.setText("show statck traces"); mStackTraces.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { mStackTracesActionPerformed(evt); } }); utilsmenu.add(mStackTraces); mStackTracesLog.setText("log stack traces"); mStackTracesLog.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { mStackTracesLogActionPerformed(evt); } }); utilsmenu.add(mStackTracesLog); gc.setText("..."); gc.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { gcActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(txtMemory, javax.swing.GroupLayout.DEFAULT_SIZE, 293, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(gc) .addGap(0, 0, 0)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(txtMemory) .addComponent(gc, javax.swing.GroupLayout.PREFERRED_SIZE, 19, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(0, 0, 0)) ); }// </editor-fold>//GEN-END:initComponents private void gcActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_gcActionPerformed // utilsmenu.setVisible(true); utilsmenu.getPopupMenu().show(this, gc.getX(), gc.getY()); // doGC(); }//GEN-LAST:event_gcActionPerformed protected void doGC() { double fm = runtime.freeMemory(); double mm = runtime.maxMemory(); double tm = runtime.totalMemory(); double um = tm-fm; Logger.getLogger(this.getClass().getName()).log(Level.INFO,"GC triggered"); String message = "Used: " + StringUtils.toHuman(um) + " of " + StringUtils.toHuman(mm) + " (Free:" + StringUtils.toHuman(fm) + " Total:" + StringUtils.toHuman(tm) + " Max:"+ StringUtils.toHuman(mm) +")"; Logger.getLogger(Memory.class.getName()).log(Level.INFO,"Memory before GC:" + message); System.gc(); System.gc(); fm = runtime.freeMemory(); mm = runtime.maxMemory(); tm = runtime.totalMemory(); um = tm-fm; message = "Used: " + StringUtils.toHuman(um) + " of " + StringUtils.toHuman(mm) + " (Free:" + StringUtils.toHuman(fm) + " Total:" + StringUtils.toHuman(tm) + " Max:"+ StringUtils.toHuman(mm) +")"; Logger.getLogger(Memory.class.getName()).log(Level.INFO,"Memory after GC:" + message); } private void mGCActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_mGCActionPerformed doGC(); }//GEN-LAST:event_mGCActionPerformed private void mStackTracesActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_mStackTracesActionPerformed UStackTraces.getStackTraces().toString(); if (stacktracemonitor == null) { stacktracemonitor = new StackTraceMonitor(); } stacktracemonitor.setVisible(true); }//GEN-LAST:event_mStackTracesActionPerformed private void mStackTracesLogActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_mStackTracesLogActionPerformed UStackTraces.logStackTraces(Level.INFO); }//GEN-LAST:event_mStackTracesLogActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton gc; private javax.swing.JMenuItem mGC; private javax.swing.JMenuItem mStackTraces; private javax.swing.JMenuItem mStackTracesLog; private javax.swing.JCheckBoxMenuItem mckAGC; private javax.swing.JCheckBoxMenuItem mckLog; private javax.swing.JTextField txtMemory; private javax.swing.JMenu utilsmenu; // End of variables declaration//GEN-END:variables }
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.grpc.SearchPb; import com.netflix.conductor.grpc.TaskServicePb; import com.netflix.conductor.proto.TaskPb; import com.netflix.conductor.proto.TaskSummaryPb; import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.TaskService; import io.grpc.stub.StreamObserver; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; public class TaskServiceImplTest { @Mock private TaskService taskService; @Mock private ExecutionService executionService; private TaskServiceImpl taskServiceImpl; @Before public void init() { initMocks(this); taskServiceImpl = new TaskServiceImpl(executionService, taskService, 5000); } @Test public void searchExceptionTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<Throwable> throwable = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(50000) .setSort("strings") .setQuery("") .setFreeText("*") .build(); StreamObserver<TaskServicePb.TaskSummarySearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(TaskServicePb.TaskSummarySearchResult value) {} @Override public void onError(Throwable t) { throwable.set(t); streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; taskServiceImpl.search(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); assertEquals( "INVALID_ARGUMENT: Cannot return more than 5000 results", throwable.get().getMessage()); } @Test public void searchV2ExceptionTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<Throwable> throwable = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(50000) .setSort("strings") .setQuery("") .setFreeText("*") .build(); StreamObserver<TaskServicePb.TaskSearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(TaskServicePb.TaskSearchResult value) {} @Override public void onError(Throwable t) { throwable.set(t); streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; taskServiceImpl.searchV2(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); assertEquals( "INVALID_ARGUMENT: Cannot return more than 5000 results", throwable.get().getMessage()); } @Test public void searchTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<TaskServicePb.TaskSummarySearchResult> result = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(1) .setSort("strings") .setQuery("") .setFreeText("*") .build(); StreamObserver<TaskServicePb.TaskSummarySearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(TaskServicePb.TaskSummarySearchResult value) { result.set(value); } @Override public void onError(Throwable t) { streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; TaskSummary taskSummary = new TaskSummary(); SearchResult<TaskSummary> searchResult = new SearchResult<>(); searchResult.setTotalHits(1); searchResult.setResults(Collections.singletonList(taskSummary)); when(taskService.search(1, 1, "strings", "*", "")).thenReturn(searchResult); taskServiceImpl.search(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); TaskServicePb.TaskSummarySearchResult taskSummarySearchResult = result.get(); assertEquals(1, taskSummarySearchResult.getTotalHits()); assertEquals( TaskSummaryPb.TaskSummary.newBuilder().build(), taskSummarySearchResult.getResultsList().get(0)); } @Test public void searchV2Test() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<TaskServicePb.TaskSearchResult> result = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(1) .setSort("strings") .setQuery("") .setFreeText("*") .build(); StreamObserver<TaskServicePb.TaskSearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(TaskServicePb.TaskSearchResult value) { result.set(value); } @Override public void onError(Throwable t) { streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; Task task = new Task(); SearchResult<Task> searchResult = new SearchResult<>(); searchResult.setTotalHits(1); searchResult.setResults(Collections.singletonList(task)); when(taskService.searchV2(1, 1, "strings", "*", "")).thenReturn(searchResult); taskServiceImpl.searchV2(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); TaskServicePb.TaskSearchResult taskSearchResult = result.get(); assertEquals(1, taskSearchResult.getTotalHits()); assertEquals( TaskPb.Task.newBuilder().setCallbackFromWorker(true).build(), taskSearchResult.getResultsList().get(0)); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v1/vulnerability.proto package com.google.cloud.securitycenter.v1; /** * * * <pre> * Additional Links * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.Reference} */ public final class Reference extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.Reference) ReferenceOrBuilder { private static final long serialVersionUID = 0L; // Use Reference.newBuilder() to construct. private Reference(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Reference() { source_ = ""; uri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Reference(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Reference( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); source_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); uri_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Reference_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Reference_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.Reference.class, com.google.cloud.securitycenter.v1.Reference.Builder.class); } public static final int SOURCE_FIELD_NUMBER = 1; private volatile java.lang.Object source_; /** * * * <pre> * Source of the reference e.g. NVD * </pre> * * <code>string source = 1;</code> * * @return The source. */ @java.lang.Override public java.lang.String getSource() { java.lang.Object ref = source_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); source_ = s; return s; } } /** * * * <pre> * Source of the reference e.g. NVD * </pre> * * <code>string source = 1;</code> * * @return The bytes for source. */ @java.lang.Override public com.google.protobuf.ByteString getSourceBytes() { java.lang.Object ref = source_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); source_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int URI_FIELD_NUMBER = 2; private volatile java.lang.Object uri_; /** * * * <pre> * Uri for the mentioned source e.g. * https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-34527. * </pre> * * <code>string uri = 2;</code> * * @return The uri. */ @java.lang.Override public java.lang.String getUri() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } } /** * * * <pre> * Uri for the mentioned source e.g. * https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-34527. * </pre> * * <code>string uri = 2;</code> * * @return The bytes for uri. */ @java.lang.Override public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(source_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, source_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uri_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(source_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, source_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uri_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v1.Reference)) { return super.equals(obj); } com.google.cloud.securitycenter.v1.Reference other = (com.google.cloud.securitycenter.v1.Reference) obj; if (!getSource().equals(other.getSource())) return false; if (!getUri().equals(other.getUri())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SOURCE_FIELD_NUMBER; hash = (53 * hash) + getSource().hashCode(); hash = (37 * hash) + URI_FIELD_NUMBER; hash = (53 * hash) + getUri().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v1.Reference parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.Reference parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Reference parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.Reference parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Reference parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.Reference parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Reference parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.Reference parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Reference parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.Reference parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Reference parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.Reference parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.securitycenter.v1.Reference prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Additional Links * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.Reference} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.Reference) com.google.cloud.securitycenter.v1.ReferenceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Reference_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Reference_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.Reference.class, com.google.cloud.securitycenter.v1.Reference.Builder.class); } // Construct using com.google.cloud.securitycenter.v1.Reference.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); source_ = ""; uri_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Reference_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v1.Reference getDefaultInstanceForType() { return com.google.cloud.securitycenter.v1.Reference.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v1.Reference build() { com.google.cloud.securitycenter.v1.Reference result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v1.Reference buildPartial() { com.google.cloud.securitycenter.v1.Reference result = new com.google.cloud.securitycenter.v1.Reference(this); result.source_ = source_; result.uri_ = uri_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v1.Reference) { return mergeFrom((com.google.cloud.securitycenter.v1.Reference) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securitycenter.v1.Reference other) { if (other == com.google.cloud.securitycenter.v1.Reference.getDefaultInstance()) return this; if (!other.getSource().isEmpty()) { source_ = other.source_; onChanged(); } if (!other.getUri().isEmpty()) { uri_ = other.uri_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.securitycenter.v1.Reference parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.securitycenter.v1.Reference) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object source_ = ""; /** * * * <pre> * Source of the reference e.g. NVD * </pre> * * <code>string source = 1;</code> * * @return The source. */ public java.lang.String getSource() { java.lang.Object ref = source_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); source_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Source of the reference e.g. NVD * </pre> * * <code>string source = 1;</code> * * @return The bytes for source. */ public com.google.protobuf.ByteString getSourceBytes() { java.lang.Object ref = source_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); source_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Source of the reference e.g. NVD * </pre> * * <code>string source = 1;</code> * * @param value The source to set. * @return This builder for chaining. */ public Builder setSource(java.lang.String value) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); return this; } /** * * * <pre> * Source of the reference e.g. NVD * </pre> * * <code>string source = 1;</code> * * @return This builder for chaining. */ public Builder clearSource() { source_ = getDefaultInstance().getSource(); onChanged(); return this; } /** * * * <pre> * Source of the reference e.g. NVD * </pre> * * <code>string source = 1;</code> * * @param value The bytes for source to set. * @return This builder for chaining. */ public Builder setSourceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); source_ = value; onChanged(); return this; } private java.lang.Object uri_ = ""; /** * * * <pre> * Uri for the mentioned source e.g. * https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-34527. * </pre> * * <code>string uri = 2;</code> * * @return The uri. */ public java.lang.String getUri() { java.lang.Object ref = uri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Uri for the mentioned source e.g. * https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-34527. * </pre> * * <code>string uri = 2;</code> * * @return The bytes for uri. */ public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Uri for the mentioned source e.g. * https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-34527. * </pre> * * <code>string uri = 2;</code> * * @param value The uri to set. * @return This builder for chaining. */ public Builder setUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uri_ = value; onChanged(); return this; } /** * * * <pre> * Uri for the mentioned source e.g. * https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-34527. * </pre> * * <code>string uri = 2;</code> * * @return This builder for chaining. */ public Builder clearUri() { uri_ = getDefaultInstance().getUri(); onChanged(); return this; } /** * * * <pre> * Uri for the mentioned source e.g. * https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-34527. * </pre> * * <code>string uri = 2;</code> * * @param value The bytes for uri to set. * @return This builder for chaining. */ public Builder setUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uri_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.Reference) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.Reference) private static final com.google.cloud.securitycenter.v1.Reference DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1.Reference(); } public static com.google.cloud.securitycenter.v1.Reference getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Reference> PARSER = new com.google.protobuf.AbstractParser<Reference>() { @java.lang.Override public Reference parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Reference(input, extensionRegistry); } }; public static com.google.protobuf.Parser<Reference> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Reference> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v1.Reference getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.client.logging; import static org.mockito.Mockito.never; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.kaaproject.kaa.client.channel.KaaChannelManager; import org.kaaproject.kaa.client.channel.LogTransport; import org.kaaproject.kaa.client.channel.failover.FailoverManager; import org.kaaproject.kaa.client.context.ExecutorContext; import org.kaaproject.kaa.client.logging.future.RecordFuture; import org.kaaproject.kaa.common.endpoint.gen.LogDeliveryErrorCode; import org.kaaproject.kaa.common.endpoint.gen.LogDeliveryStatus; import org.kaaproject.kaa.common.endpoint.gen.LogSyncRequest; import org.kaaproject.kaa.common.endpoint.gen.LogSyncResponse; import org.kaaproject.kaa.common.endpoint.gen.SyncResponseResultType; import org.kaaproject.kaa.schema.base.Log; import org.mockito.Mockito; import org.springframework.test.util.ReflectionTestUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.concurrent.AbstractExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; public class DefaultLogCollectorTest { private static ExecutorContext executorContext; private static ScheduledExecutorService executor; @BeforeClass public static void beforeSuite() { executorContext = Mockito.mock(ExecutorContext.class); executor = Executors.newSingleThreadScheduledExecutor(); Mockito.when(executorContext.getApiExecutor()).thenReturn(new AbstractExecutorService() { @Override public void execute(Runnable command) { command.run(); } @Override public List<Runnable> shutdownNow() { // TODO Auto-generated method stub return null; } @Override public void shutdown() { // TODO Auto-generated method stub } @Override public boolean isTerminated() { // TODO Auto-generated method stub return false; } @Override public boolean isShutdown() { // TODO Auto-generated method stub return false; } @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { // TODO Auto-generated method stub return false; } }); Mockito.when(executorContext.getCallbackExecutor()).thenReturn(executor); Mockito.when(executorContext.getScheduledExecutor()).thenReturn(executor); } @AfterClass public static void afterSuite() { executor.shutdown(); } @Test public void testDefaultUploadConfiguration() { KaaChannelManager channelManager = Mockito.mock(KaaChannelManager.class); FailoverManager failoverManager = Mockito.mock(FailoverManager.class); LogTransport transport = Mockito.mock(LogTransport.class); AbstractLogCollector logCollector = new DefaultLogCollector(transport, executorContext, channelManager, failoverManager); DefaultLogUploadStrategy strategy = new DefaultLogUploadStrategy(); strategy.setCountThreshold(5); logCollector.setStrategy(strategy); Log record = new Log(); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); verify(transport, times(0)).sync(); logCollector.addLogRecord(record); verify(transport, Mockito.timeout(1000).times(1)).sync(); } @Test public void testStorageStatusAffect() { KaaChannelManager channelManager = Mockito.mock(KaaChannelManager.class); FailoverManager failoverManager = Mockito.mock(FailoverManager.class); LogTransport transport = Mockito.mock(LogTransport.class); AbstractLogCollector logCollector = new DefaultLogCollector(transport, executorContext, channelManager, failoverManager); LogStorage storage = Mockito.mock(LogStorage.class); logCollector.setStorage(storage); Log record = new Log(); Mockito.when(storage.addLogRecord(Mockito.any(LogRecord.class))).thenReturn(new BucketInfo(1, 1)); Mockito.when(storage.getStatus()).thenReturn(new LogStorageStatus() { @Override public long getRecordCount() { return 1; } @Override public long getConsumedVolume() { return 1; } }); logCollector.addLogRecord(record); verify(transport, times(0)).sync(); Mockito.when(storage.getStatus()).thenReturn(new LogStorageStatus() { @Override public long getRecordCount() { return 1; } @Override public long getConsumedVolume() { return 1024 * 1024; } }); logCollector.addLogRecord(record); verify(transport, Mockito.timeout(1000).times(1)).sync(); } @Test public void testLogUploadRequestAndSuccessResponse() throws Exception { KaaChannelManager channelManager = Mockito.mock(KaaChannelManager.class); FailoverManager failoverManager = Mockito.mock(FailoverManager.class); LogDeliveryListener deliveryListener = Mockito.mock(LogDeliveryListener.class); LogTransport transport = Mockito.mock(LogTransport.class); AbstractLogCollector logCollector = new DefaultLogCollector(transport, executorContext, channelManager, failoverManager); DefaultLogUploadStrategy strategy = Mockito.spy(new DefaultLogUploadStrategy()); logCollector.setStrategy(strategy); LogStorage storage = Mockito.mock(LogStorage.class); logCollector.setStorage(storage); Log record = new Log(); Mockito.when(storage.addLogRecord(Mockito.any(LogRecord.class))).thenReturn(new BucketInfo(1, 1)); Mockito.when(storage.getStatus()).thenReturn(new LogStorageStatus() { @Override public long getRecordCount() { return 1; } @Override public long getConsumedVolume() { return 1; } }); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); Mockito.when(storage.getStatus()).thenReturn(new LogStorageStatus() { @Override public long getRecordCount() { return 1; } @Override public long getConsumedVolume() { return 1024 * 1024; } }); logCollector.addLogRecord(record); Mockito.when(storage.getNextBucket()).thenReturn( new LogBucket(1, Arrays.asList(new LogRecord(record), new LogRecord(record), new LogRecord(record)))); LogSyncRequest request1 = new LogSyncRequest(); logCollector.fillSyncRequest(request1); Assert.assertEquals(3, request1.getLogEntries().size()); LogSyncResponse uploadResponse = new LogSyncResponse(); LogDeliveryStatus status = new LogDeliveryStatus(request1.getRequestId(), SyncResponseResultType.SUCCESS, null); uploadResponse.setDeliveryStatuses(Collections.singletonList(status)); logCollector.setLogDeliveryListener(deliveryListener); logCollector.onLogResponse(uploadResponse); verify(deliveryListener, Mockito.timeout(1000)).onLogDeliverySuccess(Mockito.any(BucketInfo.class)); verify(transport, Mockito.timeout(1000).times(2)).sync(); } @Test public void testLogUploadAndFailureResponse() throws IOException, InterruptedException { KaaChannelManager channelManager = Mockito.mock(KaaChannelManager.class); FailoverManager failoverManager = Mockito.mock(FailoverManager.class); LogDeliveryListener deliveryListener = Mockito.mock(LogDeliveryListener.class); LogTransport transport = Mockito.mock(LogTransport.class); AbstractLogCollector logCollector = new DefaultLogCollector(transport, executorContext, channelManager, failoverManager); DefaultLogUploadStrategy strategy = Mockito.spy(new DefaultLogUploadStrategy()); strategy.setRetryPeriod(0); logCollector.setStrategy(strategy); LogStorage storage = Mockito.mock(LogStorage.class); logCollector.setStorage(storage); Log record = new Log(); Mockito.when(storage.addLogRecord(Mockito.any(LogRecord.class))).thenReturn(new BucketInfo(1, 1)); Mockito.when(storage.getStatus()).thenReturn(new LogStorageStatus() { @Override public long getRecordCount() { return 1; } @Override public long getConsumedVolume() { return 1; } }); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); Mockito.when(storage.addLogRecord(Mockito.any(LogRecord.class))).thenReturn(new BucketInfo(1, 1)); Mockito.when(storage.getStatus()).thenReturn(new LogStorageStatus() { @Override public long getRecordCount() { return 1; } @Override public long getConsumedVolume() { return 1024 * 1024; } }); logCollector.addLogRecord(record); Mockito.when(storage.getNextBucket()).thenReturn( new LogBucket(1, Arrays.asList(new LogRecord(record), new LogRecord(record), new LogRecord(record)))); LogSyncRequest request1 = new LogSyncRequest(); logCollector.fillSyncRequest(request1); Assert.assertEquals(3, request1.getLogEntries().size()); LogSyncResponse uploadResponse = new LogSyncResponse(); LogDeliveryStatus status = new LogDeliveryStatus(request1.getRequestId(), SyncResponseResultType.FAILURE, LogDeliveryErrorCode.NO_APPENDERS_CONFIGURED); uploadResponse.setDeliveryStatuses(Collections.singletonList(status)); logCollector.setLogDeliveryListener(deliveryListener); logCollector.onLogResponse(uploadResponse); LogFailoverCommand controller = (LogFailoverCommand) ReflectionTestUtils.getField(logCollector, "controller"); verify(deliveryListener, Mockito.timeout(1000)).onLogDeliveryFailure(Mockito.any(BucketInfo.class)); verify(strategy, Mockito.timeout(1000)).onFailure(controller, LogDeliveryErrorCode.NO_APPENDERS_CONFIGURED); verify(transport, Mockito.timeout(1000).times(2)).sync(); reset(transport); Thread.sleep(1000); verify(transport, never()).sync(); } @Test public void testTimeout() throws Exception { int timeout = 2; // in seconds KaaChannelManager channelManager = Mockito.mock(KaaChannelManager.class); FailoverManager failoverManager = Mockito.mock(FailoverManager.class); LogTransport transport = Mockito.mock(LogTransport.class); LogDeliveryListener deliveryListener = Mockito.mock(LogDeliveryListener.class); AbstractLogCollector logCollector = new DefaultLogCollector(transport, executorContext, channelManager, failoverManager); DefaultLogUploadStrategy tmp = new DefaultLogUploadStrategy(); tmp.setTimeout(timeout); LogUploadStrategy strategy = Mockito.spy(tmp); logCollector.setLogDeliveryListener(deliveryListener); logCollector.setStrategy(strategy); Log record = new Log(); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); logCollector.addLogRecord(record); Mockito.verify(strategy, Mockito.times(0)).onTimeout(Mockito.any(LogFailoverCommand.class)); LogSyncRequest request1 = Mockito.mock(LogSyncRequest.class); logCollector.fillSyncRequest(request1); Thread.sleep(timeout / 2 * 1000); Mockito.verify(strategy, Mockito.times(0)).onTimeout(Mockito.any(LogFailoverCommand.class)); Thread.sleep(timeout / 2 * 1000); logCollector.addLogRecord(record); verify(deliveryListener, Mockito.timeout(1000)).onLogDeliveryTimeout(Mockito.any(BucketInfo.class)); Mockito.verify(strategy, Mockito.timeout(1000).times(1)).onTimeout(Mockito.any(LogFailoverCommand.class)); } @Test public void testBucketFuture() throws Exception { int defaultId = 42; int logCount = 5; KaaChannelManager channelManager = Mockito.mock(KaaChannelManager.class); FailoverManager failoverManager = Mockito.mock(FailoverManager.class); LogTransport transport = Mockito.mock(LogTransport.class); LogStorage storage = Mockito.mock(LogStorage.class); AbstractLogCollector logCollector = new DefaultLogCollector(transport, executorContext, channelManager, failoverManager); logCollector.setStorage(storage); Mockito.when(storage.getStatus()).thenReturn(new LogStorageStatus() { @Override public long getRecordCount() { return 1; } @Override public long getConsumedVolume() { return 1; } }); DefaultLogUploadStrategy strategy = new DefaultLogUploadStrategy() { @Override public LogUploadStrategyDecision isUploadNeeded(LogStorageStatus status) { return LogUploadStrategyDecision.UPLOAD; } }; logCollector.setStrategy(strategy); LogSyncResponse response = new LogSyncResponse(); List<LogDeliveryStatus> statuses = new ArrayList<>(); LogDeliveryStatus status = new LogDeliveryStatus(defaultId, SyncResponseResultType.SUCCESS, null); statuses.add(status); response.setDeliveryStatuses(statuses); BucketInfo bucketInfo = new BucketInfo(status.getRequestId(), logCount); Mockito.when(storage.addLogRecord(Mockito.any(LogRecord.class))).thenReturn(bucketInfo); List<LogRecord> logRecords = new ArrayList<>(); logRecords.add(new LogRecord()); LogBucket logBlock = new LogBucket(defaultId, logRecords); Mockito.when(storage.getNextBucket()).thenReturn(logBlock); List<RecordFuture> deliveryFutures = new LinkedList<RecordFuture>(); for (int i = 0; i < logCount; ++i) { deliveryFutures.add(logCollector.addLogRecord(new Log())); } LogSyncRequest request = new LogSyncRequest(); logCollector.fillSyncRequest(request); logCollector.onLogResponse(response); for (RecordFuture future : deliveryFutures) { Assert.assertEquals(defaultId, future.get().getBucketInfo().getBucketId()); } } @Test public void testMaxParallelUpload() throws Exception { testMaxParallelUploadHelper(0); testMaxParallelUploadHelper(3); testMaxParallelUploadHelper(5); } private void testMaxParallelUploadHelper(int maxParallelUpload) throws Exception { KaaChannelManager channelManager = Mockito.mock(KaaChannelManager.class); FailoverManager failoverManager = Mockito.mock(FailoverManager.class); LogTransport transport = Mockito.mock(LogTransport.class); AbstractLogCollector logCollector = new DefaultLogCollector(transport, executorContext, channelManager, failoverManager); DefaultLogUploadStrategy strategy = Mockito.spy(new DefaultLogUploadStrategy()); strategy.setMaxParallelUploads(maxParallelUpload); logCollector.setStrategy(strategy); LogSyncRequest request = Mockito.spy(new LogSyncRequest()); List<LogDeliveryStatus> statuses = new ArrayList<>(); for (int i = 0; i < maxParallelUpload; i++) { logCollector.addLogRecord(new Log()); logCollector.fillSyncRequest(request); statuses.add(new LogDeliveryStatus(request.getRequestId(), SyncResponseResultType.SUCCESS, null)); } logCollector.addLogRecord(new Log()); logCollector.fillSyncRequest(request); Mockito.verify(request, Mockito.times(maxParallelUpload)).setRequestId(Mockito.any(Integer.class)); if (statuses.isEmpty() && maxParallelUpload == 0) { return; } LogSyncResponse response = new LogSyncResponse(statuses); logCollector.onLogResponse(response); Mockito.reset(request); logCollector.fillSyncRequest(request); Mockito.verify(request, Mockito.times(1)).setRequestId(Mockito.any(Integer.class)); } @Test public void testMaxParallelUploadWithSyncAll() throws Exception { testMaxParallelUploadSyncHelper(0); testMaxParallelUploadSyncHelper(3); testMaxParallelUploadSyncHelper(5); } private void testMaxParallelUploadSyncHelper(int maxParallelUpload) throws Exception { KaaChannelManager channelManager = Mockito.mock(KaaChannelManager.class); FailoverManager failoverManager = Mockito.mock(FailoverManager.class); LogTransport transport = Mockito.mock(LogTransport.class); AbstractLogCollector logCollector = new DefaultLogCollector(transport, executorContext, channelManager, failoverManager); DefaultLogUploadStrategy strategy = new DefaultLogUploadStrategy() { @Override public LogUploadStrategyDecision isUploadNeeded(LogStorageStatus status) { return LogUploadStrategyDecision.UPLOAD; } }; strategy.setMaxParallelUploads(maxParallelUpload); logCollector.setStrategy(strategy); LogSyncRequest request = new LogSyncRequest(); List<LogDeliveryStatus> statuses = new ArrayList<>(); for (int i = 0; i < maxParallelUpload; i++) { logCollector.addLogRecord(new Log()); logCollector.fillSyncRequest(request); } LogSyncResponse response = new LogSyncResponse(statuses); logCollector.onLogResponse(response); Mockito.verify(transport, Mockito.times(maxParallelUpload)).sync(); } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ponyvillelive.pvlmobile.ui; import android.app.Activity; import android.os.Bundle; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.view.ViewDebug; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.lang.reflect.Method; import java.net.InetAddress; import java.net.ServerSocket; import java.net.Socket; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * <p>This class can be used to enable the use of HierarchyViewer inside an * application. HierarchyViewer is an Android SDK tool that can be used * to inspect and debug the user interface of running applications. For * security reasons, HierarchyViewer does not work on production builds * (for instance phones bought in store.) By using this class, you can * make HierarchyViewer work on any device. You must be very careful * however to only enable HierarchyViewer when debugging your * application.</p> * * <p>To use this view server, your application must require the INTERNET * permission.</p> */ public class SocketActivityHierarchyServer implements Runnable, ActivityHierarchyServer { /** * The default port used to start view servers. */ private static final int VIEW_SERVER_DEFAULT_PORT = 4939; private static final int VIEW_SERVER_MAX_CONNECTIONS = 10; private static final String LOG_TAG = "SocketActivityHierarchyServer"; private static final String VALUE_PROTOCOL_VERSION = "4"; private static final String VALUE_SERVER_VERSION = "4"; // Protocol commands // Returns the protocol version private static final String COMMAND_PROTOCOL_VERSION = "PROTOCOL"; // Returns the server version private static final String COMMAND_SERVER_VERSION = "SERVER"; // Lists all of the available windows in the system private static final String COMMAND_WINDOW_MANAGER_LIST = "LIST"; // Keeps a connection open and notifies when the list of windows changes private static final String COMMAND_WINDOW_MANAGER_AUTOLIST = "AUTOLIST"; // Returns the focused window private static final String COMMAND_WINDOW_MANAGER_GET_FOCUS = "GET_FOCUS"; private ServerSocket mServer; private final int mPort; private Thread mThread; private ExecutorService mThreadPool; private final List<WindowListener> mListeners = new CopyOnWriteArrayList<>(); private final HashMap<View, String> mWindows = new HashMap<>(); private final ReentrantReadWriteLock mWindowsLock = new ReentrantReadWriteLock(); private View mFocusedWindow; private final ReentrantReadWriteLock mFocusLock = new ReentrantReadWriteLock(); /** * Creates a new ActivityHierarchyServer associated with the specified window manager on the * default local port. The server is not started by default. * * @see #start() */ public SocketActivityHierarchyServer() { mPort = SocketActivityHierarchyServer.VIEW_SERVER_DEFAULT_PORT; } /** * Starts the server. * * @return True if the server was successfully created, or false if it already exists. * @throws java.io.IOException If the server cannot be created. */ public boolean start() throws IOException { if (mThread != null) { return false; } mThread = new Thread(this, "Local View Server [port=" + mPort + "]"); mThreadPool = Executors.newFixedThreadPool(VIEW_SERVER_MAX_CONNECTIONS); mThread.start(); return true; } @Override public void onActivityCreated(Activity activity, Bundle bundle) { String name = activity.getTitle().toString(); if (TextUtils.isEmpty(name)) { name = activity.getClass().getCanonicalName() + "/0x" + System.identityHashCode(activity); } else { name += " (" + activity.getClass().getCanonicalName() + ")"; } mWindowsLock.writeLock().lock(); try { mWindows.put(activity.getWindow().getDecorView().getRootView(), name); } finally { mWindowsLock.writeLock().unlock(); } fireWindowsChangedEvent(); } @Override public void onActivityStarted(Activity activity) { } @Override public void onActivityResumed(Activity activity) { View view = activity.getWindow().getDecorView(); mFocusLock.writeLock().lock(); try { mFocusedWindow = view == null ? null : view.getRootView(); } finally { mFocusLock.writeLock().unlock(); } fireFocusChangedEvent(); } @Override public void onActivityPaused(Activity activity) { } @Override public void onActivityStopped(Activity activity) { } @Override public void onActivitySaveInstanceState(Activity activity, Bundle bundle) { } @Override public void onActivityDestroyed(Activity activity) { mWindowsLock.writeLock().lock(); try { mWindows.remove(activity.getWindow().getDecorView().getRootView()); } finally { mWindowsLock.writeLock().unlock(); } fireWindowsChangedEvent(); } public void run() { try { mServer = new ServerSocket(mPort, VIEW_SERVER_MAX_CONNECTIONS, InetAddress.getLocalHost()); } catch (Exception e) { Log.w(LOG_TAG, "Starting ServerSocket error: ", e); } while (mServer != null && Thread.currentThread() == mThread) { // Any uncaught exception will crash the system process try { Socket client = mServer.accept(); if (mThreadPool != null) { mThreadPool.submit(new ViewServerWorker(client)); } else { try { client.close(); } catch (IOException e) { e.printStackTrace(); } } } catch (Exception e) { Log.w(LOG_TAG, "Connection error: ", e); } } } private static boolean writeValue(Socket client, String value) { boolean result; BufferedWriter out = null; try { OutputStream clientStream = client.getOutputStream(); out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024); out.write(value); out.write("\n"); out.flush(); result = true; } catch (Exception e) { result = false; } finally { if (out != null) { try { out.close(); } catch (IOException e) { result = false; } } } return result; } private void fireWindowsChangedEvent() { for (WindowListener listener : mListeners) { listener.windowsChanged(); } } private void fireFocusChangedEvent() { for (WindowListener listener : mListeners) { listener.focusChanged(); } } private void addWindowListener(WindowListener listener) { if (!mListeners.contains(listener)) { mListeners.add(listener); } } private void removeWindowListener(WindowListener listener) { mListeners.remove(listener); } private interface WindowListener { void windowsChanged(); void focusChanged(); } private static class UncloseableOutputStream extends OutputStream { private final OutputStream mStream; UncloseableOutputStream(OutputStream stream) { mStream = stream; } public void close() throws IOException { // Don't close the stream } public boolean equals(Object o) { return mStream.equals(o); } public void flush() throws IOException { mStream.flush(); } public int hashCode() { return mStream.hashCode(); } public String toString() { return mStream.toString(); } public void write(byte[] buffer, int offset, int count) throws IOException { mStream.write(buffer, offset, count); } public void write(byte[] buffer) throws IOException { mStream.write(buffer); } public void write(int oneByte) throws IOException { mStream.write(oneByte); } } private class ViewServerWorker implements Runnable, WindowListener { private Socket mClient; private boolean mNeedWindowListUpdate; private boolean mNeedFocusedWindowUpdate; private final Object[] mLock = new Object[0]; public ViewServerWorker(Socket client) { mClient = client; mNeedWindowListUpdate = false; mNeedFocusedWindowUpdate = false; } public void run() { BufferedReader in = null; try { in = new BufferedReader(new InputStreamReader(mClient.getInputStream()), 1024); final String request = in.readLine(); String command; String parameters; int index = request.indexOf(' '); if (index == -1) { command = request; parameters = ""; } else { command = request.substring(0, index); parameters = request.substring(index + 1); } boolean result; if (COMMAND_PROTOCOL_VERSION.equalsIgnoreCase(command)) { result = writeValue(mClient, VALUE_PROTOCOL_VERSION); } else if (COMMAND_SERVER_VERSION.equalsIgnoreCase(command)) { result = writeValue(mClient, VALUE_SERVER_VERSION); } else if (COMMAND_WINDOW_MANAGER_LIST.equalsIgnoreCase(command)) { result = listWindows(mClient); } else if (COMMAND_WINDOW_MANAGER_GET_FOCUS.equalsIgnoreCase(command)) { result = getFocusedWindow(mClient); } else if (COMMAND_WINDOW_MANAGER_AUTOLIST.equalsIgnoreCase(command)) { result = windowManagerAutolistLoop(); } else { result = windowCommand(mClient, command, parameters); } if (!result) { Log.w(LOG_TAG, "An error occurred with the command: " + command); } } catch (IOException e) { Log.w(LOG_TAG, "Connection error: ", e); } finally { if (in != null) { try { in.close(); } catch (IOException e) { e.printStackTrace(); } } if (mClient != null) { try { mClient.close(); } catch (IOException e) { e.printStackTrace(); } } } } private boolean windowCommand(Socket client, String command, String parameters) { boolean success = true; BufferedWriter out = null; try { // Find the hash code of the window int index = parameters.indexOf(' '); if (index == -1) { index = parameters.length(); } final String code = parameters.substring(0, index); int hashCode = (int) Long.parseLong(code, 16); // Extract the command's parameter after the window description if (index < parameters.length()) { parameters = parameters.substring(index + 1); } else { parameters = ""; } final View window = findWindow(hashCode); if (window == null) { return false; } // call stuff final Method dispatch = ViewDebug.class.getDeclaredMethod("dispatchCommand", View.class, String.class, String.class, OutputStream.class); dispatch.setAccessible(true); dispatch.invoke(null, window, command, parameters, new UncloseableOutputStream(client.getOutputStream())); if (!client.isOutputShutdown()) { out = new BufferedWriter(new OutputStreamWriter(client.getOutputStream())); out.write("DONE\n"); out.flush(); } } catch (Exception e) { Log.w(LOG_TAG, "Could not send command " + command + " with parameters " + parameters, e); success = false; } finally { if (out != null) { try { out.close(); } catch (IOException e) { success = false; } } } return success; } private View findWindow(int hashCode) { if (hashCode == -1) { View window = null; mWindowsLock.readLock().lock(); try { window = mFocusedWindow; } finally { mWindowsLock.readLock().unlock(); } return window; } mWindowsLock.readLock().lock(); try { for (Entry<View, String> entry : mWindows.entrySet()) { if (System.identityHashCode(entry.getKey()) == hashCode) { return entry.getKey(); } } } finally { mWindowsLock.readLock().unlock(); } return null; } private boolean listWindows(Socket client) { boolean result = true; BufferedWriter out = null; try { mWindowsLock.readLock().lock(); OutputStream clientStream = client.getOutputStream(); out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024); for (Entry<View, String> entry : mWindows.entrySet()) { out.write(Integer.toHexString(System.identityHashCode(entry.getKey()))); out.write(' '); out.append(entry.getValue()); out.write('\n'); } out.write("DONE.\n"); out.flush(); } catch (Exception e) { result = false; } finally { mWindowsLock.readLock().unlock(); if (out != null) { try { out.close(); } catch (IOException e) { result = false; } } } return result; } private boolean getFocusedWindow(Socket client) { boolean result = true; String focusName = null; BufferedWriter out = null; try { OutputStream clientStream = client.getOutputStream(); out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024); View focusedWindow = null; mFocusLock.readLock().lock(); try { focusedWindow = mFocusedWindow; } finally { mFocusLock.readLock().unlock(); } if (focusedWindow != null) { mWindowsLock.readLock().lock(); try { focusName = mWindows.get(mFocusedWindow); } finally { mWindowsLock.readLock().unlock(); } out.write(Integer.toHexString(System.identityHashCode(focusedWindow))); out.write(' '); out.append(focusName); } out.write('\n'); out.flush(); } catch (Exception e) { result = false; } finally { if (out != null) { try { out.close(); } catch (IOException e) { result = false; } } } return result; } public void windowsChanged() { synchronized (mLock) { mNeedWindowListUpdate = true; mLock.notifyAll(); } } public void focusChanged() { synchronized (mLock) { mNeedFocusedWindowUpdate = true; mLock.notifyAll(); } } private boolean windowManagerAutolistLoop() { addWindowListener(this); BufferedWriter out = null; try { out = new BufferedWriter(new OutputStreamWriter(mClient.getOutputStream())); while (!Thread.interrupted()) { boolean needWindowListUpdate = false; boolean needFocusedWindowUpdate = false; synchronized (mLock) { while (!mNeedWindowListUpdate && !mNeedFocusedWindowUpdate) { mLock.wait(); } if (mNeedWindowListUpdate) { mNeedWindowListUpdate = false; needWindowListUpdate = true; } if (mNeedFocusedWindowUpdate) { mNeedFocusedWindowUpdate = false; needFocusedWindowUpdate = true; } } if (needWindowListUpdate) { out.write("LIST UPDATE\n"); out.flush(); } if (needFocusedWindowUpdate) { out.write("FOCUS UPDATE\n"); out.flush(); } } } catch (Exception e) { Log.w(LOG_TAG, "Connection error: ", e); } finally { if (out != null) { try { out.close(); } catch (IOException e) { // Ignore } } removeWindowListener(this); } return true; } } }
/* * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.javascript.jscomp.DefinitionsRemover.Definition; import com.google.javascript.jscomp.NodeTraversal.Callback; import com.google.javascript.jscomp.Scope.Var; import com.google.javascript.jscomp.graph.DiGraph; import com.google.javascript.jscomp.graph.FixedPointGraphTraversal; import com.google.javascript.jscomp.graph.FixedPointGraphTraversal.EdgeCallback; import com.google.javascript.jscomp.graph.LinkedDirectedGraph; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; /** * Compiler pass that computes function purity. A function is pure if * it has no outside visible side effects, and the result of the * computation does not depend on external factors that are beyond the * control of the application; repeated calls to the function should * return the same value as long as global state hasn't changed. * * Date.now is an example of a function that has no side effects but * is not pure. * * * * We will prevail, in peace and freedom from fear, and in true * health, through the purity and essence of our natural... fluids. * - General Turgidson */ class PureFunctionIdentifier implements CompilerPass { static final DiagnosticType INVALID_NO_SIDE_EFFECT_ANNOTATION = DiagnosticType.error( "JSC_INVALID_NO_SIDE_EFFECT_ANNOTATION", "@nosideeffects may only appear in externs files."); private final AbstractCompiler compiler; private final DefinitionProvider definitionProvider; // Function node -> function side effects map private final Map<Node, FunctionInformation> functionSideEffectMap; // List of all function call sites; used to iterate in markPureFunctionCalls. private final List<Node> allFunctionCalls; // Externs and ast tree root, for use in getDebugReport. These two // fields are null until process is called. private Node externs; private Node root; public PureFunctionIdentifier(AbstractCompiler compiler, DefinitionProvider definitionProvider) { this.compiler = compiler; this.definitionProvider = definitionProvider; this.functionSideEffectMap = Maps.newHashMap(); this.allFunctionCalls = Lists.newArrayList(); this.externs = null; this.root = null; } @Override public void process(Node externsAst, Node srcAst) { if (externs != null || root != null) { throw new IllegalStateException( "It is illegal to call PureFunctionIdentifier.process " + "twice the same instance. Please use a new " + "PureFunctionIdentifier instance each time."); } externs = externsAst; root = srcAst; NodeTraversal.traverse(compiler, externs, new FunctionAnalyzer(true)); NodeTraversal.traverse(compiler, root, new FunctionAnalyzer(false)); propagateSideEffects(); markPureFunctionCalls(); } /** * Compute debug report that includes: * - List of all pure functions. * - Reasons we think the remaining functions have side effects. */ String getDebugReport() { Preconditions.checkNotNull(externs); Preconditions.checkNotNull(root); StringBuilder sb = new StringBuilder(); FunctionNames functionNames = new FunctionNames(compiler); functionNames.process(null, externs); functionNames.process(null, root); sb.append("Pure functions:\n"); for (Map.Entry<Node, FunctionInformation> entry : functionSideEffectMap.entrySet()) { Node function = entry.getKey(); FunctionInformation functionInfo = entry.getValue(); boolean isPure = functionInfo.mayBePure() && !functionInfo.mayHaveSideEffects(); if (isPure) { sb.append(" " + functionNames.getFunctionName(function) + "\n"); } } sb.append("\n"); for (Map.Entry<Node, FunctionInformation> entry : functionSideEffectMap.entrySet()) { Node function = entry.getKey(); FunctionInformation functionInfo = entry.getValue(); Set<String> depFunctionNames = Sets.newHashSet(); for (Node callSite : functionInfo.getCallsInFunctionBody()) { Collection<Definition> defs = getCallableDefinitions(definitionProvider, callSite.getFirstChild()); if (defs == null) { depFunctionNames.add("<null def list>"); continue; } for (Definition def : defs) { depFunctionNames.add( functionNames.getFunctionName(def.getRValue())); } } sb.append(functionNames.getFunctionName(function) + " " + functionInfo.toString() + " Calls: " + depFunctionNames + "\n"); } return sb.toString(); } /** * Query the DefinitionProvider for the list of definitions that * correspond to a given qualified name subtree. Return null if * DefinitionProvider does not contain an entry for a given name, * one or more of the values returned by getDeclarations is not * callable, or the "name" node is not a GETPROP or NAME. * * @param definitionProvider The name reference graph * @param name Query node * @return non-empty definition list or null */ private static Collection<Definition> getCallableDefinitions( DefinitionProvider definitionProvider, Node name) { if (NodeUtil.isGetProp(name) || NodeUtil.isName(name)) { List<Definition> result = Lists.newArrayList(); Collection<Definition> decls = definitionProvider.getDefinitionsReferencedAt(name); if (decls == null) { return null; } for (Definition current : decls) { Node rValue = current.getRValue(); if ((rValue != null) && NodeUtil.isFunction(rValue)) { result.add(current); } else { return null; } } return result; } else if (name.getType() == Token.OR || name.getType() == Token.HOOK) { Node firstVal; if (name.getType() == Token.HOOK) { firstVal = name.getFirstChild().getNext(); } else { firstVal = name.getFirstChild(); } Collection<Definition> defs1 = getCallableDefinitions(definitionProvider, firstVal); Collection<Definition> defs2 = getCallableDefinitions(definitionProvider, firstVal.getNext()); if (defs1 != null && defs2 != null) { defs1.addAll(defs2); return defs1; } else { return null; } } else if (NodeUtil.isFunctionExpression(name)) { // The anonymous function reference is also the definition. // TODO(user) Change SimpleDefinitionFinder so it is possible to query for // function expressions by function node. // isExtern is false in the call to the constructor for the // FunctionExpressionDefinition below because we know that // getCallableDefinitions() will only be called on the first // child of a call and thus the function expression // definition will never be an extern. return Lists.newArrayList( (Definition) new DefinitionsRemover.FunctionExpressionDefinition(name, false)); } else { return null; } } /** * Propagate side effect information by building a graph based on * call site information stored in FunctionInformation and the * DefinitionProvider and then running GraphReachability to * determine the set of functions that have side effects. */ private void propagateSideEffects() { // Nodes are function declarations; Edges are function call sites. DiGraph<FunctionInformation, Node> sideEffectGraph = new LinkedDirectedGraph<FunctionInformation, Node>(); // create graph nodes for (FunctionInformation functionInfo : functionSideEffectMap.values()) { sideEffectGraph.createNode(functionInfo); } // add connections to called functions and side effect root. for (FunctionInformation functionInfo : functionSideEffectMap.values()) { if (!functionInfo.mayHaveSideEffects()) { continue; } for (Node callSite : functionInfo.getCallsInFunctionBody()) { Node callee = callSite.getFirstChild(); Collection<Definition> defs = getCallableDefinitions(definitionProvider, callee); if (defs == null) { // Definition set is not complete or eligible. Possible // causes include: // * "callee" is not of type NAME or GETPROP. // * One or more definitions are not functions. // * One or more definitions are complex. // (e.i. return value of a call that returns a function). functionInfo.setTaintsUnknown(); break; } for (Definition def : defs) { Node defValue = def.getRValue(); FunctionInformation dep = functionSideEffectMap.get(defValue); Preconditions.checkNotNull(dep); sideEffectGraph.connect(dep, callSite, functionInfo); } } } // Propagate side effect information to a fixed point. FixedPointGraphTraversal.newTraversal(new SideEffectPropagationCallback()) .computeFixedPoint(sideEffectGraph); // Mark remaining functions "pure". for (FunctionInformation functionInfo : functionSideEffectMap.values()) { if (functionInfo.mayBePure()) { functionInfo.setIsPure(); } } } /** * Set no side effect property at pure-function call sites. */ private void markPureFunctionCalls() { for (Node callNode : allFunctionCalls) { Node name = callNode.getFirstChild(); Collection<Definition> defs = getCallableDefinitions(definitionProvider, name); boolean hasSideEffects = true; if (defs != null) { hasSideEffects = false; for (Definition def : defs) { FunctionInformation functionInfo = functionSideEffectMap.get(def.getRValue()); Preconditions.checkNotNull(functionInfo); if ((NodeUtil.isCall(callNode) && functionInfo.mayHaveSideEffects()) || (NodeUtil.isNew(callNode) && (functionInfo.mutatesGlobalState() || functionInfo.functionThrows()))) { hasSideEffects = true; break; } } } // Handle special cases (Math, RegEx) if (NodeUtil.isCall(callNode)) { Preconditions.checkState(compiler != null); if (!NodeUtil.functionCallHasSideEffects(callNode, compiler)) { hasSideEffects = false; } } else if (NodeUtil.isNew(callNode)) { // Handle known cases now (Object, Date, RegExp, etc) if (!NodeUtil.constructorCallHasSideEffects(callNode)) { hasSideEffects = false; } } if (!hasSideEffects) { callNode.setIsNoSideEffectsCall(); } } } /** * Gather list of functions, functions with @nosideeffect * annotations, call sites, and functions that may mutate variables * not defined in the local scope. */ private class FunctionAnalyzer implements Callback { private final boolean inExterns; FunctionAnalyzer(boolean inExterns) { this.inExterns = inExterns; } @Override public boolean shouldTraverse(NodeTraversal traversal, Node node, Node parent) { // Functions need to be processed as part of pre-traversal so an // entry for the enclosing function exists in the // FunctionInformation map when processing assignments and calls // inside visit. if (NodeUtil.isFunction(node)) { Node gramp = parent.getParent(); visitFunction(traversal, node, parent, gramp); } return true; } @Override public void visit(NodeTraversal traversal, Node node, Node parent) { if (inExterns) { return; } if (!NodeUtil.nodeTypeMayHaveSideEffects(node)) { return; } if (NodeUtil.isCall(node) || NodeUtil.isNew(node)) { allFunctionCalls.add(node); } Node enclosingFunction = traversal.getEnclosingFunction(); if (enclosingFunction != null) { FunctionInformation sideEffectInfo = functionSideEffectMap.get(enclosingFunction); Preconditions.checkNotNull(sideEffectInfo); if (NodeUtil.isAssignmentOp(node)) { visitAssignmentOrUnaryOperatorLhs( sideEffectInfo, traversal.getScope(), node.getFirstChild()); } else { switch(node.getType()) { case Token.CALL: case Token.NEW: visitCall(sideEffectInfo, node); break; case Token.DELPROP: case Token.DEC: case Token.INC: visitAssignmentOrUnaryOperatorLhs( sideEffectInfo, traversal.getScope(), node.getFirstChild()); break; case Token.NAME: // Variable definition are not side effects. // Just check that the name appears in the context of a // variable declaration. Preconditions.checkArgument( NodeUtil.isVarDeclaration(node)); break; case Token.THROW: visitThrow(sideEffectInfo); break; default: throw new IllegalArgumentException( "Unhandled side effect node type " + Token.name(node.getType())); } } } } /** * Record information about the side effects caused by an * assigment or mutating unary operator. * * If the operation modifies this or taints global state, mark the * enclosing function as having those side effects. */ private void visitAssignmentOrUnaryOperatorLhs( FunctionInformation sideEffectInfo, Scope scope, Node lhs) { if (NodeUtil.isName(lhs)) { Var var = scope.getVar(lhs.getString()); if (var == null || var.scope != scope) { sideEffectInfo.setTaintsGlobalState(); } } else if (NodeUtil.isGetProp(lhs)) { if (NodeUtil.isThis(lhs.getFirstChild())) { sideEffectInfo.setTaintsThis(); } else { sideEffectInfo.setTaintsUnknown(); } } else { sideEffectInfo.setTaintsUnknown(); } } /** * Record information about a call site. */ private void visitCall(FunctionInformation sideEffectInfo, Node node) { // Handle special cases (Math, RegEx) if (NodeUtil.isCall(node) && !NodeUtil.functionCallHasSideEffects(node, compiler)) { return; } // Handle known cases now (Object, Date, RegExp, etc) if (NodeUtil.isNew(node) && !NodeUtil.constructorCallHasSideEffects(node)) { return; } sideEffectInfo.appendCall(node); } /** * Record function and check for @nosideeffects annotations. */ private void visitFunction(NodeTraversal traversal, Node node, Node parent, Node gramp) { Preconditions.checkArgument(!functionSideEffectMap.containsKey(node)); FunctionInformation sideEffectInfo = new FunctionInformation(inExterns); functionSideEffectMap.put(node, sideEffectInfo); if (hasNoSideEffectsAnnotation(node, parent, gramp)) { if (inExterns) { sideEffectInfo.setIsPure(); } else { traversal.report(node, INVALID_NO_SIDE_EFFECT_ANNOTATION); } } else if (inExterns) { sideEffectInfo.setTaintsGlobalState(); } } /** * Record that the enclosing function throws. */ private void visitThrow(FunctionInformation sideEffectInfo) { sideEffectInfo.setFunctionThrows(); } /** * Get the value of the @nosideeffects annotation stored in the * doc info. */ private boolean hasNoSideEffectsAnnotation(Node node, Node parent, Node gramp) { { JSDocInfo docInfo = node.getJSDocInfo(); if (docInfo != null && docInfo.isNoSideEffects()) { return true; } } if (NodeUtil.isName(parent)) { JSDocInfo docInfo = gramp.getJSDocInfo(); return gramp.hasOneChild() && docInfo != null && docInfo.isNoSideEffects(); } else if (NodeUtil.isAssign(parent)) { JSDocInfo docInfo = parent.getJSDocInfo(); return docInfo != null && docInfo.isNoSideEffects(); } else { return false; } } } /** * Callback that propagates side effect information across call sites. */ private static class SideEffectPropagationCallback implements EdgeCallback<FunctionInformation, Node> { public boolean traverseEdge(FunctionInformation callee, Node callSite, FunctionInformation caller) { Preconditions.checkArgument(callSite.getType() == Token.CALL || callSite.getType() == Token.NEW); boolean changed = false; if (!caller.mutatesGlobalState() && callee.mutatesGlobalState()) { caller.setTaintsGlobalState(); changed = true; } if (!caller.functionThrows() && callee.functionThrows()) { caller.setFunctionThrows(); changed = true; } if (callee.mutatesThis()) { // Side effects only propagate via regular calls. // Calling a constructor that modifies "this" has no side effects. if (callSite.getType() != Token.NEW) { Node objectNode = getCallThisObject(callSite); if (objectNode != null && NodeUtil.isThis(objectNode)) { if (!caller.mutatesThis()) { caller.setTaintsThis(); changed = true; } } else if (!caller.mutatesGlobalState()) { caller.setTaintsGlobalState(); changed = true; } } } return changed; } } /** * Analyze a call site and extract the node that will be act as * "this" inside the call, which is either the object part of the * qualified function name, the first argument to the call in the * case of ".call" and ".apply" or null if object is not specified * in either of those ways. * * @return node that will act as "this" for the call. */ private static Node getCallThisObject(Node callSite) { Node foo = callSite.getFirstChild(); if (!NodeUtil.isGetProp(foo)) { // "this" is not specified explicitly; call modifies global "this". return null; } Node object = null; String propString = foo.getLastChild().getString(); if (propString.equals("call") || propString.equals("apply")) { return foo.getNext(); } else { return foo.getFirstChild(); } } /** * Keeps track of a function's known side effects by type and the * list of calls that appear in a function's body. */ private static class FunctionInformation { private final boolean extern; private final List<Node> callsInFunctionBody = Lists.newArrayList(); private boolean pureFunction = false; private boolean functionThrows = false; private boolean taintsGlobalState = false; private boolean taintsThis = false; private boolean taintsUnknown = false; FunctionInformation(boolean extern) { this.extern = extern; checkInvariant(); } /** * Function appeared in externs file. */ boolean isExtern() { return extern; } /** * @returns false if function known to have side effects. */ boolean mayBePure() { return !(functionThrows || taintsGlobalState || taintsThis || taintsUnknown); } /** * @returns false if function known to be pure. */ boolean mayHaveSideEffects() { return !pureFunction; } /** * Mark the function as being pure. */ void setIsPure() { pureFunction = true; checkInvariant(); } /** * Marks the function as having "modifies globals" side effects. */ void setTaintsGlobalState() { taintsGlobalState = true; checkInvariant(); } /** * Marks the function as having "modifies this" side effects. */ void setTaintsThis() { taintsThis = true; checkInvariant(); } /** * Marks the function as having "throw" side effects. */ void setFunctionThrows() { functionThrows = true; checkInvariant(); } /** * Marks the function as having "complex" side effects that are * not otherwise explicitly tracked. */ void setTaintsUnknown() { taintsUnknown = true; checkInvariant(); } /** * Returns true if function mutates global state. */ boolean mutatesGlobalState() { return taintsGlobalState || taintsUnknown; } /** * Returns true if function mutates "this". */ boolean mutatesThis() { return taintsThis; } /** * Returns true if function has an explicit "throw". */ boolean functionThrows() { return functionThrows; } /** * Verify internal consistency. Should be called at the end of * every method that mutates internal state. */ private void checkInvariant() { boolean invariant = mayBePure() || mayHaveSideEffects(); if (!invariant) { throw new IllegalStateException("Invariant failed. " + toString()); } } /** * Add a CALL or NEW node to the list of calls this function makes. */ void appendCall(Node callNode) { callsInFunctionBody.add(callNode); } /** * Gets the list of CALL and NEW nodes. */ List<Node> getCallsInFunctionBody() { return callsInFunctionBody; } @Override public String toString() { List<String> status = Lists.newArrayList(); if (extern) { status.add("extern"); } if (pureFunction) { status.add("pure"); } if (taintsThis) { status.add("this"); } if (taintsGlobalState) { status.add("global"); } if (functionThrows) { status.add("throw"); } if (taintsUnknown) { status.add("complex"); } return "Side effects: " + status.toString(); } } }
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.util.importProject; import com.intellij.ide.util.projectWizard.importSources.DetectedContentRoot; import com.intellij.ide.util.projectWizard.importSources.DetectedProjectRoot; import com.intellij.ide.util.projectWizard.importSources.DetectedSourceRoot; import com.intellij.ide.util.projectWizard.importSources.ProjectStructureDetector; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileSystemUtil; import com.intellij.openapi.util.io.FileUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import java.io.File; import java.io.IOException; import java.util.*; /** * @author nik */ public class RootDetectionProcessor { private static final Logger LOG = Logger.getInstance("#com.intellij.ide.util.importProject.RootDetectionProcessor"); private final File myBaseDir; private final ProjectStructureDetector[] myDetectors; private final List<DetectedProjectRoot>[] myDetectedRoots; private final FileTypeManager myTypeManager; private final ProgressIndicator myProgressIndicator; @NotNull public static List<DetectedRootData> detectRoots(@NotNull File baseProjectFile) { return new RootDetectionProcessor(baseProjectFile, ProjectStructureDetector.EP_NAME.getExtensions()).detectRoots(); } public RootDetectionProcessor(File baseDir, final ProjectStructureDetector[] detectors) { myBaseDir = getCanonicalDir(baseDir); myDetectors = detectors; //noinspection unchecked myDetectedRoots = new List[myDetectors.length]; myTypeManager = FileTypeManager.getInstance(); myProgressIndicator = ProgressManager.getInstance().getProgressIndicator(); } private static File getCanonicalDir(File baseDir) { try { return new File(FileUtil.resolveShortWindowsName(baseDir.getAbsolutePath())); } catch (IOException e) { LOG.info(e); return baseDir; } } public static MultiMap<ProjectStructureDetector, DetectedProjectRoot> createRootsMap(List<DetectedRootData> list) { MultiMap<ProjectStructureDetector, DetectedProjectRoot> roots = new MultiMap<>(); for (final DetectedRootData rootData : list) { for (ProjectStructureDetector detector : rootData.getSelectedDetectors()) { roots.putValue(detector, rootData.getSelectedRoot()); } } return roots; } public Map<ProjectStructureDetector, List<DetectedProjectRoot>> runDetectors() { if (!myBaseDir.isDirectory()) { return Collections.emptyMap(); } BitSet enabledDetectors = new BitSet(myDetectors.length); enabledDetectors.set(0, myDetectors.length); for (int i = 0; i < myDetectors.length; i++) { myDetectedRoots[i] = new ArrayList<>(); } Set<File> parentDirectories = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); File parent = myBaseDir.getParentFile(); while (parent != null) { parentDirectories.add(parent); parent = parent.getParentFile(); } processRecursively(myBaseDir, enabledDetectors, parentDirectories); final Map<ProjectStructureDetector, List<DetectedProjectRoot>> result = new LinkedHashMap<>(); for (int i = 0; i < myDetectors.length; i++) { if (!myDetectedRoots[i].isEmpty()) { result.put(myDetectors[i], myDetectedRoots[i]); } } return result; } private List<Pair<File, Integer>> processRecursively(File dir, BitSet enabledDetectors, Set<File> parentDirectories) { List<Pair<File, Integer>> parentsToSkip = new SmartList<>(); if (myTypeManager.isFileIgnored(dir.getName())) { return parentsToSkip; } if (myProgressIndicator != null) { if (myProgressIndicator.isCanceled()) { return parentsToSkip; } myProgressIndicator.setText2(dir.getPath()); } if (FileSystemUtil.isSymLink(dir)) { try { if (parentDirectories.contains(dir.getCanonicalFile())) { return parentsToSkip; } } catch (IOException ignored) { } } try { parentDirectories.add(dir); File[] children = dir.listFiles(); if (children == null) { children = ArrayUtil.EMPTY_FILE_ARRAY; } BitSet enabledForChildren = enabledDetectors; for (int i = 0, detectorsLength = myDetectors.length; i < detectorsLength; i++) { if (!enabledDetectors.get(i)) continue; final ProjectStructureDetector.DirectoryProcessingResult result = myDetectors[i].detectRoots(dir, children, myBaseDir, myDetectedRoots[i]); if (!result.isProcessChildren()) { if (enabledForChildren == enabledDetectors) { enabledForChildren = new BitSet(); enabledForChildren.or(enabledDetectors); } enabledForChildren.set(i, false); } final File parentToSkip = result.getParentToSkip(); if (parentToSkip != null && !FileUtil.filesEqual(parentToSkip, dir)) { parentsToSkip.add(Pair.create(parentToSkip, i)); } } if (!enabledForChildren.isEmpty()) { for (File child : children) { if (child.isDirectory()) { final List<Pair<File, Integer>> toSkip = processRecursively(child, enabledForChildren, parentDirectories); if (!toSkip.isEmpty()) { if (enabledForChildren == enabledDetectors) { enabledForChildren = new BitSet(); enabledForChildren.or(enabledDetectors); } for (Pair<File, Integer> pair : toSkip) { enabledForChildren.set(pair.getSecond(), false); if (!FileUtil.filesEqual(pair.getFirst(), dir)) { parentsToSkip.add(pair); } } if (enabledForChildren.isEmpty()) { break; } } } } } return parentsToSkip; } finally { parentDirectories.remove(dir); } } private static void removeIncompatibleRoots(DetectedProjectRoot root, Map<File, DetectedRootData> rootData) { DetectedRootData[] allRoots = rootData.values().toArray(new DetectedRootData[rootData.values().size()]); for (DetectedRootData child : allRoots) { final File childDirectory = child.getDirectory(); if (FileUtil.isAncestor(root.getDirectory(), childDirectory, true)) { for (DetectedProjectRoot projectRoot : child.getAllRoots()) { if (!root.canContainRoot(projectRoot)) { child.removeRoot(projectRoot); } } if (child.isEmpty()) { rootData.remove(childDirectory); } } } } private static boolean isUnderIncompatibleRoot(DetectedProjectRoot root, Map<File, DetectedRootData> rootData) { File directory = root.getDirectory().getParentFile(); while (directory != null) { final DetectedRootData data = rootData.get(directory); if (data != null) { for (DetectedProjectRoot parentRoot : data.getAllRoots()) { if (!parentRoot.canContainRoot(root)) { return true; } } } directory = directory.getParentFile(); } return false; } private List<DetectedRootData> detectRoots() { Map<ProjectStructureDetector, List<DetectedProjectRoot>> roots = runDetectors(); if (myProgressIndicator != null) { myProgressIndicator.setText2("Processing " + roots.values().size() + " project roots..."); } Map<File, DetectedRootData> rootData = new LinkedHashMap<>(); for (ProjectStructureDetector detector : roots.keySet()) { for (DetectedProjectRoot detectedRoot : roots.get(detector)) { if (isUnderIncompatibleRoot(detectedRoot, rootData)) { continue; } final DetectedRootData data = rootData.get(detectedRoot.getDirectory()); if (data == null) { rootData.put(detectedRoot.getDirectory(), new DetectedRootData(detector, detectedRoot)); } else { detectedRoot = data.addRoot(detector, detectedRoot); } removeIncompatibleRoots(detectedRoot, rootData); } } List<DetectedRootData> dataCollection = mergeContentRoots(rootData); if (myProgressIndicator != null) { myProgressIndicator.setText2(""); } return dataCollection; } private List<DetectedRootData> mergeContentRoots(Map<File, DetectedRootData> rootData) { LOG.debug(rootData.size() + " roots found, merging content roots"); boolean hasSourceRoots = false; Set<ModuleType> typesToReplace = new HashSet<>(); Set<ModuleType> moduleTypes = new HashSet<>(); for (DetectedRootData data : rootData.values()) { for (DetectedProjectRoot root : data.getAllRoots()) { if (root instanceof DetectedContentRoot) { Collections.addAll(typesToReplace, ((DetectedContentRoot)root).getTypesToReplace()); moduleTypes.add(((DetectedContentRoot)root).getModuleType()); } else if (root instanceof DetectedSourceRoot) { LOG.debug("Source root found: " + root.getDirectory() + ", content roots will be ignored"); hasSourceRoots = true; break; } } } moduleTypes.removeAll(typesToReplace); if (hasSourceRoots || moduleTypes.size() <= 1) { Iterator<DetectedRootData> iterator = rootData.values().iterator(); DetectedContentRoot firstRoot = null; ProjectStructureDetector firstDetector = null; while (iterator.hasNext()) { DetectedRootData data = iterator.next(); for (DetectedProjectRoot root : data.getAllRoots()) { if (root instanceof DetectedContentRoot) { LOG.debug("Removed detected " + root.getRootTypeName() + " content root: " + root.getDirectory()); Collection<ProjectStructureDetector> detectors = data.removeRoot(root); if ((firstRoot == null || firstDetector == null) && moduleTypes.contains(((DetectedContentRoot)root).getModuleType())) { firstRoot = (DetectedContentRoot)root; firstDetector = ContainerUtil.getFirstItem(detectors); } } } if (data.isEmpty()) { iterator.remove(); } } if (!hasSourceRoots && firstRoot != null && firstDetector != null) { DetectedContentRoot baseRoot = new DetectedContentRoot(myBaseDir, firstRoot.getRootTypeName(), firstRoot.getModuleType()); DetectedRootData data = rootData.get(myBaseDir); if (data == null) { rootData.put(myBaseDir, new DetectedRootData(firstDetector, baseRoot)); } else { data.addRoot(firstDetector, baseRoot); } LOG.debug("Added " + firstRoot.getRootTypeName() + " content root for " + myBaseDir); } } return new ArrayList<>(rootData.values()); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** */ package bpsim.impl; import bpsim.BpsimPackage; import bpsim.Calendar; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Calendar</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link bpsim.impl.CalendarImpl#getValue <em>Value</em>}</li> * <li>{@link bpsim.impl.CalendarImpl#getId <em>Id</em>}</li> * <li>{@link bpsim.impl.CalendarImpl#getName <em>Name</em>}</li> * </ul> * </p> * * @generated */ public class CalendarImpl extends EObjectImpl implements Calendar { /** * The default value of the '{@link #getValue() <em>Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue() * @generated * @ordered */ protected static final String VALUE_EDEFAULT = null; /** * The cached value of the '{@link #getValue() <em>Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue() * @generated * @ordered */ protected String value = VALUE_EDEFAULT; /** * The default value of the '{@link #getId() <em>Id</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getId() * @generated * @ordered */ protected static final String ID_EDEFAULT = null; /** * The cached value of the '{@link #getId() <em>Id</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getId() * @generated * @ordered */ protected String id = ID_EDEFAULT; /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The cached value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected String name = NAME_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected CalendarImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return BpsimPackage.Literals.CALENDAR; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getValue() { return value; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setValue(String newValue) { String oldValue = value; value = newValue; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.CALENDAR__VALUE, oldValue, value)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getId() { return id; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setId(String newId) { String oldId = id; id = newId; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.CALENDAR__ID, oldId, id)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName(String newName) { String oldName = name; name = newName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.CALENDAR__NAME, oldName, name)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case BpsimPackage.CALENDAR__VALUE: return getValue(); case BpsimPackage.CALENDAR__ID: return getId(); case BpsimPackage.CALENDAR__NAME: return getName(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case BpsimPackage.CALENDAR__VALUE: setValue((String)newValue); return; case BpsimPackage.CALENDAR__ID: setId((String)newValue); return; case BpsimPackage.CALENDAR__NAME: setName((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case BpsimPackage.CALENDAR__VALUE: setValue(VALUE_EDEFAULT); return; case BpsimPackage.CALENDAR__ID: setId(ID_EDEFAULT); return; case BpsimPackage.CALENDAR__NAME: setName(NAME_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case BpsimPackage.CALENDAR__VALUE: return VALUE_EDEFAULT == null ? value != null : !VALUE_EDEFAULT.equals(value); case BpsimPackage.CALENDAR__ID: return ID_EDEFAULT == null ? id != null : !ID_EDEFAULT.equals(id); case BpsimPackage.CALENDAR__NAME: return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (value: "); result.append(value); result.append(", id: "); result.append(id); result.append(", name: "); result.append(name); result.append(')'); return result.toString(); } } //CalendarImpl
package de.danoeh.antennapod.adapter; import android.content.Context; import android.content.res.TypedArray; import android.text.format.DateUtils; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Adapter; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.TextView; import de.danoeh.antennapod.R; import de.danoeh.antennapod.feed.FeedItem; import de.danoeh.antennapod.feed.FeedManager; import de.danoeh.antennapod.feed.FeedMedia; import de.danoeh.antennapod.feed.MediaType; import de.danoeh.antennapod.storage.DownloadRequester; import de.danoeh.antennapod.util.Converter; import de.danoeh.antennapod.util.ThemeUtils; /** List adapter for items of feeds that the user has already subscribed to. */ public class InternalFeedItemlistAdapter extends DefaultFeedItemlistAdapter { private ActionButtonCallback callback; private boolean showFeedtitle; private int selectedItemIndex; public static final int SELECTION_NONE = -1; public InternalFeedItemlistAdapter(Context context, DefaultFeedItemlistAdapter.ItemAccess itemAccess, ActionButtonCallback callback, boolean showFeedtitle) { super(context, itemAccess); this.callback = callback; this.showFeedtitle = showFeedtitle; this.selectedItemIndex = SELECTION_NONE; } @Override public View getView(final int position, View convertView, ViewGroup parent) { Holder holder; final FeedItem item = getItem(position); if (convertView == null) { holder = new Holder(); LayoutInflater inflater = (LayoutInflater) getContext() .getSystemService(Context.LAYOUT_INFLATER_SERVICE); convertView = inflater.inflate(R.layout.feeditemlist_item, null); holder.title = (TextView) convertView .findViewById(R.id.txtvItemname); holder.lenSize = (TextView) convertView .findViewById(R.id.txtvLenSize); holder.butAction = (ImageButton) convertView .findViewById(R.id.butAction); holder.published = (TextView) convertView .findViewById(R.id.txtvPublished); holder.inPlaylist = (ImageView) convertView .findViewById(R.id.imgvInPlaylist); holder.downloaded = (ImageView) convertView .findViewById(R.id.imgvDownloaded); holder.type = (ImageView) convertView.findViewById(R.id.imgvType); holder.downloading = (ImageView) convertView .findViewById(R.id.imgvDownloading); if (showFeedtitle) { holder.feedtitle = (TextView) convertView .findViewById(R.id.txtvFeedname); } holder.statusPlaying = (View) convertView .findViewById(R.id.statusPlaying); holder.statusUnread = (View) convertView .findViewById(R.id.statusUnread); holder.episodeProgress = (ProgressBar) convertView .findViewById(R.id.pbar_episode_progress); convertView.setTag(holder); } else { holder = (Holder) convertView.getTag(); } if (!(getItemViewType(position) == Adapter.IGNORE_ITEM_VIEW_TYPE)) { convertView.setVisibility(View.VISIBLE); if (position == selectedItemIndex) { convertView.setBackgroundColor(convertView.getResources() .getColor(ThemeUtils.getSelectionBackgroundColor())); } else { convertView.setBackgroundResource(0); } holder.title.setText(item.getTitle()); if (showFeedtitle) { holder.feedtitle.setVisibility(View.VISIBLE); holder.feedtitle.setText(item.getFeed().getTitle()); } FeedItem.State state = item.getState(); switch (state) { case PLAYING: holder.statusPlaying.setVisibility(View.VISIBLE); holder.statusUnread.setVisibility(View.GONE); holder.episodeProgress.setVisibility(View.VISIBLE); break; case IN_PROGRESS: holder.statusPlaying.setVisibility(View.GONE); holder.statusUnread.setVisibility(View.GONE); holder.episodeProgress.setVisibility(View.VISIBLE); break; case NEW: holder.statusPlaying.setVisibility(View.GONE); holder.statusUnread.setVisibility(View.VISIBLE); holder.episodeProgress.setVisibility(View.GONE); break; default: holder.statusPlaying.setVisibility(View.GONE); holder.statusUnread.setVisibility(View.GONE); holder.episodeProgress.setVisibility(View.GONE); break; } holder.published.setText(convertView.getResources().getString( R.string.published_prefix) + DateUtils.getRelativeTimeSpanString( item.getPubDate().getTime(), System.currentTimeMillis(), 0, 0)); FeedMedia media = item.getMedia(); if (media == null) { holder.downloaded.setVisibility(View.GONE); holder.downloading.setVisibility(View.GONE); holder.inPlaylist.setVisibility(View.GONE); holder.type.setVisibility(View.GONE); holder.lenSize.setVisibility(View.GONE); } else { if (state == FeedItem.State.PLAYING || state == FeedItem.State.IN_PROGRESS) { if (media.getDuration() > 0) { holder.episodeProgress .setProgress((int) (((double) media .getPosition()) / media.getDuration() * 100)); holder.lenSize.setText(Converter .getDurationStringLong(media.getDuration() - media.getPosition())); } } else if (!media.isDownloaded()) { holder.lenSize.setText(getContext().getString( R.string.size_prefix) + Converter.byteToString(media.getSize())); } else { holder.lenSize.setText(getContext().getString( R.string.length_prefix) + Converter.getDurationStringLong(media .getDuration())); } holder.lenSize.setVisibility(View.VISIBLE); if (FeedManager.getInstance().isInQueue(item)) { holder.inPlaylist.setVisibility(View.VISIBLE); } else { holder.inPlaylist.setVisibility(View.GONE); } if (item.getMedia().isDownloaded()) { holder.downloaded.setVisibility(View.VISIBLE); } else { holder.downloaded.setVisibility(View.GONE); } if (DownloadRequester.getInstance().isDownloadingFile( item.getMedia())) { holder.downloading.setVisibility(View.VISIBLE); } else { holder.downloading.setVisibility(View.GONE); } TypedArray typeDrawables = getContext().obtainStyledAttributes( new int[] { R.attr.type_audio, R.attr.type_video }); MediaType mediaType = item.getMedia().getMediaType(); if (mediaType == MediaType.AUDIO) { holder.type.setImageDrawable(typeDrawables.getDrawable(0)); holder.type.setVisibility(View.VISIBLE); } else if (mediaType == MediaType.VIDEO) { holder.type.setImageDrawable(typeDrawables.getDrawable(1)); holder.type.setVisibility(View.VISIBLE); } else { holder.type.setImageBitmap(null); holder.type.setVisibility(View.GONE); } } holder.butAction.setFocusable(false); holder.butAction.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { callback.onActionButtonPressed(item); } }); } else { convertView.setVisibility(View.GONE); } return convertView; } static class Holder extends DefaultFeedItemlistAdapter.Holder { TextView feedtitle; ImageView inPlaylist; ImageView downloaded; ImageView downloading; ImageButton butAction; View statusUnread; View statusPlaying; ProgressBar episodeProgress; } public int getSelectedItemIndex() { return selectedItemIndex; } public void setSelectedItemIndex(int selectedItemIndex) { this.selectedItemIndex = selectedItemIndex; notifyDataSetChanged(); } }
package com.iobeam.api.resource; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; public class DataStoreTest { @Test public void testDefensiveConstructorSet() throws Exception { Set<String> fields = new HashSet<String>(); fields.add("a"); fields.add("b"); fields.add("c"); DataStore batch = new DataStore(fields); List<String> batchFields = batch.getColumns(); assertEquals(fields.size(), batchFields.size()); fields.remove("a"); assertEquals(fields.size() + 1, batch.getColumns().size()); } @Test public void testDefensiveConstructorList() throws Exception { List<String> fields = new ArrayList<String>(); fields.add("a"); fields.add("b"); fields.add("c"); DataStore batch = new DataStore(fields); List<String> batchFields = batch.getColumns(); assertEquals(fields.size(), batchFields.size()); for (int i = 0; i < fields.size(); i++) { assertEquals(fields.get(i), batchFields.get(i)); } fields.set(0, "aa"); assertEquals("a", batch.getColumns().get(0)); } @Test public void testDefensiveGetterFields() throws Exception { Set<String> fields = new HashSet<String>(); fields.add("a"); fields.add("b"); fields.add("c"); DataStore batch = new DataStore(fields); List<String> batchFields = batch.getColumns(); batchFields.set(0, "aa"); assertEquals("a", batch.getColumns().get(0)); } @Test public void testFromJson() throws Exception { JSONObject obj = new JSONObject(); JSONArray fields = new JSONArray(); fields.put("time"); fields.put("a"); fields.put("b"); obj.put("fields", fields); JSONArray data = new JSONArray(); for (int i = 0; i < 3; i++) { JSONArray row = new JSONArray(); row.put(i); row.put(i); row.put(i * 10); data.put(row); } obj.put("data", data); DataStore ds = DataStore.fromJson(obj); List<String> cols = ds.getColumns(); assertEquals(2, cols.size()); assertTrue(cols.contains("a")); assertTrue(cols.contains("b")); Map<Long, Map<String, Object>> rows = ds.getRows(); assertEquals(3, rows.size()); long i = 0; for (Long l : rows.keySet()) { assertEquals(i, l.longValue()); if (rows.get(l).get("a") instanceof Integer) { assertEquals(i, ((Integer) rows.get(l).get("a")).longValue()); } else if (rows.get(l).get("a") instanceof Integer) { assertEquals(i, ((Long) rows.get(l).get("a")).longValue()); } else { assertEquals(i, rows.get(l).get("a")); } if (rows.get(l).get("b") instanceof Integer) { assertEquals(i * 10, ((Integer) rows.get(l).get("b")).longValue()); } else if (rows.get(l).get("b") instanceof Long) { assertEquals(i * 10, ((Long) rows.get(l).get("b")).longValue()); } else { assertEquals(i, rows.get(l).get("b")); } i++; } } @Test(expected = JSONException.class) public void testFromJsonInvalidCols() throws Exception { JSONObject obj = new JSONObject(); JSONArray fields = new JSONArray(); fields.put("a"); fields.put("b"); obj.put("fields", fields); JSONArray data = new JSONArray(); for (int i = 0; i < 3; i++) { JSONArray row = new JSONArray(); row.put(i); row.put(i); row.put(i * 10); data.put(row); } obj.put("data", data); DataStore.fromJson(obj); } @Test(expected = DataStore.ReservedColumnException.class) public void testReservedColumn() throws Exception { DataStore ds = new DataStore("time", "colA"); ds.clear(); } @Test(expected = DataStore.ReservedColumnException.class) public void testReservedColumn2() throws Exception { DataStore ds = new DataStore("time_offset", "colA"); ds.clear(); } @Test(expected = DataStore.ReservedColumnException.class) public void testReservedColumn3() throws Exception { DataStore ds = new DataStore("colA", "all"); ds.clear(); } @Test(expected = DataStore.ReservedColumnException.class) public void testReservedColumn4() throws Exception { Set<String> cols = new HashSet<String>(); cols.add("AlL"); cols.add("colB"); DataStore ds = new DataStore(cols); ds.clear(); } @Test(expected = DataStore.ReservedColumnException.class) public void testReservedColumnDifferentCase() throws Exception { DataStore ds = new DataStore("tIMe", "colA"); ds.clear(); } @Test(expected = IllegalArgumentException.class) public void testNullColumn() throws Exception { DataStore ds = new DataStore("good", null); ds.clear(); } @Test(expected = IllegalArgumentException.class) public void testEmptyColumn() throws Exception { DataStore ds = new DataStore("good", ""); ds.clear(); } @Test public void testDuplicateAddTime() throws Exception { DataStore ds = new DataStore("col1", "col2"); ds.add(0, "col1", 5); Map<String, Object> data = ds.getRows().get(0L); assertEquals(1, data.size()); assertEquals(5, data.get("col1")); ds.add(0, "col2", 10); data = ds.getRows().get(0L); assertEquals(2, data.size()); assertEquals(5, data.get("col1")); assertEquals(10, data.get("col2")); } @Test public void testAddSingleValue() throws Exception { DataStore ds = new DataStore("col1"); ds.add("col1", 1); ds.add("col1", 100L); ds.add("col1", 1.0F); ds.add("col1", 5.0D); ds.add("col1", true); ds.add("col1", "string"); } @Test(expected = IllegalArgumentException.class) public void testAddBadSingleValue() throws Exception { DataStore ds = new DataStore("col1"); ds.add("col1", new Object[]{1.0}); } @Test public void testToJson() throws Exception { Set<String> fields = new HashSet<String>(); fields.add("b"); fields.add("a"); DataStore batch = new DataStore(fields); List<Object[]> want = new ArrayList<Object[]>(); Map<String, Object> row = new HashMap<String, Object>(); row.put("a", 100); row.put("b", 200); batch.add(0, row); want.add(new Object[]{0, 100, 200}); row.put("a", 300); row.remove("b"); batch.add(100, row); want.add(new Object[]{100, 300, null}); JSONObject json = batch.toJson(); assertTrue(json.has("fields")); JSONArray jsonFields = json.getJSONArray("fields"); // "time" field + those passed in = 1 + fields.length assertEquals(1 + fields.size(), jsonFields.length()); assertEquals("time", jsonFields.get(0)); for (int i = 0; i < fields.size(); i++) { assertTrue(fields.contains(jsonFields.getString(i + 1))); } assertTrue(json.has("data")); JSONArray data = json.getJSONArray("data"); assertEquals(want.size(), data.length()); for (int i = 0; i < data.length(); i++) { JSONArray r = data.getJSONArray(i); Object[] w = want.get(i); assertEquals(w.length, r.length()); for (int j = 0; j < w.length; j++) { // The JSON lib assumes that a 'null' return value for a JSONArray.get() call means // it is an index out of bounds. So we have to do this weird hack to handle nulls // correctly (we already know the arrays are the same size. Integer have; try { have = r.getInt(j); } catch (JSONException e) { if (w[j] == null) { assertTrue(true); } else { assertTrue(false); } continue; } if (w[j] == null) { assertTrue(false); } else { assertEquals(Integer.valueOf(w[j].toString()).intValue(), have.intValue()); } } } } @Test public void testDataSize() throws Exception { DataStore b = new DataStore("a", "b", "c"); assertEquals(0, b.getDataSize()); b.add(0, new String[]{"a", "b", "c"}, new Object[]{1, 2, 3}); assertEquals(3, b.getDataSize()); b.add(1, new String[]{"a", "b"}, new Object[]{4, 5}); assertEquals(6, b.getDataSize()); b.reset(); assertEquals(0, b.getDataSize()); } @Test public void testHasSameColumns() throws Exception { DataStore b1 = new DataStore("a", "b", "c"); assertTrue(b1.hasSameColumns(b1)); assertFalse(b1.hasSameColumns(null)); DataStore b2 = new DataStore("a", "c"); assertFalse(b1.hasSameColumns(b2)); assertFalse(b2.hasSameColumns(b1)); DataStore b3 = new DataStore("a", "c", "b"); assertTrue(b1.hasSameColumns(b3)); assertTrue(b3.hasSameColumns(b1)); } @Test public void testHasColumns() throws Exception { DataStore b1 = new DataStore("a", "b", "c"); assertFalse(b1.hasColumns(null)); assertFalse(b1.hasColumns(Collections.<String>emptyList())); String[] cols = {"a", "b", "c"}; assertTrue(b1.hasColumns(Arrays.asList(cols))); cols = new String[]{"c", "a", "b"}; assertTrue(b1.hasColumns(Arrays.asList(cols))); cols = new String[]{"a", "b"}; assertFalse(b1.hasColumns(Arrays.asList(cols))); cols = new String[]{"a", "b", "c", "d"}; assertFalse(b1.hasColumns(Arrays.asList(cols))); } @Test public void testMerge() throws Exception { DataStore b1 = new DataStore("a"); b1.add(1, new String[]{"a"}, new Object[]{1}); b1.add(2, new String[]{"a"}, new Object[]{2}); b1.add(3, new String[]{"a"}, new Object[]{3}); DataStore b2 = new DataStore("a"); b2.add(4, new String[]{"a"}, new Object[]{4}); b2.add(5, new String[]{"a"}, new Object[]{5}); b2.add(6, new String[]{"a"}, new Object[]{6}); b1.merge(b2); assertEquals(6, b1.getDataSize()); DataStore wrong = new DataStore("b"); wrong.add(7, new String[]{"b"}, new Object[]{7}); try { b1.merge(wrong); assertTrue(false); } catch (IllegalArgumentException e) { // do nothing } } @Test public void testMergeWrong() throws Exception { DataStore b1 = new DataStore("a"); DataStore wrong = new DataStore("b"); wrong.add(7, new String[]{"b"}, new Object[]{7}); try { b1.merge(wrong); assertTrue(false); } catch (IllegalArgumentException e) { // do nothing } wrong = new DataStore("a", "b"); wrong.add(7, new String[]{"a"}, new Object[]{7}); try { b1.merge(wrong); assertTrue(false); } catch (IllegalArgumentException e) { // do nothing } } @Test public void testSplit() throws Exception { List<String> fields = new ArrayList<String>(); fields.add("a"); DataStore batch = new DataStore(fields); Map<String, Object> row = new HashMap<String, Object>(); row.put("a", 100); batch.add(0, row); batch.add(1, row); batch.add(2, row); batch.add(3, row); List<DataStore> splits = DataStore.split(batch, 2); assertEquals(2, splits.size()); splits = DataStore.split(batch, 3); assertEquals(2, splits.size()); splits = DataStore.split(batch, 4); assertEquals(1, splits.size()); } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.primitives; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkElementIndex; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkPositionIndexes; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Converter; import java.io.Serializable; import java.util.AbstractList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.RandomAccess; import javax.annotation.CheckReturnValue; /** * Static utility methods pertaining to {@code short} primitives, that are not * already found in either {@link Short} or {@link Arrays}. * * <p>See the Guava User Guide article on <a href= * "https://github.com/google/guava/wiki/PrimitivesExplained"> * primitive utilities</a>. * * @author Kevin Bourrillion * @since 1.0 */ @CheckReturnValue @GwtCompatible(emulated = true) public final class Shorts { private Shorts() {} /** * The number of bytes required to represent a primitive {@code short} * value. */ public static final int BYTES = Short.SIZE / Byte.SIZE; /** * The largest power of two that can be represented as a {@code short}. * * @since 10.0 */ public static final short MAX_POWER_OF_TWO = 1 << (Short.SIZE - 2); /** * Returns a hash code for {@code value}; equal to the result of invoking * {@code ((Short) value).hashCode()}. * * @param value a primitive {@code short} value * @return a hash code for the value */ public static int hashCode(short value) { return value; } /** * Returns the {@code short} value that is equal to {@code value}, if * possible. * * @param value any value in the range of the {@code short} type * @return the {@code short} value that equals {@code value} * @throws IllegalArgumentException if {@code value} is greater than {@link * Short#MAX_VALUE} or less than {@link Short#MIN_VALUE} */ public static short checkedCast(long value) { short result = (short) value; if (result != value) { // don't use checkArgument here, to avoid boxing throw new IllegalArgumentException("Out of range: " + value); } return result; } /** * Returns the {@code short} nearest in value to {@code value}. * * @param value any {@code long} value * @return the same value cast to {@code short} if it is in the range of the * {@code short} type, {@link Short#MAX_VALUE} if it is too large, * or {@link Short#MIN_VALUE} if it is too small */ public static short saturatedCast(long value) { if (value > Short.MAX_VALUE) { return Short.MAX_VALUE; } if (value < Short.MIN_VALUE) { return Short.MIN_VALUE; } return (short) value; } /** * Compares the two specified {@code short} values. The sign of the value * returned is the same as that of {@code ((Short) a).compareTo(b)}. * * <p><b>Note for Java 7 and later:</b> this method should be treated as * deprecated; use the equivalent {@link Short#compare} method instead. * * @param a the first {@code short} to compare * @param b the second {@code short} to compare * @return a negative value if {@code a} is less than {@code b}; a positive * value if {@code a} is greater than {@code b}; or zero if they are equal */ public static int compare(short a, short b) { return a - b; // safe due to restricted range } /** * Returns {@code true} if {@code target} is present as an element anywhere in * {@code array}. * * @param array an array of {@code short} values, possibly empty * @param target a primitive {@code short} value * @return {@code true} if {@code array[i] == target} for some value of {@code * i} */ public static boolean contains(short[] array, short target) { for (short value : array) { if (value == target) { return true; } } return false; } /** * Returns the index of the first appearance of the value {@code target} in * {@code array}. * * @param array an array of {@code short} values, possibly empty * @param target a primitive {@code short} value * @return the least index {@code i} for which {@code array[i] == target}, or * {@code -1} if no such index exists. */ public static int indexOf(short[] array, short target) { return indexOf(array, target, 0, array.length); } // TODO(kevinb): consider making this public private static int indexOf(short[] array, short target, int start, int end) { for (int i = start; i < end; i++) { if (array[i] == target) { return i; } } return -1; } /** * Returns the start position of the first occurrence of the specified {@code * target} within {@code array}, or {@code -1} if there is no such occurrence. * * <p>More formally, returns the lowest index {@code i} such that {@code * java.util.Arrays.copyOfRange(array, i, i + target.length)} contains exactly * the same elements as {@code target}. * * @param array the array to search for the sequence {@code target} * @param target the array to search for as a sub-sequence of {@code array} */ public static int indexOf(short[] array, short[] target) { checkNotNull(array, "array"); checkNotNull(target, "target"); if (target.length == 0) { return 0; } outer: for (int i = 0; i < array.length - target.length + 1; i++) { for (int j = 0; j < target.length; j++) { if (array[i + j] != target[j]) { continue outer; } } return i; } return -1; } /** * Returns the index of the last appearance of the value {@code target} in * {@code array}. * * @param array an array of {@code short} values, possibly empty * @param target a primitive {@code short} value * @return the greatest index {@code i} for which {@code array[i] == target}, * or {@code -1} if no such index exists. */ public static int lastIndexOf(short[] array, short target) { return lastIndexOf(array, target, 0, array.length); } // TODO(kevinb): consider making this public private static int lastIndexOf(short[] array, short target, int start, int end) { for (int i = end - 1; i >= start; i--) { if (array[i] == target) { return i; } } return -1; } /** * Returns the least value present in {@code array}. * * @param array a <i>nonempty</i> array of {@code short} values * @return the value present in {@code array} that is less than or equal to * every other value in the array * @throws IllegalArgumentException if {@code array} is empty */ public static short min(short... array) { checkArgument(array.length > 0); short min = array[0]; for (int i = 1; i < array.length; i++) { if (array[i] < min) { min = array[i]; } } return min; } /** * Returns the greatest value present in {@code array}. * * @param array a <i>nonempty</i> array of {@code short} values * @return the value present in {@code array} that is greater than or equal to * every other value in the array * @throws IllegalArgumentException if {@code array} is empty */ public static short max(short... array) { checkArgument(array.length > 0); short max = array[0]; for (int i = 1; i < array.length; i++) { if (array[i] > max) { max = array[i]; } } return max; } /** * Returns the values from each provided array combined into a single array. * For example, {@code concat(new short[] {a, b}, new short[] {}, new * short[] {c}} returns the array {@code {a, b, c}}. * * @param arrays zero or more {@code short} arrays * @return a single array containing all the values from the source arrays, in * order */ public static short[] concat(short[]... arrays) { int length = 0; for (short[] array : arrays) { length += array.length; } short[] result = new short[length]; int pos = 0; for (short[] array : arrays) { System.arraycopy(array, 0, result, pos, array.length); pos += array.length; } return result; } /** * Returns a big-endian representation of {@code value} in a 2-element byte * array; equivalent to {@code * ByteBuffer.allocate(2).putShort(value).array()}. For example, the input * value {@code (short) 0x1234} would yield the byte array {@code {0x12, * 0x34}}. * * <p>If you need to convert and concatenate several values (possibly even of * different types), use a shared {@link java.nio.ByteBuffer} instance, or use * {@link com.google.common.io.ByteStreams#newDataOutput()} to get a growable * buffer. */ @GwtIncompatible("doesn't work") public static byte[] toByteArray(short value) { return new byte[] { (byte) (value >> 8), (byte) value }; } /** * Returns the {@code short} value whose big-endian representation is * stored in the first 2 bytes of {@code bytes}; equivalent to {@code * ByteBuffer.wrap(bytes).getShort()}. For example, the input byte array * {@code {0x54, 0x32}} would yield the {@code short} value {@code 0x5432}. * * <p>Arguably, it's preferable to use {@link java.nio.ByteBuffer}; that * library exposes much more flexibility at little cost in readability. * * @throws IllegalArgumentException if {@code bytes} has fewer than 2 * elements */ @GwtIncompatible("doesn't work") public static short fromByteArray(byte[] bytes) { checkArgument(bytes.length >= BYTES, "array too small: %s < %s", bytes.length, BYTES); return fromBytes(bytes[0], bytes[1]); } /** * Returns the {@code short} value whose byte representation is the given 2 * bytes, in big-endian order; equivalent to {@code Shorts.fromByteArray(new * byte[] {b1, b2})}. * * @since 7.0 */ @GwtIncompatible("doesn't work") public static short fromBytes(byte b1, byte b2) { return (short) ((b1 << 8) | (b2 & 0xFF)); } private static final class ShortConverter extends Converter<String, Short> implements Serializable { static final ShortConverter INSTANCE = new ShortConverter(); @Override protected Short doForward(String value) { return Short.decode(value); } @Override protected String doBackward(Short value) { return value.toString(); } @Override public String toString() { return "Shorts.stringConverter()"; } private Object readResolve() { return INSTANCE; } private static final long serialVersionUID = 1; } /** * Returns a serializable converter object that converts between strings and * shorts using {@link Short#decode} and {@link Short#toString()}. * * @since 16.0 */ @Beta public static Converter<String, Short> stringConverter() { return ShortConverter.INSTANCE; } /** * Returns an array containing the same values as {@code array}, but * guaranteed to be of a specified minimum length. If {@code array} already * has a length of at least {@code minLength}, it is returned directly. * Otherwise, a new array of size {@code minLength + padding} is returned, * containing the values of {@code array}, and zeroes in the remaining places. * * @param array the source array * @param minLength the minimum length the returned array must guarantee * @param padding an extra amount to "grow" the array by if growth is * necessary * @throws IllegalArgumentException if {@code minLength} or {@code padding} is * negative * @return an array containing the values of {@code array}, with guaranteed * minimum length {@code minLength} */ public static short[] ensureCapacity(short[] array, int minLength, int padding) { checkArgument(minLength >= 0, "Invalid minLength: %s", minLength); checkArgument(padding >= 0, "Invalid padding: %s", padding); return (array.length < minLength) ? copyOf(array, minLength + padding) : array; } // Arrays.copyOf() requires Java 6 private static short[] copyOf(short[] original, int length) { short[] copy = new short[length]; System.arraycopy(original, 0, copy, 0, Math.min(original.length, length)); return copy; } /** * Returns a string containing the supplied {@code short} values separated * by {@code separator}. For example, {@code join("-", (short) 1, (short) 2, * (short) 3)} returns the string {@code "1-2-3"}. * * @param separator the text that should appear between consecutive values in * the resulting string (but not at the start or end) * @param array an array of {@code short} values, possibly empty */ public static String join(String separator, short... array) { checkNotNull(separator); if (array.length == 0) { return ""; } // For pre-sizing a builder, just get the right order of magnitude StringBuilder builder = new StringBuilder(array.length * 6); builder.append(array[0]); for (int i = 1; i < array.length; i++) { builder.append(separator).append(array[i]); } return builder.toString(); } /** * Returns a comparator that compares two {@code short} arrays * lexicographically. That is, it compares, using {@link * #compare(short, short)}), the first pair of values that follow any * common prefix, or when one array is a prefix of the other, treats the * shorter array as the lesser. For example, {@code [] < [(short) 1] < * [(short) 1, (short) 2] < [(short) 2]}. * * <p>The returned comparator is inconsistent with {@link * Object#equals(Object)} (since arrays support only identity equality), but * it is consistent with {@link Arrays#equals(short[], short[])}. * * @see <a href="http://en.wikipedia.org/wiki/Lexicographical_order"> * Lexicographical order article at Wikipedia</a> * @since 2.0 */ public static Comparator<short[]> lexicographicalComparator() { return LexicographicalComparator.INSTANCE; } private enum LexicographicalComparator implements Comparator<short[]> { INSTANCE; @Override public int compare(short[] left, short[] right) { int minLength = Math.min(left.length, right.length); for (int i = 0; i < minLength; i++) { int result = Shorts.compare(left[i], right[i]); if (result != 0) { return result; } } return left.length - right.length; } } /** * Returns an array containing each value of {@code collection}, converted to * a {@code short} value in the manner of {@link Number#shortValue}. * * <p>Elements are copied from the argument collection as if by {@code * collection.toArray()}. Calling this method is as thread-safe as calling * that method. * * @param collection a collection of {@code Number} instances * @return an array containing the same values as {@code collection}, in the * same order, converted to primitives * @throws NullPointerException if {@code collection} or any of its elements * is null * @since 1.0 (parameter was {@code Collection<Short>} before 12.0) */ public static short[] toArray(Collection<? extends Number> collection) { if (collection instanceof ShortArrayAsList) { return ((ShortArrayAsList) collection).toShortArray(); } Object[] boxedArray = collection.toArray(); int len = boxedArray.length; short[] array = new short[len]; for (int i = 0; i < len; i++) { // checkNotNull for GWT (do not optimize) array[i] = ((Number) checkNotNull(boxedArray[i])).shortValue(); } return array; } /** * Returns a fixed-size list backed by the specified array, similar to {@link * Arrays#asList(Object[])}. The list supports {@link List#set(int, Object)}, * but any attempt to set a value to {@code null} will result in a {@link * NullPointerException}. * * <p>The returned list maintains the values, but not the identities, of * {@code Short} objects written to or read from it. For example, whether * {@code list.get(0) == list.get(0)} is true for the returned list is * unspecified. * * @param backingArray the array to back the list * @return a list view of the array */ public static List<Short> asList(short... backingArray) { if (backingArray.length == 0) { return Collections.emptyList(); } return new ShortArrayAsList(backingArray); } @GwtCompatible private static class ShortArrayAsList extends AbstractList<Short> implements RandomAccess, Serializable { final short[] array; final int start; final int end; ShortArrayAsList(short[] array) { this(array, 0, array.length); } ShortArrayAsList(short[] array, int start, int end) { this.array = array; this.start = start; this.end = end; } @Override public int size() { return end - start; } @Override public boolean isEmpty() { return false; } @Override public Short get(int index) { checkElementIndex(index, size()); return array[start + index]; } @Override public boolean contains(Object target) { // Overridden to prevent a ton of boxing return (target instanceof Short) && Shorts.indexOf(array, (Short) target, start, end) != -1; } @Override public int indexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Short) { int i = Shorts.indexOf(array, (Short) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public int lastIndexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Short) { int i = Shorts.lastIndexOf(array, (Short) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public Short set(int index, Short element) { checkElementIndex(index, size()); short oldValue = array[start + index]; // checkNotNull for GWT (do not optimize) array[start + index] = checkNotNull(element); return oldValue; } @Override public List<Short> subList(int fromIndex, int toIndex) { int size = size(); checkPositionIndexes(fromIndex, toIndex, size); if (fromIndex == toIndex) { return Collections.emptyList(); } return new ShortArrayAsList(array, start + fromIndex, start + toIndex); } @Override public boolean equals(Object object) { if (object == this) { return true; } if (object instanceof ShortArrayAsList) { ShortArrayAsList that = (ShortArrayAsList) object; int size = size(); if (that.size() != size) { return false; } for (int i = 0; i < size; i++) { if (array[start + i] != that.array[that.start + i]) { return false; } } return true; } return super.equals(object); } @Override public int hashCode() { int result = 1; for (int i = start; i < end; i++) { result = 31 * result + Shorts.hashCode(array[i]); } return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(size() * 6); builder.append('[').append(array[start]); for (int i = start + 1; i < end; i++) { builder.append(", ").append(array[i]); } return builder.append(']').toString(); } short[] toShortArray() { // Arrays.copyOfRange() is not available under GWT int size = size(); short[] result = new short[size]; System.arraycopy(array, start, result, 0, size); return result; } private static final long serialVersionUID = 0; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3/entity_type.proto package com.google.cloud.dialogflow.cx.v3; /** * * * <pre> * The response message for [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.ListEntityTypesResponse} */ public final class ListEntityTypesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.ListEntityTypesResponse) ListEntityTypesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListEntityTypesResponse.newBuilder() to construct. private ListEntityTypesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEntityTypesResponse() { entityTypes_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListEntityTypesResponse(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListEntityTypesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { entityTypes_ = new java.util.ArrayList<com.google.cloud.dialogflow.cx.v3.EntityType>(); mutable_bitField0_ |= 0x00000001; } entityTypes_.add( input.readMessage( com.google.cloud.dialogflow.cx.v3.EntityType.parser(), extensionRegistry)); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); nextPageToken_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { entityTypes_ = java.util.Collections.unmodifiableList(entityTypes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse.class, com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse.Builder.class); } public static final int ENTITY_TYPES_FIELD_NUMBER = 1; private java.util.List<com.google.cloud.dialogflow.cx.v3.EntityType> entityTypes_; /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.dialogflow.cx.v3.EntityType> getEntityTypesList() { return entityTypes_; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dialogflow.cx.v3.EntityTypeOrBuilder> getEntityTypesOrBuilderList() { return entityTypes_; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ @java.lang.Override public int getEntityTypesCount() { return entityTypes_.size(); } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3.EntityType getEntityTypes(int index) { return entityTypes_.get(index); } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3.EntityTypeOrBuilder getEntityTypesOrBuilder(int index) { return entityTypes_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; private volatile java.lang.Object nextPageToken_; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < entityTypes_.size(); i++) { output.writeMessage(1, entityTypes_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < entityTypes_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, entityTypes_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse other = (com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse) obj; if (!getEntityTypesList().equals(other.getEntityTypesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getEntityTypesCount() > 0) { hash = (37 * hash) + ENTITY_TYPES_FIELD_NUMBER; hash = (53 * hash) + getEntityTypesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.ListEntityTypesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.ListEntityTypesResponse) com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse.class, com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getEntityTypesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (entityTypesBuilder_ == null) { entityTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { entityTypesBuilder_.clear(); } nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesResponse_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse build() { com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse buildPartial() { com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse result = new com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse(this); int from_bitField0_ = bitField0_; if (entityTypesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { entityTypes_ = java.util.Collections.unmodifiableList(entityTypes_); bitField0_ = (bitField0_ & ~0x00000001); } result.entityTypes_ = entityTypes_; } else { result.entityTypes_ = entityTypesBuilder_.build(); } result.nextPageToken_ = nextPageToken_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse) { return mergeFrom((com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse other) { if (other == com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse.getDefaultInstance()) return this; if (entityTypesBuilder_ == null) { if (!other.entityTypes_.isEmpty()) { if (entityTypes_.isEmpty()) { entityTypes_ = other.entityTypes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureEntityTypesIsMutable(); entityTypes_.addAll(other.entityTypes_); } onChanged(); } } else { if (!other.entityTypes_.isEmpty()) { if (entityTypesBuilder_.isEmpty()) { entityTypesBuilder_.dispose(); entityTypesBuilder_ = null; entityTypes_ = other.entityTypes_; bitField0_ = (bitField0_ & ~0x00000001); entityTypesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEntityTypesFieldBuilder() : null; } else { entityTypesBuilder_.addAllMessages(other.entityTypes_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<com.google.cloud.dialogflow.cx.v3.EntityType> entityTypes_ = java.util.Collections.emptyList(); private void ensureEntityTypesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { entityTypes_ = new java.util.ArrayList<com.google.cloud.dialogflow.cx.v3.EntityType>(entityTypes_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.EntityType, com.google.cloud.dialogflow.cx.v3.EntityType.Builder, com.google.cloud.dialogflow.cx.v3.EntityTypeOrBuilder> entityTypesBuilder_; /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public java.util.List<com.google.cloud.dialogflow.cx.v3.EntityType> getEntityTypesList() { if (entityTypesBuilder_ == null) { return java.util.Collections.unmodifiableList(entityTypes_); } else { return entityTypesBuilder_.getMessageList(); } } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public int getEntityTypesCount() { if (entityTypesBuilder_ == null) { return entityTypes_.size(); } else { return entityTypesBuilder_.getCount(); } } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public com.google.cloud.dialogflow.cx.v3.EntityType getEntityTypes(int index) { if (entityTypesBuilder_ == null) { return entityTypes_.get(index); } else { return entityTypesBuilder_.getMessage(index); } } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public Builder setEntityTypes(int index, com.google.cloud.dialogflow.cx.v3.EntityType value) { if (entityTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEntityTypesIsMutable(); entityTypes_.set(index, value); onChanged(); } else { entityTypesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public Builder setEntityTypes( int index, com.google.cloud.dialogflow.cx.v3.EntityType.Builder builderForValue) { if (entityTypesBuilder_ == null) { ensureEntityTypesIsMutable(); entityTypes_.set(index, builderForValue.build()); onChanged(); } else { entityTypesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public Builder addEntityTypes(com.google.cloud.dialogflow.cx.v3.EntityType value) { if (entityTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEntityTypesIsMutable(); entityTypes_.add(value); onChanged(); } else { entityTypesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public Builder addEntityTypes(int index, com.google.cloud.dialogflow.cx.v3.EntityType value) { if (entityTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEntityTypesIsMutable(); entityTypes_.add(index, value); onChanged(); } else { entityTypesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public Builder addEntityTypes( com.google.cloud.dialogflow.cx.v3.EntityType.Builder builderForValue) { if (entityTypesBuilder_ == null) { ensureEntityTypesIsMutable(); entityTypes_.add(builderForValue.build()); onChanged(); } else { entityTypesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public Builder addEntityTypes( int index, com.google.cloud.dialogflow.cx.v3.EntityType.Builder builderForValue) { if (entityTypesBuilder_ == null) { ensureEntityTypesIsMutable(); entityTypes_.add(index, builderForValue.build()); onChanged(); } else { entityTypesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public Builder addAllEntityTypes( java.lang.Iterable<? extends com.google.cloud.dialogflow.cx.v3.EntityType> values) { if (entityTypesBuilder_ == null) { ensureEntityTypesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, entityTypes_); onChanged(); } else { entityTypesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public Builder clearEntityTypes() { if (entityTypesBuilder_ == null) { entityTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { entityTypesBuilder_.clear(); } return this; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public Builder removeEntityTypes(int index) { if (entityTypesBuilder_ == null) { ensureEntityTypesIsMutable(); entityTypes_.remove(index); onChanged(); } else { entityTypesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public com.google.cloud.dialogflow.cx.v3.EntityType.Builder getEntityTypesBuilder(int index) { return getEntityTypesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public com.google.cloud.dialogflow.cx.v3.EntityTypeOrBuilder getEntityTypesOrBuilder( int index) { if (entityTypesBuilder_ == null) { return entityTypes_.get(index); } else { return entityTypesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public java.util.List<? extends com.google.cloud.dialogflow.cx.v3.EntityTypeOrBuilder> getEntityTypesOrBuilderList() { if (entityTypesBuilder_ != null) { return entityTypesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(entityTypes_); } } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public com.google.cloud.dialogflow.cx.v3.EntityType.Builder addEntityTypesBuilder() { return getEntityTypesFieldBuilder() .addBuilder(com.google.cloud.dialogflow.cx.v3.EntityType.getDefaultInstance()); } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public com.google.cloud.dialogflow.cx.v3.EntityType.Builder addEntityTypesBuilder(int index) { return getEntityTypesFieldBuilder() .addBuilder(index, com.google.cloud.dialogflow.cx.v3.EntityType.getDefaultInstance()); } /** * * * <pre> * The list of entity types. There will be a maximum number of items returned * based on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3.EntityType entity_types = 1;</code> */ public java.util.List<com.google.cloud.dialogflow.cx.v3.EntityType.Builder> getEntityTypesBuilderList() { return getEntityTypesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.EntityType, com.google.cloud.dialogflow.cx.v3.EntityType.Builder, com.google.cloud.dialogflow.cx.v3.EntityTypeOrBuilder> getEntityTypesFieldBuilder() { if (entityTypesBuilder_ == null) { entityTypesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.EntityType, com.google.cloud.dialogflow.cx.v3.EntityType.Builder, com.google.cloud.dialogflow.cx.v3.EntityTypeOrBuilder>( entityTypes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); entityTypes_ = null; } return entityTypesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.ListEntityTypesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.ListEntityTypesResponse) private static final com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse(); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEntityTypesResponse> PARSER = new com.google.protobuf.AbstractParser<ListEntityTypesResponse>() { @java.lang.Override public ListEntityTypesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListEntityTypesResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListEntityTypesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEntityTypesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ListEntityTypesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.dbcp2.cpdsadapter; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import org.apache.commons.dbcp2.DelegatingConnection; import org.apache.commons.dbcp2.DelegatingPreparedStatement; /** * This class is the <code>Connection</code> that will be returned * from <code>PooledConnectionImpl.getConnection()</code>. * Most methods are wrappers around the jdbc 1.x <code>Connection</code>. * A few exceptions include preparedStatement and close. * In accordance with the jdbc specification this Connection cannot * be used after closed() is called. Any further usage will result in an * SQLException. * * ConnectionImpl extends DelegatingConnection to enable access to the * underlying connection. * * @author John D. McNally * @version $Id: ConnectionImpl.java 1658644 2015-02-10 08:59:07Z tn $ * @since 2.0 */ class ConnectionImpl extends DelegatingConnection<Connection> { private final boolean accessToUnderlyingConnectionAllowed; /** The object that instantiated this object */ private final PooledConnectionImpl pooledConnection; /** * Creates a <code>ConnectionImpl</code>. * * @param pooledConnection The PooledConnection that is calling the ctor. * @param connection The JDBC 1.x Connection to wrap. * @param accessToUnderlyingConnectionAllowed if true, then access is allowed to the underlying connection */ ConnectionImpl(PooledConnectionImpl pooledConnection, Connection connection, boolean accessToUnderlyingConnectionAllowed) { super(connection); this.pooledConnection = pooledConnection; this.accessToUnderlyingConnectionAllowed = accessToUnderlyingConnectionAllowed; } /** * Marks the Connection as closed, and notifies the pool that the * pooled connection is available. * In accordance with the jdbc specification this Connection cannot * be used after closed() is called. Any further usage will result in an * SQLException. * * @exception SQLException The database connection couldn't be closed. */ @Override public void close() throws SQLException { if (!isClosedInternal()) { try { passivate(); } finally { setClosedInternal(true); pooledConnection.notifyListeners(); } } } /** * If pooling of <code>PreparedStatement</code>s is turned on in the * {@link DriverAdapterCPDS}, a pooled object may be returned, otherwise * delegate to the wrapped jdbc 1.x {@link java.sql.Connection}. * * @param sql SQL statement to be prepared * @return the prepared statement * @exception SQLException if this connection is closed or an error occurs * in the wrapped connection. */ @Override public PreparedStatement prepareStatement(String sql) throws SQLException { checkOpen(); try { return new DelegatingPreparedStatement (this, pooledConnection.prepareStatement(sql)); } catch (SQLException e) { handleException(e); // Does not return return null; } } /** * If pooling of <code>PreparedStatement</code>s is turned on in the * {@link DriverAdapterCPDS}, a pooled object may be returned, otherwise * delegate to the wrapped jdbc 1.x {@link java.sql.Connection}. * * @exception SQLException if this connection is closed or an error occurs * in the wrapped connection. */ @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { checkOpen(); try { return new DelegatingPreparedStatement (this, pooledConnection.prepareStatement (sql,resultSetType,resultSetConcurrency)); } catch (SQLException e) { handleException(e); return null; } } @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { checkOpen(); try { return new DelegatingPreparedStatement(this, pooledConnection.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability)); } catch (SQLException e) { handleException(e); return null; } } @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { checkOpen(); try { return new DelegatingPreparedStatement(this, pooledConnection.prepareStatement(sql, autoGeneratedKeys)); } catch (SQLException e) { handleException(e); return null; } } @Override public PreparedStatement prepareStatement(String sql, int columnIndexes[]) throws SQLException { checkOpen(); try { return new DelegatingPreparedStatement(this, pooledConnection.prepareStatement(sql, columnIndexes)); } catch (SQLException e) { handleException(e); return null; } } @Override public PreparedStatement prepareStatement(String sql, String columnNames[]) throws SQLException { checkOpen(); try { return new DelegatingPreparedStatement(this, pooledConnection.prepareStatement(sql, columnNames)); } catch (SQLException e) { handleException(e); return null; } } // // Methods for accessing the delegate connection // /** * If false, getDelegate() and getInnermostDelegate() will return null. * @return true if access is allowed to the underlying connection * @see ConnectionImpl */ public boolean isAccessToUnderlyingConnectionAllowed() { return accessToUnderlyingConnectionAllowed; } /** * Get the delegated connection, if allowed. * @return the internal connection, or null if access is not allowed. * @see #isAccessToUnderlyingConnectionAllowed() */ @Override public Connection getDelegate() { if (isAccessToUnderlyingConnectionAllowed()) { return getDelegateInternal(); } return null; } /** * Get the innermost connection, if allowed. * @return the innermost internal connection, or null if access is not allowed. * @see #isAccessToUnderlyingConnectionAllowed() */ @Override public Connection getInnermostDelegate() { if (isAccessToUnderlyingConnectionAllowed()) { return super.getInnermostDelegateInternal(); } return null; } }