gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Copyright 2015-2017 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.swarm; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.Files; import java.nio.file.InvalidPathException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.Set; import java.util.UUID; import java.util.jar.JarFile; import java.util.logging.LogManager; import java.util.stream.Collectors; import java.util.zip.ZipEntry; import javax.enterprise.inject.Vetoed; import org.jboss.jandex.ClassInfo; import org.jboss.jandex.DotName; import org.jboss.jandex.Index; import org.jboss.jandex.Indexer; import org.jboss.modules.Module; import org.jboss.modules.ModuleLoadException; import org.jboss.modules.ModuleLoader; import org.jboss.modules.Resource; import org.jboss.modules.filter.PathFilters; import org.jboss.modules.log.StreamModuleLogger; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.ArchivePath; import org.jboss.shrinkwrap.api.Domain; import org.jboss.shrinkwrap.api.Node; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.exporter.ExplodedExporter; import org.jboss.shrinkwrap.api.exporter.ZipExporter; import org.jboss.shrinkwrap.api.importer.ExplodedImporter; import org.jboss.shrinkwrap.api.importer.ZipImporter; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.jboss.shrinkwrap.impl.base.exporter.ExplodedExporterImpl; import org.jboss.shrinkwrap.impl.base.exporter.zip.ZipExporterImpl; import org.jboss.shrinkwrap.impl.base.importer.ExplodedImporterImpl; import org.jboss.shrinkwrap.impl.base.importer.zip.ZipImporterImpl; import org.jboss.shrinkwrap.impl.base.spec.JavaArchiveImpl; import org.jboss.shrinkwrap.impl.base.spec.WebArchiveImpl; import org.wildfly.swarm.bootstrap.env.ApplicationEnvironment; import org.wildfly.swarm.bootstrap.logging.BackingLoggerManager; import org.wildfly.swarm.bootstrap.logging.BootstrapLogger; import org.wildfly.swarm.bootstrap.modules.BootModuleLoader; import org.wildfly.swarm.bootstrap.performance.Performance; import org.wildfly.swarm.bootstrap.util.BootstrapProperties; import org.wildfly.swarm.bootstrap.util.BootstrapUtil; import org.wildfly.swarm.cli.CommandLine; import org.wildfly.swarm.container.DeploymentException; import org.wildfly.swarm.container.config.ClassLoaderConfigLocator; import org.wildfly.swarm.container.config.ConfigViewFactory; import org.wildfly.swarm.container.internal.Server; import org.wildfly.swarm.container.internal.ServerBootstrap; import org.wildfly.swarm.container.internal.WeldShutdown; import org.wildfly.swarm.internal.OutboundSocketBindingRequest; import org.wildfly.swarm.internal.SocketBindingRequest; import org.wildfly.swarm.internal.SwarmMessages; import org.wildfly.swarm.jdk.specific.JarFiles; import org.wildfly.swarm.spi.api.ArtifactLookup; import org.wildfly.swarm.spi.api.ConfigurationFilter; import org.wildfly.swarm.spi.api.Fraction; import org.wildfly.swarm.spi.api.OutboundSocketBinding; import org.wildfly.swarm.spi.api.SocketBinding; import org.wildfly.swarm.spi.api.StageConfig; import org.wildfly.swarm.spi.api.SwarmProperties; import org.wildfly.swarm.spi.api.config.ConfigView; /** * Default {@code main(...)} if an application does not provide one. * * <p>This simply constructs a default container, starts it and performs * a default deployment. Typically only useful for barren WAR applications.</p> * * <p>If providing their own {@code main(...)}, then the following needs to be known:</p> * * <ul> * <li>Any usage of {@code java.util.logging} may only follow the initial constructor * of {@code new Swarm()}.</li> * <li>While this object may appear to be thread-safe, it does rely on general * static instances for some facilities. Therefore, it should not be instantiated * several times concurrently.</li> * <li>It can be instantiated multiple times <b>serially</b>, as long as one instance * is disposed before another is created and used. This limitation may be removed * in a future version, if required.</li> * </ul> * * <p>If using this class either directly or implicit as a {@code main(...)}, certain * command-line facilities are available. If used directly, the user should pass the * {@code String...args} from his own {@code main(...)} to the constructor of this * class if these command-line facilities are desired.</p> * * <p>Many internal aspects of the runtime container may be configured using the Java * APIs for various fractions, XML configuration files, YAML configuration files, and * Java system properties.</p> * * <p>Configuration ordering works as follows: Fractions configured through an XML * configuration file takes precedence over the same fraction configured through the * Java API. YAML or system properties may override portions or attributes of fractions * defined either way. A system property override binds more strongly than YAML configuration.</p> * * @author Bob McWhirter * @author Ken Finnigan */ @Vetoed public class Swarm { public static Swarm INSTANCE = null; private static final String BOOT_MODULE_PROPERTY = "boot.module.loader"; public static final String APPLICATION_MODULE_NAME = "thorntail.application"; private static final String CONTAINER_MODULE_NAME = "swarm.container"; private static final String PROJECT_STAGES_FILE = "project-stages.yml"; private static Swarm swarm; private final CommandLine commandLine; /** * Construct a new, un-started container. * * @throws Exception If an error occurs performing classloading and initialization magic. */ public Swarm() throws Exception { this(Boolean.getBoolean(SwarmProperties.DEBUG_BOOTSTRAP)); } /** * Construct a new, un-started container. * * @param debugBootstrap - flag to indicate if the module layer should be put into bootstrap debug mode. Same as * the jboss-module -debuglog mode which enables trace logging to System.out during the * initial bootstrap of the module layer. * @throws Exception If an error occurs performing classloading and initialization magic. */ public Swarm(boolean debugBootstrap) throws Exception { this(debugBootstrap, new String[]{}); } /** * Construct a new, un-started container, configured using command-line arguments. * * @param args The command-line arguments arguments * @throws Exception If an error occurs performing classloading and initialization magic. */ public Swarm(String... args) throws Exception { this(false, args); } /** * Construct a new, un-started container, configured using command-line arguments. * * @param debugBootstrap - flag to indicate if the module layer should be put into bootstrap debug mode. Same as * the jboss-module -debuglog mode which enables trace logging to System.out during the * initial bootstrap of the module layer. * @param args The command-line arguments arguments * @throws Exception If an error occurs performing classloading and initialization magic. */ public Swarm(boolean debugBootstrap, String... args) throws Exception { this(debugBootstrap, null, null, args); } public Swarm(Properties properties, String... args) throws Exception { this(false, properties, null, args); } public Swarm(boolean debug, Properties properties, String... args) throws Exception { this(debug, properties, null, args); } public Swarm(Properties properties, Map<String, String> environment, String... args) throws Exception { this(false, properties, environment, args); } public Swarm(boolean debugBootstrap, Properties properties, Map<String, String> environment, String... args) throws Exception { if (System.getProperty(BOOT_MODULE_PROPERTY) == null) { System.setProperty(BOOT_MODULE_PROPERTY, BootModuleLoader.class.getName()); } if (debugBootstrap) { Module.setModuleLogger(new StreamModuleLogger(System.err)); } BootstrapUtil.convertSwarmSystemPropertiesToThorntail(); setArgs(args); this.debugBootstrap = debugBootstrap; // Need to setup Logging here so that Weld doesn't default to JUL. try { Module loggingModule = Module.getBootModuleLoader().loadModule("org.wildfly.swarm.logging:runtime"); ClassLoader originalCl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(loggingModule.getClassLoader()); System.setProperty("java.util.logging.manager", "org.jboss.logmanager.LogManager"); //force logging init LogManager.getLogManager(); Class<?> logManagerClass = loggingModule.getClassLoader().loadClass("org.wildfly.swarm.container.runtime.logging.JBossLoggingManager"); BootstrapLogger.setBackingLoggerManager((BackingLoggerManager) logManagerClass.newInstance()); } finally { Thread.currentThread().setContextClassLoader(originalCl); } } catch (ModuleLoadException e) { System.err.println("[WARN] logging not available, logging will not be configured"); } installModuleMBeanServer(); createShrinkWrapDomain(); this.commandLine = CommandLine.parse(args); this.configView = ConfigViewFactory.defaultFactory(properties, environment); if (ApplicationEnvironment.get().isHollow()) { if (!this.commandLine.extraArguments().isEmpty()) { URLClassLoader firstDeploymentCL = new URLClassLoader(new URL[]{ new File(this.commandLine.extraArguments().get(0)).toURI().toURL() }); this.configView.addLocator(new ClassLoaderConfigLocator(firstDeploymentCL)); } } this.commandLine.apply(this); initializeConfigView(properties); this.isConstructing = false; } /** * Retrieve the parsed command-line from this instance. * * <p>This method is only applicable if the {@code String...args} was passed through * the constructor or {@link #setArgs(String...)} was called to provide the command-line * arguments.</p> * * @return The parsed command-line. */ public CommandLine getCommandLine() { return this.commandLine; } /** * Pass the effective command-line arguments to this instance. * * @param args The arguments. */ public void setArgs(String... args) { this.args = args; } /** * Specify an XML configuration file (in usual WildFly {@code standalone.xml}) format. * * <p>Usage of an XML configuration file is <b>not</b> exclusive with other configuration * methods.</p> * * @param url The URL of the XML configuration file. * @return This instance. * @see #withConfig(URL) */ public Swarm withXmlConfig(URL url) { this.xmlConfig = Optional.of(url); return this; } public Swarm withConfig(URL url) throws IOException { if (!isConstructing) { String uuid = UUID.randomUUID().toString(); this.configView.load(uuid, url); this.configView.withProfile(uuid); } this.configs.add(url); return this; } public Swarm withProfile(String name) { if (!isConstructing) { this.configView.load(name); this.configView.withProfile(name); } this.profiles.add(name); return this; } public Swarm withProperty(String name, String value) { this.configView.withProperty(name, value); return this; } /** * Add a fraction to the container. * * @param fraction The fraction to add. * @return The container. */ public Swarm fraction(Fraction fraction) { this.explicitlyInstalledFractions.add(fraction); return this; } public Swarm component(Class<?> cls) { this.userComponentClasses.add(cls); return this; } /** * Add an outbound socket-binding to the container. * * <p>In the event the specified {@code socketBindingGroup} does not exist, the socket-binding * will be completely ignored.</p> * * TODO fix the above-mentioned issue. * * @param socketBindingGroup The name of the socket-binding group to attach a binding to. * @param binding The outbound socket-binding to add. * @return This container. */ public Swarm outboundSocketBinding(String socketBindingGroup, OutboundSocketBinding binding) { this.outboundSocketBindings.add(new OutboundSocketBindingRequest(socketBindingGroup, binding)); return this; } /** * Add an inbound socket-binding to the container. * * <p>In the even the specified {@code socketBindingGroup} does no exist, the socket-binding * will be completely ignored.</p> * * TODO fix the above-mentioned issue. * * @param socketBindingGroup The name of the socket-binding group to attach a binding to. * @param binding The inbound socket-binding to add. * @return This container. */ public Swarm socketBinding(String socketBindingGroup, SocketBinding binding) { this.socketBindings.add(new SocketBindingRequest(socketBindingGroup, binding)); return this; } /** * Start the container. * * <p>This is a blocking call, which guarateens that when it returns without error, the * container is fully started.</p> * * @return The container. * @throws Exception if an error occurs. */ public Swarm start() throws Exception { INSTANCE = this; try (AutoCloseable handle = Performance.time("Thorntail.start()")) { Module module = Module.getBootModuleLoader().loadModule(CONTAINER_MODULE_NAME); Class<?> bootstrapClass = module.getClassLoader().loadClass("org.wildfly.swarm.container.runtime.ServerBootstrapImpl"); ServerBootstrap bootstrap = (ServerBootstrap) bootstrapClass.newInstance(); bootstrap .withArguments(this.args) .withBootstrapDebug(this.debugBootstrap) .withExplicitlyInstalledFractions(this.explicitlyInstalledFractions) .withSocketBindings(this.socketBindings) .withOutboundSocketBindings(this.outboundSocketBindings) .withUserComponents(this.userComponentClasses) .withXmlConfig(this.xmlConfig) .withConfigView(this.configView.get(true)); this.server = bootstrap.bootstrap(); return this; } } /** * Start the container with a deployment. * * <p>Effectively calls {@code start().deploy(deployment)}</p> * * @param deployment The deployment to deploy. * @return The container. * @throws Exception if an error occurs. * @see #start() * @see #deploy(Archive) */ public Swarm start(Archive<?> deployment) throws Exception { return start().deploy(deployment); } /** * Stop the container, first undeploying all deployments. * * @return THe container. * @throws Exception If an error occurs. */ public Swarm stop() throws Exception { if (this.server == null) { throw SwarmMessages.MESSAGES.containerNotStarted("stop()"); } this.server.stop(); this.server = null; Module module = Module.getBootModuleLoader().loadModule(CONTAINER_MODULE_NAME); Class<?> shutdownClass = module.getClassLoader().loadClass("org.wildfly.swarm.container.runtime.WeldShutdownImpl"); WeldShutdown shutdown = (WeldShutdown) shutdownClass.newInstance(); shutdown.shutdown(); return this; } /** * Perform a default deployment. * * <p>For regular uberjars, it is effectively a short-cut for {@code deploy(swarm.createDefaultDeployment())}, * deploying the baked-in deployment.</p> * * <p>For hollow uberjars, it deploys whatever deployments were passed through the command-line, as * none are baked-in.</p> * * @return The container. * @throws DeploymentException if an error occurs. * @throws IllegalStateException if the container has not already been started. * @see #Swarm(String...) * @see #setArgs(String...) * @see #deploy(Archive) * @see #createDefaultDeployment() */ public Swarm deploy() throws IllegalStateException, DeploymentException { if (this.server == null) { throw SwarmMessages.MESSAGES.containerNotStarted("deploy()"); } if (ApplicationEnvironment.get().isHollow()) { this.server.deployer().deploy( getCommandLine().extraArguments() .stream() .map(e -> Paths.get(e)) .collect(Collectors.toList()) ); } else { this.server.deployer().deploy(); } return this; } /** * Deploy an archive. * * @param deployment The ShrinkWrap archive to deploy. * @return The container. * @throws DeploymentException if an error occurs. */ public Swarm deploy(Archive<?> deployment) throws Exception { if (this.server == null) { throw SwarmMessages.MESSAGES.containerNotStarted("deploy(Archive<?>)"); } this.server.deployer().deploy(deployment); return this; } /** * Retrieve the default ShrinkWrap deployment. * * @return The default deployment, unmodified. */ public Archive<?> createDefaultDeployment() throws Exception { if (this.server == null) { throw SwarmMessages.MESSAGES.containerNotStarted("createDefaultDeployment()"); } return this.server.deployer().createDefaultDeployment(); } private void createShrinkWrapDomain() { ClassLoader originalCl = Thread.currentThread().getContextClassLoader(); try { if (isFatJar()) { Module appModule = Module.getBootModuleLoader().loadModule(APPLICATION_MODULE_NAME); Thread.currentThread().setContextClassLoader(appModule.getClassLoader()); } Domain domain = ShrinkWrap.getDefaultDomain(); domain.getConfiguration().getExtensionLoader().addOverride(ZipExporter.class, ZipExporterImpl.class); domain.getConfiguration().getExtensionLoader().addOverride(ZipImporter.class, ZipImporterImpl.class); domain.getConfiguration().getExtensionLoader().addOverride(ExplodedExporter.class, ExplodedExporterImpl.class); domain.getConfiguration().getExtensionLoader().addOverride(ExplodedImporter.class, ExplodedImporterImpl.class); domain.getConfiguration().getExtensionLoader().addOverride(JavaArchive.class, JavaArchiveImpl.class); domain.getConfiguration().getExtensionLoader().addOverride(WebArchive.class, WebArchiveImpl.class); } catch (Exception e) { SwarmMessages.MESSAGES.shrinkwrapDomainSetupFailed(e); } finally { Thread.currentThread().setContextClassLoader(originalCl); } } private static boolean isFatJar() throws IOException { URL location = Swarm.class.getProtectionDomain().getCodeSource().getLocation(); Path root = null; if (location.getProtocol().equals("file")) { try { root = Paths.get(location.toURI()); } catch (URISyntaxException e) { throw new IOException(e); } } else if (location.toExternalForm().startsWith("jar:file:")) { return true; } if (Files.isRegularFile(root)) { try (JarFile jar = JarFiles.create(root.toFile())) { ZipEntry propsEntry = jar.getEntry("META-INF/wildfly-swarm.properties"); if (propsEntry != null) { try (InputStream in = jar.getInputStream(propsEntry)) { Properties props = new Properties(); props.load(in); if (props.containsKey(BootstrapProperties.APP_ARTIFACT)) { System.setProperty(BootstrapProperties.APP_ARTIFACT, props.getProperty(BootstrapProperties.APP_ARTIFACT)); } Set<String> names = props.stringPropertyNames(); for (String name : names) { String value = props.getProperty(name); if (System.getProperty(name) == null) { System.setProperty(name, value); } } } return true; } } } return false; } private void initializeConfigView(Properties props) throws IOException, ModuleLoadException { try (AutoCloseable handle = Performance.time("Loading YAML")) { if (System.getProperty(SwarmProperties.PROJECT_STAGE_FILE) != null) { String file = System.getProperty(SwarmProperties.PROJECT_STAGE_FILE); boolean loaded = false; try { Path path = Paths.get(file); if (Files.exists(path)) { this.configView.load("stages", path.toUri().toURL()); loaded = true; } } catch (InvalidPathException e) { // ignore } if (!loaded) { // try it as a URL try { URL url = new URL(file); this.configView.load("stages", url); } catch (MalformedURLException e) { // oh well } } } //List<String> activatedNames = new ArrayList<>(); String projectStageProp = System.getProperty(SwarmProperties.PROJECT_STAGE); if (projectStageProp == null && props != null) { projectStageProp = props.getProperty(SwarmProperties.PROJECT_STAGE); } if (projectStageProp == null) { projectStageProp = this.configView.get().resolve(SwarmProperties.PROJECT_STAGE).withDefault("NOT_FOUND").getValue(); if (projectStageProp != null && projectStageProp.equals("NOT_FOUND")) { projectStageProp = null; } } if (projectStageProp != null) { String[] activated = projectStageProp.split(","); for (String each : activated) { this.configView.load(each); this.configView.withProfile(each); } } int counter = 0; for (URL config : this.configs) { String syntheticName = "cli-" + (++counter); this.configView.load(syntheticName, config); this.configView.withProfile(syntheticName); } this.configView.load("stages"); for (String profile : this.profiles) { this.configView.load(profile); this.configView.withProfile(profile); } this.configView.load("defaults"); initializeConfigFilters(); } catch (Exception e) { throw new RuntimeException(e); } } private void initializeConfigFilters() throws ModuleLoadException, IOException, ClassNotFoundException { if (isFatJar()) { initializeConfigFiltersFatJar(); } else { initializeConfigFiltersClassPath(); } } private void initializeConfigFiltersFatJar() throws ModuleLoadException, IOException, ClassNotFoundException { Indexer indexer = new Indexer(); Module appModule = Module.getBootModuleLoader().loadModule(APPLICATION_MODULE_NAME); Iterator<Resource> iter = appModule.iterateResources(PathFilters.acceptAll()); while (iter.hasNext()) { Resource each = iter.next(); if (each.getName().endsWith(".class")) { if (!each.getName().equals("module-info.class")) { try (InputStream is = each.openStream()) { indexer.index(is); } catch (IOException e) { // ignore } } } } Index index = indexer.complete(); Set<ClassInfo> impls = index.getAllKnownImplementors(DotName.createSimple(ConfigurationFilter.class.getName())); for (ClassInfo each : impls) { String name = each.name().toString(); Class<? extends ConfigurationFilter> cls = (Class<? extends ConfigurationFilter>) appModule.getClassLoader().loadClass(name); try { ConfigurationFilter filter = cls.newInstance(); this.configView.withFilter(filter); } catch (InstantiationException | IllegalAccessException e) { e.printStackTrace(); } } } private void initializeConfigFiltersClassPath() throws IOException, ClassNotFoundException { String classpath = System.getProperty("java.class.path"); String[] locations = classpath.split(System.getProperty("path.separator")); Indexer indexer = new Indexer(); for (String location : locations) { File file = new File(location); JavaArchive archive = null; if (file.exists()) { if (file.isDirectory()) { archive = ShrinkWrap.create(ExplodedImporter.class).importDirectory(file).as(JavaArchive.class); } else { archive = ShrinkWrap.create(ZipImporter.class).importFrom(file).as(JavaArchive.class); } Map<ArchivePath, Node> content = archive.getContent(); for (ArchivePath path : content.keySet()) { if (path.get().endsWith(".class") && !path.get().endsWith("module-info.class")) { Node node = content.get(path); try { indexer.index(node.getAsset().openStream()); } catch (IOException e) { // ignore } } } } } Index index = indexer.complete(); Set<ClassInfo> impls = index.getAllKnownImplementors(DotName.createSimple(ConfigurationFilter.class.getName())); for (ClassInfo each : impls) { String name = each.name().toString(); Class<? extends ConfigurationFilter> cls = (Class<? extends ConfigurationFilter>) Class.forName(name); try { ConfigurationFilter filter = cls.newInstance(); this.configView.withFilter(filter); } catch (InstantiationException | IllegalAccessException e) { e.printStackTrace(); } } } /** * Main entry-point if a user does not specify a custom {@code main(...)}-containing class. * * <p>The default behaviour of this {@code main(...)} is to start the container entirely * with defaults and deploy the default deployment.</p> * * @param args The command-line arguments from the invocation. * @throws Exception if an error occurs. */ public static void main(String... args) throws Exception { if (System.getProperty(BOOT_MODULE_PROPERTY) == null) { System.setProperty(BOOT_MODULE_PROPERTY, "org.wildfly.swarm.bootstrap.modules.BootModuleLoader"); } swarm = new Swarm(args); try { swarm.start(); if (System.getProperty("thorntail.inhibit.default-deployment") == null) { swarm.deploy(); } } catch (final VirtualMachineError vme) { // Don't even try to swarm.stop() in case of OOM etc. vme.printStackTrace(); System.exit(1); } catch (final Throwable t) { if (System.getProperty("thorntail.inhibit.auto-stop") == null) { t.printStackTrace(); tryToStopAfterStartupError(t, swarm); } throw t; } displayUsage(); } public static void stopMain() throws Exception { try { if (swarm != null) { swarm.stop(); } } catch (Exception e) { } } private static void tryToStopAfterStartupError(final Throwable errorCause, final Swarm swarm) { // Try to swarm.stop() if needed. if (swarm.server != null) { // Server was apparently started but might be in an inconsistent state and stop() might therefore fail. // So, to avoid overlaying/shadowing of errorCause we need to perform a "failsafe" stop(). try { swarm.stop(); } catch (final Throwable t) { // To avoid keeping the potentially inconsistent server/JVM running, we explicitly kill it. // SwarmMessages is not usable here because swarm.start() might not even have made it past logging setup. errorCause.printStackTrace(); t.printStackTrace(); System.exit(1); } } else { // errors can be thrown before swarm.server is created errorCause.printStackTrace(); System.exit(1); } } private static void displayUsage() throws Exception { swarm.server.displayUsage(); } private static ArtifactLookup artifactLookup() { return ArtifactLookup.get(); } /** * Retrieve an artifact that was part of the original build using a * full or simplified Maven GAV specifier. * * <p>The following formats of GAVs are supported:</p> * * <ul> * <li>groupId:artifactId</li> * <li>groupId:artifactId:version</li> * <li>groupId:artifactId:packaging:version</li> * <li>groupId:artifactId:packaging:version:classifier</li> * </ul> * * <p>Only artifacts that were compiled with the user's project with * a scope of {@code compile} are available through lookup.</p> * * <p>In the variants that include a {@code version} parameter, it may be * replaced by a literal asterisk in order to avoid hard-coding versions * into the application.</p> * * @param gav The Maven GAV. * @return The located artifact, as a {@code JavaArchive}. * @throws Exception If the specified artifact is not locatable. */ public static JavaArchive artifact(String gav) throws Exception { return artifactLookup().artifact(gav); } /** * Retrieve an artifact that was part of the original build using a * full or simplified Maven GAV specifier, returning an archive with a * specified name. * * @param gav The Maven GAV. * @return The located artifact, as a {@code JavaArchive} with the specified name. * @throws Exception If the specified artifact is not locatable. * @see #artifact(String) */ public static JavaArchive artifact(String gav, String asName) throws Exception { return artifactLookup().artifact(gav, asName); } /** * Retrieve all dependency artifacts for the user's project. * * @return All dependencies, as {@code JavaArchive} objects. * @throws Exception */ public static List<JavaArchive> allArtifacts() throws Exception { return artifactLookup().allArtifacts(); } /** * Installs the Module MBeanServer. */ private void installModuleMBeanServer() { try { Method method = ModuleLoader.class.getDeclaredMethod("installMBeanServer"); method.setAccessible(true); method.invoke(null); } catch (Exception e) { SwarmMessages.MESSAGES.moduleMBeanServerNotInstalled(e); } } /** * Retrieve the configuration view. * * @return The configuration view. */ public ConfigView configView() { return this.configView.get(); } /** * Retrieve the configuration view in a deprecated manner. * * @return The {@code ConfigView} through a deprecated interface. * @see #configView() */ @SuppressWarnings("deprecation") @Deprecated public StageConfig stageConfig() { return this.configView.get(); } private String[] args; private Server server; private Set<Class<?>> userComponentClasses = new HashSet<>(); private List<SocketBindingRequest> socketBindings = new ArrayList<>(); private List<OutboundSocketBindingRequest> outboundSocketBindings = new ArrayList<>(); private List<Fraction> explicitlyInstalledFractions = new ArrayList<>(); private ConfigViewFactory configView; private Optional<URL> xmlConfig = Optional.empty(); private List<URL> configs = new ArrayList<>(); private List<String> profiles = new ArrayList<>(); private boolean debugBootstrap; private boolean isConstructing = true; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.corona; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.TFramedTransport; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransportException; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Arrays; import java.util.List; /** * The Tool that can talk to the cluster manager to work with the sessions * List, Kill, etc */ public class CoronaClient extends Configured implements Tool { static { Configuration.addDefaultResource("mapred-default.xml"); Configuration.addDefaultResource("mapred-site.xml"); } /** * Print the usage instructions for this class. * If a specific command is given only prints the usage for that command * otherwise lists all the commands and their usages * @param cmd the command to list the usage for */ public static void printUsage(String cmd) { if ("-kill".equals(cmd)) { System.err.println("Usage: CoronaClient -kill <session id>"); } else { System.err.println("Usage: CoronaClient"); System.err.println("\t\t[-list]"); System.err.println("\t\t[-kill <session id>]"); } } @Override public int run(String[] args) throws Exception { if (args.length < 1) { printUsage(""); return -1; } int i = 0; String cmd = args[i++]; if ("-list".equals(cmd)) { if (args.length > 1) { printUsage(""); return -1; } return listSessions(); } else if ("-kill".equals(cmd)) { if (args.length != 2) { printUsage(cmd); return -1; } return killSession(args[i]); } return 0; } /** * Tells the cluster manager to kill the session with a given id * @param sessionId the id of the session to kill * @return 0 in case of success, non zero value on error * @throws IOException */ private int killSession(String sessionId)throws IOException { try { System.out.printf("Killing %s", sessionId); ClusterManagerService.Client client = getCMSClient(); try { client.killSession(sessionId); } catch (SafeModeException e) { throw new IOException( "Cannot kill session yet, ClusterManager is in Safe Mode"); } System.err.printf("%s killed", sessionId); } catch (TException e) { throw new IOException(e); } return 0; } public static void killSession(String sessionId, Configuration conf) throws IOException { try { ClusterManagerService.Client client = getCMSClient(new CoronaConf(conf)); client.killSession(sessionId); } catch (SafeModeException e) { throw new IOException(e); } catch (TException e) { throw new IOException(e); } } /** * Gets a list of the sessions from the cluster manager * and outputs them on the console * @return 0 in case of success, non zero value on error * @throws IOException */ private int listSessions() throws IOException { try { ClusterManagerService.Client client = getCMSClient(); List<RunningSession> sessions; try { sessions = client.getSessions(); } catch (SafeModeException e) { throw new IOException( "Cannot list sessions, ClusterManager is in Safe Mode"); } System.out.printf("%d sessions currently running:\n", sessions.size()); System.out.printf("SessionID\t" + "Session Name\t" + "Session User\t" + "Session Poolgroup\t" + "Session Pool\t" + "Session Priority\t" + "Running Mappers\t" + "Running Reducers\t" + "Running Jobtrackers\n"); for (RunningSession session : sessions) { SessionPriority priority = session.getPriority(); if (priority == null) { priority = SessionPriority.NORMAL; } System.out.printf("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n", session.getHandle(), session.getName().replace("\t", "\\t").replace("\n", "\\n"), session.getUserId(), session.getPoolInfo().getPoolGroupName(), session.getPoolInfo().getPoolName(), priority, session.getRunningResources().get(ResourceType.MAP), session.getRunningResources().get(ResourceType.REDUCE), session.getRunningResources().get(ResourceType.JOBTRACKER)); } } catch (TException e) { throw new IOException(e); } return 0; } /** * Get the thrift client to communicate with the cluster manager * @return a thrift client initialized to talk to the cluster manager * @throws TTransportException */ private ClusterManagerService.Client getCMSClient() throws TTransportException { // Get the current configuration CoronaConf conf = new CoronaConf(getConf()); return getCMSClient(conf); } /** * Get the thrift client to communicate with the cluster manager * @return a thrift client initialized to talk to the cluster manager * @param conf The configuration. * @throws TTransportException */ private static ClusterManagerService.Client getCMSClient(CoronaConf conf) throws TTransportException { InetSocketAddress address = NetUtils.createSocketAddr(conf .getClusterManagerAddress()); TFramedTransport transport = new TFramedTransport( new TSocket(address.getHostName(), address.getPort())); ClusterManagerService.Client client = new ClusterManagerService.Client( new TBinaryProtocol(transport)); transport.open(); return client; } public static void main(String[] args) throws Exception { int result = ToolRunner.run(new CoronaClient(), args); System.exit(result); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package rdpclient.rdp; import streamer.BaseElement; import streamer.ByteBuffer; import streamer.Element; import streamer.Link; import streamer.Pipeline; import streamer.PipelineImpl; import streamer.debug.MockSink; import streamer.debug.MockSource; public class ServerIOChannelRouter extends BaseElement { /** * Demand Active PDU. */ public static final int PDUTYPE_DEMANDACTIVEPDU = 0x1; /** * Confirm Active PDU. */ public static final int PDUTYPE_CONFIRMACTIVEPDU = 0x3; /** * Deactivate All PDU. */ public static final int PDUTYPE_DEACTIVATEALLPDU = 0x6; /** * Data PDU (actual type is revealed by the pduType2 field in the Share Data * Header). */ public static final int PDUTYPE_DATAPDU = 0x7; /** * Enhanced Security Server Redirection PDU. */ public static final int PDUTYPE_SERVER_REDIR_PKT = 0xA; protected RdpState state; public ServerIOChannelRouter(String id, RdpState state) { super(id); this.state = state; } /** * @see http://msdn.microsoft.com/en-us/library/cc240576.aspx */ @Override public void handleData(ByteBuffer buf, Link link) { if (verbose) System.out.println("[" + this + "] INFO: Data received: " + buf + "."); int length = buf.readUnsignedShortLE(); if (buf.length != length) { // It is ServerErrorAlert-ValidClient // Ignore it //throw new RuntimeException("[" + this + "] ERROR: Incorrect PDU length: " + length + ", data: " + buf + "."); } int type = buf.readUnsignedShortLE() & 0xf; // int sourceId = buf.readUnsignedShortLE(); buf.skipBytes(2); switch (type) { case PDUTYPE_DEMANDACTIVEPDU: pushDataToPad("demand_active", buf); break; case PDUTYPE_CONFIRMACTIVEPDU: throw new RuntimeException("Unexpected client CONFIRM ACTIVE PDU. Data: " + buf + "."); case PDUTYPE_DEACTIVATEALLPDU: // pushDataToPad("deactivate_all", buf); /* ignore */buf.unref(); break; case PDUTYPE_DATAPDU: handleDataPdu(buf); break; case PDUTYPE_SERVER_REDIR_PKT: // pushDataToPad("server_redir", buf); /* ignore */buf.unref(); break; default: throw new RuntimeException("[" + this + "] ERROR: Unknown PDU type: " + type + ", data: " + buf + "."); } } /** * Graphics Update PDU. */ public static final int PDUTYPE2_UPDATE = 0x02; /** * Control PDU. */ public static final int PDUTYPE2_CONTROL = 0x14; /** * Pointer Update PDU. */ public static final int PDUTYPE2_POINTER = 0x1B; /** * Input Event PDU. */ public static final int PDUTYPE2_INPUT = 0x1C; /** * Synchronize PDU. */ public static final int PDUTYPE2_SYNCHRONIZE = 0x1F; /** * Refresh Rect PDU. */ public static final int PDUTYPE2_REFRESH_RECT = 0x21; /** * Play Sound PDU. */ public static final int PDUTYPE2_PLAY_SOUND = 0x22; /** * Suppress Output PDU. */ public static final int PDUTYPE2_SUPPRESS_OUTPUT = 0x23; /** * Shutdown Request PDU. */ public static final int PDUTYPE2_SHUTDOWN_REQUEST = 0x24; /** * Shutdown Request Denied PDU. */ public static final int PDUTYPE2_SHUTDOWN_DENIED = 0x25; /** * Save Session Info PDU. */ public static final int PDUTYPE2_SAVE_SESSION_INFO = 0x26; /** * Font List PDU. */ public static final int PDUTYPE2_FONTLIST = 0x27; /** * Font Map PDU. */ public static final int PDUTYPE2_FONTMAP = 0x28; /** * Set Keyboard Indicators PDU. */ public static final int PDUTYPE2_SET_KEYBOARD_INDICATORS = 0x29; /** * Persistent Key List PDU. */ public static final int PDUTYPE2_BITMAPCACHE_PERSISTENT_LIST = 0x2B; /** * Bitmap Cache Error PDU. */ public static final int PDUTYPE2_BITMAPCACHE_ERROR_PDU = 0x2C; /** * Set Keyboard IME Status PDU. */ public static final int PDUTYPE2_SET_KEYBOARD_IME_STATUS = 0x2D; /** * Offscreen Bitmap Cache Error PDU. */ public static final int PDUTYPE2_OFFSCRCACHE_ERROR_PDU = 0x2E; /** * Set Error Info PDU. */ public static final int PDUTYPE2_SET_ERROR_INFO_PDU = 0x2F; /** * DrawNineGrid Cache Error PDU. */ public static final int PDUTYPE2_DRAWNINEGRID_ERROR_PDU = 0x30; /** * GDI+ Error PDU. */ public static final int PDUTYPE2_DRAWGDIPLUS_ERROR_PDU = 0x31; /** * Auto-Reconnect Status PDU. */ public static final int PDUTYPE2_ARC_STATUS_PDU = 0x32; /** * Status Info PDU. */ public static final int PDUTYPE2_STATUS_INFO_PDU = 0x36; /** * Monitor Layout PDU. */ public static final int PDUTYPE2_MONITOR_LAYOUT_PDU = 0x37; /** * Indicates an Orders Update. */ public static final int UPDATETYPE_ORDERS = 0x0000; /** * Indicates a Bitmap Graphics Update. */ public static final int UPDATETYPE_BITMAP = 0x0001; /** * Indicates a Palette Update. */ public static final int UPDATETYPE_PALETTE = 0x0002; /** * Indicates a Synchronize Update. */ public static final int UPDATETYPE_SYNCHRONIZE = 0x0003; /** * @see http://msdn.microsoft.com/en-us/library/cc240577.aspx */ protected void handleDataPdu(ByteBuffer buf) { // (4 bytes): A 32-bit, unsigned integer. Share identifier for the packet. long shareId = buf.readUnsignedIntLE(); if (shareId != state.serverShareId) throw new RuntimeException("Unexpected share ID: " + shareId + "."); // buf.skipBytes(4); // Padding. buf.skipBytes(1); // (1 byte): An 8-bit, unsigned integer. The stream identifier for the // packet. // int streamId = buf.readUnsignedByte(); buf.skipBytes(1); // (2 bytes): A 16-bit, unsigned integer. The uncompressed length of the // packet in bytes. int uncompressedLength = buf.readUnsignedShortLE(); // (1 byte): An 8-bit, unsigned integer. The type of Data PDU. int type2 = buf.readUnsignedByte(); // (1 byte): An 8-bit, unsigned integer. The compression type and flags // specifying the data following the Share Data Header int compressedType = buf.readUnsignedByte(); if (compressedType != 0) throw new RuntimeException("Compression of protocol packets is not supported. Data: " + buf + "."); // (2 bytes): A 16-bit, unsigned integer. The compressed length of the // packet in bytes. int compressedLength = buf.readUnsignedShortLE(); if (compressedLength != 0) throw new RuntimeException("Compression of protocol packets is not supported. Data: " + buf + "."); ByteBuffer data = buf.readBytes(uncompressedLength - 18); buf.unref(); switch (type2) { case PDUTYPE2_UPDATE: { // (2 bytes): A 16-bit, unsigned integer. Type of the graphics update. int updateType = data.readUnsignedShortLE(); ByteBuffer payload = data.readBytes(data.length - data.cursor); data.unref(); switch (updateType) { case UPDATETYPE_ORDERS: pushDataToPad("orders", payload); break; case UPDATETYPE_BITMAP: pushDataToPad("bitmap", payload); break; case UPDATETYPE_PALETTE: pushDataToPad("palette", payload); break; case UPDATETYPE_SYNCHRONIZE: // Ignore payload.unref(); break; } break; } case PDUTYPE2_CONTROL: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_CONTROL ignored."); // Ignore data.unref(); break; case PDUTYPE2_POINTER: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_POINTER ignored."); // Ignore data.unref(); break; case PDUTYPE2_INPUT: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_INPUT ignored."); // Ignore data.unref(); break; case PDUTYPE2_SYNCHRONIZE: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_SYNCHRONIZE ignored."); // Ignore data.unref(); break; case PDUTYPE2_REFRESH_RECT: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_REFRESH_RECT ignored."); // Ignore data.unref(); break; case PDUTYPE2_PLAY_SOUND: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_PLAY_SOUND ignored."); // Ignore data.unref(); break; case PDUTYPE2_SUPPRESS_OUTPUT: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_SUPPRESS_OUTPUT ignored."); // Ignore data.unref(); break; case PDUTYPE2_SHUTDOWN_REQUEST: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_SHUTDOWN_REQUEST ignored."); // Ignore data.unref(); break; case PDUTYPE2_SHUTDOWN_DENIED: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_SHUTDOWN_DENIED ignored."); // Ignore data.unref(); break; case PDUTYPE2_SAVE_SESSION_INFO: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_SAVE_SESSION_INFO ignored."); // Ignore data.unref(); break; case PDUTYPE2_FONTLIST: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_FONTLIST ignored."); // Ignore data.unref(); break; case PDUTYPE2_FONTMAP: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_FONTMAP ignored."); // Ignore data.unref(); break; case PDUTYPE2_SET_KEYBOARD_INDICATORS: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_SET_KEYBOARD_INDICATORS ignored."); // Ignore data.unref(); break; case PDUTYPE2_BITMAPCACHE_PERSISTENT_LIST: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_BITMAPCACHE_PERSISTENT_LIST ignored."); // Ignore data.unref(); break; case PDUTYPE2_BITMAPCACHE_ERROR_PDU: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_BITMAPCACHE_ERROR_PDU ignored."); // Ignore data.unref(); break; case PDUTYPE2_SET_KEYBOARD_IME_STATUS: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_SET_KEYBOARD_IME_STATUS ignored."); // Ignore data.unref(); break; case PDUTYPE2_OFFSCRCACHE_ERROR_PDU: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_OFFSCRCACHE_ERROR_PDU ignored."); // Ignore data.unref(); break; case PDUTYPE2_SET_ERROR_INFO_PDU: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_SET_ERROR_INFO_PDU ignored."); // Ignore data.unref(); break; case PDUTYPE2_DRAWNINEGRID_ERROR_PDU: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_DRAWNINEGRID_ERROR_PDU ignored."); // Ignore data.unref(); break; case PDUTYPE2_DRAWGDIPLUS_ERROR_PDU: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_DRAWGDIPLUS_ERROR_PDU ignored."); // Ignore data.unref(); break; case PDUTYPE2_ARC_STATUS_PDU: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_ARC_STATUS_PDU ignored."); // Ignore data.unref(); break; case PDUTYPE2_STATUS_INFO_PDU: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_STATUS_INFO_PDU ignored."); // Ignore data.unref(); break; case PDUTYPE2_MONITOR_LAYOUT_PDU: if (verbose) System.out.println("[" + this + "] INFO: Packet PDUTYPE2_MONITOR_LAYOUT_PDU ignored."); // Ignore data.unref(); break; default: throw new RuntimeException("Unknow data PDU type: " + type2 + ", data: " + buf + "."); } } /** * Example. * */ public static void main(String args[]) { // System.setProperty("streamer.Link.debug", "true"); System.setProperty("streamer.Element.debug", "true"); // System.setProperty("streamer.Pipeline.debug", "true"); byte[] packet = new byte[] { // TPKT (byte)0x03, (byte)0x00, // TPKT Header: TPKT version = 3 (byte)0x00, (byte)0x1B, // TPKT length: 27 bytes // X224 (byte)0x02, // X224 Length: 2 bytes (byte)0xF0, // X224 Type: Data (byte)0x80, // X224 EOT // MCS // Type: send data indication: 26 (0x1a, top 6 bits) (byte)0x68, // ?? (byte)0x00, (byte)0x01, // User ID: 1002 (1001+1) (byte)0x03, (byte)0xEB, // Channel ID: 1003 (byte)0x70, // Data priority: high, segmentation: begin|end (byte)0x0D, // Payload length: 13 bytes // Deactivate all PDU (byte)0x0D, (byte)0x00, // Length: 13 bytes (LE) // - PDUType: (0x16, LE) // Type: (............0110) TS_PDUTYPE_DEACTIVATEALLPDU // ProtocolVersion: (000000000001....) 1 (byte)0x16, (byte)0x00, (byte)0xEA, (byte)0x03, // PDU source: 1002 (LE) (byte)0xEA, (byte)0x03, (byte)0x01, (byte)0x00, // ShareID = 66538 (byte)0x01, (byte)0x00, // Length if source descriptor: 1 (LE) (byte)0x00, // Source descriptor (should be set to 0): 0 }; MockSource source = new MockSource("source", ByteBuffer.convertByteArraysToByteBuffers(packet)); RdpState rdpState = new RdpState() { { serverShareId = 66538; } }; Element channel1003 = new ServerIOChannelRouter("channel_1003", rdpState); Element mcs = new ServerMCSPDU("mcs"); Element tpkt = new ServerTpkt("tpkt"); Element x224 = new ServerX224DataPdu("x224"); Element sink = new MockSink("sink", ByteBuffer.convertByteArraysToByteBuffers(new byte[] { // Deactivate all PDU (byte)0x0D, (byte)0x00, // Length: 13 bytes (LE) // - PDUType: 22 (0x16, LE) // Type: (............0110) TS_PDUTYPE_DEACTIVATEALLPDU // ProtocolVersion: (000000000001....) 1 (byte)0x16, (byte)0x00, (byte)0xEA, (byte)0x03, // PDU source: 1002 (LE) (byte)0xEA, (byte)0x03, (byte)0x01, (byte)0x00, // ShareID = 66538 (byte)0x01, (byte)0x00, // Length if source descriptor: 1 (LE) (byte)0x00, // Source descriptor (should be set to 0): 0 })); Pipeline pipeline = new PipelineImpl("test"); pipeline.add(source, tpkt, x224, mcs, channel1003, sink); pipeline.link("source", "tpkt", "x224", "mcs >channel_1003", "channel_1003 >deactivate_all", "sink"); pipeline.runMainLoop("source", STDOUT, false, false); } }
/* * Copyright (C) 2015-2016 The Helenus Driver Project Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.helenus.driver.impl; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; import org.helenus.commons.collections.DirectedGraph; import org.helenus.commons.collections.GraphUtils; import org.helenus.commons.collections.graph.ConcurrentHashDirectedGraph; import org.helenus.commons.lang3.reflect.ReflectionUtils; import org.helenus.driver.AlterSchemas; import org.helenus.driver.Clause; import org.helenus.driver.ExcludedKeyspaceKeyException; import org.helenus.driver.GroupableStatement; import org.helenus.driver.SequenceableStatement; import org.helenus.driver.StatementBridge; import org.helenus.driver.VoidFuture; import org.helenus.driver.info.ClassInfo; import org.helenus.driver.persistence.Keyspace; import org.helenus.driver.persistence.Table; import org.reflections.Reflections; /** * The <code>AlterSchemasImpl</code> class provides support for a statement * which will create and/or alter all the required elements (keyspace, tables, * types, and indexes) to support the schema for a given package of POJOs. It * will take care of creating and/or altering the required keyspace, tables, * types, and indexes. * * @copyright 2015-2016 The Helenus Driver Project Authors * * @author The Helenus Driver Project Authors * @version 1 - Apr 2, 2015 - paouelle - Creation * * @since 1.0 */ public class AlterSchemasImpl extends SequenceStatementImpl<Void, VoidFuture, Void> implements AlterSchemas { /** * Holds the packages for all POJO classes for which to alter schemas. * * @author paouelle */ private final Object[] pkgs; /** * Flag indicating if only POJOs with keyspace names that can be computed * based on exactly the set of keyspace keys provided should be considered. * * @author paouelle */ private final boolean matching; /** * Set of POJO class infos with their keyspace to be altered. * * @author paouelle */ private final Map<Keyspace, List<ClassInfoImpl<?>>> keyspaces; /** * Holds the cache of contexts for POJOs that will have schemas altered. * * @author paouelle */ private volatile List<ClassInfoImpl<?>.Context> contexts; /** * Holds the where statement part. * * @author paouelle */ private final WhereImpl where; /** * Instantiates a new <code>AlterSchemaImpl</code> object. * * @author paouelle * * @param pkgs the packages and/or classes where to find all POJO classes to * alter schemas for associated with this statement * @param matching <code>true</code> to only consider POJOs with keyspace names * that can be computed with exactly the set of keyspace keys provided * @param mgr the non-<code>null</code> statement manager * @param bridge the non-<code>null</code> statement bridge * @throws NullPointerException if <code>pkgs</code> is <code>null</code> * @throws IllegalArgumentException if an @Entity or @RootEntity annotated * class is missing the @Keyspace annotation or two entities defines * the same keyspace with different options or an entity class doesn't * represent a valid POJO class or if no entities are found */ public AlterSchemasImpl( String[] pkgs, boolean matching, StatementManagerImpl mgr, StatementBridge bridge ) { super(Void.class, (String)null, mgr, bridge); org.apache.commons.lang3.Validate.notNull(pkgs, "invalid null packages"); this.pkgs = Stream.of(pkgs).filter(p -> p != null).toArray(); this.matching = matching; this.keyspaces = findKeyspaces(); this.where = new WhereImpl(this); } /** * Find all keyspaces. * * @author paouelle * * @return a map of all the keyspaces along with the list of POJO class info * associated with them * @throws IllegalArgumentException if an @Entity annotated class is missing the * Keyspace annotation or two entities defines the same keyspace with * different options or an @Entity annotated class doesn't represent * a valid POJO class */ private Map<Keyspace, List<ClassInfoImpl<?>>> findKeyspaces() { final Map<String, Keyspace> keyspaces = new LinkedHashMap<>(25); final Reflections reflections = new Reflections(pkgs); // search for all POJO annotated classes with @UDTEntity // because of interdependencies between UDT, we need to build a graph // to detect circular dependencies and also to ensure a proper creation // order later final Map<Keyspace, DirectedGraph<UDTClassInfoImpl<?>>> udtcinfos = new HashMap<>(25); for (final Class<?> clazz: reflections.getTypesAnnotatedWith( org.helenus.driver.persistence.UDTEntity.class, true )) { // skip abstract POJO classes if (Modifier.isAbstract(clazz.getModifiers())) { continue; } final UDTClassInfoImpl<?> cinfo = (UDTClassInfoImpl<?>)mgr.getClassInfoImpl(clazz); final Keyspace k = cinfo.getKeyspace(); final Keyspace old = keyspaces.put(k.name(), k); DirectedGraph<UDTClassInfoImpl<?>> cs = udtcinfos.get(k); if (cs == null) { cs = new ConcurrentHashDirectedGraph<>(); udtcinfos.put(k, cs); } cs.add(cinfo, cinfo.udts()); // add dependencies if ((old != null) && !k.equals(old)) { // duplicate annotation found with different attribute throw new IllegalArgumentException( "two different @Keyspace annotations found with class '" + clazz.getName() + "': " + old + " and: " + k ); } } // search for all POJO annotated classes with @UDTRootEntity for (final Class<?> clazz: reflections.getTypesAnnotatedWith( org.helenus.driver.persistence.UDTRootEntity.class, true )) { // skip classes that are not directly annotated if (ReflectionUtils.findFirstClassAnnotatedWith( clazz, org.helenus.driver.persistence.UDTRootEntity.class ) != clazz) { continue; } final UDTClassInfoImpl<?> cinfo = (UDTClassInfoImpl<?>)mgr.getClassInfoImpl(clazz); final Keyspace k = cinfo.getKeyspace(); final Keyspace old = keyspaces.put(k.name(), k); DirectedGraph<UDTClassInfoImpl<?>> cs = udtcinfos.get(k); if (cs == null) { cs = new ConcurrentHashDirectedGraph<>(); udtcinfos.put(k, cs); } cs.add(cinfo, cinfo.udts()); if ((old != null) && !k.equals(old)) { // duplicate annotation found with different attribute throw new IllegalArgumentException( "two different @Keyspace annotations found with class '" + clazz.getName() + "': " + old + " and: " + k ); } // now add all @UDTTypeEntity for this @UDTRootEntity final DirectedGraph<UDTClassInfoImpl<?>> fcs = cs; ((UDTRootClassInfoImpl<?>)cinfo).typeImpls() .forEach(tcinfo -> fcs.add(tcinfo, tcinfo.udts())); } // now we are done with types, do a reverse topological sort of all keyspace // graphs such that we end up creating udts in the dependent order // and populate the resulting cinfos map with that sorted list @SuppressWarnings({"cast", "unchecked", "rawtypes"}) final Map<Keyspace, List<ClassInfoImpl<?>>> cinfos = udtcinfos.entrySet().stream() .collect(Collectors.toMap( e -> ((Map.Entry<Keyspace, DirectedGraph<UDTClassInfoImpl<?>>>)e).getKey(), e -> { final List<UDTClassInfoImpl<?>> l = GraphUtils.sort( ((Map.Entry<Keyspace, DirectedGraph<UDTClassInfoImpl<?>>>)e).getValue(), o -> o.getObjectClass(), o -> o.getObjectClass().getSimpleName() ); Collections.reverse(l); return (List<ClassInfoImpl<?>>)(List)l; } )); // search for all POJO annotated classes with @Entity for (final Class<?> clazz: reflections.getTypesAnnotatedWith( org.helenus.driver.persistence.Entity.class, true )) { // skip abstract POJO classes if (Modifier.isAbstract(clazz.getModifiers())) { continue; } final ClassInfoImpl<?> cinfo = mgr.getClassInfoImpl(clazz); final Keyspace k = cinfo.getKeyspace(); final Keyspace old = keyspaces.put(k.name(), k); List<ClassInfoImpl<?>> cs = cinfos.get(k); if (cs == null) { cs = new ArrayList<>(25); cinfos.put(k, cs); } cs.add(cinfo); if ((old != null) && !k.equals(old)) { // duplicate annotation found with different attribute throw new IllegalArgumentException( "two different @Keyspace annotations found with class '" + clazz.getName() + "': " + old + " and: " + k ); } } // search for all POJO annotated classes with @RootEntity for (final Class<?> clazz: reflections.getTypesAnnotatedWith( org.helenus.driver.persistence.RootEntity.class, true )) { // skip classes that are not directly annotated if (ReflectionUtils.findFirstClassAnnotatedWith( clazz, org.helenus.driver.persistence.RootEntity.class ) != clazz) { continue; } final ClassInfoImpl<?> cinfo = mgr.getClassInfoImpl(clazz); final Keyspace k = cinfo.getKeyspace(); final Keyspace old = keyspaces.put(k.name(), k); List<ClassInfoImpl<?>> cs = cinfos.get(k); if (cs == null) { cs = new ArrayList<>(32); cinfos.put(k, cs); } cs.add(cinfo); if ((old != null) && !k.equals(old)) { // duplicate annotation found with different attribute throw new IllegalArgumentException( "two different @Keyspace annotations found with class '" + clazz.getName() + "': " + old + " and: " + k ); } // now add all @TypeEntity for this @RootEntity final List<ClassInfoImpl<?>> fcs = cs; ((RootClassInfoImpl<?>)cinfo).typeImpls() .forEach(tcinfo -> fcs.add(tcinfo)); } return cinfos; } /** * Gets the contexts for all POJO classes for which we should alter schemas. * * @author paouelle * * @return the list of contexts for all POJO for which we are creating schemas * @throws IllegalArgumentException if the value for a provided keyspace key doesn't * match the POJO's definition for that keyspace key */ @SuppressWarnings({"synthetic-access", "unchecked", "rawtypes"}) private List<ClassInfoImpl<?>.Context> getContexts() { if (contexts == null) { // split UDT from the rest so we create and report those first final List<ClassInfoImpl<?>.Context> ucontexts = new ArrayList<>(32); final List<ClassInfoImpl<?>.Context> contexts = new ArrayList<>(32); next_keyspace: for (final List<ClassInfoImpl<?>> cinfos: keyspaces.values()) { // create contexts for all the classes associated with the keyspace next_class: for (final ClassInfoImpl<?> cinfo: cinfos) { final ClassInfoImpl<?>.Context context = cinfo.newContext(); IllegalArgumentException iae = null; int found = 0; // populate the required keyspace keys for (final Map.Entry<String, FieldInfoImpl<?>> e: (Set<Map.Entry<String, FieldInfoImpl<?>>>)(Set)cinfo.getKeyspaceKeyTypes().entrySet()) { final String type = e.getKey(); final FieldInfoImpl<?> finfo = e.getValue(); if (!where.keyspaceKeys.containsKey(type)) { // we are missing a required keyspace key for this POJO and since all pojos // references the same @Keyspace, all of them will have the same problem // -- so continue with next keyspace and ignore any errors that might // occurred for keyspace keys before that continue next_keyspace; } found++; // don't forget to convert the type into the keyspace key name for the // associated POJO try { context.addKeyspaceKey(finfo.getKeyspaceKey().name(), where.keyspaceKeys.get(type)); } catch (ExcludedKeyspaceKeyException ee) { // ignore and skip this class continue next_class; } catch (IllegalArgumentException ee) { if (iae == null) { // keep only first one iae = ee; } } } // if we get here then we must have found all required keyspace keys in the // where clause so whether or not we consider it depends on whether or // not we required the exact same number of keyspace keys as defined if we // are matching or if we are not matching (and there would potentially // be more keyspace keys defined than we needed) if ((found == where.keyspaceKeys.size()) || !matching) { // if we got an error on one of the keyspace key, throw it back if (iae != null) { throw iae; } if (cinfo instanceof UDTClassInfoImpl) { ucontexts.add(context); } else { contexts.add(context); } } } } this.contexts = new ArrayList<>(ucontexts.size() + contexts.size()); this.contexts.addAll(ucontexts); this.contexts.addAll(contexts); } return contexts; } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setDirty() */ @Override protected void setDirty() { super.setDirty(); this.contexts = null; } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.SequenceStatementImpl#buildSequencedStatements() */ @Override protected final List<StatementImpl<?, ?, ?>> buildSequencedStatements() { final List<ClassInfoImpl<?>.Context> contexts = getContexts(); // we do not want to alter the same keyspace or table so many times for nothing final Set<Pair<String, Keyspace>> keyspaces = new HashSet<>(contexts.size() * 3); final Map<Pair<String, Keyspace>, Set<Table>> tables = new HashMap<>(contexts.size() * 3); // create groups to aggregate all alter keyspaces, table, create index, and initial objects final GroupImpl kgroup = init(new GroupImpl( Optional.empty(), new GroupableStatement<?, ?>[0], mgr, bridge )); final GroupImpl tgroup = init(new GroupImpl( Optional.empty(), new GroupableStatement<?, ?>[0], mgr, bridge )); final GroupImpl igroup = init(new GroupImpl( Optional.empty(), new GroupableStatement<?, ?>[0], mgr, bridge )); final SequenceImpl yseq = init(new SequenceImpl( Optional.empty(), new SequenceableStatement<?, ?>[0], mgr, bridge )); final GroupImpl group = init(new GroupImpl( Optional.empty(), new GroupableStatement<?, ?>[0], mgr, bridge )); contexts.forEach(c -> { @SuppressWarnings({"rawtypes", "unchecked"}) final AlterSchemaImpl<?> as = init(new AlterSchemaImpl(c, mgr, bridge)); as.buildSequencedStatements( keyspaces, tables, kgroup, tgroup, igroup, yseq, group ); }); return Stream.of(kgroup, yseq, tgroup, igroup, group) .filter(g -> !g.isEmpty()) .collect(Collectors.toList()); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#appendGroupSubType(java.lang.StringBuilder) */ @Override protected void appendGroupSubType(StringBuilder builder) { builder.append(" ALTER"); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.SequenceStatementImpl#appendGroupType(java.lang.StringBuilder) */ @Override protected void appendGroupType(StringBuilder builder) { builder.append("SCHEMAS"); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.CreateSchemas#getObjectClasses() */ @Override public Set<Class<?>> getObjectClasses() { return objectClasses() .sequential() .collect(Collectors.toSet()); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.CreateSchemas#getClassInfos() */ @Override public Set<ClassInfo<?>> getClassInfos() { return classInfos() .sequential() .collect(Collectors.toCollection(LinkedHashSet::new)); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.CreateSchemas#getDefinedClassInfos() */ @Override public Set<ClassInfo<?>> getDefinedClassInfos() { return definedClassInfos() .sequential() .collect(Collectors.toCollection(LinkedHashSet::new)); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.CreateSchemas#objectClasses() */ @SuppressWarnings({"cast", "rawtypes", "unchecked"}) @Override public Stream<Class<?>> objectClasses() { return (Stream<Class<?>>)(Stream)getContexts().stream() .flatMap(c -> c.getClassInfo().objectClasses()) .distinct(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.CreateSchemas#classInfos() */ @SuppressWarnings({"cast", "rawtypes", "unchecked"}) @Override public Stream<ClassInfo<?>> classInfos() { return (Stream<ClassInfo<?>>)(Stream)getContexts().stream() .flatMap(c -> c.getClassInfo().classInfos()) .distinct(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.CreateSchemas#definedClassInfos() */ @SuppressWarnings({"cast", "rawtypes", "unchecked"}) @Override public Stream<ClassInfo<?>> definedClassInfos() { return (Stream<ClassInfo<?>>)(Stream)keyspaces.values().stream() .flatMap(cl -> cl.stream()) .flatMap(cl -> cl.classInfos()) .distinct(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.CreateSchemas#where(org.helenus.driver.Clause) */ @Override public Where where(Clause clause) { return where.and(clause); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.CreateSchemas#where() */ @Override public Where where() { return where; } /** * The <code>WhereImpl</code> class defines a WHERE clause for the ALTER * SCHEMAS statement which can be used to specify keyspace key types used for * keyspace names. * * @copyright 2015-2016 The Helenus Driver Project Authors * * @author The Helenus Driver Project Authors * @version 1 - Apr 2, 2015 - paouelle - Creation * * @since 1.0 */ public static class WhereImpl extends ForwardingStatementImpl<Void, VoidFuture, Void, AlterSchemasImpl> implements Where { /** * Holds the keyspace keys with their values. * * @author paouelle */ private final Map<String, Object> keyspaceKeys = new HashMap<>(8); /** * Instantiates a new <code>WhereImpl</code> object. * * @author paouelle * * @param statement the encapsulated statement */ WhereImpl(AlterSchemasImpl statement) { super(statement); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.CreateTable.Where#and(org.helenus.driver.Clause) */ @Override public Where and(Clause clause) { org.apache.commons.lang3.Validate.notNull(clause, "invalid null clause"); org.apache.commons.lang3.Validate.isTrue( clause instanceof ClauseImpl, "unsupported class of clauses: %s", clause.getClass().getName() ); org.apache.commons.lang3.Validate.isTrue( !(clause instanceof ClauseImpl.DelayedWithObject), "unsupported clause '%s' for a ALTER SCHEMAS statement", clause ); if (clause instanceof ClauseImpl.Delayed) { for (final Clause c: ((ClauseImpl.Delayed)clause).processWith(statement.getContext().getClassInfo())) { and(c); // recurse to add the processed clause } } else { final ClauseImpl c = (ClauseImpl)clause; org.apache.commons.lang3.Validate.isTrue( clause instanceof Clause.Equality, "unsupported class of clauses: %s", clause.getClass().getName() ); keyspaceKeys.put(c.getColumnName().toString(), c.firstValue()); setDirty(); } return this; } } }
package basicgraph; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import util.GraphLoader; /** An abstract class that implements a directed graph. * The graph may have self-loops, parallel edges. * Vertices are labeled by integers 0 .. n-1 * and may also have String labels. * The edges of the graph are not labeled. * Representation of edges is left abstract. * * @author UCSD MOOC development team and YOU * */ public abstract class Graph { private int numVertices; private int numEdges; //optional association of String labels to vertices private Map<Integer,String> vertexLabels; /** * Create a new empty Graph */ public Graph() { numVertices = 0; numEdges = 0; vertexLabels = null; } /** * Report size of vertex set * @return The number of vertices in the graph. */ public int getNumVertices() { return numVertices; } /** * Report size of edge set * @return The number of edges in the graph. */ public int getNumEdges() { return numEdges; } /** * Add new vertex to the graph. This vertex will * have as its index the next available integer. * Precondition: contiguous integers are used to * index vertices. * @return index of newly added vertex */ public int addVertex() { implementAddVertex(); numVertices ++; return (numVertices-1); } /** * Abstract method implementing adding a new * vertex to the representation of the graph. */ public abstract void implementAddVertex(); /** * Add new edge to the graph between given vertices, * @param u Index of the start point of the edge to be added. * @param v Index of the end point of the edge to be added. */ public void addEdge(int v , int w) { numEdges ++; if (v < numVertices && w < numVertices) { implementAddEdge(v , w); } else { throw new IndexOutOfBoundsException(); } } /** * Abstract method implementing adding a new * edge to the representation of the graph. */ public abstract void implementAddEdge(int v, int w); /** * Get all (out-)neighbors of a given vertex. * @param v Index of vertex in question. * @return List of indices of all vertices that are adjacent to v * via outgoing edges from v. */ public abstract List<Integer> getNeighbors(int v); /** * Get all in-neighbors of a given vertex. * @param v Index of vertex in question. * @return List of indices of all vertices that are adjacent to v * via incoming edges to v. */ public abstract List<Integer> getInNeighbors(int v); /** * The degree sequence of a graph is a sorted (organized in numerical order * from largest to smallest, possibly with repetitions) list of the degrees * of the vertices in the graph. * * @return The degree sequence of this graph. */ public List<Integer> degreeSequence() { // Implement in part 1 of week 1 List<Integer> degreeSequence = new ArrayList<>(); for (int i = 0; i < numVertices; i++) degreeSequence.add(getInNeighbors(i).size() + getNeighbors(i).size()); Collections.sort(degreeSequence, new Comparator<Integer>() { @Override public int compare(Integer o1, Integer o2) { return o2.compareTo(o1); } }); return degreeSequence; } /** * Get all the vertices that are 2 away from the vertex in question. * @param v The starting vertex * @return A list of the vertices that can be reached in exactly two hops (by * following two edges) from vertex v. * XXX: Implement in part 2 of week 1 for each subclass of Graph */ public abstract List<Integer> getDistance2(int v); /** Return a String representation of the graph * @return A string representation of the graph */ public String toString() { String s = "\nGraph with " + numVertices + " vertices and " + numEdges + " edges.\n"; s += "Degree sequence: " + degreeSequence() + ".\n"; if (numVertices <= 20) s += adjacencyString(); return s; } /** * Generate string representation of adjacency list * @return the String */ public abstract String adjacencyString(); // The next methods implement labeled vertices. // Basic graphs may or may not have labeled vertices. /** * Create a new map of vertex indices to string labels * (Optional: only if using labeled vertices.) */ public void initializeLabels() { vertexLabels = new HashMap<Integer,String>(); } /** * Test whether some vertex in the graph is labeled * with a given index. * @param The index being checked * @return True if there's a vertex in the graph with this index; false otherwise. */ public boolean hasVertex(int v) { return v < getNumVertices(); } /** * Test whether some vertex in the graph is labeled * with a given String label * @param The String label being checked * @return True if there's a vertex in the graph with this label; false otherwise. */ public boolean hasVertex(String s) { return vertexLabels.containsValue(s); } /** * Add label to an unlabeled vertex in the graph. * @param The index of the vertex to be labeled. * @param The label to be assigned to this vertex. */ public void addLabel(int v, String s) { if (v < getNumVertices() && !vertexLabels.containsKey(v)) { vertexLabels.put(v, s); } else { System.out.println("ERROR: tried to label a vertex that is out of range or already labeled"); } } /** * Report label of vertex with given index * @param The integer index of the vertex * @return The String label of this vertex */ public String getLabel(int v) { if (vertexLabels.containsKey(v)) { return vertexLabels.get(v); } else return null; } /** * Report index of vertex with given label. * (Assume distinct labels for vertices.) * @param The String label of the vertex * @return The integer index of this vertex */ public int getIndex(String s) { for (Map.Entry<Integer,String> entry : vertexLabels.entrySet()) { if (entry.getValue().equals(s)) return entry.getKey(); } System.out.println("ERROR: No vertex with this label"); return -1; } public static void main (String[] args) { GraphLoader.createIntersectionsFile("data/maps/myucsd.map", "data/intersections/myucsd.intersections"); // For testing of Part 1 functionality // Add your tests here to make sure your degreeSequence method is returning // the correct list, after examining the graphs. System.out.println("Loading graphs based on real data..."); System.out.println("Goal: use degree sequence to analyse graphs."); System.out.println("****"); System.out.println("Roads / intersections:"); GraphAdjList graphFromFile = new GraphAdjList(); GraphLoader.loadRoadMap("data/testdata/simpletest.map", graphFromFile); System.out.println(graphFromFile); System.out.println("Observe all degrees are <= 12."); System.out.println("****"); System.out.println("\n****"); // You can test with real road data here. Use the data files in data/maps System.out.println("Flight data:"); GraphAdjList airportGraph = new GraphAdjList(); GraphLoader.loadRoutes("data/airports/routesUA.dat", airportGraph); System.out.println(airportGraph); System.out.println("Observe most degrees are small (1-30), eight are over 100."); System.out.println("****"); //For testing Part 2 functionality // Test your distance2 code here. System.out.println("Testing distance-two methods on sample graphs..."); System.out.println("Goal: implement method using two approaches."); } }
/* * oxTrust is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.gluu.oxtrust.ldap.service.uma; import org.gluu.search.filter.Filter; import org.gluu.oxtrust.ldap.service.OrganizationService; import org.gluu.oxtrust.util.OxTrustConstants; import org.gluu.persist.ldap.impl.LdapEntryManager; import org.gluu.persist.model.base.SimpleBranch; import org.slf4j.Logger; import org.xdi.oxauth.model.uma.persistence.UmaResource; import org.xdi.util.INumGenerator; import org.xdi.util.StringHelper; import javax.ejb.Stateless; import javax.inject.Inject; import javax.inject.Named; import java.io.Serializable; import java.util.List; /** * Provides operations with resources * * @author Yuriy Movchan Date: 12/06/2012 */ @Stateless @Named("umaResourcesService") public class ResourceSetService implements Serializable { private static final long serialVersionUID = -1537567020929600777L; @Inject private LdapEntryManager ldapEntryManager; @Inject private OrganizationService organizationService; @Inject private Logger log; public void addBranch() { SimpleBranch branch = new SimpleBranch(); branch.setOrganizationalUnitName("resources"); branch.setDn(getDnForResource(null)); ldapEntryManager.persist(branch); } public boolean containsBranch() { return ldapEntryManager.contains(SimpleBranch.class, getDnForResource(null)); } /** * Create resource branch if needed */ public void prepareResourceBranch() { if (!containsBranch()) { addBranch(); } } /** * Add new resource entry * * @param resource Resource */ public void addResource(UmaResource resource) { ldapEntryManager.persist(resource); } /** * Update resource entry * * @param resource Resource */ public void updateResource(UmaResource resource) { ldapEntryManager.merge(resource); } /** * Remove resource entry * * @param resource Resource */ public void removeResource(UmaResource resource) { ldapEntryManager.remove(resource); } /** * Check if LDAP server contains resource with specified attributes * * @return True if resource with specified attributes exist */ public boolean containsResource(UmaResource resource) { return ldapEntryManager.contains(resource); } public List<UmaResource> getAllResources(int sizeLimit) { return ldapEntryManager.findEntries(getDnForResource(null), UmaResource.class, null, sizeLimit); } /** * Get all resources * * @return List of resources */ public List<UmaResource> getAllResources(String... ldapReturnAttributes) { return ldapEntryManager.findEntries(getDnForResource(null), UmaResource.class, null, ldapReturnAttributes); } /** * Search resources by pattern * * @param pattern Pattern * @param sizeLimit Maximum count of results * @return List of resources */ public List<UmaResource> findResources(String pattern, int sizeLimit) { String[] targetArray = new String[] { pattern }; Filter oxIdFilter = Filter.createSubstringFilter("oxId", null, targetArray, null); Filter displayNameFilter = Filter.createSubstringFilter(OxTrustConstants.displayName, null, targetArray, null); Filter searchFilter = Filter.createORFilter(oxIdFilter, displayNameFilter); List<UmaResource> result = ldapEntryManager.findEntries(getDnForResource(null), UmaResource.class, searchFilter, sizeLimit); return result; } /** * Get resources by example * * @param resource Resource * @return List of Resources which conform example */ public List<UmaResource> findResourceSets(UmaResource resource) { return ldapEntryManager.findEntries(resource); } /** * Get resources by Id * * @param id Id * @return List of Resources which specified id */ public List<UmaResource> findResourcesById(String id) { return ldapEntryManager.findEntries(getDnForResource(null), UmaResource.class, Filter.createEqualityFilter("oxId", id)); } /** * Get resource set by DN * * @param dn Resource set DN * @return Resource set */ public UmaResource getResourceByDn(String dn) { return ldapEntryManager.find(UmaResource.class, dn); } /** * Generate new inum for resource set * * @return New inum for resource set */ public String generateInumForNewResource() { UmaResource resource = new UmaResource(); String newInum = null; do { newInum = generateInumForNewResourceImpl(); String newDn = getDnForResource(newInum); resource.setDn(newDn); } while (ldapEntryManager.contains(resource)); return newInum; } /** * Generate new inum for resource set * * @return New inum for resource set */ private String generateInumForNewResourceImpl() { String orgInum = organizationService.getInumForOrganization(); return orgInum + OxTrustConstants.inumDelimiter + INumGenerator.generate(2); } /** * Build DN string for resource */ public String getDnForResource(String oxId) { String orgDn = organizationService.getDnForOrganization(); if (StringHelper.isEmpty(oxId)) { return String.format("ou=resources,ou=uma,%s", orgDn); } return String.format("oxId=%s,ou=resources,ou=uma,%s", oxId, orgDn); } /** * Get resources by scope * * @param id Id * @return List of Resources which specified scope */ public List<UmaResource> findResourcesByScope(String scopeId) { return ldapEntryManager.findEntries(getDnForResource(null), UmaResource.class, Filter.createEqualityFilter("oxAuthUmaScope", scopeId)); } }
/* Copyright 2007-2009 Selenium committers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.remote; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.openqa.selenium.Cookie; import org.openqa.selenium.Platform; import org.openqa.selenium.Proxy; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.browserlaunchers.DoNotUseProxyPac; import org.openqa.selenium.logging.LogEntries; import org.openqa.selenium.logging.LogEntry; import org.openqa.selenium.logging.LogType; import org.openqa.selenium.logging.LoggingPreferences; import java.awt.*; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(JUnit4.class) public class BeanToJsonConverterTest { @Test public void testShouldBeAbleToConvertASimpleString() throws Exception { String json = new BeanToJsonConverter().convert("cheese"); assertThat(json, is("cheese")); } @Test public void testShouldConvertAMapIntoAJsonObject() throws Exception { Map<String, String> toConvert = new HashMap<String, String>(); toConvert.put("cheese", "cheddar"); toConvert.put("fish", "nice bit of haddock"); String json = new BeanToJsonConverter().convert(toConvert); JSONObject converted = new JSONObject(json); assertThat((String) converted.get("cheese"), is("cheddar")); } @Test public void testShouldConvertASimpleJavaBean() throws Exception { String json = new BeanToJsonConverter().convert(new SimpleBean()); JSONObject converted = new JSONObject(json); assertThat((String) converted.get("foo"), is("bar")); assertThat((Boolean) converted.get("simple"), is(true)); assertThat((Double) converted.get("number"), is(123.456)); } @Test public void testShouldConvertArrays() throws Exception { String json = new BeanToJsonConverter().convert(new BeanWithArray()); JSONObject converted = new JSONObject(json); JSONArray allNames = (JSONArray) converted.get("names"); assertThat(allNames.length(), is(3)); } @Test public void testShouldConvertCollections() throws Exception { String json = new BeanToJsonConverter().convert(new BeanWithCollection()); JSONObject converted = new JSONObject(json); JSONArray allNames = (JSONArray) converted.get("something"); assertThat(allNames.length(), is(2)); } @Test public void testShouldConvertNumbersAsLongs() throws Exception { String json = new BeanToJsonConverter().convert(new Exception()); Map<?,?> map = new JsonToBeanConverter().convert(Map.class, json); List<?> stack = (List<?>) map.get("stackTrace"); Map<?,?> line = (Map<?,?>) stack.get(0); Object o = line.get("lineNumber"); assertTrue("line number is of type: " + o.getClass(), o instanceof Long); } @Test public void testShouldNotChokeWhenCollectionIsNull() throws Exception { try { new BeanToJsonConverter().convert(new BeanWithNullCollection()); } catch (Exception e) { e.printStackTrace(); fail("That shouldn't have happened"); } } @Test public void testShouldConvertEnumsToStrings() throws Exception { // If this doesn't hang indefinitely, we're all good new BeanToJsonConverter().convert(State.INDIFFERENT); } @Test public void testShouldConvertEnumsWithMethods() throws Exception { // If this doesn't hang indefinitely, we're all good new BeanToJsonConverter().convert(WithMethods.CHEESE); } @Test public void testNullAndAnEmptyStringAreEncodedDifferently() throws Exception { BeanToJsonConverter converter = new BeanToJsonConverter(); String nullValue = converter.convert(null); String emptyString = converter.convert(""); assertFalse(emptyString.equals(nullValue)); } @Test public void testShouldBeAbleToConvertAPoint() throws Exception { Point point = new Point(65, 75); try { new BeanToJsonConverter().convert(point); } catch (StackOverflowError e) { fail("This should never happen"); } } @Test public void testShouldEncodeClassNameAsClassProperty() throws Exception { String json = new BeanToJsonConverter().convert(new SimpleBean()); JSONObject converted = new JSONObject(json); assertEquals(SimpleBean.class.getName(), converted.get("class")); } @Test public void testShouldBeAbleToConvertASessionId() throws JSONException { SessionId sessionId = new SessionId("some id"); String json = new BeanToJsonConverter().convert(sessionId); JSONObject converted = new JSONObject(json); assertEquals("some id", converted.getString("value")); } @Test public void testShouldBeAbleToConvertAJsonObject() throws JSONException { JSONObject obj = new JSONObject(); obj.put("key", "value"); String json = new BeanToJsonConverter().convert(obj); JSONObject converted = new JSONObject(json); assertEquals("value", converted.getString("key")); } @Test public void testShouldBeAbleToConvertACapabilityObject() throws JSONException { DesiredCapabilities caps = new DesiredCapabilities(); caps.setCapability("key", "alpha"); String json = new BeanToJsonConverter().convert(caps); JSONObject converted = new JSONObject(json); assertEquals("alpha", converted.getString("key")); } @Test public void testShouldConvertAProxyPacProperly() throws JSONException { DoNotUseProxyPac pac = new DoNotUseProxyPac(); pac.map("*/selenium/*").toProxy("http://localhost:8080/selenium-server"); pac.map("/[a-zA-Z]{4}.microsoft.com/").toProxy("http://localhost:1010/selenium-server/"); pac.map("/flibble*").toNoProxy(); pac.mapHost("www.google.com").toProxy("http://fishy.com/"); pac.mapHost("seleniumhq.org").toNoProxy(); pac.defaults().toNoProxy(); String json = new BeanToJsonConverter().convert(pac); JSONObject converted = new JSONObject(json); assertEquals("http://localhost:8080/selenium-server", converted.getJSONObject("proxiedUrls").get("*/selenium/*")); assertEquals("http://localhost:1010/selenium-server/", converted.getJSONObject("proxiedRegexUrls").get("/[a-zA-Z]{4}.microsoft.com/")); assertEquals("/flibble*", converted.getJSONArray("directUrls").get(0)); assertEquals("seleniumhq.org", converted.getJSONArray("directHosts").get(0)); assertEquals("http://fishy.com/", converted.getJSONObject("proxiedHosts").get("www.google.com")); assertEquals("'DIRECT'", converted.get("defaultProxy")); } @Test public void testShouldConvertAProxyCorrectly() throws JSONException { Proxy proxy = new Proxy(); proxy.setHttpProxy("localhost:4444"); DesiredCapabilities caps = new DesiredCapabilities("foo", "1", Platform.LINUX); caps.setCapability(CapabilityType.PROXY, proxy); Map<String, ?> asMap = ImmutableMap.of("desiredCapabilities", caps); Command command = new Command(new SessionId("empty"), DriverCommand.NEW_SESSION, asMap); String json = new BeanToJsonConverter().convert(command.getParameters()); JSONObject converted = new JSONObject(json); JSONObject capsAsMap = converted.getJSONObject("desiredCapabilities"); assertEquals(json, proxy.getHttpProxy(), capsAsMap.getJSONObject(CapabilityType.PROXY).get("httpProxy")); } @Test public void testShouldCallToJsonMethodIfPresent() { String json = new BeanToJsonConverter().convert(new JsonAware("converted")); assertEquals("converted", json); } private void verifyStackTraceInJson(String json, StackTraceElement[] stackTrace) { int posOfLastStackTraceElement = 0; for (StackTraceElement e : stackTrace) { if (e.getFileName() != null) { // Native methods may have null filenames assertTrue("Filename not found", json.contains("\"fileName\":\"" + e.getFileName() + "\"")); } assertTrue("Line number not found", json.contains("\"lineNumber\":" + e.getLineNumber() + "")); assertTrue("class not found.", json.contains("\"class\":\"" + e.getClass().getName() + "\"")); assertTrue("class name not found", json.contains("\"className\":\"" + e.getClassName() + "\"")); assertTrue("method name not found.", json.contains("\"methodName\":\"" + e.getMethodName() + "\"")); int posOfCurrStackTraceElement = json.indexOf(e.getMethodName()); assertTrue("Mismatch in order of stack trace elements.", posOfCurrStackTraceElement > posOfLastStackTraceElement); } } @Test public void testShouldBeAbleToConvertARuntimeException() { RuntimeException clientError = new RuntimeException("foo bar baz!"); StackTraceElement[] stackTrace = clientError.getStackTrace(); String json = new BeanToJsonConverter().convert(clientError); assertTrue(json.contains("\"message\":\"foo bar baz!\"")); assertTrue(json.contains("\"class\":\"java.lang.RuntimeException\"")); assertTrue(json.contains("\"stackTrace\"")); verifyStackTraceInJson(json, stackTrace); } @Test public void testShouldBeAbleToConvertAWebDriverException() throws JSONException { RuntimeException clientError = new WebDriverException("foo bar baz!"); StackTraceElement[] stackTrace = clientError.getStackTrace(); String raw = new BeanToJsonConverter().convert(clientError); JSONObject json = new JSONObject(raw); assertTrue(raw, json.has("buildInformation")); assertTrue(raw, json.has("systemInformation")); assertTrue(raw, json.has("additionalInformation")); assertTrue(raw, json.has("message")); assertThat(json.getString("message"), containsString("foo bar baz!")); assertThat(json.getString("class"), is(WebDriverException.class.getName())); assertTrue(raw, json.has("stackTrace")); verifyStackTraceInJson(raw, stackTrace); } @Test public void testShouldConvertDatesToMillisecondsInUtcTime() { String jsonStr = new BeanToJsonConverter().convert(new Date(0)); assertEquals(0, Integer.valueOf(jsonStr).intValue()); } @Test public void testShouldConvertDateFieldsToSecondsSince1970InUtcTime() throws JSONException { class Bean { private final Date date; Bean(Date date) { this.date = date; } public Date getDate() { return date; } } Date date = new Date(123456789L); Bean bean = new Bean(date); String jsonStr = new BeanToJsonConverter().convert(bean); JSONObject json = new JSONObject(jsonStr); assertTrue(json.has("date")); assertEquals(123456L, json.getLong("date")); } @Test public void testShouldBeAbleToConvertACookie() throws JSONException { Date expiry = new Date(); Cookie cookie = new Cookie("name", "value", "domain", "/path", expiry, true); String jsonStr = new BeanToJsonConverter().convert(cookie); JSONObject json = new JSONObject(jsonStr); assertEquals("name", json.getString("name")); assertEquals("value", json.getString("value")); assertEquals("domain", json.getString("domain")); assertEquals("/path", json.getString("path")); assertTrue(json.getBoolean("secure")); assertEquals(TimeUnit.MILLISECONDS.toSeconds(expiry.getTime()), json.getLong("expiry")); } @Test public void testUnsetCookieFieldsAreUndefined() { Cookie cookie = new Cookie("name", "value"); String jsonStr = new BeanToJsonConverter().convert(cookie); // assertThat(jsonStr, not(containsString("path"))); assertThat(jsonStr, not(containsString("domain"))); assertThat(jsonStr, not(containsString("expiry"))); } @Test public void testProperlyConvertsNulls() { Map<String, Object> frameId = Maps.newHashMap(); frameId.put("id", null); String payload = new BeanToJsonConverter().convert(frameId); assertEquals("{\"id\":null}", payload); } @Test public void testConvertLoggingPreferencesToJson() throws JSONException { LoggingPreferences prefs = new LoggingPreferences(); prefs.enable(LogType.CLIENT, Level.FINE); prefs.enable(LogType.DRIVER, Level.ALL); JSONObject json = new JSONObject(new BeanToJsonConverter().convert(prefs)); assertEquals("FINE", json.getString(LogType.CLIENT)); assertEquals("ALL", json.getString(LogType.DRIVER)); } @Test public void testConvertsLogEntryToJson() throws JSONException { String raw = new BeanToJsonConverter().convert(new LogEntry(Level.OFF, 17, "foo")); JSONObject object = new JSONObject(raw); assertEquals("foo", object.get("message")); assertEquals(17, object.get("timestamp")); assertEquals("OFF", object.get("level")); } @Test public void testConvertLogEntriesToJson() throws JSONException { long timestamp = new Date().getTime(); final LogEntry entry1 = new LogEntry(Level.OFF, timestamp, "entry1"); final LogEntry entry2 = new LogEntry(Level.WARNING, timestamp, "entry2"); LogEntries entries = new LogEntries(Lists.<LogEntry>newArrayList(entry1, entry2)); JSONArray json = new JSONArray(new BeanToJsonConverter().convert(entries)); JSONObject obj1 = (JSONObject) json.get(0); JSONObject obj2 = (JSONObject) json.get(1); assertEquals("OFF", obj1.get("level")); assertEquals(timestamp, obj1.get("timestamp")); assertEquals("entry1", obj1.get("message")); assertEquals("WARNING", obj2.get("level")); assertEquals(timestamp, obj2.get("timestamp")); assertEquals("entry2", obj2.get("message")); } @SuppressWarnings("unused") private static class SimpleBean { public String getFoo() { return "bar"; } public boolean isSimple() { return true; } public double getNumber() { return 123.456; } } @SuppressWarnings("unused") private static class BeanWithArray { public String[] getNames() { return new String[] {"peter", "paul", "mary"}; } } private static class BeanWithCollection { @SuppressWarnings("unused") public Set<?> getSomething() { Set<Integer> integers = new HashSet<Integer>(); integers.add(1); integers.add(43); return integers; } } private static class BeanWithNullCollection { @SuppressWarnings("unused") public List<?> getList() { return null; } } public static enum State { GOOD, BAD, INDIFFERENT } public static enum WithMethods { CHEESE() { @Override public void eat(String foodStuff) { // Does nothing } }, EGGS() { @Override public void eat(String foodStuff) { // Does nothing too } }; public abstract void eat(String foodStuff); } public class JsonAware { private String convertedValue; public JsonAware(String convertedValue) { this.convertedValue = convertedValue; } public String toJson() { return convertedValue; } } }
/* * Copyright (c) 2000, 2006, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.jmx.snmp.agent; // java imports // import java.io.Serializable; import java.util.Date; import java.util.Vector; import java.util.Enumeration; import java.util.List; import java.util.ArrayList; // jmx imports // import javax.management.Notification; import javax.management.ObjectName; import javax.management.NotificationFilter; import javax.management.NotificationListener; import javax.management.NotificationBroadcaster; import javax.management.MBeanNotificationInfo; import javax.management.ListenerNotFoundException; import com.sun.jmx.snmp.SnmpOid; import com.sun.jmx.snmp.SnmpValue; import com.sun.jmx.snmp.SnmpVarBind; import com.sun.jmx.snmp.SnmpStatusException; /** * This class is an abstraction for an SNMP table. * It is the base class for implementing SNMP tables in the * MBean world. * * <p> * Its responsibility is to synchronize the MBean view of the table * (Table of entries) with the MIB view (array of OID indexes). Each * object of this class will be bound to the Metadata object which * manages the same SNMP Table within the MIB. * </p> * * <p> * For each table defined in a MIB, mibgen will generate a specific * class called Table<i>TableName</i> that will subclass this class, and * a corresponding <i>TableName</i>Meta class extending SnmpMibTable * and corresponding to the MIB view of the same table. * </p> * * <p> * Objects of this class are instantiated by MBeans representing * the SNMP Group to which the table belong. * </p> * * <p><b>This API is a Sun Microsystems internal API and is subject * to change without notice.</b></p> * @see com.sun.jmx.snmp.agent.SnmpTableEntryFactory * @see com.sun.jmx.snmp.agent.SnmpMibTable * */ public abstract class SnmpTableSupport implements SnmpTableEntryFactory, // NPCTE fix for bugId 4499265, esc 0, MR 04 sept 2001 // SnmpTableCallbackHandler { SnmpTableCallbackHandler, Serializable { // end of NPCTE fix for bugId 4499265 //----------------------------------------------------------------- // // Protected Variables // //----------------------------------------------------------------- /** * The list of entries **/ protected List<Object> entries; /** * The associated metadata object **/ protected SnmpMibTable meta; /** * The MIB to which this table belongs **/ protected SnmpMib theMib; //----------------------------------------------------------------- // // Private Variables // //----------------------------------------------------------------- /** * This variable is initialized while binding this object to its * corresponding meta object. **/ private boolean registrationRequired = false; //----------------------------------------------------------------- // // Constructor // //----------------------------------------------------------------- /** * Initializes the table. * The steps are these: * <ul><li> allocate an array for storing entry object,</li> * <li> retrieve the corresponding metadata object * from the MIB, * <li> bind this object to the corresponding metadata object * from the MIB.</li> * </ul> * * @param mib The MIB to which this table belong. * **/ protected SnmpTableSupport(SnmpMib mib) { theMib = mib; meta = getRegisteredTableMeta(mib); bindWithTableMeta(); entries = allocateTable(); } //----------------------------------------------------------------- // // Implementation of the SnmpTableEntryFactory interface // //----------------------------------------------------------------- /** * Creates a new entry in the table. * * This factory method is generated by mibgen and used internally. * It is part of the * {@link com.sun.jmx.snmp.agent.SnmpTableEntryFactory} interface. * You may subclass this method to implement any specific behaviour * your application requires. * * @exception SnmpStatusException if the entry cannot be created. **/ public abstract void createNewEntry(SnmpMibSubRequest request, SnmpOid rowOid, int depth, SnmpMibTable meta) throws SnmpStatusException; //----------------------------------------------------------------- // // Public methods // //----------------------------------------------------------------- /** * Returns the entry located at the given position in the table. * * @return The entry located at the given position, <code>null</code> * if no entry can be found at this position. **/ // XXXX xxxx zzz ZZZZ => public? or protected? public Object getEntry(int pos) { if (entries == null) return null; return entries.get(pos); } /** * Returns the number of entries registered in the table. * * @return The number of entries registered in the table. **/ public int getSize() { return meta.getSize(); } /** * This method lets you dynamically switch the creation policy. * * <CODE>setCreationEnabled()</CODE> will switch the policy of * remote entry creation via SET operations, by calling * <code>setCreationEnabled()</code> on the metadata object * associated with this table. * <BR> By default remote entry creation via SET operation is disabled. * * @param remoteCreationFlag Tells whether remote entry creation must * be enabled or disabled. * <li> * <CODE>setCreationEnabled(true)</CODE> will enable remote entry * creation via SET operations.</li> * <li> * <CODE>setCreationEnabled(false)</CODE> will disable remote entry * creation via SET operations.</li> * <p> By default remote entry creation via SET operation is disabled. * </p> * * @see com.sun.jmx.snmp.agent.SnmpMibTable * **/ public void setCreationEnabled(boolean remoteCreationFlag) { meta.setCreationEnabled(remoteCreationFlag); } /** * Tells whether a new entry should be created when a SET operation * is received for an entry that does not exist yet. * This method calls <code>isCreationEnabled()</code> on the metadata * object associated with this table. * * @return true if a new entry must be created, false otherwise.<br> * [default: returns <CODE>false</CODE>] * * @see com.sun.jmx.snmp.agent.SnmpMibTable **/ public boolean isCreationEnabled() { return meta.isCreationEnabled(); } /** * Tells whether the metadata object to which this table is linked * requires entries to be registered. In this case passing an * ObjectName when registering entries will be mandatory. * * @return <code>true</code> if the associated metadata requires entries * to be registered (mibgen generated generic metadata). **/ public boolean isRegistrationRequired() { return registrationRequired; } /** * Builds an entry SnmpIndex from its row OID. * * This method is generated by mibgen and used internally. * * @param rowOid The SnmpOid object identifying a table entry. * * @return The SnmpIndex of the entry identified by <code>rowOid</code>. * * @exception SnmpStatusException if the index cannot be built from the * given OID. **/ public SnmpIndex buildSnmpIndex(SnmpOid rowOid) throws SnmpStatusException { return buildSnmpIndex(rowOid.longValue(false), 0); } /** * Builds an SnmpOid from an SnmpIndex object. * * This method is generated by mibgen and used internally. * * @param index An SnmpIndex object identifying a table entry. * * @return The SnmpOid form of the given entry index. * * @exception SnmpStatusException if the given index is not valid. **/ public abstract SnmpOid buildOidFromIndex(SnmpIndex index) throws SnmpStatusException; /** * Builds the default ObjectName of an entry from the SnmpIndex * identifying this entry. No access is made on the entry itself. * * This method is generated by mibgen and used internally. * You can subclass this method if you want to change the default * ObjectName policy. This is only meaningfull when entries * are registered MBeans. * * @param index The SnmpIndex identifying the entry from which we * want to build the default ObjectName. * * @return The default ObjectName for the entry identified by * the given index. * * @exception SnmpStatusException if the given index is not valid. **/ public abstract ObjectName buildNameFromIndex(SnmpIndex index) throws SnmpStatusException; //----------------------------------------------------------------- // // Implementation of the SnmpTableEntryFactory interface // //----------------------------------------------------------------- /** * This callback is called by the associated metadata object * when a new table entry has been registered in the * table metadata. * * This method will update the <code>entries</code> list. * * @param pos The position at which the new entry was inserted * in the table. * @param row The row OID of the new entry * @param name The ObjectName of the new entry (as specified by the * factory) * @param entry The new entry (as returned by the factory) * @param meta The table metadata object. * **/ public void addEntryCb(int pos, SnmpOid row, ObjectName name, Object entry, SnmpMibTable meta) throws SnmpStatusException { try { if (entries != null) entries.add(pos,entry); } catch (Exception e) { throw new SnmpStatusException(SnmpStatusException.noSuchName); } } /** * This callback is called by the associated metadata object * when a new table entry has been removed from the * table metadata. * * This method will update the <code>entries</code> list. * * @param pos The position from which the entry was deleted * @param row The row OID of the deleted entry * @param name The ObjectName of the deleted entry (may be null if * ObjectName's were not required) * @param entry The deleted entry (may be null if only ObjectName's * were required) * @param meta The table metadata object. * **/ public void removeEntryCb(int pos, SnmpOid row, ObjectName name, Object entry, SnmpMibTable meta) throws SnmpStatusException { try { if (entries != null) entries.remove(pos); } catch (Exception e) { } } /** * Enables to add an SNMP entry listener to this * <CODE>SnmpMibTable</CODE>. * * @param listener The listener object which will handle the * notifications emitted by the registered MBean. * * @param filter The filter object. If filter is null, no filtering * will be performed before handling notifications. * * @param handback The context to be sent to the listener when a * notification is emitted. * * @exception IllegalArgumentException Listener parameter is null. */ public void addNotificationListener(NotificationListener listener, NotificationFilter filter, Object handback) { meta.addNotificationListener(listener,filter,handback); } /** * Enables to remove an SNMP entry listener from this * <CODE>SnmpMibTable</CODE>. * * @param listener The listener object which will handle the * notifications emitted by the registered MBean. * This method will remove all the information related to this * listener. * * @exception ListenerNotFoundException The listener is not registered * in the MBean. */ public synchronized void removeNotificationListener(NotificationListener listener) throws ListenerNotFoundException { meta.removeNotificationListener(listener); } /** * Returns a <CODE>NotificationInfo</CODE> object containing the * notification class and the notification type sent by the * <CODE>SnmpMibTable</CODE>. */ public MBeanNotificationInfo[] getNotificationInfo() { return meta.getNotificationInfo(); } //----------------------------------------------------------------- // // Protected Abstract methods // //----------------------------------------------------------------- /** * Builds an SnmpIndex object from the index part of an OID. * * This method is generated by mibgen and used internally. * * @param oid The OID from which to build the index, represented * as an array of long. * @param start The position where to start from in the OID array. * * @return The SnmpOid form of the given entry index. * * @exception SnmpStatusException if the given index is not valid. **/ protected abstract SnmpIndex buildSnmpIndex(long oid[], int start ) throws SnmpStatusException; /** * Returns the metadata object associated with this table. * * This method is generated by mibgen and used internally. * * @param mib The SnmpMib object holding the Metadata corresponding * to this table. * * @return The metadata object associated with this table. * Returns <code>null</code> if this implementation of the * MIB doesn't support this table. **/ protected abstract SnmpMibTable getRegisteredTableMeta(SnmpMib mib); //----------------------------------------------------------------- // // Protected methods // //----------------------------------------------------------------- /** * Allocates an ArrayList for storing table entries. * * This method is called within the constructor at object creation. * Any object implementing the {@link java.util.List} interface can * be used. * * @return A new list in which to store entries. If <code>null</code> * is returned then no entry will be stored in the list * and getEntry() will always return null. **/ protected List<Object> allocateTable() { return new ArrayList<Object>(); } /** * Add an entry in this table. * * This method registers an entry in the table and perform * synchronization with the associated table metadata object. * * This method assumes that the given entry will not be registered, * or will be registered with its default ObjectName built from the * associated SnmpIndex. * <p> * If the entry is going to be registered, then * {@link com.sun.jmx.snmp.agent.SnmpTableSupport#addEntry(SnmpIndex, ObjectName, Object)} should be prefered. * <br> This function is mainly provided for backward compatibility. * * @param index The SnmpIndex built from the given entry. * @param entry The entry that should be added in the table. * * @exception SnmpStatusException if the entry cannot be registered with * the given index. **/ protected void addEntry(SnmpIndex index, Object entry) throws SnmpStatusException { SnmpOid oid = buildOidFromIndex(index); ObjectName name = null; if (isRegistrationRequired()) { name = buildNameFromIndex(index); } meta.addEntry(oid,name,entry); } /** * Add an entry in this table. * * This method registers an entry in the table and performs * synchronization with the associated table metadata object. * * @param index The SnmpIndex built from the given entry. * @param name The ObjectName with which this entry will be registered. * @param entry The entry that should be added in the table. * * @exception SnmpStatusException if the entry cannot be registered with * the given index. **/ protected void addEntry(SnmpIndex index, ObjectName name, Object entry) throws SnmpStatusException { SnmpOid oid = buildOidFromIndex(index); meta.addEntry(oid,name,entry); } /** * Remove an entry from this table. * * This method unregisters an entry from the table and performs * synchronization with the associated table metadata object. * * @param index The SnmpIndex identifying the entry. * @param entry The entry that should be removed in the table. This * parameter is optional and can be omitted if it doesn't * need to be passed along to the * <code>removeEntryCb()</code> callback defined in the * {@link com.sun.jmx.snmp.agent.SnmpTableCallbackHandler} * interface. * * @exception SnmpStatusException if the entry cannot be unregistered. **/ protected void removeEntry(SnmpIndex index, Object entry) throws SnmpStatusException { SnmpOid oid = buildOidFromIndex(index); meta.removeEntry(oid,entry); } // protected void removeEntry(ObjectName name, Object entry) // throws SnmpStatusException { // meta.removeEntry(name,entry); // } /** * Returns the entries in the table. * * @return An Object[] array containing the entries registered in the * table. **/ protected Object[] getBasicEntries() { if (entries == null) return null; Object[] array= new Object[entries.size()]; entries.toArray(array); return array; } /** * Binds this table with its associated metadata, registering itself * as an SnmpTableEntryFactory. **/ protected void bindWithTableMeta() { if (meta == null) return; registrationRequired = meta.isRegistrationRequired(); meta.registerEntryFactory(this); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.hive; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMultimap; import com.google.common.net.HostAndPort; import io.airlift.concurrent.BoundedExecutor; import io.airlift.json.JsonCodec; import io.airlift.slice.Slice; import io.airlift.stats.CounterStat; import io.prestosql.GroupByHashPageIndexerFactory; import io.prestosql.plugin.base.CatalogName; import io.prestosql.plugin.hive.AbstractTestHive.HiveTransaction; import io.prestosql.plugin.hive.AbstractTestHive.Transaction; import io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext; import io.prestosql.plugin.hive.authentication.HiveAuthenticationConfig; import io.prestosql.plugin.hive.authentication.HiveIdentity; import io.prestosql.plugin.hive.authentication.NoHdfsAuthentication; import io.prestosql.plugin.hive.metastore.Column; import io.prestosql.plugin.hive.metastore.Database; import io.prestosql.plugin.hive.metastore.HiveMetastore; import io.prestosql.plugin.hive.metastore.PrincipalPrivileges; import io.prestosql.plugin.hive.metastore.Table; import io.prestosql.plugin.hive.metastore.cache.CachingHiveMetastore; import io.prestosql.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.prestosql.plugin.hive.metastore.thrift.MetastoreLocator; import io.prestosql.plugin.hive.metastore.thrift.TestingMetastoreLocator; import io.prestosql.plugin.hive.metastore.thrift.ThriftHiveMetastore; import io.prestosql.plugin.hive.metastore.thrift.ThriftMetastoreConfig; import io.prestosql.plugin.hive.security.SqlStandardAccessControlMetadata; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; import io.prestosql.spi.connector.ConnectorMetadata; import io.prestosql.spi.connector.ConnectorOutputTableHandle; import io.prestosql.spi.connector.ConnectorPageSink; import io.prestosql.spi.connector.ConnectorPageSinkProvider; import io.prestosql.spi.connector.ConnectorPageSource; import io.prestosql.spi.connector.ConnectorPageSourceProvider; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.connector.ConnectorSplit; import io.prestosql.spi.connector.ConnectorSplitManager; import io.prestosql.spi.connector.ConnectorSplitSource; import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.spi.connector.ConnectorTableMetadata; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.connector.TableNotFoundException; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.security.ConnectorIdentity; import io.prestosql.spi.type.Type; import io.prestosql.sql.gen.JoinCompiler; import io.prestosql.testing.MaterializedResult; import io.prestosql.testing.MaterializedRow; import io.prestosql.testing.TestingNodeManager; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.io.IOException; import java.io.UncheckedIOException; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.OptionalLong; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.stream.IntStream; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.util.concurrent.MoreExecutors.newDirectExecutorService; import static io.airlift.concurrent.MoreFutures.getFutureValue; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; import static io.prestosql.plugin.hive.AbstractTestHive.createTableProperties; import static io.prestosql.plugin.hive.AbstractTestHive.filterNonHiddenColumnHandles; import static io.prestosql.plugin.hive.AbstractTestHive.filterNonHiddenColumnMetadata; import static io.prestosql.plugin.hive.AbstractTestHive.getAllSplits; import static io.prestosql.plugin.hive.AbstractTestHive.getSplits; import static io.prestosql.plugin.hive.HiveTestUtils.PAGE_SORTER; import static io.prestosql.plugin.hive.HiveTestUtils.TYPE_MANAGER; import static io.prestosql.plugin.hive.HiveTestUtils.getDefaultHiveFileWriterFactories; import static io.prestosql.plugin.hive.HiveTestUtils.getDefaultHivePageSourceFactories; import static io.prestosql.plugin.hive.HiveTestUtils.getDefaultHiveRecordCursorProviders; import static io.prestosql.plugin.hive.HiveTestUtils.getHiveSession; import static io.prestosql.plugin.hive.HiveTestUtils.getHiveSessionProperties; import static io.prestosql.plugin.hive.HiveTestUtils.getTypes; import static io.prestosql.plugin.hive.util.HiveWriteUtils.getRawFileSystem; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.testing.MaterializedResult.materializeSourceDataStream; import static io.prestosql.testing.QueryAssertions.assertEqualsIgnoreOrder; import static java.util.Locale.ENGLISH; import static java.util.UUID.randomUUID; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; public abstract class AbstractTestHiveFileSystem { protected static final HdfsContext TESTING_CONTEXT = new HdfsContext(ConnectorIdentity.ofUser("test")); protected String database; protected SchemaTableName table; protected SchemaTableName tableWithHeader; protected SchemaTableName tableWithHeaderAndFooter; protected SchemaTableName temporaryCreateTable; protected HdfsEnvironment hdfsEnvironment; protected LocationService locationService; protected TestingHiveMetastore metastoreClient; protected HiveMetadataFactory metadataFactory; protected HiveTransactionManager transactionManager; protected ConnectorSplitManager splitManager; protected ConnectorPageSinkProvider pageSinkProvider; protected ConnectorPageSourceProvider pageSourceProvider; private ExecutorService executor; private HiveConfig config; private ScheduledExecutorService heartbeatService; @BeforeClass public void setUp() { executor = newCachedThreadPool(daemonThreadsNamed("hive-%s")); heartbeatService = newScheduledThreadPool(1); } @AfterClass(alwaysRun = true) public void tearDown() { if (executor != null) { executor.shutdownNow(); executor = null; } if (heartbeatService != null) { heartbeatService.shutdownNow(); heartbeatService = null; } } protected abstract Path getBasePath(); protected void onSetupComplete() {} protected void setup(String host, int port, String databaseName, boolean s3SelectPushdownEnabled, HdfsConfiguration hdfsConfiguration) { database = databaseName; table = new SchemaTableName(database, "presto_test_external_fs"); tableWithHeader = new SchemaTableName(database, "presto_test_external_fs_with_header"); tableWithHeaderAndFooter = new SchemaTableName(database, "presto_test_external_fs_with_header_and_footer"); String random = randomUUID().toString().toLowerCase(ENGLISH).replace("-", ""); temporaryCreateTable = new SchemaTableName(database, "tmp_presto_test_create_" + random); config = new HiveConfig().setS3SelectPushdownEnabled(s3SelectPushdownEnabled); Optional<HostAndPort> proxy = Optional.ofNullable(System.getProperty("hive.metastore.thrift.client.socks-proxy")) .map(HostAndPort::fromString); MetastoreLocator metastoreLocator = new TestingMetastoreLocator(proxy, HostAndPort.fromParts(host, port)); ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("hive-%s")); HivePartitionManager hivePartitionManager = new HivePartitionManager(config); hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, new HdfsConfig(), new NoHdfsAuthentication()); metastoreClient = new TestingHiveMetastore( new BridgingHiveMetastore(new ThriftHiveMetastore(metastoreLocator, new HiveConfig(), new ThriftMetastoreConfig(), hdfsEnvironment, false)), executor, getBasePath(), hdfsEnvironment); locationService = new HiveLocationService(hdfsEnvironment, new HiveAuthenticationConfig()); JsonCodec<PartitionUpdate> partitionUpdateCodec = JsonCodec.jsonCodec(PartitionUpdate.class); metadataFactory = new HiveMetadataFactory( new CatalogName("hive"), config, metastoreClient, hdfsEnvironment, hivePartitionManager, newDirectExecutorService(), heartbeatService, TYPE_MANAGER, locationService, partitionUpdateCodec, new NodeVersion("test_version"), SqlStandardAccessControlMetadata::new); transactionManager = new HiveTransactionManager(); splitManager = new HiveSplitManager( transactionHandle -> ((HiveMetadata) transactionManager.get(transactionHandle)).getMetastore(), hivePartitionManager, new NamenodeStats(), hdfsEnvironment, new CachingDirectoryLister(new HiveConfig()), new BoundedExecutor(executor, config.getMaxSplitIteratorThreads()), new CounterStat(), config.getMaxOutstandingSplits(), config.getMaxOutstandingSplitsSize(), config.getMinPartitionBatchSize(), config.getMaxPartitionBatchSize(), config.getMaxInitialSplits(), config.getSplitLoaderConcurrency(), config.getMaxSplitsPerSecond(), config.getRecursiveDirWalkerEnabled(), TYPE_MANAGER); pageSinkProvider = new HivePageSinkProvider( getDefaultHiveFileWriterFactories(config, hdfsEnvironment), hdfsEnvironment, PAGE_SORTER, metastoreClient, new GroupByHashPageIndexerFactory(new JoinCompiler(createTestMetadataManager())), TYPE_MANAGER, config, locationService, partitionUpdateCodec, new TestingNodeManager("fake-environment"), new HiveEventClient(), getHiveSessionProperties(config), new HiveWriterStats()); pageSourceProvider = new HivePageSourceProvider( TYPE_MANAGER, hdfsEnvironment, getDefaultHivePageSourceFactories(hdfsEnvironment, config), getDefaultHiveRecordCursorProviders(config, hdfsEnvironment), new GenericHiveRecordCursorProvider(hdfsEnvironment, config)); onSetupComplete(); } protected ConnectorSession newSession() { return getHiveSession(config); } protected Transaction newTransaction() { return new HiveTransaction(transactionManager, (HiveMetadata) metadataFactory.create()); } @Test public void testGetRecords() throws Exception { assertEqualsIgnoreOrder( readTable(table), MaterializedResult.resultBuilder(newSession(), BIGINT) .row(3L).row(14L).row(15L) // test_table.csv .row(92L).row(65L).row(35L) // test_table.csv.gz .row(89L).row(79L).row(32L) // test_table.csv.bz2 .row(38L).row(46L).row(26L) // test_table.csv.lz4 .build()); } @Test public void testGetRecordsWithHeader() throws IOException { assertEqualsIgnoreOrder( readTable(tableWithHeader), MaterializedResult.resultBuilder(newSession(), BIGINT) .row(2L).row(71L).row(82L) // test_table_with_header.csv .row(81L).row(82L).row(84L) // test_table_with_header.csv.gz .row(59L).row(4L).row(52L) // test_table_with_header.csv.bz2 .row(35L).row(36L).row(2L) // test_table_with_header.csv.lz4 .build()); } @Test public void testGetRecordsWithHeaderAndFooter() throws IOException { assertEqualsIgnoreOrder( readTable(tableWithHeaderAndFooter), MaterializedResult.resultBuilder(newSession(), BIGINT) .row(1L).row(41L).row(42L) // test_table_with_header_and_footer.csv .row(13L).row(56L).row(23L) // test_table_with_header_and_footer.csv.gz .row(73L).row(9L).row(50L) // test_table_with_header_and_footer.csv.bz2 .row(48L).row(80L).row(16L) // test_table_with_header_and_footer.csv.lz4 .build()); } @Test public void testGetFileStatus() throws Exception { Path basePath = getBasePath(); Path tablePath = new Path(basePath, "presto_test_external_fs"); Path filePath = new Path(tablePath, "test_table.csv"); FileSystem fs = hdfsEnvironment.getFileSystem(TESTING_CONTEXT, basePath); assertTrue(fs.getFileStatus(basePath).isDirectory(), "basePath should be considered a directory"); assertTrue(fs.getFileStatus(tablePath).isDirectory(), "tablePath should be considered a directory"); assertTrue(fs.getFileStatus(filePath).isFile(), "filePath should be considered a file"); assertFalse(fs.getFileStatus(filePath).isDirectory(), "filePath should not be considered a directory"); assertFalse(fs.exists(new Path(basePath, "foo-" + randomUUID())), "foo-random path should be found not to exist"); assertFalse(fs.exists(new Path(basePath, "foo")), "foo path should be found not to exist"); } @Test public void testRename() throws Exception { Path basePath = new Path(getBasePath(), randomUUID().toString()); FileSystem fs = hdfsEnvironment.getFileSystem(TESTING_CONTEXT, basePath); assertFalse(fs.exists(basePath)); // create file foo.txt Path path = new Path(basePath, "foo.txt"); assertTrue(fs.createNewFile(path)); assertTrue(fs.exists(path)); // rename foo.txt to bar.txt when bar does not exist Path newPath = new Path(basePath, "bar.txt"); assertFalse(fs.exists(newPath)); assertTrue(fs.rename(path, newPath)); assertFalse(fs.exists(path)); assertTrue(fs.exists(newPath)); // rename foo.txt to foo.txt when foo.txt does not exist assertFalse(fs.rename(path, path)); // create file foo.txt and rename to existing bar.txt assertTrue(fs.createNewFile(path)); assertFalse(fs.rename(path, newPath)); // rename foo.txt to foo.txt when foo.txt exists assertEquals(fs.rename(path, path), getRawFileSystem(fs) instanceof AzureBlobFileSystem); // delete foo.txt assertTrue(fs.delete(path, false)); assertFalse(fs.exists(path)); // create directory source with file Path source = new Path(basePath, "source"); assertTrue(fs.createNewFile(new Path(source, "test.txt"))); // rename source to non-existing target Path target = new Path(basePath, "target"); assertFalse(fs.exists(target)); assertTrue(fs.rename(source, target)); assertFalse(fs.exists(source)); assertTrue(fs.exists(target)); // create directory source with file assertTrue(fs.createNewFile(new Path(source, "test.txt"))); // rename source to existing target assertTrue(fs.rename(source, target)); assertFalse(fs.exists(source)); target = new Path(target, "source"); assertTrue(fs.exists(target)); assertTrue(fs.exists(new Path(target, "test.txt"))); // delete target target = new Path(basePath, "target"); assertTrue(fs.exists(target)); assertTrue(fs.delete(target, true)); assertFalse(fs.exists(target)); // cleanup fs.delete(basePath, true); } @Test public void testTableCreation() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { if (storageFormat == HiveStorageFormat.CSV) { // CSV supports only unbounded VARCHAR type continue; } createTable(temporaryCreateTable, storageFormat); dropTable(temporaryCreateTable); } } private void createTable(SchemaTableName tableName, HiveStorageFormat storageFormat) throws Exception { List<ColumnMetadata> columns = ImmutableList.<ColumnMetadata>builder() .add(new ColumnMetadata("id", BIGINT)) .build(); MaterializedResult data = MaterializedResult.resultBuilder(newSession(), BIGINT) .row(1L) .row(3L) .row(2L) .build(); try (Transaction transaction = newTransaction()) { ConnectorMetadata metadata = transaction.getMetadata(); ConnectorSession session = newSession(); // begin creating the table ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(tableName, columns, createTableProperties(storageFormat)); ConnectorOutputTableHandle outputHandle = metadata.beginCreateTable(session, tableMetadata, Optional.empty()); // write the records ConnectorPageSink sink = pageSinkProvider.createPageSink(transaction.getTransactionHandle(), session, outputHandle); sink.appendPage(data.toPage()); Collection<Slice> fragments = getFutureValue(sink.finish()); // commit the table metadata.finishCreateTable(session, outputHandle, fragments, ImmutableList.of()); transaction.commit(); // Hack to work around the metastore not being configured for S3 or other FS. // The metastore tries to validate the location when creating the // table, which fails without explicit configuration for file system. // We work around that by using a dummy location when creating the // table and update it here to the correct location. metastoreClient.updateTableLocation( database, tableName.getTableName(), locationService.getTableWriteInfo(((HiveOutputTableHandle) outputHandle).getLocationHandle(), false).getTargetPath().toString()); } try (Transaction transaction = newTransaction()) { ConnectorMetadata metadata = transaction.getMetadata(); ConnectorSession session = newSession(); // load the new table ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName); List<ColumnHandle> columnHandles = filterNonHiddenColumnHandles(metadata.getColumnHandles(session, tableHandle).values()); // verify the metadata ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(session, getTableHandle(metadata, tableName)); assertEquals(filterNonHiddenColumnMetadata(tableMetadata.getColumns()), columns); // verify the data metadata.beginQuery(session); ConnectorSplitSource splitSource = getSplits(splitManager, transaction, session, tableHandle); ConnectorSplit split = getOnlyElement(getAllSplits(splitSource)); try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, tableHandle, columnHandles, TupleDomain.all())) { MaterializedResult result = materializeSourceDataStream(session, pageSource, getTypes(columnHandles)); assertEqualsIgnoreOrder(result.getMaterializedRows(), data.getMaterializedRows()); } metadata.cleanupQuery(session); } } private void dropTable(SchemaTableName table) { try (Transaction transaction = newTransaction()) { transaction.getMetastore().dropTable(newSession(), table.getSchemaName(), table.getTableName()); transaction.commit(); } } protected MaterializedResult readTable(SchemaTableName tableName) throws IOException { try (Transaction transaction = newTransaction()) { ConnectorMetadata metadata = transaction.getMetadata(); ConnectorSession session = newSession(); ConnectorTableHandle table = getTableHandle(metadata, tableName); List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(session, table).values()); metadata.beginQuery(session); ConnectorSplitSource splitSource = getSplits(splitManager, transaction, session, table); List<Type> allTypes = getTypes(columnHandles); List<Type> dataTypes = getTypes(columnHandles.stream() .filter(columnHandle -> !((HiveColumnHandle) columnHandle).isHidden()) .collect(toImmutableList())); MaterializedResult.Builder result = MaterializedResult.resultBuilder(session, dataTypes); List<ConnectorSplit> splits = getAllSplits(splitSource); for (ConnectorSplit split : splits) { try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, table, columnHandles, TupleDomain.all())) { MaterializedResult pageSourceResult = materializeSourceDataStream(session, pageSource, allTypes); for (MaterializedRow row : pageSourceResult.getMaterializedRows()) { Object[] dataValues = IntStream.range(0, row.getFieldCount()) .filter(channel -> !((HiveColumnHandle) columnHandles.get(channel)).isHidden()) .mapToObj(row::getField) .toArray(); result.row(dataValues); } } } metadata.cleanupQuery(session); return result.build(); } } private ConnectorTableHandle getTableHandle(ConnectorMetadata metadata, SchemaTableName tableName) { ConnectorTableHandle handle = metadata.getTableHandle(newSession(), tableName); checkArgument(handle != null, "table not found: %s", tableName); return handle; } protected static class TestingHiveMetastore extends CachingHiveMetastore { private final Path basePath; private final HdfsEnvironment hdfsEnvironment; public TestingHiveMetastore(HiveMetastore delegate, Executor executor, Path basePath, HdfsEnvironment hdfsEnvironment) { super(delegate, executor, OptionalLong.empty(), OptionalLong.empty(), 0, StatsRecording.ENABLED); this.basePath = basePath; this.hdfsEnvironment = hdfsEnvironment; } @Override public Optional<Database> getDatabase(String databaseName) { return super.getDatabase(databaseName) .map(database -> Database.builder(database) .setLocation(Optional.of(basePath.toString())) .build()); } @Override public void createTable(HiveIdentity identity, Table table, PrincipalPrivileges privileges) { // hack to work around the metastore not being configured for S3 or other FS Table.Builder tableBuilder = Table.builder(table); tableBuilder.getStorageBuilder().setLocation("/"); super.createTable(identity, tableBuilder.build(), privileges); } @Override public void dropTable(HiveIdentity identity, String databaseName, String tableName, boolean deleteData) { try { Optional<Table> table = getTable(identity, databaseName, tableName); if (table.isEmpty()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } // hack to work around the metastore not being configured for S3 or other FS List<String> locations = listAllDataPaths(identity, databaseName, tableName); Table.Builder tableBuilder = Table.builder(table.get()); tableBuilder.getStorageBuilder().setLocation("/"); // drop table replaceTable(identity, databaseName, tableName, tableBuilder.build(), new PrincipalPrivileges(ImmutableMultimap.of(), ImmutableMultimap.of())); delegate.dropTable(identity, databaseName, tableName, false); // drop data if (deleteData) { for (String location : locations) { Path path = new Path(location); hdfsEnvironment.getFileSystem(TESTING_CONTEXT, path).delete(path, true); } } } catch (IOException e) { throw new UncheckedIOException(e); } finally { invalidateTable(databaseName, tableName); } } public void updateTableLocation(String databaseName, String tableName, String location) { HiveIdentity identity = new HiveIdentity(TESTING_CONTEXT.getIdentity()); Optional<Table> table = getTable(identity, databaseName, tableName); if (table.isEmpty()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } Table.Builder tableBuilder = Table.builder(table.get()); tableBuilder.getStorageBuilder().setLocation(location); // NOTE: this clears the permissions replaceTable(identity, databaseName, tableName, tableBuilder.build(), new PrincipalPrivileges(ImmutableMultimap.of(), ImmutableMultimap.of())); } private List<String> listAllDataPaths(HiveIdentity identity, String schemaName, String tableName) { ImmutableList.Builder<String> locations = ImmutableList.builder(); Table table = getTable(identity, schemaName, tableName).get(); List<String> partitionColumnNames = table.getPartitionColumns().stream().map(Column::getName).collect(toImmutableList()); if (table.getStorage().getLocation() != null) { // For partitioned table, there should be nothing directly under this directory. // But including this location in the set makes the directory content assert more // extensive, which is desirable. locations.add(table.getStorage().getLocation()); } Optional<List<String>> partitionNames = getPartitionNamesByFilter(identity, schemaName, tableName, partitionColumnNames, TupleDomain.all()); if (partitionNames.isPresent()) { getPartitionsByNames(identity, table, partitionNames.get()).values().stream() .map(Optional::get) .map(partition -> partition.getStorage().getLocation()) .filter(location -> !location.startsWith(table.getStorage().getLocation())) .forEach(locations::add); } return locations.build(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.rest.controller; import java.io.IOException; import java.io.StringWriter; import java.util.List; import org.apache.kylin.cube.CubeInstance; import org.apache.kylin.cube.CubeSegment; import org.apache.kylin.cube.model.CubeDesc; import org.apache.kylin.cube.model.DimensionDesc; import org.apache.kylin.metadata.model.SegmentRange.TSRange; import org.apache.kylin.metadata.realization.RealizationStatusEnum; import org.apache.kylin.rest.exception.InternalErrorException; import org.apache.kylin.rest.exception.NotFoundException; import org.apache.kylin.rest.request.CubeRequest; import org.apache.kylin.rest.response.CubeInstanceResponse; import org.apache.kylin.rest.response.GeneralResponse; import org.apache.kylin.rest.service.CubeService; import org.apache.kylin.rest.service.JobService; import org.apache.kylin.rest.service.ServiceTestBase; import org.apache.kylin.rest.service.StreamingService; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; /** * @author xduo */ public class CubeControllerTest extends ServiceTestBase { private CubeController cubeController; private CubeDescController cubeDescController; @Autowired @Qualifier("cubeMgmtService") CubeService cubeService; @Autowired @Qualifier("jobService") JobService jobService; @Autowired @Qualifier("streamingMgmtService") StreamingService streamingService; @Before public void setup() throws Exception { super.setup(); cubeController = new CubeController(); cubeController.setCubeService(cubeService); cubeController.setJobService(jobService); cubeDescController = new CubeDescController(); cubeDescController.setCubeService(cubeService); } @Test public void testBasics() throws IOException { CubeDesc[] cubes = cubeDescController.getCube("test_kylin_cube_with_slr_ready"); Assert.assertNotNull(cubes); Assert.assertNotNull(cubeController.getSql("test_kylin_cube_with_slr_ready", "20130331080000_20131212080000")); Assert.assertNotNull(cubeController.getCubes(null, null, null, 0, 5)); CubeDesc cube = cubes[0]; CubeDesc newCube = new CubeDesc(); String newCubeName = cube.getName() + "_test_save"; try { cubeController.deleteCube(newCubeName); } catch (Exception e) { // it may not exist, ignore the exception } newCube.setName(newCubeName); newCube.setModelName(cube.getModelName()); newCube.setModel(cube.getModel()); newCube.setDimensions(cube.getDimensions()); newCube.setHbaseMapping(cube.getHbaseMapping()); newCube.setMeasures(cube.getMeasures()); newCube.setRowkey(cube.getRowkey()); newCube.setAggregationGroups(cube.getAggregationGroups()); newCube.getModel().setLastModified(0); ObjectMapper cubeDescMapper = new ObjectMapper(); StringWriter cubeDescWriter = new StringWriter(); cubeDescMapper.writeValue(cubeDescWriter, newCube); ObjectMapper modelDescMapper = new ObjectMapper(); StringWriter modelDescWriter = new StringWriter(); modelDescMapper.writeValue(modelDescWriter, newCube.getModel()); CubeRequest cubeRequest = new CubeRequest(); cubeRequest.setCubeDescData(cubeDescWriter.toString()); cubeRequest.setCubeName(newCube.getName()); cubeRequest = cubeController.saveCubeDesc(cubeRequest); List<String> notifyList = Lists.newArrayList(); notifyList.add("john@example.com"); cubeController.updateNotifyList(newCubeName, notifyList); List<CubeInstanceResponse> cubeInstances = cubeController.getCubes(newCubeName, cube.getModelName(), "default", 1, 0); CubeInstance cubeInstance = cubeController.getCube(cubeInstances.get(0).getName()); Assert.assertTrue(cubeInstance.getDescriptor().getNotifyList().contains("john@example.com")); Assert.assertTrue(cubeInstance.getCost() == 495); cubeController.deleteCube(newCubeName); } @Test(expected = InternalErrorException.class) public void testDeleteSegmentNew() throws IOException { String cubeName = "test_kylin_cube_with_slr_ready_3_segments"; CubeDesc[] cubes = cubeDescController.getCube(cubeName); Assert.assertNotNull(cubes); String segmentName = "20131212000000_20140112000000"; CubeInstance cube = cubeService.getCubeManager().getCube(cubeName); CubeSegment toDelete = null; for (CubeSegment seg : cube.getSegments()) { if (seg.getName().equals(segmentName)) { toDelete = seg; break; } } Assert.assertNotNull(toDelete); String segId = toDelete.getUuid(); cubeController.deleteSegment(cubeName, segmentName); // delete success, no related job 'NEW' segment can be delete if (cubeService.isOrphonSegment(cube, segId)){ throw new InternalErrorException(); } } @Test(expected = NotFoundException.class) public void testDeleteSegmentNotExist() throws IOException { String cubeName = "test_kylin_cube_with_slr_ready_3_segments"; CubeDesc[] cubes = cubeDescController.getCube(cubeName); Assert.assertNotNull(cubes); cubeController.deleteSegment(cubeName, "not_exist_segment"); } @Test public void testDeleteSegmentFromHead() throws IOException { String cubeName = "test_kylin_cube_with_slr_ready_3_segments"; CubeDesc[] cubes = cubeDescController.getCube(cubeName); Assert.assertNotNull(cubes); int segNumber = cubeService.getCubeManager().getCube(cubeName).getSegments().size(); cubeController.deleteSegment(cubeName, "19691231160000_20131112000000"); int newSegNumber = cubeService.getCubeManager().getCube(cubeName).getSegments().size(); Assert.assertTrue(segNumber == newSegNumber + 1); } @Test public void testGetHoles() throws IOException { String cubeName = "test_kylin_cube_with_slr_ready_3_segments"; CubeDesc[] cubes = cubeDescController.getCube(cubeName); Assert.assertNotNull(cubes); CubeInstance cube = cubeService.getCubeManager().getCube(cubeName); List<CubeSegment> segments = cube.getSegments(); final long dateEnd = segments.get(segments.size() - 1).getTSRange().end.v; final long ONEDAY = 24 * 60 * 60000; cubeService.getCubeManager().appendSegment(cube, new TSRange(dateEnd + ONEDAY, dateEnd + ONEDAY * 2)); List<CubeSegment> holes = cubeController.getHoles(cubeName); Assert.assertTrue(holes.size() == 1); CubeSegment hole = holes.get(0); Assert.assertTrue(hole.getTSRange().equals(new TSRange(dateEnd, dateEnd + ONEDAY))); } @Test public void testGetCubes() { List<CubeInstanceResponse> cubes = cubeController.getCubes(null, null, null, 1, 0); Assert.assertTrue(cubes.size() == 1); } @Test public void testGetSql() { GeneralResponse response = cubeController.getSql("test_kylin_cube_with_slr_ready"); String sql = response.getProperty("sql"); CubeDesc cubeDesc = cubeDescController.getDesc("test_kylin_cube_with_slr_ready"); for (DimensionDesc dimensionDesc : cubeDesc.getDimensions()) { if (dimensionDesc.getDerived() != null) { for (String derivedDimension : dimensionDesc.getDerived()) { Assert.assertTrue(sql.contains(derivedDimension)); } } } } @Test public void tesDeleteDescBrokenCube() throws Exception { final String cubeName = "ci_left_join_cube"; CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cubeName); CubeDesc cubeDesc = cubeInstance.getDescriptor(); cubeDesc.setModel(null); cubeInstance.setStatus(RealizationStatusEnum.DESCBROKEN); cubeController.deleteCube(cubeName); Assert.assertNull(cubeService.getCubeManager().getCube(cubeName)); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive.security; import com.facebook.presto.hive.HiveConnectorId; import com.facebook.presto.hive.HiveTransactionManager; import com.facebook.presto.hive.TransactionalMetadata; import com.facebook.presto.hive.metastore.Database; import com.facebook.presto.hive.metastore.MetastoreContext; import com.facebook.presto.hive.metastore.SemiTransactionalHiveMetastore; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.connector.ConnectorAccessControl; import com.facebook.presto.spi.connector.ConnectorTransactionHandle; import com.facebook.presto.spi.security.AccessControlContext; import com.facebook.presto.spi.security.AccessDeniedException; import com.facebook.presto.spi.security.ConnectorIdentity; import com.facebook.presto.spi.security.PrestoPrincipal; import com.facebook.presto.spi.security.Privilege; import com.facebook.presto.spi.security.RoleGrant; import javax.inject.Inject; import java.util.Optional; import java.util.Set; import static com.facebook.presto.hive.metastore.Database.DEFAULT_DATABASE_NAME; import static com.facebook.presto.hive.metastore.HivePrivilegeInfo.HivePrivilege; import static com.facebook.presto.hive.metastore.HivePrivilegeInfo.HivePrivilege.DELETE; import static com.facebook.presto.hive.metastore.HivePrivilegeInfo.HivePrivilege.INSERT; import static com.facebook.presto.hive.metastore.HivePrivilegeInfo.HivePrivilege.OWNERSHIP; import static com.facebook.presto.hive.metastore.HivePrivilegeInfo.HivePrivilege.SELECT; import static com.facebook.presto.hive.metastore.HivePrivilegeInfo.toHivePrivilege; import static com.facebook.presto.hive.metastore.thrift.ThriftMetastoreUtil.isRoleApplicable; import static com.facebook.presto.hive.metastore.thrift.ThriftMetastoreUtil.isRoleEnabled; import static com.facebook.presto.hive.metastore.thrift.ThriftMetastoreUtil.listApplicableRoles; import static com.facebook.presto.hive.metastore.thrift.ThriftMetastoreUtil.listApplicableTablePrivileges; import static com.facebook.presto.hive.metastore.thrift.ThriftMetastoreUtil.listEnabledTablePrivileges; import static com.facebook.presto.spi.security.AccessDeniedException.denyAddColumn; import static com.facebook.presto.spi.security.AccessDeniedException.denyCreateRole; import static com.facebook.presto.spi.security.AccessDeniedException.denyCreateSchema; import static com.facebook.presto.spi.security.AccessDeniedException.denyCreateTable; import static com.facebook.presto.spi.security.AccessDeniedException.denyCreateView; import static com.facebook.presto.spi.security.AccessDeniedException.denyCreateViewWithSelect; import static com.facebook.presto.spi.security.AccessDeniedException.denyDeleteTable; import static com.facebook.presto.spi.security.AccessDeniedException.denyDropColumn; import static com.facebook.presto.spi.security.AccessDeniedException.denyDropRole; import static com.facebook.presto.spi.security.AccessDeniedException.denyDropSchema; import static com.facebook.presto.spi.security.AccessDeniedException.denyDropTable; import static com.facebook.presto.spi.security.AccessDeniedException.denyDropView; import static com.facebook.presto.spi.security.AccessDeniedException.denyGrantRoles; import static com.facebook.presto.spi.security.AccessDeniedException.denyGrantTablePrivilege; import static com.facebook.presto.spi.security.AccessDeniedException.denyInsertTable; import static com.facebook.presto.spi.security.AccessDeniedException.denyRenameColumn; import static com.facebook.presto.spi.security.AccessDeniedException.denyRenameSchema; import static com.facebook.presto.spi.security.AccessDeniedException.denyRenameTable; import static com.facebook.presto.spi.security.AccessDeniedException.denyRevokeRoles; import static com.facebook.presto.spi.security.AccessDeniedException.denyRevokeTablePrivilege; import static com.facebook.presto.spi.security.AccessDeniedException.denySelectTable; import static com.facebook.presto.spi.security.AccessDeniedException.denySetCatalogSessionProperty; import static com.facebook.presto.spi.security.AccessDeniedException.denySetRole; import static com.facebook.presto.spi.security.AccessDeniedException.denyShowRoles; import static com.facebook.presto.spi.security.PrincipalType.ROLE; import static com.facebook.presto.spi.security.PrincipalType.USER; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toSet; public class SqlStandardAccessControl implements ConnectorAccessControl { public static final String ADMIN_ROLE_NAME = "admin"; private static final String INFORMATION_SCHEMA_NAME = "information_schema"; private static final SchemaTableName ROLES = new SchemaTableName(INFORMATION_SCHEMA_NAME, "roles"); private final String connectorId; private final HiveTransactionManager hiveTransactionManager; @Inject public SqlStandardAccessControl( HiveConnectorId connectorId, HiveTransactionManager hiveTransactionManager) { this.connectorId = requireNonNull(connectorId, "connectorId is null").toString(); this.hiveTransactionManager = requireNonNull(hiveTransactionManager, "hiveTransactionManager is null"); } @Override public void checkCanCreateSchema(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, String schemaName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isAdmin(transaction, identity, metastoreContext)) { denyCreateSchema(schemaName); } } @Override public void checkCanDropSchema(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, String schemaName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isDatabaseOwner(transaction, identity, metastoreContext, schemaName)) { denyDropSchema(schemaName); } } @Override public void checkCanRenameSchema(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, String schemaName, String newSchemaName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isDatabaseOwner(transaction, identity, metastoreContext, schemaName)) { denyRenameSchema(schemaName, newSchemaName); } } @Override public void checkCanShowSchemas(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context) { } @Override public Set<String> filterSchemas(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, Set<String> schemaNames) { return schemaNames; } @Override public void checkCanCreateTable(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isDatabaseOwner(transaction, identity, metastoreContext, tableName.getSchemaName())) { denyCreateTable(tableName.toString()); } } @Override public void checkCanDropTable(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isTableOwner(transaction, identity, metastoreContext, tableName)) { denyDropTable(tableName.toString()); } } @Override public void checkCanRenameTable(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName, SchemaTableName newTableName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isTableOwner(transaction, identity, metastoreContext, tableName)) { denyRenameTable(tableName.toString(), newTableName.toString()); } } @Override public void checkCanShowTablesMetadata(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, String schemaName) { } @Override public Set<SchemaTableName> filterTables(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, Set<SchemaTableName> tableNames) { return tableNames; } @Override public void checkCanAddColumn(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isTableOwner(transaction, identity, metastoreContext, tableName)) { denyAddColumn(tableName.toString()); } } @Override public void checkCanDropColumn(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isTableOwner(transaction, identity, metastoreContext, tableName)) { denyDropColumn(tableName.toString()); } } @Override public void checkCanRenameColumn(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isTableOwner(transaction, identity, metastoreContext, tableName)) { denyRenameColumn(tableName.toString()); } } @Override public void checkCanSelectFromColumns(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName, Set<String> columnNames) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); // TODO: Implement column level access control if (!checkTablePermission(transaction, identity, metastoreContext, tableName, SELECT, false)) { denySelectTable(tableName.toString()); } } @Override public void checkCanInsertIntoTable(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!checkTablePermission(transaction, identity, metastoreContext, tableName, INSERT, false)) { denyInsertTable(tableName.toString()); } } @Override public void checkCanDeleteFromTable(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!checkTablePermission(transaction, identity, metastoreContext, tableName, DELETE, false)) { denyDeleteTable(tableName.toString()); } } @Override public void checkCanCreateView(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName viewName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isDatabaseOwner(transaction, identity, metastoreContext, viewName.getSchemaName())) { denyCreateView(viewName.toString()); } } @Override public void checkCanDropView(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName viewName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isTableOwner(transaction, identity, metastoreContext, viewName)) { denyDropView(viewName.toString()); } } @Override public void checkCanCreateViewWithSelectFromColumns(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName, Set<String> columnNames) { checkCanSelectFromColumns(transaction, identity, context, tableName, columnNames); MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); // TODO implement column level access control if (!checkTablePermission(transaction, identity, metastoreContext, tableName, SELECT, true)) { denyCreateViewWithSelect(tableName.toString(), identity); } } @Override public void checkCanSetCatalogSessionProperty(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, String propertyName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isAdmin(transaction, identity, metastoreContext)) { denySetCatalogSessionProperty(connectorId, propertyName); } } @Override public void checkCanGrantTablePrivilege(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, Privilege privilege, SchemaTableName tableName, PrestoPrincipal grantee, boolean withGrantOption) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (isTableOwner(transaction, identity, metastoreContext, tableName)) { return; } if (!hasGrantOptionForPrivilege(transaction, identity, metastoreContext, privilege, tableName)) { denyGrantTablePrivilege(privilege.name(), tableName.toString()); } } @Override public void checkCanRevokeTablePrivilege(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, Privilege privilege, SchemaTableName tableName, PrestoPrincipal revokee, boolean grantOptionFor) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (isTableOwner(transaction, identity, metastoreContext, tableName)) { return; } if (!hasGrantOptionForPrivilege(transaction, identity, metastoreContext, privilege, tableName)) { denyRevokeTablePrivilege(privilege.name(), tableName.toString()); } } @Override public void checkCanCreateRole(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, String role, Optional<PrestoPrincipal> grantor) { // currently specifying grantor is supported by metastore, but it is not supported by Hive itself if (grantor.isPresent()) { throw new AccessDeniedException("Hive Connector does not support WITH ADMIN statement"); } MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isAdmin(transactionHandle, identity, metastoreContext)) { denyCreateRole(role); } } @Override public void checkCanDropRole(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, String role) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isAdmin(transactionHandle, identity, metastoreContext)) { denyDropRole(role); } } @Override public void checkCanGrantRoles(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, Set<String> roles, Set<PrestoPrincipal> grantees, boolean withAdminOption, Optional<PrestoPrincipal> grantor, String catalogName) { // currently specifying grantor is supported by metastore, but it is not supported by Hive itself if (grantor.isPresent()) { throw new AccessDeniedException("Hive Connector does not support GRANTED BY statement"); } MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!hasAdminOptionForRoles(transactionHandle, identity, metastoreContext, roles)) { denyGrantRoles(roles, grantees); } } @Override public void checkCanRevokeRoles(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, Set<String> roles, Set<PrestoPrincipal> grantees, boolean adminOptionFor, Optional<PrestoPrincipal> grantor, String catalogName) { // currently specifying grantor is supported by metastore, but it is not supported by Hive itself if (grantor.isPresent()) { throw new AccessDeniedException("Hive Connector does not support GRANTED BY statement"); } MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!hasAdminOptionForRoles(transactionHandle, identity, metastoreContext, roles)) { denyRevokeRoles(roles, grantees); } } @Override public void checkCanSetRole(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, String role, String catalogName) { SemiTransactionalHiveMetastore metastore = getMetastore(transaction); MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isRoleApplicable(metastore, identity, new PrestoPrincipal(USER, identity.getUser()), metastoreContext, role)) { denySetRole(role); } } @Override public void checkCanShowRoles(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, String catalogName) { MetastoreContext metastoreContext = new MetastoreContext(identity, context.getQueryId().getId(), context.getClientInfo(), context.getSource()); if (!isAdmin(transactionHandle, identity, metastoreContext)) { denyShowRoles(catalogName); } } @Override public void checkCanShowCurrentRoles(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, String catalogName) { } @Override public void checkCanShowRoleGrants(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, AccessControlContext context, String catalogName) { } private boolean isAdmin(ConnectorTransactionHandle transaction, ConnectorIdentity identity, MetastoreContext metastoreContext) { SemiTransactionalHiveMetastore metastore = getMetastore(transaction); return isRoleEnabled(identity, (PrestoPrincipal p) -> metastore.listRoleGrants(metastoreContext, p), ADMIN_ROLE_NAME); } private boolean isDatabaseOwner(ConnectorTransactionHandle transaction, ConnectorIdentity identity, MetastoreContext metastoreContext, String databaseName) { // all users are "owners" of the default database if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(databaseName)) { return true; } if (isAdmin(transaction, identity, metastoreContext)) { return true; } SemiTransactionalHiveMetastore metastore = getMetastore(transaction); Optional<Database> databaseMetadata = metastore.getDatabase(metastoreContext, databaseName); if (!databaseMetadata.isPresent()) { return false; } Database database = databaseMetadata.get(); // a database can be owned by a user or role if (database.getOwnerType() == USER && identity.getUser().equals(database.getOwnerName())) { return true; } if (database.getOwnerType() == ROLE && isRoleEnabled(identity, (PrestoPrincipal p) -> metastore.listRoleGrants(metastoreContext, p), database.getOwnerName())) { return true; } return false; } private boolean isTableOwner(ConnectorTransactionHandle transaction, ConnectorIdentity identity, MetastoreContext metastoreContext, SchemaTableName tableName) { return checkTablePermission(transaction, identity, metastoreContext, tableName, OWNERSHIP, false); } private boolean checkTablePermission( ConnectorTransactionHandle transaction, ConnectorIdentity identity, MetastoreContext metastoreContext, SchemaTableName tableName, HivePrivilege requiredPrivilege, boolean grantOptionRequired) { if (isAdmin(transaction, identity, metastoreContext)) { return true; } if (tableName.equals(ROLES)) { return false; } if (INFORMATION_SCHEMA_NAME.equals(tableName.getSchemaName())) { return true; } SemiTransactionalHiveMetastore metastore = getMetastore(transaction); return listEnabledTablePrivileges(metastore, tableName.getSchemaName(), tableName.getTableName(), identity, metastoreContext) .filter(privilegeInfo -> !grantOptionRequired || privilegeInfo.isGrantOption()) .anyMatch(privilegeInfo -> privilegeInfo.getHivePrivilege().equals(requiredPrivilege)); } private boolean hasGrantOptionForPrivilege(ConnectorTransactionHandle transaction, ConnectorIdentity identity, MetastoreContext metastoreContext, Privilege privilege, SchemaTableName tableName) { if (isAdmin(transaction, identity, metastoreContext)) { return true; } SemiTransactionalHiveMetastore metastore = getMetastore(transaction); return listApplicableTablePrivileges( metastore, identity, metastoreContext, tableName.getSchemaName(), tableName.getTableName(), identity.getUser()) .anyMatch(privilegeInfo -> privilegeInfo.getHivePrivilege().equals(toHivePrivilege(privilege)) && privilegeInfo.isGrantOption()); } private boolean hasAdminOptionForRoles(ConnectorTransactionHandle transaction, ConnectorIdentity identity, MetastoreContext metastoreContext, Set<String> roles) { if (isAdmin(transaction, identity, metastoreContext)) { return true; } SemiTransactionalHiveMetastore metastore = getMetastore(transaction); Set<String> rolesWithGrantOption = listApplicableRoles(new PrestoPrincipal(USER, identity.getUser()), (PrestoPrincipal p) -> metastore.listRoleGrants(metastoreContext, p)) .filter(RoleGrant::isGrantable) .map(RoleGrant::getRoleName) .collect(toSet()); return rolesWithGrantOption.containsAll(roles); } private SemiTransactionalHiveMetastore getMetastore(ConnectorTransactionHandle transaction) { TransactionalMetadata metadata = hiveTransactionManager.get(transaction); return metadata.getMetastore(); } }
/* * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package com.sun.jmx.snmp; import com.sun.jmx.snmp.Enumerated; /** * Represents an SNMP integer. * * <p><b>This API is a Sun Microsystems internal API and is subject * to change without notice.</b></p> */ public class SnmpInt extends SnmpValue { private static final long serialVersionUID = -7163624758070343373L; // CONSTRUCTORS //------------- /** * Constructs a new <CODE>SnmpInt</CODE> from the specified integer value. * @param v The initialization value. * @exception IllegalArgumentException The specified value is smaller than <CODE>Integer.MIN_VALUE</CODE> * or larger than <CODE>Integer.MAX_VALUE</CODE>. */ public SnmpInt(int v) throws IllegalArgumentException { if ( isInitValueValid(v) == false ) { throw new IllegalArgumentException() ; } value = (long)v ; } /** * Constructs a new <CODE>SnmpInt</CODE> from the specified <CODE>Integer</CODE> value. * @param v The initialization value. * @exception IllegalArgumentException The specified value is smaller than <CODE>Integer.MIN_VALUE</CODE> * or larger than <CODE>Integer.MAX_VALUE</CODE>. */ public SnmpInt(Integer v) throws IllegalArgumentException { this(v.intValue()) ; } /** * Constructs a new <CODE>SnmpInt</CODE> from the specified long value. * @param v The initialization value. * @exception IllegalArgumentException The specified value is smaller than <CODE>Integer.MIN_VALUE</CODE> * or larger than <CODE>Integer.MAX_VALUE</CODE>. */ public SnmpInt(long v) throws IllegalArgumentException { if ( isInitValueValid(v) == false ) { throw new IllegalArgumentException() ; } value = v ; } /** * Constructs a new <CODE>SnmpInt</CODE> from the specified <CODE>Long</CODE> value. * @param v The initialization value. * @exception IllegalArgumentException The specified value is smaller than <CODE>Integer.MIN_VALUE</CODE> * or larger than <CODE>Integer.MAX_VALUE</CODE>. */ public SnmpInt(Long v) throws IllegalArgumentException { this(v.longValue()) ; } /** * Constructs a new <CODE>SnmpInt</CODE> from the specified <CODE>Enumerated</CODE> value. * @param v The initialization value. * @exception IllegalArgumentException The specified value is smaller than <CODE>Integer.MIN_VALUE</CODE> * or larger than <CODE>Integer.MAX_VALUE</CODE>. * @see Enumerated */ public SnmpInt(Enumerated v) throws IllegalArgumentException { this(v.intValue()) ; } /** * Constructs a new <CODE>SnmpInt</CODE> from the specified boolean value. * This constructor applies rfc1903 rule: * <p><blockquote><pre> * TruthValue ::= TEXTUAL-CONVENTION * STATUS current * DESCRIPTION * "Represents a boolean value." * SYNTAX INTEGER { true(1), false(2) } * </pre></blockquote> * @param v The initialization value. */ public SnmpInt(boolean v) { value = v ? 1 : 2 ; } // PUBLIC METHODS //--------------- /** * Returns the long value of this <CODE>SnmpInt</CODE>. * @return The value. */ public long longValue() { return value ; } /** * Converts the integer value to its <CODE>Long</CODE> form. * @return The <CODE>Long</CODE> representation of the value. */ public Long toLong() { return new Long(value) ; } /** * Converts the integer value to its integer form. * @return The integer representation of the value. */ public int intValue() { return (int) value ; } /** * Converts the integer value to its <CODE>Integer</CODE> form. * @return The <CODE>Integer</CODE> representation of the value. */ public Integer toInteger() { return new Integer((int)value) ; } /** * Converts the integer value to its <CODE>String</CODE> form. * @return The <CODE>String</CODE> representation of the value. */ public String toString() { return String.valueOf(value) ; } /** * Converts the integer value to its <CODE>SnmpOid</CODE> form. * @return The OID representation of the value. */ public SnmpOid toOid() { return new SnmpOid(value) ; } /** * Extracts the integer from an index OID and returns its * value converted as an <CODE>SnmpOid</CODE>. * @param index The index array. * @param start The position in the index array. * @return The OID representing the integer value. * @exception SnmpStatusException There is no integer value * available at the start position. */ public static SnmpOid toOid(long[] index, int start) throws SnmpStatusException { try { return new SnmpOid(index[start]) ; } catch(IndexOutOfBoundsException e) { throw new SnmpStatusException(SnmpStatusException.noSuchName) ; } } /** * Scans an index OID, skips the integer value and returns the position * of the next value. * @param index The index array. * @param start The position in the index array. * @return The position of the next value. * @exception SnmpStatusException There is no integer value * available at the start position. */ public static int nextOid(long[] index, int start) throws SnmpStatusException { if (start >= index.length) { throw new SnmpStatusException(SnmpStatusException.noSuchName) ; } else { return start + 1 ; } } /** * Appends an <CODE>SnmpOid</CODE> representing an <CODE>SnmpInt</CODE> to another OID. * @param source An OID representing an <CODE>SnmpInt</CODE> value. * @param dest Where source should be appended. */ public static void appendToOid(SnmpOid source, SnmpOid dest) { if (source.getLength() != 1) { throw new IllegalArgumentException() ; } dest.append(source) ; } /** * Performs a clone action. This provides a workaround for the * <CODE>SnmpValue</CODE> interface. * @return The <CODE>SnmpValue</CODE> clone. */ final synchronized public SnmpValue duplicate() { return (SnmpValue) clone() ; } /** * Clones the <CODE>SnmpInt</CODE> object, making a copy of its data. * @return The object clone. */ final synchronized public Object clone() { SnmpInt newclone = null ; try { newclone = (SnmpInt) super.clone() ; newclone.value = value ; } catch (CloneNotSupportedException e) { throw new InternalError(e) ; // vm bug. } return newclone ; } /** * Returns a textual description of the type object. * @return ASN.1 textual description. */ public String getTypeName() { return name ; } /** * This method has been defined to allow the sub-classes * of SnmpInt to perform their own control at intialization time. */ boolean isInitValueValid(int v) { if ((v < Integer.MIN_VALUE) || (v > Integer.MAX_VALUE)) { return false; } return true; } /** * This method has been defined to allow the sub-classes * of SnmpInt to perform their own control at intialization time. */ boolean isInitValueValid(long v) { if ((v < Integer.MIN_VALUE) || (v > Integer.MAX_VALUE)) { return false; } return true; } // VARIABLES //---------- /** * Name of the type. */ final static String name = "Integer32" ; /** * This is where the value is stored. This long is signed. * @serial */ protected long value = 0 ; }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.stress; import static com.google.common.base.Charsets.UTF_8; import java.io.*; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.cassandra.client.ClientLibrary; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.ConfigurationException; import org.apache.cassandra.db.ColumnFamilyType; import org.apache.cassandra.db.marshal.*; import org.apache.cassandra.thrift.*; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.LamportClock; import org.apache.commons.cli.*; import org.apache.commons.lang.StringUtils; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.TFramedTransport; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; public class Session implements Serializable { // command line options public static final Options availableOptions = new Options(); public static final String DEFAULT_COMPARATOR = "AsciiType"; public static final String DEFAULT_VALIDATOR = "BytesType"; private static InetAddress localInetAddress; public final AtomicInteger operations; public final AtomicInteger keys; public final AtomicInteger columnCount; public final AtomicLong bytes; public final AtomicLong latency; public final ConcurrentLinkedQueue<Long> latencies; static { availableOptions.addOption("h", "help", false, "Show this help message and exit"); availableOptions.addOption("n", "num-keys", true, "Number of keys, default:1000000"); availableOptions.addOption("F", "num-different-keys", true, "Number of different keys (if < NUM-KEYS, the same key will re-used multiple times), default:NUM-KEYS"); availableOptions.addOption("N", "skip-keys", true, "Fraction of keys to skip initially, default:0"); availableOptions.addOption("t", "threads", true, "Number of threads to use, default:50"); availableOptions.addOption("c", "columns", true, "Number of columns per key, default:5"); availableOptions.addOption("S", "column-size", true, "Size of column values in bytes, default:34"); availableOptions.addOption("C", "cardinality", true, "Number of unique values stored in columns, default:50"); availableOptions.addOption("d", "nodes", true, "Host nodes (comma separated), default:locahost"); availableOptions.addOption("D", "nodesfile", true, "File containing host nodes (one per line)"); availableOptions.addOption("s", "stdev", true, "Standard Deviation Factor, default:0.1"); availableOptions.addOption("r", "random", false, "Use random key generator (STDEV will have no effect), default:false"); availableOptions.addOption("f", "file", true, "Write output to given file"); availableOptions.addOption("p", "port", true, "Thrift port, default:9160"); availableOptions.addOption("m", "unframed", false, "Use unframed transport, default:false"); availableOptions.addOption("o", "operation", true, "Operation to perform (INSERT, READ, RANGE_SLICE, INDEXED_RANGE_SLICE, MULTI_GET, COUNTER_ADD, COUNTER_GET), default:INSERT"); availableOptions.addOption("u", "supercolumns", true, "Number of super columns per key, default:1"); availableOptions.addOption("y", "family-type", true, "Column Family Type (Super, Standard), default:Standard"); availableOptions.addOption("K", "keep-trying", true, "Retry on-going operation N times (in case of failure). positive integer, default:10"); availableOptions.addOption("k", "keep-going", false, "Ignore errors inserting or reading (when set, --keep-trying has no effect), default:false"); availableOptions.addOption("i", "progress-interval", true, "Progress Report Interval (seconds), default:10"); availableOptions.addOption("g", "keys-per-call", true, "Number of keys to get_range_slices or multiget per call, default:1000"); availableOptions.addOption("l", "replication-factor", true, "Replication Factor to use when creating needed column families, default:1"); availableOptions.addOption("L", "enable-cql", false, "Perform queries using CQL (Cassandra Query Language)."); availableOptions.addOption("e", "consistency-level", true, "Consistency Level to use (ONE, QUORUM, LOCAL_QUORUM, EACH_QUORUM, ALL, ANY), default:ONE"); availableOptions.addOption("x", "create-index", true, "Type of index to create on needed column families (KEYS)"); availableOptions.addOption("R", "replication-strategy", true, "Replication strategy to use (only on insert if keyspace does not exist), default:org.apache.cassandra.locator.SimpleStrategy"); availableOptions.addOption("O", "strategy-properties", true, "Replication strategy properties in the following format <dc_name>:<num>,<dc_name>:<num>,..."); availableOptions.addOption("W", "no-replicate-on-write",false, "Set replicate_on_write to false for counters. Only counter add with CL=ONE will work"); availableOptions.addOption("V", "average-size-values", false, "Generate column values of average rather than specific size"); availableOptions.addOption("T", "send-to", true, "Send this as a request to the stress daemon at specified address."); availableOptions.addOption("I", "compression", true, "Specify the compression to use for sstable, default:no compression"); availableOptions.addOption("Q", "query-names", true, "Comma-separated list of column names to retrieve from each row."); availableOptions.addOption("Z", "compaction-strategy", true, "CompactionStrategy to use."); availableOptions.addOption("U", "comparator", true, "Column Comparator to use. Currently supported types are: TimeUUIDType, AsciiType, UTF8Type."); availableOptions.addOption("A", "num-dependencies", true, "Number of dependencies to attach to each operation."); availableOptions.addOption("", "just-create-keyspace", false, "Only create the keyspace and then exit"); availableOptions.addOption("", "stress-index", true, "Index of this stress client out of STRESS-COUNT. Allows for disjoint INSERTS on different servers."); availableOptions.addOption("", "stress-count", true, "Total number of coordinating stress clients"); availableOptions.addOption("", "write-fraction", true, "Fraction of ops to be writes, 0-1"); availableOptions.addOption("", "columns-per-key-read", true, ""); availableOptions.addOption("", "columns-per-key-write", true, ""); availableOptions.addOption("", "keys-per-read", true, ""); availableOptions.addOption("", "keys-per-write", true, ""); availableOptions.addOption("", "write-transaction-fraction", true, "Fraction of ops to be transactions, 0-1"); availableOptions.addOption("", "num-servers", true, "The number of servers in each cluster, required for write-txn workload"); availableOptions.addOption("", "keys-per-server", true, "The number of keys to write on each server in a write txn"); availableOptions.addOption("", "servers-per-txn", true, "The number of servers to include in each write txn"); availableOptions.addOption("", "server-index", true, "Index of the server (out of num-servers) to load for DYNAMIC_ONE_SERVER"); } private int numKeys = 1000 * 1000; private int numDifferentKeys = numKeys; private float skipKeys = 0; private int threads = 50; private int columns = 5; private int columnSize = 34; private int cardinality = 50; private String[] nodes = new String[] { "127.0.0.1" }; private boolean random = false; private boolean unframed = false; private int retryTimes = 10; private int port = 9160; private int superColumns = 1; private String compression = null; private String compactionStrategy = null; private int progressInterval = 10; private int keysPerCall = 1000; private boolean replicateOnWrite = true; private boolean ignoreErrors = false; private boolean enable_cql = false; private final String outFileName; private IndexType indexType = null; private Stress.Operations operation = Stress.Operations.INSERT; private ColumnFamilyType columnFamilyType = ColumnFamilyType.Standard; private ConsistencyLevel consistencyLevel = ConsistencyLevel.ONE; private String replicationStrategy = "org.apache.cassandra.locator.SimpleStrategy"; private final Map<String, String> replicationStrategyOptions = new HashMap<String, String>(); // if we know exactly column names that we want to read (set by -Q option) public final List<ByteBuffer> columnNames; public final boolean averageSizeValues; // required by Gaussian distribution. protected int mean; protected float sigma; public final InetAddress sendToDaemon; public final String comparator; public final boolean timeUUIDComparator; //COPS specific microbenchmarking options private int numDependencies = 0; private final Set<Dep> pregeneratedDependencies = new HashSet<Dep>(); private int stressIndex = 0; private int stressCount = 1; private final int keysOffset = 0; private boolean justCreateKeyspace = false; //COPS dynamic workload generator options Map<String, Integer> localServerIPAndPorts = new HashMap<String, Integer>(); //we'll piggyback this off hosts and just use that and assume 9160 for the port private double write_fraction = -1; //value size already an option private int columns_per_key_read = 0; private int columns_per_key_write = 0; private int keys_per_read = 0; private int keys_per_write = 0; private double write_transaction_fraction = -1; // for write txn experiment where we want to control the exact number of keys being accessed on each server private int num_servers = 0; private int keys_per_server = 0; private int servers_per_txn = 0; private int server_index = -1; private static ArrayList<ArrayList<ByteBuffer>> generatedKeysByServer; public Session(String[] arguments) throws IllegalArgumentException { float STDev = 0.1f; CommandLineParser parser = new PosixParser(); try { CommandLine cmd = parser.parse(availableOptions, arguments); if (cmd.getArgs().length > 0) { System.err.println("Application does not allow arbitrary arguments: " + StringUtils.join(cmd.getArgList(), ", ")); System.exit(1); } if (cmd.hasOption("h")) throw new IllegalArgumentException("help"); if (cmd.hasOption("n")) numKeys = Integer.parseInt(cmd.getOptionValue("n")); if (cmd.hasOption("F")) numDifferentKeys = Integer.parseInt(cmd.getOptionValue("F")); else numDifferentKeys = numKeys; if (cmd.hasOption("N")) skipKeys = Float.parseFloat(cmd.getOptionValue("N")); if (cmd.hasOption("t")) threads = Integer.parseInt(cmd.getOptionValue("t")); if (cmd.hasOption("c")) columns = Integer.parseInt(cmd.getOptionValue("c")); if (cmd.hasOption("S")) columnSize = Integer.parseInt(cmd.getOptionValue("S")); if (cmd.hasOption("C")) cardinality = Integer.parseInt(cmd.getOptionValue("C")); if (cmd.hasOption("d")) nodes = cmd.getOptionValue("d").split(","); if (cmd.hasOption("D")) { try { String node = null; List<String> tmpNodes = new ArrayList<String>(); BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(cmd.getOptionValue("D")))); while ((node = in.readLine()) != null) { if (node.length() > 0) tmpNodes.add(node); } nodes = tmpNodes.toArray(new String[tmpNodes.size()]); in.close(); } catch(IOException ioe) { throw new RuntimeException(ioe); } } if (cmd.hasOption("s")) STDev = Float.parseFloat(cmd.getOptionValue("s")); if (cmd.hasOption("r")) random = true; outFileName = (cmd.hasOption("f")) ? cmd.getOptionValue("f") : null; if (cmd.hasOption("p")) port = Integer.parseInt(cmd.getOptionValue("p")); if (cmd.hasOption("m")) unframed = Boolean.parseBoolean(cmd.getOptionValue("m")); if (cmd.hasOption("o")) operation = Stress.Operations.valueOf(cmd.getOptionValue("o").toUpperCase()); if (cmd.hasOption("u")) superColumns = Integer.parseInt(cmd.getOptionValue("u")); if (cmd.hasOption("y")) columnFamilyType = ColumnFamilyType.valueOf(cmd.getOptionValue("y")); if (cmd.hasOption("K")) { retryTimes = Integer.valueOf(cmd.getOptionValue("K")); if (retryTimes <= 0) { throw new RuntimeException("--keep-trying option value should be > 0"); } } if (cmd.hasOption("k")) { retryTimes = 1; ignoreErrors = true; } if (cmd.hasOption("i")) progressInterval = Integer.parseInt(cmd.getOptionValue("i")); if (cmd.hasOption("g")) keysPerCall = Integer.parseInt(cmd.getOptionValue("g")); if (cmd.hasOption("e")) consistencyLevel = ConsistencyLevel.valueOf(cmd.getOptionValue("e").toUpperCase()); if (cmd.hasOption("x")) indexType = IndexType.valueOf(cmd.getOptionValue("x").toUpperCase()); if (cmd.hasOption("R")) replicationStrategy = cmd.getOptionValue("R"); if (cmd.hasOption("l")) replicationStrategyOptions.put("replication_factor", String.valueOf(Integer.parseInt(cmd.getOptionValue("l")))); else if (replicationStrategy.endsWith("SimpleStrategy")) replicationStrategyOptions.put("replication_factor", "1"); if (cmd.hasOption("L")) enable_cql = true; if (cmd.hasOption("O")) { String[] pairs = StringUtils.split(cmd.getOptionValue("O"), ','); for (String pair : pairs) { String[] keyAndValue = StringUtils.split(pair, ':'); if (keyAndValue.length != 2) throw new RuntimeException("Invalid --strategy-properties value."); replicationStrategyOptions.put(keyAndValue[0], keyAndValue[1]); } } if (cmd.hasOption("W")) replicateOnWrite = false; if (cmd.hasOption("I")) compression = cmd.getOptionValue("I"); averageSizeValues = cmd.hasOption("V"); try { sendToDaemon = cmd.hasOption("send-to") ? InetAddress.getByName(cmd.getOptionValue("send-to")) : null; } catch (UnknownHostException e) { throw new RuntimeException(e); } if (cmd.hasOption("Q")) { AbstractType comparator = TypeParser.parse(DEFAULT_COMPARATOR); String[] names = StringUtils.split(cmd.getOptionValue("Q"), ","); columnNames = new ArrayList<ByteBuffer>(names.length); for (String columnName : names) columnNames.add(comparator.fromString(columnName)); } else { columnNames = null; } if (cmd.hasOption("Z")) { compactionStrategy = cmd.getOptionValue("Z"); try { // validate compaction strategy class CFMetaData.createCompactionStrategy(compactionStrategy); } catch (ConfigurationException e) { System.err.println(e.getMessage()); System.exit(1); } } if (cmd.hasOption("U")) { AbstractType parsed = null; try { parsed = TypeParser.parse(cmd.getOptionValue("U")); } catch (ConfigurationException e) { System.err.println(e.getMessage()); System.exit(1); } comparator = cmd.getOptionValue("U"); timeUUIDComparator = parsed instanceof TimeUUIDType; if (!(parsed instanceof TimeUUIDType || parsed instanceof AsciiType || parsed instanceof UTF8Type)) { System.err.println("Currently supported types are: TimeUUIDType, AsciiType, UTF8Type."); System.exit(1); } } else { comparator = null; timeUUIDComparator = false; } //num-dependencies if (cmd.hasOption("A")) { numDependencies = Integer.parseInt(cmd.getOptionValue("A")); for (int i = 0; i < numDependencies; ++i) { //we'll just include dummy dependencies, this is for microbenchmarks so they shouldn't be checked ByteBuffer locator_key = ByteBufferUtil.bytes(String.valueOf(i)); long timestamp = i; pregeneratedDependencies.add(new Dep(locator_key, timestamp)); } } if (cmd.hasOption("stress-index")) { stressIndex = Integer.parseInt(cmd.getOptionValue("stress-index")); if (stressIndex < 0) throw new RuntimeException("Invalid --stress-index value"); } if (cmd.hasOption("stress-count")) { stressCount = Integer.parseInt(cmd.getOptionValue("stress-count")); if (stressCount <= 0) throw new RuntimeException("Invalid --stress-count value"); } if (cmd.hasOption("just-create-keyspace")) { justCreateKeyspace = true; } if (cmd.hasOption("write-fraction")) { write_fraction = Double.parseDouble(cmd.getOptionValue("write-fraction")); if (write_fraction < 0 || write_fraction > 1) throw new RuntimeException("Invalid --write-fraction value"); } if (cmd.hasOption("columns-per-key-read")) { columns_per_key_read = Integer.parseInt(cmd.getOptionValue("columns-per-key-read")); if (columns_per_key_read <= 0) throw new RuntimeException("Invalid columns-per-key-read value"); } if (cmd.hasOption("columns-per-key-write")) { columns_per_key_write = Integer.parseInt(cmd.getOptionValue("columns-per-key-write")); if (columns_per_key_write <= 0) throw new RuntimeException("Invalid columns-per-key-write value"); } if (cmd.hasOption("keys-per-read")) { keys_per_read = Integer.parseInt(cmd.getOptionValue("keys-per-read")); if (keys_per_read <= 0) throw new RuntimeException("Invalid keys-per-read value"); } if (cmd.hasOption("keys-per-write")) { keys_per_write = Integer.parseInt(cmd.getOptionValue("keys-per-write")); if (keys_per_write <= 0) throw new RuntimeException("Invalid keys-per-write value"); } if (cmd.hasOption("write-transaction-fraction")) { write_transaction_fraction = Double.parseDouble(cmd.getOptionValue("write-transaction-fraction")); if (write_transaction_fraction < 0 || write_transaction_fraction > 1) throw new RuntimeException("Invalid --write-transaction-fraction value"); } if (cmd.hasOption("num-servers")) { num_servers = Integer.parseInt(cmd.getOptionValue("num-servers")); if (num_servers <= 0) throw new RuntimeException("Invalid num-servers value"); } if (cmd.hasOption("keys-per-server")) { keys_per_server = Integer.parseInt(cmd.getOptionValue("keys-per-server")); if (keys_per_server <= 0) throw new RuntimeException("Invalid key-per-server value"); } if (cmd.hasOption("servers-per-txn")) { servers_per_txn = Integer.parseInt(cmd.getOptionValue("servers-per-txn")); if (servers_per_txn <= 0) throw new RuntimeException("Invalid servers-per-txn value"); } if (cmd.hasOption("server-index")) { server_index = Integer.parseInt(cmd.getOptionValue("server-index")); if (server_index < 0) throw new RuntimeException("Invalid server-index value"); } } catch (ParseException e) { throw new IllegalArgumentException(e.getMessage(), e); } catch (ConfigurationException e) { throw new IllegalStateException(e.getMessage(), e); } if (operation == Stress.Operations.DYNAMIC || operation == Stress.Operations.DYNAMIC_ONE_SERVER) { //Must set all dynamic workload parameters or none if (! (write_fraction >= 0 && columns_per_key_read != 0 && columns_per_key_write != 0 && keys_per_read != 0 && keys_per_write != 0 && write_transaction_fraction >= 0 && columnSize != 34)) { throw new RuntimeException("All dynamic options must be set"); } if (operation == Stress.Operations.DYNAMIC_ONE_SERVER) { if (num_servers == 0 || server_index == -1) { throw new RuntimeException("Dynamic One Server requires num-servers, and server-index"); } //DYNAMIC_ONE_SERVER should get a numDifferentKeys==totalKeys written in the system, just like normal dynamic... dynamicOneServerGenerateKeysForEachServer(num_servers, numDifferentKeys); } } if (operation == Stress.Operations.WRITE_TXN || operation == Stress.Operations.BATCH_MUTATE) { if (num_servers == 0 || keys_per_server == 0 || servers_per_txn == 0 || numKeys == 0 || columns_per_key_write == 0) { throw new RuntimeException("Write txn required num-servers, keys-per-server, servers-per-txn, columns-per-key-write, num-keys, and num-different-keys options"); } assert servers_per_txn <= num_servers; generateKeysForEachServer(num_servers, numDifferentKeys); } for (String node : nodes) { localServerIPAndPorts.put(node, 9160); } if (justCreateKeyspace) { this.createKeySpaces(); System.exit(0); } mean = numDifferentKeys / 2; sigma = numDifferentKeys * STDev; operations = new AtomicInteger(); keys = new AtomicInteger(); columnCount = new AtomicInteger(); bytes = new AtomicLong(); latency = new AtomicLong(); latencies = new ConcurrentLinkedQueue<Long>(); } public int getCardinality() { return cardinality; } public int getColumnSize() { return columnSize; } public boolean isUnframed() { return unframed; } public int getColumnsPerKey() { return columns; } public ColumnFamilyType getColumnFamilyType() { return columnFamilyType; } public int getNumKeys() { return numKeys; } public int getNumDifferentKeys() { return numDifferentKeys; } public int getKeysOffset() { return numDifferentKeys*stressIndex; } public int getThreads() { return threads; } public float getSkipKeys() { return skipKeys; } public int getSuperColumns() { return superColumns; } public int getKeysPerThread() { return numKeys / threads; } public int getTotalKeysLength() { //return Integer.toString(numDifferentKeys*stressCount).length(); return 10; } public int getNumTotalKeys() { return numDifferentKeys*stressCount; } public ConsistencyLevel getConsistencyLevel() { return consistencyLevel; } public int getRetryTimes() { return retryTimes; } public boolean ignoreErrors() { return ignoreErrors; } public Stress.Operations getOperation() { return operation; } public PrintStream getOutputStream() { try { return (outFileName == null) ? System.out : new PrintStream(new FileOutputStream(outFileName)); } catch (FileNotFoundException e) { throw new RuntimeException(e.getMessage(), e); } } public int getProgressInterval() { return progressInterval; } public boolean useRandomGenerator() { return random; } public int getKeysPerCall() { return keysPerCall; } // required by Gaussian distribution public int getMean() { return mean; } // required by Gaussian distribution public float getSigma() { return sigma; } public boolean isCQL() { return enable_cql; } public Set<Dep> getPregeneratedDependencies() { return pregeneratedDependencies; } public double getWrite_fraction() { return write_fraction; } public int getColumns_per_key_read() { return columns_per_key_read; } public int getColumns_per_key_write() { return columns_per_key_write; } public int getKeys_per_read() { return keys_per_read; } public int getKeys_per_write() { return keys_per_write; } public double getWrite_transaction_fraction() { return write_transaction_fraction; } public int getNum_servers() { return num_servers; } public int getKeys_per_server() { return keys_per_server; } public int getServers_per_txn() { return servers_per_txn; } public int getServerIndex() { assert server_index != -1; return server_index; } /** * Create Keyspace1 with Standard1 and Super1 column families */ public void createKeySpaces() { KsDef keyspace = new KsDef(); String defaultComparator = comparator == null ? DEFAULT_COMPARATOR : comparator; // column family for standard columns CfDef standardCfDef = new CfDef("Keyspace1", "Standard1"); Map<String, String> compressionOptions = new HashMap<String, String>(); if (compression != null) compressionOptions.put("sstable_compression", compression); standardCfDef.setComparator_type(defaultComparator) .setDefault_validation_class(DEFAULT_VALIDATOR) .setCompression_options(compressionOptions); standardCfDef.setCaching("all"); standardCfDef.setRead_repair_chance(0); if (indexType != null) { ColumnDef standardColumn = new ColumnDef(ByteBufferUtil.bytes("C1"), "BytesType"); standardColumn.setIndex_type(indexType).setIndex_name("Idx1"); standardCfDef.setColumn_metadata(Arrays.asList(standardColumn)); } // column family with super columns CfDef superCfDef = new CfDef("Keyspace1", "Super1").setColumn_type("Super"); superCfDef.setComparator_type(DEFAULT_COMPARATOR) .setSubcomparator_type(defaultComparator) .setDefault_validation_class(DEFAULT_VALIDATOR) .setCompression_options(compressionOptions) .setRead_repair_chance(0); // column family for standard counters CfDef counterCfDef = new CfDef("Keyspace1", "Counter1"); counterCfDef.setDefault_validation_class("CounterColumnType") .setReplicate_on_write(replicateOnWrite) .setCompression_options(compressionOptions) .setRead_repair_chance(0); // column family with counter super columns CfDef counterSuperCfDef = new CfDef("Keyspace1", "SuperCounter1"); counterSuperCfDef.setDefault_validation_class("CounterColumnType") .setReplicate_on_write(replicateOnWrite) .setColumn_type("Super") .setCompression_options(compressionOptions) .setRead_repair_chance(0); keyspace.setName("Keyspace1"); keyspace.setStrategy_class(replicationStrategy); if (!replicationStrategyOptions.isEmpty()) { keyspace.setStrategy_options(replicationStrategyOptions); } if (compactionStrategy != null) { standardCfDef.setCompaction_strategy(compactionStrategy); superCfDef.setCompaction_strategy(compactionStrategy); counterCfDef.setCompaction_strategy(compactionStrategy); counterSuperCfDef.setCompaction_strategy(compactionStrategy); } keyspace.setCf_defs(new ArrayList<CfDef>(Arrays.asList(standardCfDef, superCfDef, counterCfDef, counterSuperCfDef))); Cassandra.Client client = getClient(false); try { client.system_add_keyspace(keyspace); int sleepTime = 5; System.out.println(String.format("Created keyspaces. Sleeping %ss for propagation.", sleepTime)); Thread.sleep(sleepTime * 1000); // seconds } catch (InvalidRequestException e) { System.err.println("Unable to create stress keyspace: " + e.getWhy()); } catch (Exception e) { System.err.println(e.getMessage()); } } public ClientLibrary getClientLibrary() { // Allow use of client library with other consistency levels for micro-benchmarks //if (this.getConsistencyLevel() != ConsistencyLevel.LOCAL_QUORUM) { // throw new RuntimeException("Session.getClientLibrary is only meant for use with consistency level LOCAL_QUORUM"); //} try { return new ClientLibrary(localServerIPAndPorts, "Keyspace1", this.getConsistencyLevel()); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e.getMessage()); } } public ByteBuffer getRandGeneratedKey(int serverNum) { int serverKeyCount = generatedKeysByServer.get(serverNum).size(); return generatedKeysByServer.get(serverNum).get(Stress.randomizer.nextInt(serverKeyCount)); } private void generateKeysForEachServer(int numServers, int totalNumKeys) { int keysPerServer = totalNumKeys/numServers; generatedKeysByServer = new ArrayList<ArrayList<ByteBuffer>>(numServers); for (int i = 0; i < numServers; i++) { generatedKeysByServer.add(new ArrayList<ByteBuffer>(keysPerServer)); } // Assuming we're using the random partitioner, which we are. // We need to generate keys for servers by randomly generating keys // and then matching them to whatever their md5 maps to. boolean allServersFull; Random randomizer = new Random(); do { String randKeyStr = String.format("%0" + (getTotalKeysLength()) + "d", randomizer.nextInt(10*(getNumDifferentKeys() - 1))); ByteBuffer randKey = ByteBuffer.wrap(randKeyStr.getBytes(UTF_8)); double hashedRandKey = FBUtilities.hashToBigInteger(randKey).doubleValue(); //Cassandra's keyspace is [0, 2**127) double keyrangeSize = Math.pow(2, 127) / numServers; int serverIndex = (int) (hashedRandKey / keyrangeSize); if (generatedKeysByServer.get(serverIndex).size() < keysPerServer) { generatedKeysByServer.get(serverIndex).add(randKey); } allServersFull = true; for (int i = 0; i < numServers; i++) { //System.out.println("Server " + i + " has " + generatedKeysByServer.get(i).size() + " keys"); if (generatedKeysByServer.get(i).size() < keysPerServer) { allServersFull = false; break; } } } while (!allServersFull); } private void dynamicOneServerGenerateKeysForEachServer(int numServers, int numPopulatedKeys) { generatedKeysByServer = new ArrayList<ArrayList<ByteBuffer>>(numServers); for (int i = 0; i < numServers; i++) { generatedKeysByServer.add(new ArrayList<ByteBuffer>()); } // Assuming we're using the random partitioner, which we are. // We need to generate keys for servers by randomly generating keys // and then matching them to whatever their md5 maps to. for (int keyI = 0; keyI < numPopulatedKeys; keyI++) { String keyStr = String.format("%0" + (getTotalKeysLength()) + "d", keyI); ByteBuffer key = ByteBuffer.wrap(keyStr.getBytes(UTF_8)); double hashedKey = FBUtilities.hashToBigInteger(key).doubleValue(); //Cassandra's keyspace is [0, 2**127) double keyrangeSize = Math.pow(2, 127) / numServers; int serverIndex = (int) (hashedKey / keyrangeSize); generatedKeysByServer.get(serverIndex).add(key); } } /** * Thrift client connection with Keyspace1 set. * @return cassandra client connection */ public Cassandra.Client getClient() { return getClient(true); } /** * Thrift client connection * @param setKeyspace - should we set keyspace for client or not * @return cassandra client connection */ public Cassandra.Client getClient(boolean setKeyspace) { // random node selection for fake load balancing String currentNode = nodes[Stress.randomizer.nextInt(nodes.length)]; TSocket socket = new TSocket(currentNode, port); TTransport transport = (isUnframed()) ? socket : new TFramedTransport(socket); Cassandra.Client client = new Cassandra.Client(new TBinaryProtocol(transport)); try { transport.open(); if (setKeyspace) { client.set_keyspace("Keyspace1", LamportClock.sendTimestamp()); } } catch (InvalidRequestException e) { throw new RuntimeException(e.getWhy()); } catch (Exception e) { throw new RuntimeException(e.getMessage()); } return client; } public static InetAddress getLocalAddress() { if (localInetAddress == null) { try { localInetAddress = InetAddress.getLocalHost(); } catch (UnknownHostException e) { throw new RuntimeException(e); } } return localInetAddress; } }
/* * Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* ******************************************************************************* * Copyright (C) 2009, International Business Machines Corporation and * * others. All Rights Reserved. * ******************************************************************************* */ package sun.util.locale; public final class AsciiUtil { public static boolean caseIgnoreMatch(String s1, String s2) { if (s1 == s2) { return true; } int len = s1.length(); if (len != s2.length()) { return false; } int i = 0; while (i < len) { char c1 = s1.charAt(i); char c2 = s2.charAt(i); if (c1 != c2 && toLower(c1) != toLower(c2)) { break; } i++; } return (i == len); } public static int caseIgnoreCompare(String s1, String s2) { if (s1 == s2) { return 0; } return AsciiUtil.toLowerString(s1).compareTo(AsciiUtil.toLowerString(s2)); } public static char toUpper(char c) { if (c >= 'a' && c <= 'z') { c -= 0x20; } return c; } public static char toLower(char c) { if (c >= 'A' && c <= 'Z') { c += 0x20; } return c; } public static String toLowerString(String s) { int idx = 0; for (; idx < s.length(); idx++) { char c = s.charAt(idx); if (c >= 'A' && c <= 'Z') { break; } } if (idx == s.length()) { return s; } StringBuilder buf = new StringBuilder(s.substring(0, idx)); for (; idx < s.length(); idx++) { buf.append(toLower(s.charAt(idx))); } return buf.toString(); } public static String toUpperString(String s) { int idx = 0; for (; idx < s.length(); idx++) { char c = s.charAt(idx); if (c >= 'a' && c <= 'z') { break; } } if (idx == s.length()) { return s; } StringBuilder buf = new StringBuilder(s.substring(0, idx)); for (; idx < s.length(); idx++) { buf.append(toUpper(s.charAt(idx))); } return buf.toString(); } public static String toTitleString(String s) { if (s.length() == 0) { return s; } int idx = 0; char c = s.charAt(idx); if (!(c >= 'a' && c <= 'z')) { for (idx = 1; idx < s.length(); idx++) { if (c >= 'A' && c <= 'Z') { break; } } } if (idx == s.length()) { return s; } StringBuilder buf = new StringBuilder(s.substring(0, idx)); if (idx == 0) { buf.append(toUpper(s.charAt(idx))); idx++; } for (; idx < s.length(); idx++) { buf.append(toLower(s.charAt(idx))); } return buf.toString(); } public static boolean isAlpha(char c) { return (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z'); } public static boolean isAlphaString(String s) { boolean b = true; for (int i = 0; i < s.length(); i++) { if (!isAlpha(s.charAt(i))) { b = false; break; } } return b; } public static boolean isNumeric(char c) { return (c >= '0' && c <= '9'); } public static boolean isNumericString(String s) { boolean b = true; for (int i = 0; i < s.length(); i++) { if (!isNumeric(s.charAt(i))) { b = false; break; } } return b; } public static boolean isAlphaNumeric(char c) { return isAlpha(c) || isNumeric(c); } public static boolean isAlphaNumericString(String s) { boolean b = true; for (int i = 0; i < s.length(); i++) { if (!isAlphaNumeric(s.charAt(i))) { b = false; break; } } return b; } public static class CaseInsensitiveKey { private String _key; private int _hash; public CaseInsensitiveKey(String key) { _key = key; _hash = AsciiUtil.toLowerString(key).hashCode(); } public boolean equals(Object o) { if (this == o) { return true; } if (o instanceof CaseInsensitiveKey) { return AsciiUtil.caseIgnoreMatch(_key, ((CaseInsensitiveKey)o)._key); } return false; } public int hashCode() { return _hash; } } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.idea.svn.integrate; import com.intellij.configurationStore.StoreUtil; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.FileIndexFacade; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.IconUtil; import com.intellij.util.PlatformIcons; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.svn.SvnBundle; import org.jetbrains.idea.svn.SvnConfiguration; import org.jetbrains.idea.svn.SvnUtil; import org.jetbrains.idea.svn.SvnVcs; import org.jetbrains.idea.svn.api.Url; import org.jetbrains.idea.svn.branchConfig.SvnBranchMapperManager; import org.jetbrains.idea.svn.info.Info; import javax.swing.*; import java.io.File; import java.util.*; import static com.intellij.openapi.vfs.VfsUtilCore.virtualToIoFile; import static org.jetbrains.idea.svn.SvnUtil.isAncestor; public class IntegratedSelectedOptionsDialog extends DialogWrapper { private JPanel contentPane; private JCheckBox myDryRunCheckbox; private JList myWorkingCopiesList; private JComponent myToolbar; private JLabel mySourceInfoLabel; private JLabel myTargetInfoLabel; private JLabel myWcListTitleLabel; private JCheckBox myIgnoreWhitespacesCheckBox; private final Project myProject; @NotNull private final Url mySelectedBranchUrl; private final SvnVcs myVcs; private final String mySelectedRepositoryUUID; private DefaultActionGroup myGroup; private boolean myMustSelectBeforeOk; public IntegratedSelectedOptionsDialog(final Project project, final Url currentBranch, @NotNull Url selectedBranchUrl) { super(project, true); myMustSelectBeforeOk = true; myProject = project; mySelectedBranchUrl = selectedBranchUrl; myVcs = SvnVcs.getInstance(myProject); mySelectedRepositoryUUID = SvnUtil.getRepositoryUUID(myVcs, currentBranch); setTitle(SvnBundle.message("action.Subversion.integrate.changes.dialog.title")); init(); myWorkingCopiesList.setModel(new DefaultListModel()); myWorkingCopiesList .addListSelectionListener(e -> setOKActionEnabled((!myMustSelectBeforeOk) || (myWorkingCopiesList.getSelectedIndex() != -1))); setOKActionEnabled((! myMustSelectBeforeOk) || (myWorkingCopiesList.getSelectedIndex() != -1)); final List<WorkingCopyInfo> workingCopyInfoList = new ArrayList<>(); final Set<String> workingCopies = SvnBranchMapperManager.getInstance().get(mySelectedBranchUrl); if (workingCopies != null) { for (String workingCopy : workingCopies) { workingCopyInfoList.add(new WorkingCopyInfo(workingCopy, underProject(new File(workingCopy)))); } } Collections.sort(workingCopyInfoList, WorkingCopyInfoComparator.getInstance()); for (WorkingCopyInfo info : workingCopyInfoList) { ((DefaultListModel)myWorkingCopiesList.getModel()).addElement(info); } if (!workingCopyInfoList.isEmpty()) { myWorkingCopiesList.setSelectedIndex(0); } SvnConfiguration svnConfig = myVcs.getSvnConfiguration(); myDryRunCheckbox.setSelected(svnConfig.isMergeDryRun()); myIgnoreWhitespacesCheckBox.setSelected(svnConfig.isIgnoreSpacesInMerge()); mySourceInfoLabel.setText(SvnBundle.message("action.Subversion.integrate.changes.branch.info.source.label.text", currentBranch)); myTargetInfoLabel .setText(SvnBundle.message("action.Subversion.integrate.changes.branch.info.target.label.text", selectedBranchUrl.toDecodedString())); final String addText = SvnBundle.message("action.Subversion.integrate.changes.dialog.add.wc.text"); final AnAction addAction = new DumbAwareAction(addText, addText, IconUtil.getAddIcon()) { { registerCustomShortcutSet(CommonShortcuts.INSERT, myWorkingCopiesList); } @Override public void actionPerformed(@NotNull final AnActionEvent e) { final VirtualFile vFile = FileChooser.chooseFile(FileChooserDescriptorFactory.createSingleFolderDescriptor(), myProject, null); if (vFile != null) { final File file = virtualToIoFile(vFile); if (hasDuplicate(file)) { return; // silently do not add duplicate } final String repositoryUUID = SvnUtil.getRepositoryUUID(myVcs, file); // local not consistent copy can not prevent us from integration: only remote local copy is really involved if ((mySelectedRepositoryUUID != null) && (! mySelectedRepositoryUUID.equals(repositoryUUID))) { if (Messages.OK == Messages.showOkCancelDialog((repositoryUUID == null) ? SvnBundle.message("action.Subversion.integrate.changes.message.not.under.control.text") : SvnBundle.message("action.Subversion.integrate.changes.message.another.wc.text"), getTitle(), UIUtil.getWarningIcon())) { onOkToAdd(file); } } else { onOkToAdd(file); } } } }; myGroup.add(addAction); final String removeText = SvnBundle.message("action.Subversion.integrate.changes.dialog.remove.wc.text"); myGroup.add(new DumbAwareAction(removeText, removeText, PlatformIcons.DELETE_ICON) { { registerCustomShortcutSet(CommonShortcuts.getDelete(), myWorkingCopiesList); } @Override public void update(@NotNull final AnActionEvent e) { final Presentation presentation = e.getPresentation(); final int idx = (myWorkingCopiesList == null) ? -1 : myWorkingCopiesList.getSelectedIndex(); presentation.setEnabled(idx != -1); } @Override public void actionPerformed(@NotNull final AnActionEvent e) { final int idx = myWorkingCopiesList.getSelectedIndex(); if (idx != -1) { final DefaultListModel model = (DefaultListModel)myWorkingCopiesList.getModel(); final WorkingCopyInfo info = (WorkingCopyInfo)model.get(idx); model.removeElementAt(idx); SvnBranchMapperManager.getInstance().remove(mySelectedBranchUrl, new File(info.getLocalPath())); } } }); } public void setSelectedWcPath(final String path) { final ListModel model = myWorkingCopiesList.getModel(); final int size = model.getSize(); for (int i = 0; i < size; i++) { final WorkingCopyInfo info = (WorkingCopyInfo) model.getElementAt(i); if (info.getLocalPath().equals(path)) { myWorkingCopiesList.setSelectedValue(info, true); return; } } } public void selectWcopyRootOnly() { myMustSelectBeforeOk = false; setTitle(SvnBundle.message("dialog.Subversion.select.working.copy.title")); myIgnoreWhitespacesCheckBox.setVisible(false); myDryRunCheckbox.setVisible(false); myWcListTitleLabel.setText(SvnBundle.message("dialog.Subversion.select.working.copy.wcopy.list.title")); } private void createUIComponents() { myGroup = new DefaultActionGroup(); final ActionToolbar actionToolbar = ActionManager.getInstance().createActionToolbar("SvnIntegratedSelectedOptionsDialog", myGroup, false); myToolbar = actionToolbar.getComponent(); } private boolean hasDuplicate(final File file) { final DefaultListModel model = (DefaultListModel)myWorkingCopiesList.getModel(); final String path = file.getAbsolutePath(); for (int i = 0; i < model.getSize(); i++) { final WorkingCopyInfo info = (WorkingCopyInfo)model.getElementAt(i); if (path.equals(info.getLocalPath())) { return true; } } return false; } private void onOkToAdd(final File file) { final WorkingCopyInfo info = new WorkingCopyInfo(file.getAbsolutePath(), underProject(file)); final DefaultListModel model = (DefaultListModel) myWorkingCopiesList.getModel(); model.addElement(info); myWorkingCopiesList.setSelectedValue(info, true); SvnBranchMapperManager.getInstance().put(mySelectedBranchUrl, file); } private boolean underProject(final File file) { return ReadAction.compute(() -> { final VirtualFile vf = SvnUtil.getVirtualFile(file.getAbsolutePath()); return (vf == null) || ServiceManager.getService(myProject, FileIndexFacade.class).isInContent(vf); }); } public WorkingCopyInfo getSelectedWc() { return (WorkingCopyInfo)myWorkingCopiesList.getSelectedValue(); } public void saveOptions() { SvnConfiguration svnConfig = myVcs.getSvnConfiguration(); svnConfig.setMergeDryRun(myDryRunCheckbox.isSelected()); svnConfig.setIgnoreSpacesInMerge(myIgnoreWhitespacesCheckBox.isSelected()); } @Override protected JComponent createCenterPanel() { return contentPane; } public static class WorkingCopyInfoComparator implements Comparator<WorkingCopyInfo> { private static final WorkingCopyInfoComparator instance = new WorkingCopyInfoComparator(); public static WorkingCopyInfoComparator getInstance() { return instance; } private WorkingCopyInfoComparator() { } @Override public int compare(final WorkingCopyInfo o1, final WorkingCopyInfo o2) { return o1.getLocalPath().compareTo(o2.getLocalPath()); } } @Nullable private static Url realTargetUrl(@NotNull SvnVcs vcs, @NotNull WorkingCopyInfo info, @NotNull Url targetBranchUrl) { Info svnInfo = vcs.getInfo(info.getLocalPath()); Url url = svnInfo != null ? svnInfo.getUrl() : null; return url != null && isAncestor(targetBranchUrl, url) ? url : null; } @Nullable public static Pair<WorkingCopyInfo, Url> selectWorkingCopy(final Project project, final Url currentBranch, @NotNull Url targetBranch, final boolean showIntegrationParameters, final String selectedLocalBranchPath, final String dialogTitle) { final IntegratedSelectedOptionsDialog dialog = new IntegratedSelectedOptionsDialog(project, currentBranch, targetBranch); if (!showIntegrationParameters) { dialog.selectWcopyRootOnly(); } if (selectedLocalBranchPath != null) { dialog.setSelectedWcPath(selectedLocalBranchPath); } if (dialogTitle != null) { dialog.setTitle(dialogTitle); } if (dialog.showAndGet()) { StoreUtil.saveDocumentsAndProjectSettings(project); dialog.saveOptions(); final WorkingCopyInfo info = dialog.getSelectedWc(); if (info != null) { final File file = new File(info.getLocalPath()); if ((!file.exists()) || (!file.isDirectory())) { Messages.showErrorDialog(SvnBundle.message("action.Subversion.integrate.changes.error.target.not.dir.text"), SvnBundle.message("action.Subversion.integrate.changes.messages.title")); return null; } final Url targetUrl = realTargetUrl(SvnVcs.getInstance(project), info, targetBranch); if (targetUrl == null) { Messages.showErrorDialog(SvnBundle.message("action.Subversion.integrate.changes.error.not.versioned.text"), SvnBundle.message("action.Subversion.integrate.changes.messages.title")); return null; } return Pair.create(info, targetUrl); } } return null; } }
/* * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sbql4j8.com.sun.tools.javac.util; import java.util.AbstractQueue; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; /** A class for constructing lists by appending elements. Modelled after * java.lang.StringBuffer. * * <p><b>This is NOT part of any supported API. * If you write code that depends on this, you do so at your own risk. * This code and its internal interfaces are subject to change or * deletion without notice.</b> */ public class ListBuffer<A> extends AbstractQueue<A> { public static <T> ListBuffer<T> of(T x) { ListBuffer<T> lb = new ListBuffer<T>(); lb.add(x); return lb; } /** The list of elements of this buffer. */ private List<A> elems; /** A pointer pointing to the last element of 'elems' containing data, * or null if the list is empty. */ private List<A> last; /** The number of element in this buffer. */ private int count; /** Has a list been created from this buffer yet? */ private boolean shared; /** Create a new initially empty list buffer. */ public ListBuffer() { clear(); } public final void clear() { this.elems = List.nil(); this.last = null; count = 0; shared = false; } /** Return the number of elements in this buffer. */ public int length() { return count; } public int size() { return count; } /** Is buffer empty? */ public boolean isEmpty() { return count == 0; } /** Is buffer not empty? */ public boolean nonEmpty() { return count != 0; } /** Copy list and sets last. */ private void copy() { if (elems.nonEmpty()) { List<A> orig = elems; elems = last = List.<A>of(orig.head); while ((orig = orig.tail).nonEmpty()) { last.tail = List.<A>of(orig.head); last = last.tail; } } } /** Prepend an element to buffer. */ public ListBuffer<A> prepend(A x) { elems = elems.prepend(x); if (last == null) last = elems; count++; return this; } /** Append an element to buffer. */ public ListBuffer<A> append(A x) { x.getClass(); // null check if (shared) copy(); List<A> newLast = List.<A>of(x); if (last != null) { last.tail = newLast; last = newLast; } else { elems = last = newLast; } count++; return this; } /** Append all elements in a list to buffer. */ public ListBuffer<A> appendList(List<A> xs) { while (xs.nonEmpty()) { append(xs.head); xs = xs.tail; } return this; } /** Append all elements in a list to buffer. */ public ListBuffer<A> appendList(ListBuffer<A> xs) { return appendList(xs.toList()); } /** Append all elements in an array to buffer. */ public ListBuffer<A> appendArray(A[] xs) { for (int i = 0; i < xs.length; i++) { append(xs[i]); } return this; } /** Convert buffer to a list of all its elements. */ public List<A> toList() { shared = true; return elems; } /** Does the list contain the specified element? */ public boolean contains(Object x) { return elems.contains(x); } /** Convert buffer to an array */ public <T> T[] toArray(T[] vec) { return elems.toArray(vec); } public Object[] toArray() { return toArray(new Object[size()]); } /** The first element in this buffer. */ public A first() { return elems.head; } /** Return first element in this buffer and remove */ public A next() { A x = elems.head; if (!elems.isEmpty()) { elems = elems.tail; if (elems.isEmpty()) last = null; count--; } return x; } /** An enumeration of all elements in this buffer. */ public Iterator<A> iterator() { return new Iterator<A>() { List<A> elems = ListBuffer.this.elems; public boolean hasNext() { return !elems.isEmpty(); } public A next() { if (elems.isEmpty()) throw new NoSuchElementException(); A elem = elems.head; elems = elems.tail; return elem; } public void remove() { throw new UnsupportedOperationException(); } }; } public boolean add(A a) { append(a); return true; } public boolean remove(Object o) { throw new UnsupportedOperationException(); } public boolean containsAll(Collection<?> c) { for (Object x: c) { if (!contains(x)) return false; } return true; } public boolean addAll(Collection<? extends A> c) { for (A a: c) append(a); return true; } public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException(); } public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException(); } public boolean offer(A a) { append(a); return true; } public A poll() { return next(); } public A peek() { return first(); } public A last() { return last != null ? last.head : null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.model; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import javax.xml.bind.annotation.XmlType; import org.apache.camel.Endpoint; import org.apache.camel.ErrorHandlerFactory; import org.apache.camel.ShutdownRoute; import org.apache.camel.ShutdownRunningTask; import org.apache.camel.builder.EndpointConsumerBuilder; import org.apache.camel.builder.ErrorHandlerBuilderRef; import org.apache.camel.model.rest.RestBindingDefinition; import org.apache.camel.model.rest.RestDefinition; import org.apache.camel.reifier.errorhandler.ErrorHandlerReifier; import org.apache.camel.spi.AsEndpointUri; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.RoutePolicy; /** * A Camel route */ @Metadata(label = "configuration") @XmlRootElement(name = "route") @XmlType(propOrder = {"input", "inputType", "outputType", "outputs", "routeProperties"}) @XmlAccessorType(XmlAccessType.PROPERTY) // must use XmlAccessType.PROPERTY as there is some custom logic needed to be executed in the setter methods public class RouteDefinition extends ProcessorDefinition<RouteDefinition> implements OutputNode { private final AtomicBoolean prepared = new AtomicBoolean(false); private FromDefinition input; private List<ProcessorDefinition<?>> outputs = new ArrayList<>(); private String group; private String streamCache; private String trace; private String messageHistory; private String logMask; private String handleFault; private String delayer; private String autoStartup; private Integer startupOrder; private List<RoutePolicy> routePolicies; private String routePolicyRef; private ShutdownRoute shutdownRoute; private ShutdownRunningTask shutdownRunningTask; private String errorHandlerRef; private ErrorHandlerFactory errorHandlerFactory; // keep state whether the error handler is context scoped or not // (will by default be context scoped of no explicit error handler configured) private boolean contextScopedErrorHandler = true; private Boolean rest; private RestDefinition restDefinition; private RestBindingDefinition restBindingDefinition; private InputTypeDefinition inputType; private OutputTypeDefinition outputType; private List<PropertyDefinition> routeProperties; public RouteDefinition() { } public RouteDefinition(@AsEndpointUri String uri) { from(uri); } public RouteDefinition(Endpoint endpoint) { from(endpoint); } /** * This route is created from the REST DSL. */ public void fromRest(@AsEndpointUri String uri) { from(uri); rest = true; } /** * Check if the route has been prepared * * @return wether the route has been prepared or not * @see RouteDefinitionHelper#prepareRoute(ModelCamelContext, RouteDefinition) */ public boolean isPrepared() { return prepared.get(); } /** * Marks the route definition as prepared. * <p/> * This is needed if routes have been created by components such as * <tt>camel-spring</tt> or <tt>camel-blueprint</tt>. * Usually they share logic in the <tt>camel-core-xml</tt> module which prepares the routes. */ public void markPrepared() { prepared.set(true); } /** * Marks the route definition as un-prepared. * <p/> * This is needed if routes have been created by components such as * <tt>camel-scala</tt>. To unset the prepare so the routes can be prepared * at a later stage when scala has build the routes completely. */ public void markUnprepared() { prepared.set(false); } @Override public String toString() { if (getId() != null) { return "Route(" + getId() + ")[" + (input != null ? input : "") + " -> " + outputs + "]"; } else { return "Route[" + input + " -> " + outputs + "]"; } } @Override public String getShortName() { return "route"; } // Fluent API // ----------------------------------------------------------------------- /** * Creates an input to the route * * @param uri the from uri * @return the builder */ public RouteDefinition from(@AsEndpointUri String uri) { setInput(new FromDefinition(uri)); return this; } /** * Creates an input to the route * * @param endpoint the from endpoint * @return the builder */ public RouteDefinition from(Endpoint endpoint) { setInput(new FromDefinition(endpoint)); return this; } /** * Creates an input to the route * * @param endpoint the from endpoint * @return the builder */ public RouteDefinition from(EndpointConsumerBuilder endpoint) { setInput(new FromDefinition(endpoint)); return this; } /** * Set the group name for this route * * @param name the group name * @return the builder */ public RouteDefinition group(String name) { setGroup(name); return this; } /** * Set the route group for this route * * @param group the route group * @return the builder */ public RouteDefinition routeGroup(String group) { setGroup(group); return this; } /** * Set the route id for this route * * @param id the route id * @return the builder */ public RouteDefinition routeId(String id) { if (hasCustomIdAssigned()) { throw new IllegalArgumentException("You can only set routeId one time per route."); } setId(id); return this; } /** * Set the route description for this route * * @param description the route description * @return the builder */ public RouteDefinition routeDescription(String description) { DescriptionDefinition desc = new DescriptionDefinition(); desc.setText(description); setDescription(desc); return this; } /** * Disable stream caching for this route. * * @return the builder */ public RouteDefinition noStreamCaching() { setStreamCache("false"); return this; } /** * Enable stream caching for this route. * * @return the builder */ public RouteDefinition streamCaching() { setStreamCache("true"); return this; } /** * Enable stream caching for this route. * * @param streamCache whether to use stream caching (true or false), the value can be a property placeholder * @return the builder */ public RouteDefinition streamCaching(String streamCache) { setStreamCache(streamCache); return this; } /** * Disable tracing for this route. * * @return the builder */ public RouteDefinition noTracing() { setTrace("false"); return this; } /** * Enable tracing for this route. * * @return the builder */ public RouteDefinition tracing() { setTrace("true"); return this; } /** * Enable tracing for this route. * * @param tracing whether to use tracing (true or false), the value can be a property placeholder * @return the builder */ public RouteDefinition tracing(String tracing) { setTrace(tracing); return this; } /** * Enable message history for this route. * * @return the builder */ public RouteDefinition messageHistory() { setMessageHistory("true"); return this; } /** * Enable message history for this route. * * @param messageHistory whether to use message history (true or false), the value can be a property placeholder * @return the builder */ public RouteDefinition messageHistory(String messageHistory) { setMessageHistory(messageHistory); return this; } /** * Enable security mask for Logging on this route. * * @return the builder */ public RouteDefinition logMask() { setLogMask("true"); return this; } /** * Sets whether security mask for logging is enabled on this route. * * @param logMask whether to enable security mask for Logging (true or false), the value can be a property placeholder * @return the builder */ public RouteDefinition logMask(String logMask) { setLogMask(logMask); return this; } /** * Disable message history for this route. * * @return the builder */ public RouteDefinition noMessageHistory() { setMessageHistory("false"); return this; } /** * Disable handle fault for this route. * * @return the builder */ public RouteDefinition noHandleFault() { setHandleFault("false"); return this; } /** * Enable handle fault for this route. * * @return the builder */ public RouteDefinition handleFault() { setHandleFault("true"); return this; } /** * Disable delayer for this route. * * @return the builder */ public RouteDefinition noDelayer() { setDelayer("0"); return this; } /** * Enable delayer for this route. * * @param delay delay in millis * @return the builder */ public RouteDefinition delayer(long delay) { setDelayer("" + delay); return this; } /** * Installs the given <a href="http://camel.apache.org/error-handler.html">error handler</a> builder. * * @param errorHandlerBuilder the error handler to be used by default for all child routes * @return the current builder with the error handler configured */ public RouteDefinition errorHandler(ErrorHandlerFactory errorHandlerBuilder) { setErrorHandlerFactory(errorHandlerBuilder); // we are now using a route scoped error handler contextScopedErrorHandler = false; return this; } /** * Disables this route from being auto started when Camel starts. * * @return the builder */ public RouteDefinition noAutoStartup() { setAutoStartup("false"); return this; } /** * Sets the auto startup property on this route. * * @param autoStartup whether to auto startup (true or false), the value can be a property placeholder * @return the builder */ public RouteDefinition autoStartup(String autoStartup) { setAutoStartup(autoStartup); return this; } /** * Sets the auto startup property on this route. * * @param autoStartup - boolean indicator * @return the builder */ public RouteDefinition autoStartup(boolean autoStartup) { setAutoStartup(Boolean.toString(autoStartup)); return this; } /** * Configures the startup order for this route * <p/> * Camel will reorder routes and star them ordered by 0..N where 0 is the lowest number and N the highest number. * Camel will stop routes in reverse order when its stopping. * * @param order the order represented as a number * @return the builder */ public RouteDefinition startupOrder(int order) { setStartupOrder(order); return this; } /** * Configures route policies for this route * * @param policies the route policies * @return the builder */ public RouteDefinition routePolicy(RoutePolicy... policies) { if (routePolicies == null) { routePolicies = new ArrayList<>(); } for (RoutePolicy policy : policies) { routePolicies.add(policy); } return this; } /** * Configures a route policy for this route * * @param routePolicyRef reference to a {@link RoutePolicy} to lookup and use. * You can specify multiple references by separating using comma. * @return the builder */ public RouteDefinition routePolicyRef(String routePolicyRef) { setRoutePolicyRef(routePolicyRef); return this; } /** * Configures a shutdown route option. * * @param shutdownRoute the option to use when shutting down this route * @return the builder */ public RouteDefinition shutdownRoute(ShutdownRoute shutdownRoute) { setShutdownRoute(shutdownRoute); return this; } /** * Configures a shutdown running task option. * * @param shutdownRunningTask the option to use when shutting down and how to act upon running tasks. * @return the builder */ public RouteDefinition shutdownRunningTask(ShutdownRunningTask shutdownRunningTask) { setShutdownRunningTask(shutdownRunningTask); return this; } /** * Declare the expected data type of the input message. If the actual message type is different * at runtime, camel look for a required {@link org.apache.camel.spi.Transformer} and apply if exists. * The type name consists of two parts, 'scheme' and 'name' connected with ':'. For Java type 'name' * is a fully qualified class name. For example {@code java:java.lang.String}, {@code json:ABCOrder}. * * @see org.apache.camel.spi.Transformer * * @param urn input type URN * @return the builder */ public RouteDefinition inputType(String urn) { inputType = new InputTypeDefinition(); inputType.setUrn(urn); inputType.setValidate(false); return this; } /** * Declare the expected data type of the input message with content validation enabled. * If the actual message type is different at runtime, camel look for a required * {@link org.apache.camel.spi.Transformer} and apply if exists, and then applies * {@link org.apache.camel.spi.Validator} as well. * The type name consists of two parts, 'scheme' and 'name' connected with ':'. For Java type 'name' * is a fully qualified class name. For example {@code java:java.lang.String}, {@code json:ABCOrder}. * * @see org.apache.camel.spi.Transformer * @see org.apache.camel.spi.Validator * * @param urn input type URN * @return the builder */ public RouteDefinition inputTypeWithValidate(String urn) { inputType = new InputTypeDefinition(); inputType.setUrn(urn); inputType.setValidate(true); return this; } /** * Declare the expected data type of the input message by Java class. * If the actual message type is different at runtime, camel look for a required * {@link org.apache.camel.spi.Transformer} and apply if exists. * * @see org.apache.camel.spi.Transformer * * @param clazz Class object of the input type * @return the builder */ public RouteDefinition inputType(Class clazz) { inputType = new InputTypeDefinition(); inputType.setJavaClass(clazz); inputType.setValidate(false); return this; } /** * Declare the expected data type of the input message by Java class with content validation enabled. * If the actual message type is different at runtime, camel look for a required * {@link org.apache.camel.spi.Transformer} and apply if exists, and then applies * {@link org.apache.camel.spi.Validator} as well. * * @see org.apache.camel.spi.Transformer * @see org.apache.camel.spi.Validator * * @param clazz Class object of the input type * @return the builder */ public RouteDefinition inputTypeWithValidate(Class clazz) { inputType = new InputTypeDefinition(); inputType.setJavaClass(clazz); inputType.setValidate(true); return this; } /** * Declare the expected data type of the output message. If the actual message type is different * at runtime, camel look for a required {@link org.apache.camel.spi.Transformer} and apply if exists. * The type name consists of two parts, 'scheme' and 'name' connected with ':'. For Java type 'name' * is a fully qualified class name. For example {@code java:java.lang.String}, {@code json:ABCOrder}. * * @see org.apache.camel.spi.Transformer * * @param urn output type URN * @return the builder */ public RouteDefinition outputType(String urn) { outputType = new OutputTypeDefinition(); outputType.setUrn(urn); outputType.setValidate(false); return this; } /** * Declare the expected data type of the output message with content validation enabled. * If the actual message type is different at runtime, Camel look for a required * {@link org.apache.camel.spi.Transformer} and apply if exists, and then applies * {@link org.apache.camel.spi.Validator} as well. * The type name consists of two parts, 'scheme' and 'name' connected with ':'. For Java type 'name' * is a fully qualified class name. For example {@code java:java.lang.String}, {@code json:ABCOrder}. * * @see org.apache.camel.spi.Transformer * @see org.apache.camel.spi.Validator * * @param urn output type URN * @return the builder */ public RouteDefinition outputTypeWithValidate(String urn) { outputType = new OutputTypeDefinition(); outputType.setUrn(urn); outputType.setValidate(true); return this; } /** * Declare the expected data type of the output message by Java class. * If the actual message type is different at runtime, camel look for a required * {@link org.apache.camel.spi.Transformer} and apply if exists. * * @see org.apache.camel.spi.Transformer * * @param clazz Class object of the output type * @return the builder */ public RouteDefinition outputType(Class clazz) { outputType = new OutputTypeDefinition(); outputType.setJavaClass(clazz); outputType.setValidate(false); return this; } /** * Declare the expected data type of the ouput message by Java class with content validation enabled. * If the actual message type is different at runtime, camel look for a required * {@link org.apache.camel.spi.Transformer} and apply if exists, and then applies * {@link org.apache.camel.spi.Validator} as well. * * @see org.apache.camel.spi.Transformer * @see org.apache.camel.spi.Validator * @param clazz Class object of the output type * @return the builder */ public RouteDefinition outputTypeWithValidate(Class clazz) { outputType = new OutputTypeDefinition(); outputType.setJavaClass(clazz); outputType.setValidate(true); return this; } /** * Adds a custom property on the route. */ public RouteDefinition routeProperty(String key, String value) { if (routeProperties == null) { routeProperties = new ArrayList<>(); } PropertyDefinition prop = new PropertyDefinition(); prop.setKey(key); prop.setValue(value); routeProperties.add(prop); return this; } // Properties // ----------------------------------------------------------------------- public FromDefinition getInput() { return input; } /** * Input to the route. */ @XmlElementRef(required = false) public void setInput(FromDefinition input) { // required = false: in rest-dsl you can embed an in-lined route which // does not have a <from> as its implied to be the rest endpoint this.input = input; } public List<ProcessorDefinition<?>> getOutputs() { return outputs; } /** * Outputs are processors that determines how messages are processed by this route. */ @XmlElementRef public void setOutputs(List<ProcessorDefinition<?>> outputs) { this.outputs = outputs; if (outputs != null) { for (ProcessorDefinition<?> output : outputs) { configureChild(output); } } } /** * The group that this route belongs to; could be the name of the RouteBuilder class * or be explicitly configured in the XML. * <p/> * May be null. */ public String getGroup() { return group; } /** * The group that this route belongs to; could be the name of the RouteBuilder class * or be explicitly configured in the XML. * <p/> * May be null. */ @XmlAttribute public void setGroup(String group) { this.group = group; } /** * Whether stream caching is enabled on this route. */ public String getStreamCache() { return streamCache; } /** * Whether stream caching is enabled on this route. */ @XmlAttribute public void setStreamCache(String streamCache) { this.streamCache = streamCache; } /** * Whether tracing is enabled on this route. */ public String getTrace() { return trace; } /** * Whether tracing is enabled on this route. */ @XmlAttribute public void setTrace(String trace) { this.trace = trace; } /** * Whether message history is enabled on this route. */ public String getMessageHistory() { return messageHistory; } /** * Whether message history is enabled on this route. */ @XmlAttribute @Metadata(defaultValue = "true") public void setMessageHistory(String messageHistory) { this.messageHistory = messageHistory; } /** * Whether security mask for Logging is enabled on this route. */ public String getLogMask() { return logMask; } /** * Whether security mask for Logging is enabled on this route. */ @XmlAttribute public void setLogMask(String logMask) { this.logMask = logMask; } /** * Whether handle fault is enabled on this route. */ public String getHandleFault() { return handleFault; } /** * Whether handle fault is enabled on this route. */ @XmlAttribute public void setHandleFault(String handleFault) { this.handleFault = handleFault; } /** * Whether to slow down processing messages by a given delay in msec. */ public String getDelayer() { return delayer; } /** * Whether to slow down processing messages by a given delay in msec. */ @XmlAttribute public void setDelayer(String delayer) { this.delayer = delayer; } /** * Whether to auto start this route */ public String getAutoStartup() { return autoStartup; } /** * Whether to auto start this route */ @XmlAttribute @Metadata(defaultValue = "true") public void setAutoStartup(String autoStartup) { this.autoStartup = autoStartup; } /** * To configure the ordering of the routes being started */ public Integer getStartupOrder() { return startupOrder; } /** * To configure the ordering of the routes being started */ @XmlAttribute public void setStartupOrder(Integer startupOrder) { this.startupOrder = startupOrder; } /** * Sets the bean ref name of the error handler builder to use on this route */ @XmlAttribute public void setErrorHandlerRef(String errorHandlerRef) { this.errorHandlerRef = errorHandlerRef; // we use an specific error handler ref (from Spring DSL) then wrap that // with a error handler build ref so Camel knows its not just the default one setErrorHandlerFactory(new ErrorHandlerBuilderRef(errorHandlerRef)); } /** * Sets the bean ref name of the error handler builder to use on this route */ public String getErrorHandlerRef() { return errorHandlerRef; } /** * Sets the error handler if one is not already set */ public void setErrorHandlerFactoryIfNull(ErrorHandlerFactory errorHandlerFactory) { if (this.errorHandlerFactory == null) { setErrorHandlerFactory(errorHandlerFactory); } } /** * Reference to custom {@link org.apache.camel.spi.RoutePolicy} to use by the route. * Multiple policies can be configured by separating values using comma. */ @XmlAttribute public void setRoutePolicyRef(String routePolicyRef) { this.routePolicyRef = routePolicyRef; } /** * Reference to custom {@link org.apache.camel.spi.RoutePolicy} to use by the route. * Multiple policies can be configured by separating values using comma. */ public String getRoutePolicyRef() { return routePolicyRef; } public List<RoutePolicy> getRoutePolicies() { return routePolicies; } @XmlTransient public void setRoutePolicies(List<RoutePolicy> routePolicies) { this.routePolicies = routePolicies; } public ShutdownRoute getShutdownRoute() { return shutdownRoute; } /** * To control how to shutdown the route. */ @XmlAttribute @Metadata(defaultValue = "Default") public void setShutdownRoute(ShutdownRoute shutdownRoute) { this.shutdownRoute = shutdownRoute; } /** * To control how to shutdown the route. */ public ShutdownRunningTask getShutdownRunningTask() { return shutdownRunningTask; } /** * To control how to shutdown the route. */ @XmlAttribute @Metadata(defaultValue = "CompleteCurrentTaskOnly") public void setShutdownRunningTask(ShutdownRunningTask shutdownRunningTask) { this.shutdownRunningTask = shutdownRunningTask; } private ErrorHandlerFactory createErrorHandlerBuilder() { if (errorHandlerRef != null) { return new ErrorHandlerBuilderRef(errorHandlerRef); } // return a reference to the default error handler return new ErrorHandlerBuilderRef(ErrorHandlerReifier.DEFAULT_ERROR_HANDLER_BUILDER); } @XmlTransient public ErrorHandlerFactory getErrorHandlerFactory() { if (errorHandlerFactory == null) { errorHandlerFactory = createErrorHandlerBuilder(); } return errorHandlerFactory; } /** * Sets the error handler to use with processors created by this builder */ public void setErrorHandlerFactory(ErrorHandlerFactory errorHandlerFactory) { this.errorHandlerFactory = errorHandlerFactory; } @XmlAttribute public Boolean isRest() { return rest; } public RestDefinition getRestDefinition() { return restDefinition; } @XmlTransient public void setRestDefinition(RestDefinition restDefinition) { this.restDefinition = restDefinition; } public RestBindingDefinition getRestBindingDefinition() { return restBindingDefinition; } @XmlTransient public void setRestBindingDefinition(RestBindingDefinition restBindingDefinition) { this.restBindingDefinition = restBindingDefinition; } public boolean isContextScopedErrorHandler() { return contextScopedErrorHandler; } @XmlElementRef(required = false) public void setInputType(InputTypeDefinition inputType) { this.inputType = inputType; } public InputTypeDefinition getInputType() { return this.inputType; } @XmlElementRef(required = false) public void setOutputType(OutputTypeDefinition outputType) { this.outputType = outputType; } public OutputTypeDefinition getOutputType() { return this.outputType; } public List<PropertyDefinition> getRouteProperties() { return routeProperties; } /** * To set metadata as properties on the route. */ @XmlElement(name = "routeProperty") @Metadata(label = "advanced") public void setRouteProperties(List<PropertyDefinition> routeProperties) { this.routeProperties = routeProperties; } // **************************** // Static helpers // **************************** public static RouteDefinition fromUri(String uri) { return new RouteDefinition().from(uri); } public static RouteDefinition fromEndpoint(Endpoint endpoint) { return new RouteDefinition().from(endpoint); } }
/* * Copyright 2014 Soichiro Kashima * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.ksoichiro.android.observablescrollview; import android.content.Context; import android.os.Parcel; import android.os.Parcelable; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.ScrollView; /** * ScrollView that its scroll position can be observed. */ public class ObservableScrollView extends ScrollView implements Scrollable { // Fields that should be saved onSaveInstanceState private int mPrevScrollY; private int mScrollY; // Fields that don't need to be saved onSaveInstanceState private ObservableScrollViewCallbacks mCallbacks; private ScrollState mScrollState; private boolean mFirstScroll; private boolean mDragging; private boolean mIntercepted; private MotionEvent mPrevMoveEvent; private ViewGroup mTouchInterceptionViewGroup; public ObservableScrollView(Context context) { super(context); } public ObservableScrollView(Context context, AttributeSet attrs) { super(context, attrs); } public ObservableScrollView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } @Override public void onRestoreInstanceState(Parcelable state) { SavedState ss = (SavedState) state; mPrevScrollY = ss.prevScrollY; mScrollY = ss.scrollY; super.onRestoreInstanceState(ss.getSuperState()); } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState ss = new SavedState(superState); ss.prevScrollY = mPrevScrollY; ss.scrollY = mScrollY; return ss; } @Override protected void onScrollChanged(int l, int t, int oldl, int oldt) { super.onScrollChanged(l, t, oldl, oldt); if (mCallbacks != null) { mScrollY = t; mCallbacks.onScrollChanged(t, mFirstScroll, mDragging); if (mFirstScroll) { mFirstScroll = false; } if (mPrevScrollY < t) { mScrollState = ScrollState.UP; } else if (t < mPrevScrollY) { mScrollState = ScrollState.DOWN; //} else { // Keep previous state while dragging. // Never makes it STOP even if scrollY not changed. // Before Android 4.4, onTouchEvent calls onScrollChanged directly for ACTION_MOVE, // which makes mScrollState always STOP when onUpOrCancelMotionEvent is called. // STOP state is now meaningless for ScrollView. } mPrevScrollY = t; } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { if (mCallbacks != null) { switch (ev.getActionMasked()) { case MotionEvent.ACTION_DOWN: // Whether or not motion events are consumed by children, // flag initializations which are related to ACTION_DOWN events should be executed. // Because if the ACTION_DOWN is consumed by children and only ACTION_MOVEs are // passed to parent (this view), the flags will be invalid. // Also, applications might implement initialization codes to onDownMotionEvent, // so call it here. mFirstScroll = mDragging = true; mCallbacks.onDownMotionEvent(); break; } } return super.onInterceptTouchEvent(ev); } @Override public boolean onTouchEvent(MotionEvent ev) { if (mCallbacks != null) { switch (ev.getActionMasked()) { case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: mIntercepted = false; mDragging = false; mCallbacks.onUpOrCancelMotionEvent(mScrollState); break; case MotionEvent.ACTION_MOVE: if (mPrevMoveEvent == null) { mPrevMoveEvent = ev; } float diffY = ev.getY() - mPrevMoveEvent.getY(); mPrevMoveEvent = MotionEvent.obtainNoHistory(ev); if (getCurrentScrollY() - diffY <= 0) { // Can't scroll anymore. if (mIntercepted) { // Already dispatched ACTION_DOWN event to parents, so stop here. return false; } // Apps can set the interception target other than the direct parent. final ViewGroup parent; if (mTouchInterceptionViewGroup == null) { parent = (ViewGroup) getParent(); } else { parent = mTouchInterceptionViewGroup; } // Get offset to parents. If the parent is not the direct parent, // we should aggregate offsets from all of the parents. float offsetX = 0; float offsetY = 0; for (View v = this; v != null && v != parent; v = (View) v.getParent()) { offsetX += v.getLeft() - v.getScrollX(); offsetY += v.getTop() - v.getScrollY(); } final MotionEvent event = MotionEvent.obtainNoHistory(ev); event.offsetLocation(offsetX, offsetY); if (parent.onInterceptTouchEvent(event)) { mIntercepted = true; // If the parent wants to intercept ACTION_MOVE events, // we pass ACTION_DOWN event to the parent // as if these touch events just have began now. event.setAction(MotionEvent.ACTION_DOWN); // Return this onTouchEvent() first and set ACTION_DOWN event for parent // to the queue, to keep events sequence. post(new Runnable() { @Override public void run() { parent.dispatchTouchEvent(event); } }); return false; } // Even when this can't be scrolled anymore, // simply returning false here may cause subView's click, // so delegate it to super. return super.onTouchEvent(ev); } break; } } return super.onTouchEvent(ev); } @Override public void setScrollViewCallbacks(ObservableScrollViewCallbacks listener) { mCallbacks = listener; } @Override public void setTouchInterceptionViewGroup(ViewGroup viewGroup) { mTouchInterceptionViewGroup = viewGroup; } @Override public void scrollVerticallyTo(int y) { scrollTo(0, y); } @Override public int getCurrentScrollY() { return mScrollY; } static class SavedState extends BaseSavedState { int prevScrollY; int scrollY; /** * Called by onSaveInstanceState. */ private SavedState(Parcelable superState) { super(superState); } /** * Called by CREATOR. */ private SavedState(Parcel in) { super(in); prevScrollY = in.readInt(); scrollY = in.readInt(); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeInt(prevScrollY); out.writeInt(scrollY); } public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { @Override public SavedState createFromParcel(Parcel in) { return new SavedState(in); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.ui.tree; import com.intellij.ide.util.treeView.AbstractTreeBuilder; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.ScrollingUtil; import com.intellij.ui.SimpleColoredComponent; import com.intellij.ui.awt.RelativePoint; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.Range; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.plaf.basic.BasicTreeUI; import javax.swing.tree.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.util.*; import java.util.List; public final class TreeUtil { private static final Logger LOG = Logger.getInstance("#com.intellij.util.ui.tree.TreeUtil"); @NonNls @NotNull private static final String TREE_UTIL_SCROLL_TIME_STAMP = "TreeUtil.scrollTimeStamp"; private TreeUtil() {} /** * @param tree JTree to collect expanded paths from. * @param paths output parameter. */ public static void collectExpandedPaths(@NotNull final JTree tree, @NotNull final List<TreePath> paths){ final TreeModel model = tree.getModel(); final Object root = model.getRoot(); LOG.assertTrue(root != null); collectExpandedPathsImpl(tree, paths, new TreePath(root)); } @NotNull public static List<TreePath> collectExpandedPaths(@NotNull final JTree tree){ final ArrayList<TreePath> result = new ArrayList<TreePath>(); final Object root = tree.getModel().getRoot(); final TreePath rootPath = new TreePath(root); result.addAll(collectExpandedPaths(tree, rootPath)); return result; } @NotNull public static <T> List<T> collectSelectedObjectsOfType(@NotNull JTree tree, @NotNull Class<T> clazz) { final TreePath[] selections = tree.getSelectionPaths(); if (selections != null) { final ArrayList<T> result = new ArrayList<T>(); for (TreePath selection : selections) { final DefaultMutableTreeNode node = (DefaultMutableTreeNode)selection.getLastPathComponent(); final Object userObject = node.getUserObject(); if (clazz.isInstance(userObject)) { //noinspection unchecked result.add((T)userObject); } } return result; } return Collections.emptyList(); } @NotNull public static List<TreePath> collectExpandedPaths(@NotNull final JTree tree, @NotNull TreePath path){ final ArrayList<TreePath> result = new ArrayList<TreePath>(); if (!tree.isExpanded(path)) return result; final Object lastPathComponent = path.getLastPathComponent(); final TreeModel model = tree.getModel(); if (model.isLeaf(lastPathComponent)) { result.add(path); } else { boolean pathWasAdded = false; for(int i = model.getChildCount(lastPathComponent) - 1; i >= 0 ; i--){ final TreePath childPath = path.pathByAddingChild(model.getChild(lastPathComponent, i)); if (model.isLeaf(lastPathComponent)) { if (!pathWasAdded) { result.add(path); pathWasAdded= true; } } else if (tree.isExpanded(childPath)) { result.addAll(collectExpandedPaths(tree, childPath)); } else { if (!pathWasAdded) { result.add(path); pathWasAdded= true; } } } } return result; } private static boolean collectExpandedPathsImpl(@NotNull final JTree tree, @NotNull final Collection<TreePath> paths, @NotNull final TreePath path){ final TreeModel model = tree.getModel(); final Object lastPathComponent = path.getLastPathComponent(); if(model.isLeaf(lastPathComponent)){ return false; } boolean hasExpandedChildren = false; for(int i = model.getChildCount(lastPathComponent) - 1; i >= 0 ; i--){ hasExpandedChildren |= collectExpandedPathsImpl(tree, paths, path.pathByAddingChild(model.getChild(lastPathComponent, i))); } if(!hasExpandedChildren){ paths.add(path); return true; } else{ return false; } } /** * Expands specified paths. * @param tree JTree to apply expansion status to * @param paths to expand. See {@link #collectExpandedPaths(javax.swing.JTree, java.util.List)} */ public static void restoreExpandedPaths(@NotNull final JTree tree, @NotNull final List<TreePath> paths){ for(int i = paths.size() - 1; i >= 0; i--){ tree.expandPath(paths.get(i)); } } @NotNull public static TreePath getPath(final TreeNode aRootNode, @NotNull final TreeNode aNode) { final List<TreeNode> pathStack = new ArrayList<TreeNode>(); addEach(aRootNode, aNode, pathStack); final Object[] pathElements = new Object[pathStack.size()]; for (int i = pathStack.size() - 1; i >= 0; i--) { pathElements[pathStack.size() - i - 1] = pathStack.get(i); } return new TreePath(pathElements); } public static boolean isAncestor(final TreeNode ancestor, final TreeNode node) { TreeNode parent = node; while (parent != null) { if (parent == ancestor) return true; parent = parent.getParent(); } return false; } private static boolean isAncestor(@NotNull final TreePath ancestor, @NotNull final TreePath path) { if (path.getPathCount() < ancestor.getPathCount()) return false; for (int i = 0; i < ancestor.getPathCount(); i++) if (!path.getPathComponent(i).equals(ancestor.getPathComponent(i))) return false; return true; } private static boolean isDescendants(@NotNull final TreePath path, @NotNull final TreePath[] paths) { for (final TreePath ancestor : paths) { if (isAncestor(ancestor, path)) return true; } return false; } @NotNull public static TreePath getPathFromRoot(@NotNull TreeNode node) { final ArrayList<TreeNode> path = new ArrayList<TreeNode>(); do { path.add(node); node = node.getParent(); } while (node != null); Collections.reverse(path); return new TreePath(path.toArray()); } @Nullable public static TreeNode findNodeWithObject(final Object object, @NotNull final TreeModel model, final Object parent) { for (int i = 0; i < model.getChildCount(parent); i++) { final DefaultMutableTreeNode childNode = (DefaultMutableTreeNode) model.getChild(parent, i); if (childNode.getUserObject().equals(object)) return childNode; } return null; } /** * Removes last component in the current selection path. * @param tree to remove selected node from. */ public static void removeSelected(@NotNull final JTree tree) { TreePath[] paths = tree.getSelectionPaths(); if (paths == null) { return; } for (TreePath path : paths) { removeLastPathComponent((DefaultTreeModel) tree.getModel(), path).restoreSelection(tree); } } public static void removeLastPathComponent(@NotNull final JTree tree, @NotNull final TreePath pathToBeRemoved){ removeLastPathComponent((DefaultTreeModel)tree.getModel(), pathToBeRemoved).restoreSelection(tree); } @Nullable public static DefaultMutableTreeNode findNodeWithObject(@NotNull final DefaultMutableTreeNode aRoot, final Object aObject) { if (Comparing.equal(aRoot.getUserObject(), aObject)) { return aRoot; } else { for (int i = 0; i < aRoot.getChildCount(); i++) { final DefaultMutableTreeNode candidate = findNodeWithObject((DefaultMutableTreeNode) aRoot.getChildAt(i), aObject); if (null != candidate) { return candidate; } } return null; } } @NotNull public static TreePath findCommonPath(@NotNull final TreePath[] treePaths) { LOG.assertTrue(areComponentsEqual(treePaths, 0)); TreePath result = new TreePath(treePaths[0].getPathComponent(0)); int pathIndex = 1; while (areComponentsEqual(treePaths, pathIndex)) { result = result.pathByAddingChild(treePaths[0].getPathComponent(pathIndex)); pathIndex++; } return result; } @NotNull public static ActionCallback selectFirstNode(@NotNull JTree tree) { TreePath selectionPath = getFirstNodePath(tree); return selectPath(tree, selectionPath); } @NotNull public static TreePath getFirstNodePath(@NotNull JTree tree) { final TreeModel model = tree.getModel(); final Object root = model.getRoot(); TreePath selectionPath = new TreePath(root); if (!tree.isRootVisible() && model.getChildCount(root) > 0) { selectionPath = selectionPath.pathByAddingChild(model.getChild(root, 0)); } return selectionPath; } @NotNull public static TreePath getFirstLeafNodePath(@NotNull JTree tree) { final TreeModel model = tree.getModel(); Object root = model.getRoot(); TreePath selectionPath = new TreePath(root); while (model.getChildCount(root) > 0) { final Object child = model.getChild(root, 0); selectionPath = selectionPath.pathByAddingChild(child); root = child; } return selectionPath; } private static void addEach(final TreeNode aRootNode, @NotNull final TreeNode aNode, @NotNull final List<TreeNode> aPathStack) { aPathStack.add(aNode); if (aNode != aRootNode) { addEach(aRootNode, aNode.getParent(), aPathStack); } } @NotNull private static IndexTreePathState removeLastPathComponent(@NotNull final DefaultTreeModel model, @NotNull final TreePath pathToBeRemoved) { final IndexTreePathState selectionState = new IndexTreePathState(pathToBeRemoved); if (((MutableTreeNode) pathToBeRemoved.getLastPathComponent()).getParent() == null) return selectionState; model.removeNodeFromParent((MutableTreeNode)pathToBeRemoved.getLastPathComponent()); return selectionState; } private static boolean areComponentsEqual(@NotNull final TreePath[] paths, final int componentIndex) { if (paths[0].getPathCount() <= componentIndex) return false; final Object pathComponent = paths[0].getPathComponent(componentIndex); for (final TreePath treePath : paths) { if (treePath.getPathCount() <= componentIndex) return false; if (!pathComponent.equals(treePath.getPathComponent(componentIndex))) return false; } return true; } @NotNull private static TreePath[] removeDuplicates(@NotNull final TreePath[] paths) { final ArrayList<TreePath> result = new ArrayList<TreePath>(); for (final TreePath path : paths) { if (!result.contains(path)) result.add(path); } return result.toArray(new TreePath[result.size()]); } @NotNull public static TreePath[] selectMaximals(@Nullable final TreePath[] paths) { if (paths == null) return new TreePath[0]; final TreePath[] noDuplicates = removeDuplicates(paths); final ArrayList<TreePath> result = new ArrayList<TreePath>(); for (final TreePath path : noDuplicates) { final ArrayList<TreePath> otherPaths = new ArrayList<TreePath>(Arrays.asList(noDuplicates)); otherPaths.remove(path); if (!isDescendants(path, otherPaths.toArray(new TreePath[otherPaths.size()]))) result.add(path); } return result.toArray(new TreePath[result.size()]); } public static void sort(@NotNull final DefaultTreeModel model, final Comparator comparator) { sort((DefaultMutableTreeNode) model.getRoot(), comparator); } public static void sort(@NotNull final DefaultMutableTreeNode node, final Comparator comparator) { final List<TreeNode> children = childrenToArray(node); Collections.sort(children, comparator); node.removeAllChildren(); addChildrenTo(node, children); for (int i = 0; i < node.getChildCount(); i++) { sort((DefaultMutableTreeNode) node.getChildAt(i), comparator); } } public static void addChildrenTo(@NotNull final MutableTreeNode node, @NotNull final List<TreeNode> children) { for (final Object aChildren : children) { final MutableTreeNode child = (MutableTreeNode)aChildren; node.insert(child, node.getChildCount()); } } public static boolean traverse(@NotNull final TreeNode node, @NotNull final Traverse traverse) { final int childCount = node.getChildCount(); for (int i = 0; i < childCount; i++){ if (!traverse(node.getChildAt(i), traverse)) return false; } return traverse.accept(node); } public static boolean traverseDepth(@NotNull final TreeNode node, @NotNull final Traverse traverse) { if (!traverse.accept(node)) return false; final int childCount = node.getChildCount(); for (int i = 0; i < childCount; i++) if (!traverseDepth(node.getChildAt(i), traverse)) return false; return true; } @NotNull public static ActionCallback selectPath(@NotNull final JTree tree, final TreePath path) { return selectPath(tree, path, true); } @NotNull public static ActionCallback selectPath(@NotNull final JTree tree, final TreePath path, boolean center) { tree.makeVisible(path); if (center) { return showRowCentred(tree, tree.getRowForPath(path)); } else { final int row = tree.getRowForPath(path); return showAndSelect(tree, row - ScrollingUtil.ROW_PADDING, row + ScrollingUtil.ROW_PADDING, row, -1); } } @NotNull public static ActionCallback moveDown(@NotNull final JTree tree) { final int size = tree.getRowCount(); int row = tree.getLeadSelectionRow(); if (row < size - 1) { row++; return showAndSelect(tree, row, row + 2, row, getSelectedRow(tree), false, true, true); } else { return ActionCallback.DONE; } } @NotNull public static ActionCallback moveUp(@NotNull final JTree tree) { int row = tree.getLeadSelectionRow(); if (row > 0) { row--; return showAndSelect(tree, row - 2, row, row, getSelectedRow(tree), false, true, true); } else { return ActionCallback.DONE; } } @NotNull public static ActionCallback movePageUp(@NotNull final JTree tree) { final int visible = getVisibleRowCount(tree); if (visible <= 0){ return moveHome(tree); } final int decrement = visible - 1; final int row = Math.max(getSelectedRow(tree) - decrement, 0); final int top = getFirstVisibleRow(tree) - decrement; final int bottom = top + visible - 1; return showAndSelect(tree, top, bottom, row, getSelectedRow(tree)); } @NotNull public static ActionCallback movePageDown(@NotNull final JTree tree) { final int visible = getVisibleRowCount(tree); if (visible <= 0){ return moveEnd(tree); } final int size = tree.getRowCount(); final int increment = visible - 1; final int index = Math.min(getSelectedRow(tree) + increment, size - 1); final int top = getFirstVisibleRow(tree) + increment; final int bottom = top + visible - 1; return showAndSelect(tree, top, bottom, index, getSelectedRow(tree)); } @NotNull private static ActionCallback moveHome(@NotNull final JTree tree) { return showRowCentred(tree, 0); } @NotNull private static ActionCallback moveEnd(@NotNull final JTree tree) { return showRowCentred(tree, tree.getRowCount() - 1); } @NotNull private static ActionCallback showRowCentred(@NotNull final JTree tree, final int row) { return showRowCentered(tree, row, true); } @NotNull public static ActionCallback showRowCentered(@NotNull final JTree tree, final int row, final boolean centerHorizontally) { return showRowCentered(tree, row, centerHorizontally, true); } @NotNull public static ActionCallback showRowCentered(@NotNull final JTree tree, final int row, final boolean centerHorizontally, boolean scroll) { final int visible = getVisibleRowCount(tree); final int top = visible > 0 ? row - (visible - 1)/ 2 : row; final int bottom = visible > 0 ? top + visible - 1 : row; return showAndSelect(tree, top, bottom, row, -1, false, scroll, false); } @NotNull public static ActionCallback showAndSelect(@NotNull final JTree tree, int top, int bottom, final int row, final int previous) { return showAndSelect(tree, top, bottom, row, previous, false); } @NotNull public static ActionCallback showAndSelect(@NotNull final JTree tree, int top, int bottom, final int row, final int previous, boolean addToSelection) { return showAndSelect(tree, top, bottom, row, previous, addToSelection, true, false); } @NotNull public static ActionCallback showAndSelect(@NotNull final JTree tree, int top, int bottom, final int row, final int previous, final boolean addToSelection, final boolean scroll) { return showAndSelect(tree, top, bottom, row, previous, addToSelection, scroll, false); } @NotNull public static ActionCallback showAndSelect(@NotNull final JTree tree, int top, int bottom, final int row, final int previous, final boolean addToSelection, final boolean scroll, final boolean resetSelection) { final TreePath path = tree.getPathForRow(row); if (path == null) return ActionCallback.DONE; final int size = tree.getRowCount(); if (size == 0) { tree.clearSelection(); return ActionCallback.DONE; } if (top < 0){ top = 0; } if (bottom >= size){ bottom = size - 1; } if (row >= tree.getRowCount()) return ActionCallback.DONE; boolean okToScroll = true; if (tree.isShowing()) { if (!tree.isValid()) { tree.validate(); } } else { Application app = ApplicationManager.getApplication(); if (app != null && app.isUnitTestMode()) { okToScroll = false; } } Runnable selectRunnable = new Runnable() { @Override public void run() { if (!tree.isRowSelected(row)) { if (addToSelection) { tree.getSelectionModel().addSelectionPath(tree.getPathForRow(row)); } else { tree.setSelectionRow(row); } } else if (resetSelection) { if (!addToSelection) { tree.setSelectionRow(row); } } } }; if (!okToScroll) { selectRunnable.run(); return ActionCallback.DONE; } final Rectangle rowBounds = tree.getRowBounds(row); if (rowBounds == null) return ActionCallback.DONE; Rectangle topBounds = tree.getRowBounds(top); if (topBounds == null) { topBounds = rowBounds; } Rectangle bottomBounds = tree.getRowBounds(bottom); if (bottomBounds == null) { bottomBounds = rowBounds; } Rectangle bounds = topBounds.union(bottomBounds); bounds.x = rowBounds.x; bounds.width = rowBounds.width; final Rectangle visible = tree.getVisibleRect(); if (visible.contains(bounds)) { bounds = null; } else { final Component comp = tree.getCellRenderer().getTreeCellRendererComponent(tree, path.getLastPathComponent(), true, true, false, row, false); if (comp instanceof SimpleColoredComponent) { final SimpleColoredComponent renderer = (SimpleColoredComponent)comp; final Dimension scrollableSize = renderer.computePreferredSize(true); bounds.width = scrollableSize.width; } } final ActionCallback callback = new ActionCallback(); selectRunnable.run(); if (bounds != null) { final Range<Integer> range = getExpandControlRange(tree, path); if (range != null) { int delta = bounds.x - range.getFrom().intValue(); bounds.x -= delta; bounds.width -= delta; } if (visible.width < bounds.width) { bounds.width = visible.width; } if (tree instanceof Tree && !((Tree)tree).isHorizontalAutoScrollingEnabled()) { bounds.x = 0; } final Rectangle b1 = bounds; final Runnable runnable = new Runnable() { @Override public void run() { if (scroll) { AbstractTreeBuilder builder = AbstractTreeBuilder.getBuilderFor(tree); if (builder != null) { builder.getReady(TreeUtil.class).doWhenDone(new Runnable() { @Override public void run() { tree.scrollRectToVisible(b1); } }); callback.setDone(); } else { tree.scrollRectToVisible(b1); Long ts = (Long)tree.getClientProperty(TREE_UTIL_SCROLL_TIME_STAMP); if (ts == null) { ts = 0L; } ts = ts.longValue() + 1; tree.putClientProperty(TREE_UTIL_SCROLL_TIME_STAMP, ts); final long targetValue = ts.longValue(); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { Long actual = (Long)tree.getClientProperty(TREE_UTIL_SCROLL_TIME_STAMP); if (actual == null || targetValue < actual.longValue()) return; if (!tree.getVisibleRect().contains(b1)) { tree.scrollRectToVisible(b1); } callback.setDone(); } }); } } callback.setDone(); } }; runnable.run(); } else { callback.setDone(); } return callback; } // this method returns FIRST selected row but not LEAD private static int getSelectedRow(@NotNull final JTree tree) { return tree.getRowForPath(tree.getSelectionPath()); } private static int getFirstVisibleRow(@NotNull final JTree tree) { final Rectangle visible = tree.getVisibleRect(); int row = -1; for (int i=0; i < tree.getRowCount(); i++) { final Rectangle bounds = tree.getRowBounds(i); if (visible.y <= bounds.y && visible.y + visible.height >= bounds.y + bounds.height) { row = i; break; } } return row; } public static int getVisibleRowCount(@NotNull final JTree tree) { final Rectangle visible = tree.getVisibleRect(); if (visible == null) return 0; int count = 0; for (int i=0; i < tree.getRowCount(); i++) { final Rectangle bounds = tree.getRowBounds(i); if (bounds == null) continue; if (visible.y <= bounds.y && visible.y + visible.height >= bounds.y + bounds.height) { count++; } } return count; } /** * works correctly for trees with fixed row height only. * For variable height trees (e.g. trees with custom tree node renderer) use the {@link #getVisibleRowCount(JTree)} which is slower */ public static int getVisibleRowCountForFixedRowHeight(@NotNull final JTree tree) { // myTree.getVisibleRowCount returns 20 Rectangle bounds = tree.getRowBounds(0); int rowHeight = bounds == null ? 0 : bounds.height; return rowHeight == 0 ? tree.getVisibleRowCount() : tree.getVisibleRect().height / rowHeight; } @SuppressWarnings({"HardCodedStringLiteral"}) public static void installActions(@NotNull final JTree tree) { tree.getActionMap().put("scrollUpChangeSelection", new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { movePageUp(tree); } }); tree.getActionMap().put("scrollDownChangeSelection", new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { movePageDown(tree); } }); tree.getActionMap().put("selectPrevious", new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { moveUp(tree); } }); tree.getActionMap().put("selectNext", new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { moveDown(tree); } }); copyAction(tree, "selectLast", "selectLastChangeLead"); copyAction(tree, "selectFirst", "selectFirstChangeLead"); InputMap inputMap = tree.getInputMap(JComponent.WHEN_FOCUSED); UIUtil.maybeInstall(inputMap, "scrollUpChangeSelection", KeyStroke.getKeyStroke(KeyEvent.VK_PAGE_UP, 0)); UIUtil.maybeInstall(inputMap, "scrollDownChangeSelection", KeyStroke.getKeyStroke(KeyEvent.VK_PAGE_DOWN, 0)); UIUtil.maybeInstall(inputMap, "selectNext", KeyStroke.getKeyStroke(KeyEvent.VK_DOWN, 0)); UIUtil.maybeInstall(inputMap, "selectPrevious", KeyStroke.getKeyStroke(KeyEvent.VK_UP, 0)); UIUtil.maybeInstall(inputMap, "selectLast", KeyStroke.getKeyStroke(KeyEvent.VK_END, 0)); UIUtil.maybeInstall(inputMap, "selectFirst", KeyStroke.getKeyStroke(KeyEvent.VK_HOME, 0)); } private static void copyAction(@NotNull final JTree tree, String original, String copyTo) { final Action action = tree.getActionMap().get(original); if (action != null) { tree.getActionMap().put(copyTo, action); } } public static void collapseAll(@NotNull final JTree tree, final int keepSelectionLevel) { final TreePath leadSelectionPath = tree.getLeadSelectionPath(); // Collapse all int row = tree.getRowCount() - 1; while (row >= 0) { tree.collapseRow(row); row--; } final DefaultMutableTreeNode root = (DefaultMutableTreeNode)tree.getModel().getRoot(); tree.expandPath(new TreePath(root)); if (leadSelectionPath != null) { final Object[] path = leadSelectionPath.getPath(); final Object[] pathToSelect = new Object[path.length > keepSelectionLevel && keepSelectionLevel >= 0 ? keepSelectionLevel : path.length]; System.arraycopy(path, 0, pathToSelect, 0, pathToSelect.length); if (pathToSelect.length == 0) return; selectPath(tree, new TreePath(pathToSelect)); } } public static void selectNode(@NotNull final JTree tree, final TreeNode node) { selectPath(tree, getPathFromRoot(node)); } public static void moveSelectedRow(@NotNull final JTree tree, final int direction){ final TreePath selectionPath = tree.getSelectionPath(); final DefaultMutableTreeNode treeNode = (DefaultMutableTreeNode)selectionPath.getLastPathComponent(); final DefaultMutableTreeNode parent = (DefaultMutableTreeNode)treeNode.getParent(); final int idx = parent.getIndex(treeNode); ((DefaultTreeModel)tree.getModel()).removeNodeFromParent(treeNode); ((DefaultTreeModel)tree.getModel()).insertNodeInto(treeNode, parent, idx + direction); selectNode(tree, treeNode); } @NotNull public static ArrayList<TreeNode> childrenToArray(@NotNull final TreeNode node) { //ApplicationManager.getApplication().assertIsDispatchThread(); final int size = node.getChildCount(); final ArrayList<TreeNode> result = new ArrayList<TreeNode>(size); for(int i = 0; i < size; i++){ TreeNode child = node.getChildAt(i); LOG.assertTrue(child != null); result.add(child); } return result; } public static void expandRootChildIfOnlyOne(@Nullable final JTree tree) { if (tree == null) return; final Runnable runnable = new Runnable() { @Override public void run() { final DefaultMutableTreeNode root = (DefaultMutableTreeNode)tree.getModel().getRoot(); tree.expandPath(new TreePath(new Object[]{root})); if (root.getChildCount() == 1) { TreeNode firstChild = root.getFirstChild(); tree.expandPath(new TreePath(new Object[]{root, firstChild})); } } }; UIUtil.invokeLaterIfNeeded(runnable); } public static void expandAll(@NotNull final JTree tree) { tree.expandPath(new TreePath(tree.getModel().getRoot())); int oldRowCount = 0; do { int rowCount = tree.getRowCount(); if (rowCount == oldRowCount) break; oldRowCount = rowCount; for (int i = 0; i < rowCount; i++) { tree.expandRow(i); } } while (true); } /** * Expands n levels of the tree counting from the root * @param tree to expand nodes of * @param levels depths of the expantion */ public static void expand(@NotNull JTree tree, int levels) { expand(tree, new TreePath(tree.getModel().getRoot()), levels); } private static void expand(@NotNull JTree tree, @NotNull TreePath path, int levels) { if (levels == 0) return; tree.expandPath(path); TreeNode node = (TreeNode)path.getLastPathComponent(); Enumeration children = node.children(); while (children.hasMoreElements()) { expand(tree, path.pathByAddingChild(children.nextElement()) , levels - 1); } } @NotNull public static ActionCallback selectInTree(DefaultMutableTreeNode node, boolean requestFocus, @NotNull JTree tree) { return selectInTree(node, requestFocus, tree, true); } @NotNull public static ActionCallback selectInTree(@Nullable DefaultMutableTreeNode node, boolean requestFocus, @NotNull JTree tree, boolean center) { if (node == null) return ActionCallback.DONE; final TreePath treePath = new TreePath(node.getPath()); tree.expandPath(treePath); if (requestFocus) { tree.requestFocus(); } return selectPath(tree, treePath, center); } @NotNull public static ActionCallback selectInTree(Project project, @Nullable DefaultMutableTreeNode node, boolean requestFocus, @NotNull JTree tree, boolean center) { if (node == null) return ActionCallback.DONE; final TreePath treePath = new TreePath(node.getPath()); tree.expandPath(treePath); if (requestFocus) { ActionCallback result = new ActionCallback(2); IdeFocusManager.getInstance(project).requestFocus(tree, true).notifyWhenDone(result); selectPath(tree, treePath, center).notifyWhenDone(result); return result; } return selectPath(tree, treePath, center); } @NotNull public static List<TreePath> collectSelectedPaths(@NotNull final JTree tree, @NotNull final TreePath treePath) { final ArrayList<TreePath> result = new ArrayList<TreePath>(); final TreePath[] selections = tree.getSelectionPaths(); if (selections != null) { for (TreePath selection : selections) { if (treePath.isDescendant(selection)) { result.add(selection); } } } return result; } public static void unselect(@NotNull JTree tree, @NotNull final DefaultMutableTreeNode node) { final TreePath rootPath = new TreePath(node.getPath()); final TreePath[] selectionPaths = tree.getSelectionPaths(); if (selectionPaths != null) { for (TreePath selectionPath : selectionPaths) { if (selectionPath.getPathCount() > rootPath.getPathCount() && rootPath.isDescendant(selectionPath)) { tree.removeSelectionPath(selectionPath); } } } } @Nullable public static Range<Integer> getExpandControlRange(@NotNull final JTree aTree, @Nullable final TreePath path) { TreeModel treeModel = aTree.getModel(); final BasicTreeUI basicTreeUI = (BasicTreeUI)aTree.getUI(); Icon expandedIcon = basicTreeUI.getExpandedIcon(); Range<Integer> box = null; if (path != null && !treeModel.isLeaf(path.getLastPathComponent())) { int boxWidth; Insets i = aTree.getInsets(); if (expandedIcon != null) { boxWidth = expandedIcon.getIconWidth(); } else { boxWidth = 8; } int boxLeftX = i != null ? i.left : 0; boolean leftToRight = aTree.getComponentOrientation().isLeftToRight(); int depthOffset = getDepthOffset(aTree); int totalChildIndent = basicTreeUI.getLeftChildIndent() + basicTreeUI.getRightChildIndent(); if (leftToRight) { boxLeftX += (path.getPathCount() + depthOffset - 2) * totalChildIndent + basicTreeUI.getLeftChildIndent() - boxWidth / 2; } int boxRightX = boxLeftX + boxWidth; box = new Range<Integer>(boxLeftX, boxRightX); } return box; } public static int getDepthOffset(@NotNull JTree aTree) { if (aTree.isRootVisible()) { return aTree.getShowsRootHandles() ? 1 : 0; } else { return aTree.getShowsRootHandles() ? 0 : -1; } } @NotNull public static RelativePoint getPointForSelection(@NotNull JTree aTree) { final int[] rows = aTree.getSelectionRows(); if (rows == null || rows.length == 0) { return RelativePoint.getCenterOf(aTree); } return getPointForRow(aTree, rows[rows.length - 1]); } @NotNull public static RelativePoint getPointForRow(@NotNull JTree aTree, int aRow) { return getPointForPath(aTree, aTree.getPathForRow(aRow)); } @NotNull public static RelativePoint getPointForPath(@NotNull JTree aTree, TreePath path) { final Rectangle rowBounds = aTree.getPathBounds(path); rowBounds.x += 20; return getPointForBounds(aTree, rowBounds); } @NotNull public static RelativePoint getPointForBounds(JComponent aComponent, @NotNull final Rectangle aBounds) { return new RelativePoint(aComponent, new Point(aBounds.x, (int)aBounds.getMaxY())); } public static boolean isOverSelection(@NotNull final JTree tree, @NotNull final Point point) { TreePath path = tree.getPathForLocation(point.x, point.y); return path != null && tree.getSelectionModel().isPathSelected(path); } public static void dropSelectionButUnderPoint(@NotNull JTree tree, @NotNull Point treePoint) { final TreePath toRetain = tree.getPathForLocation(treePoint.x, treePoint.y); if (toRetain == null) return; TreePath[] selection = tree.getSelectionModel().getSelectionPaths(); selection = selection == null ? new TreePath[0] : selection; for (TreePath each : selection) { if (toRetain.equals(each)) continue; tree.getSelectionModel().removeSelectionPath(each); } } public interface Traverse{ boolean accept(Object node); } public static void ensureSelection(@NotNull JTree tree) { final TreePath[] paths = tree.getSelectionPaths(); if (paths != null) { for (TreePath each : paths) { if (tree.getRowForPath(each) >= 0 && tree.isVisible(each)) { return; } } } for (int eachRow = 0; eachRow < tree.getRowCount(); eachRow++) { TreePath eachPath = tree.getPathForRow(eachRow); if (eachPath != null && tree.isVisible(eachPath)) { tree.setSelectionPath(eachPath); break; } } } public static int indexedBinarySearch(@NotNull TreeNode parent, @NotNull TreeNode key, Comparator comparator) { int low = 0; int high = parent.getChildCount() - 1; while (low <= high) { int mid = (low + high) / 2; TreeNode treeNode = parent.getChildAt(mid); int cmp = comparator.compare(treeNode, key); if (cmp < 0) { low = mid + 1; } else if (cmp > 0) { high = mid - 1; } else { return mid; // key found } } return -(low + 1); // key not found } @NotNull public static Comparator<TreePath> getDisplayOrderComparator(@NotNull final JTree tree) { return new Comparator<TreePath>() { @Override public int compare(TreePath path1, TreePath path2) { return tree.getRowForPath(path1) - tree.getRowForPath(path2); } }; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.concurrent.RejectedExecutionException; import org.apache.camel.AsyncCallback; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.MessageHistory; import org.apache.camel.Processor; import org.apache.camel.Route; import org.apache.camel.StatefulService; import org.apache.camel.StreamCache; import org.apache.camel.api.management.PerformanceCounter; import org.apache.camel.management.DelegatePerformanceCounter; import org.apache.camel.management.mbean.ManagedPerformanceCounter; import org.apache.camel.model.ProcessorDefinition; import org.apache.camel.model.ProcessorDefinitionHelper; import org.apache.camel.processor.interceptor.BacklogDebugger; import org.apache.camel.processor.interceptor.BacklogTracer; import org.apache.camel.processor.interceptor.DefaultBacklogTracerEventMessage; import org.apache.camel.spi.InflightRepository; import org.apache.camel.spi.MessageHistoryFactory; import org.apache.camel.spi.RouteContext; import org.apache.camel.spi.RoutePolicy; import org.apache.camel.spi.StreamCachingStrategy; import org.apache.camel.spi.UnitOfWork; import org.apache.camel.util.MessageHelper; import org.apache.camel.util.StopWatch; import org.apache.camel.util.UnitOfWorkHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Internal {@link Processor} that Camel routing engine used during routing for cross cutting functionality such as: * <ul> * <li>Execute {@link UnitOfWork}</li> * <li>Keeping track which route currently is being routed</li> * <li>Execute {@link RoutePolicy}</li> * <li>Gather JMX performance statics</li> * <li>Tracing</li> * <li>Debugging</li> * <li>Message History</li> * <li>Stream Caching</li> * </ul> * ... and more. * <p/> * This implementation executes this cross cutting functionality as a {@link CamelInternalProcessorAdvice} advice (before and after advice) * by executing the {@link CamelInternalProcessorAdvice#before(org.apache.camel.Exchange)} and * {@link CamelInternalProcessorAdvice#after(org.apache.camel.Exchange, Object)} callbacks in correct order during routing. * This reduces number of stack frames needed during routing, and reduce the number of lines in stacktraces, as well * makes debugging the routing engine easier for end users. * <p/> * <b>Debugging tips:</b> Camel end users whom want to debug their Camel applications with the Camel source code, then make sure to * read the source code of this class about the debugging tips, which you can find in the * {@link #process(org.apache.camel.Exchange, org.apache.camel.AsyncCallback)} method. */ public class CamelInternalProcessor extends DelegateAsyncProcessor { private static final Logger LOG = LoggerFactory.getLogger(CamelInternalProcessor.class); private final List<CamelInternalProcessorAdvice> advices = new ArrayList<CamelInternalProcessorAdvice>(); public CamelInternalProcessor() { } public CamelInternalProcessor(Processor processor) { super(processor); } /** * Adds an {@link CamelInternalProcessorAdvice} advice to the list of advices to execute by this internal processor. * * @param advice the advice to add */ public void addAdvice(CamelInternalProcessorAdvice advice) { advices.add(advice); } /** * Gets the advice with the given type. * * @param type the type of the advice * @return the advice if exists, or <tt>null</tt> if no advices has been added with the given type. */ public <T> T getAdvice(Class<T> type) { for (CamelInternalProcessorAdvice task : advices) { if (type.isInstance(task)) { return type.cast(task); } } return null; } @Override public boolean process(Exchange exchange, AsyncCallback callback) { // ---------------------------------------------------------- // CAMEL END USER - READ ME FOR DEBUGGING TIPS // ---------------------------------------------------------- // If you want to debug the Camel routing engine, then there is a lot of internal functionality // the routing engine executes during routing messages. You can skip debugging this internal // functionality and instead debug where the routing engine continues routing to the next node // in the routes. The CamelInternalProcessor is a vital part of the routing engine, as its // being used in between the nodes. As an end user you can just debug the code in this class // in between the: // CAMEL END USER - DEBUG ME HERE +++ START +++ // CAMEL END USER - DEBUG ME HERE +++ END +++ // you can see in the code below. // ---------------------------------------------------------- if (processor == null || !continueProcessing(exchange)) { // no processor or we should not continue then we are done callback.done(true); return true; } final List<Object> states = new ArrayList<Object>(advices.size()); for (CamelInternalProcessorAdvice task : advices) { try { Object state = task.before(exchange); states.add(state); } catch (Throwable e) { exchange.setException(e); callback.done(true); return true; } } // create internal callback which will execute the advices in reverse order when done callback = new InternalCallback(states, exchange, callback); // UNIT_OF_WORK_PROCESS_SYNC is @deprecated and we should remove it from Camel 3.0 Object synchronous = exchange.removeProperty(Exchange.UNIT_OF_WORK_PROCESS_SYNC); if (exchange.isTransacted() || synchronous != null) { // must be synchronized for transacted exchanges if (LOG.isTraceEnabled()) { if (exchange.isTransacted()) { LOG.trace("Transacted Exchange must be routed synchronously for exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } else { LOG.trace("Synchronous UnitOfWork Exchange must be routed synchronously for exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } } // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ START +++ // ---------------------------------------------------------- try { processor.process(exchange); } catch (Throwable e) { exchange.setException(e); } // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ END +++ // ---------------------------------------------------------- callback.done(true); return true; } else { final UnitOfWork uow = exchange.getUnitOfWork(); // allow unit of work to wrap callback in case it need to do some special work // for example the MDCUnitOfWork AsyncCallback async = callback; if (uow != null) { async = uow.beforeProcess(processor, exchange, callback); } // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ START +++ // ---------------------------------------------------------- if (LOG.isTraceEnabled()) { LOG.trace("Processing exchange for exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } boolean sync = processor.process(exchange, async); // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ END +++ // ---------------------------------------------------------- // execute any after processor work (in current thread, not in the callback) if (uow != null) { uow.afterProcess(processor, exchange, callback, sync); } if (LOG.isTraceEnabled()) { LOG.trace("Exchange processed and is continued routed {} for exchangeId: {} -> {}", new Object[]{sync ? "synchronously" : "asynchronously", exchange.getExchangeId(), exchange}); } return sync; } } @Override public String toString() { return processor != null ? processor.toString() : super.toString(); } /** * Internal callback that executes the after advices. */ private final class InternalCallback implements AsyncCallback { private final List<Object> states; private final Exchange exchange; private final AsyncCallback callback; private InternalCallback(List<Object> states, Exchange exchange, AsyncCallback callback) { this.states = states; this.exchange = exchange; this.callback = callback; } @Override public void done(boolean doneSync) { // NOTE: if you are debugging Camel routes, then all the code in the for loop below is internal only // so you can step straight to the finally block and invoke the callback // we should call after in reverse order try { for (int i = advices.size() - 1; i >= 0; i--) { CamelInternalProcessorAdvice task = advices.get(i); Object state = states.get(i); try { task.after(exchange, state); } catch (Exception e) { exchange.setException(e); // allow all advices to complete even if there was an exception } } } finally { // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ START +++ // ---------------------------------------------------------- // callback must be called callback.done(doneSync); // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ END +++ // ---------------------------------------------------------- } } } /** * Strategy to determine if we should continue processing the {@link Exchange}. */ protected boolean continueProcessing(Exchange exchange) { Object stop = exchange.getProperty(Exchange.ROUTE_STOP); if (stop != null) { boolean doStop = exchange.getContext().getTypeConverter().convertTo(Boolean.class, stop); if (doStop) { LOG.debug("Exchange is marked to stop routing: {}", exchange); return false; } } // determine if we can still run, or the camel context is forcing a shutdown boolean forceShutdown = exchange.getContext().getShutdownStrategy().forceShutdown(this); if (forceShutdown) { String msg = "Run not allowed as ShutdownStrategy is forcing shutting down, will reject executing exchange: " + exchange; LOG.debug(msg); if (exchange.getException() == null) { exchange.setException(new RejectedExecutionException(msg)); } return false; } // yes we can continue return true; } /** * Advice to invoke callbacks for before and after routing. */ public static class RouteLifecycleAdvice implements CamelInternalProcessorAdvice<Object> { private Route route; public void setRoute(Route route) { this.route = route; } @Override public Object before(Exchange exchange) throws Exception { UnitOfWork uow = exchange.getUnitOfWork(); if (uow != null) { uow.beforeRoute(exchange, route); } return null; } @Override public void after(Exchange exchange, Object object) throws Exception { UnitOfWork uow = exchange.getUnitOfWork(); if (uow != null) { uow.afterRoute(exchange, route); } } } /** * Advice for JMX instrumentation of the process being invoked. * <p/> * This advice keeps track of JMX metrics for performance statistics. * <p/> * The current implementation of this advice is only used for route level statistics. For processor levels * they are still wrapped in the route processor chains. */ public static class InstrumentationAdvice implements CamelInternalProcessorAdvice<StopWatch> { private PerformanceCounter counter; private String type; public InstrumentationAdvice(String type) { this.type = type; } public void setCounter(Object counter) { ManagedPerformanceCounter mpc = null; if (counter instanceof ManagedPerformanceCounter) { mpc = (ManagedPerformanceCounter) counter; } if (this.counter instanceof DelegatePerformanceCounter) { ((DelegatePerformanceCounter) this.counter).setCounter(mpc); } else if (mpc != null) { this.counter = mpc; } else if (counter instanceof PerformanceCounter) { this.counter = (PerformanceCounter) counter; } } protected void beginTime(Exchange exchange) { counter.processExchange(exchange); } protected void recordTime(Exchange exchange, long duration) { if (LOG.isTraceEnabled()) { LOG.trace("{}Recording duration: {} millis for exchange: {}", new Object[]{type != null ? type + ": " : "", duration, exchange}); } if (!exchange.isFailed() && exchange.getException() == null) { counter.completedExchange(exchange, duration); } else { counter.failedExchange(exchange); } } public String getType() { return type; } public void setType(String type) { this.type = type; } @Override public StopWatch before(Exchange exchange) throws Exception { // only record time if stats is enabled StopWatch answer = counter != null && counter.isStatisticsEnabled() ? new StopWatch() : null; if (answer != null) { beginTime(exchange); } return answer; } @Override public void after(Exchange exchange, StopWatch watch) throws Exception { // record end time if (watch != null) { recordTime(exchange, watch.stop()); } } } /** * Advice to inject the current {@link RouteContext} into the {@link UnitOfWork} on the {@link Exchange} * * @deprecated this logic has been merged into {@link org.apache.camel.processor.CamelInternalProcessor.UnitOfWorkProcessorAdvice} */ @Deprecated public static class RouteContextAdvice implements CamelInternalProcessorAdvice<UnitOfWork> { private final RouteContext routeContext; public RouteContextAdvice(RouteContext routeContext) { this.routeContext = routeContext; } @Override public UnitOfWork before(Exchange exchange) throws Exception { // push the current route context final UnitOfWork unitOfWork = exchange.getUnitOfWork(); if (unitOfWork != null) { unitOfWork.pushRouteContext(routeContext); } return unitOfWork; } @Override public void after(Exchange exchange, UnitOfWork unitOfWork) throws Exception { if (unitOfWork != null) { unitOfWork.popRouteContext(); } } } /** * Advice to keep the {@link InflightRepository} up to date. */ public static class RouteInflightRepositoryAdvice implements CamelInternalProcessorAdvice { private final InflightRepository inflightRepository; private final String id; public RouteInflightRepositoryAdvice(InflightRepository inflightRepository, String id) { this.inflightRepository = inflightRepository; this.id = id; } @Override public Object before(Exchange exchange) throws Exception { inflightRepository.add(exchange, id); return null; } @Override public void after(Exchange exchange, Object state) throws Exception { inflightRepository.remove(exchange, id); } } /** * Advice to execute any {@link RoutePolicy} a route may have been configured with. */ public static class RoutePolicyAdvice implements CamelInternalProcessorAdvice { private final List<RoutePolicy> routePolicies; private Route route; public RoutePolicyAdvice(List<RoutePolicy> routePolicies) { this.routePolicies = routePolicies; } public void setRoute(Route route) { this.route = route; } /** * Strategy to determine if this policy is allowed to run * * @param policy the policy * @return <tt>true</tt> to run */ protected boolean isRoutePolicyRunAllowed(RoutePolicy policy) { if (policy instanceof StatefulService) { StatefulService ss = (StatefulService) policy; return ss.isRunAllowed(); } return true; } @Override public Object before(Exchange exchange) throws Exception { // invoke begin for (RoutePolicy policy : routePolicies) { try { if (isRoutePolicyRunAllowed(policy)) { policy.onExchangeBegin(route, exchange); } } catch (Exception e) { LOG.warn("Error occurred during onExchangeBegin on RoutePolicy: " + policy + ". This exception will be ignored", e); } } return null; } @Override public void after(Exchange exchange, Object data) throws Exception { // do not invoke it if Camel is stopping as we don't want // the policy to start a consumer during Camel is stopping if (isCamelStopping(exchange.getContext())) { return; } for (RoutePolicy policy : routePolicies) { try { if (isRoutePolicyRunAllowed(policy)) { policy.onExchangeDone(route, exchange); } } catch (Exception e) { LOG.warn("Error occurred during onExchangeDone on RoutePolicy: " + policy + ". This exception will be ignored", e); } } } private static boolean isCamelStopping(CamelContext context) { if (context instanceof StatefulService) { StatefulService ss = (StatefulService) context; return ss.isStopping() || ss.isStopped(); } return false; } } /** * Advice to execute the {@link BacklogTracer} if enabled. */ public static final class BacklogTracerAdvice implements CamelInternalProcessorAdvice { private final BacklogTracer backlogTracer; private final ProcessorDefinition<?> processorDefinition; private final ProcessorDefinition<?> routeDefinition; private final boolean first; public BacklogTracerAdvice(BacklogTracer backlogTracer, ProcessorDefinition<?> processorDefinition, ProcessorDefinition<?> routeDefinition, boolean first) { this.backlogTracer = backlogTracer; this.processorDefinition = processorDefinition; this.routeDefinition = routeDefinition; this.first = first; } @Override public Object before(Exchange exchange) throws Exception { if (backlogTracer.shouldTrace(processorDefinition, exchange)) { Date timestamp = new Date(); String toNode = processorDefinition.getId(); String exchangeId = exchange.getExchangeId(); String messageAsXml = MessageHelper.dumpAsXml(exchange.getIn(), true, 4, backlogTracer.isBodyIncludeStreams(), backlogTracer.isBodyIncludeFiles(), backlogTracer.getBodyMaxChars()); // if first we should add a pseudo trace message as well, so we have a starting message (eg from the route) String routeId = routeDefinition != null ? routeDefinition.getId() : null; if (first) { Date created = exchange.getProperty(Exchange.CREATED_TIMESTAMP, timestamp, Date.class); DefaultBacklogTracerEventMessage pseudo = new DefaultBacklogTracerEventMessage(backlogTracer.incrementTraceCounter(), created, routeId, null, exchangeId, messageAsXml); backlogTracer.traceEvent(pseudo); } DefaultBacklogTracerEventMessage event = new DefaultBacklogTracerEventMessage(backlogTracer.incrementTraceCounter(), timestamp, routeId, toNode, exchangeId, messageAsXml); backlogTracer.traceEvent(event); } return null; } @Override public void after(Exchange exchange, Object data) throws Exception { // noop } } /** * Advice to execute the {@link org.apache.camel.processor.interceptor.BacklogDebugger} if enabled. */ public static final class BacklogDebuggerAdvice implements CamelInternalProcessorAdvice<StopWatch> { private final BacklogDebugger backlogDebugger; private final Processor target; private final ProcessorDefinition<?> definition; private final String nodeId; public BacklogDebuggerAdvice(BacklogDebugger backlogDebugger, Processor target, ProcessorDefinition<?> definition) { this.backlogDebugger = backlogDebugger; this.target = target; this.definition = definition; this.nodeId = definition.getId(); } @Override public StopWatch before(Exchange exchange) throws Exception { if (backlogDebugger.isEnabled() && (backlogDebugger.hasBreakpoint(nodeId) || backlogDebugger.isSingleStepMode())) { StopWatch watch = new StopWatch(); backlogDebugger.beforeProcess(exchange, target, definition); return watch; } else { return null; } } @Override public void after(Exchange exchange, StopWatch stopWatch) throws Exception { if (stopWatch != null) { backlogDebugger.afterProcess(exchange, target, definition, stopWatch.stop()); } } } /** * Advice to inject new {@link UnitOfWork} to the {@link Exchange} if needed, and as well to ensure * the {@link UnitOfWork} is done and stopped. */ public static class UnitOfWorkProcessorAdvice implements CamelInternalProcessorAdvice<UnitOfWork> { private final RouteContext routeContext; public UnitOfWorkProcessorAdvice(RouteContext routeContext) { this.routeContext = routeContext; } @Override public UnitOfWork before(Exchange exchange) throws Exception { // if the exchange doesn't have from route id set, then set it if it originated // from this unit of work if (routeContext != null && exchange.getFromRouteId() == null) { String routeId = routeContext.getRoute().idOrCreate(routeContext.getCamelContext().getNodeIdFactory()); exchange.setFromRouteId(routeId); } // only return UnitOfWork if we created a new as then its us that handle the lifecycle to done the created UoW UnitOfWork created = null; if (exchange.getUnitOfWork() == null) { // If there is no existing UoW, then we should start one and // terminate it once processing is completed for the exchange. created = createUnitOfWork(exchange); exchange.setUnitOfWork(created); created.start(); } // for any exchange we should push/pop route context so we can keep track of which route we are routing if (routeContext != null) { UnitOfWork existing = exchange.getUnitOfWork(); if (existing != null) { existing.pushRouteContext(routeContext); } } return created; } @Override public void after(Exchange exchange, UnitOfWork uow) throws Exception { UnitOfWork existing = exchange.getUnitOfWork(); // execute done on uow if we created it, and the consumer is not doing it if (uow != null) { UnitOfWorkHelper.doneUow(uow, exchange); } // after UoW is done lets pop the route context which must be done on every existing UoW if (routeContext != null && existing != null) { existing.popRouteContext(); } } protected UnitOfWork createUnitOfWork(Exchange exchange) { return exchange.getContext().getUnitOfWorkFactory().createUnitOfWork(exchange); } } /** * Advice when an EIP uses the <tt>shareUnitOfWork</tt> functionality. */ public static class ChildUnitOfWorkProcessorAdvice extends UnitOfWorkProcessorAdvice { private final UnitOfWork parent; public ChildUnitOfWorkProcessorAdvice(RouteContext routeContext, UnitOfWork parent) { super(routeContext); this.parent = parent; } @Override protected UnitOfWork createUnitOfWork(Exchange exchange) { // let the parent create a child unit of work to be used return parent.createChildUnitOfWork(exchange); } } /** * Advice when an EIP uses the <tt>shareUnitOfWork</tt> functionality. */ public static class SubUnitOfWorkProcessorAdvice implements CamelInternalProcessorAdvice<UnitOfWork> { @Override public UnitOfWork before(Exchange exchange) throws Exception { // begin savepoint exchange.getUnitOfWork().beginSubUnitOfWork(exchange); return exchange.getUnitOfWork(); } @Override public void after(Exchange exchange, UnitOfWork unitOfWork) throws Exception { // end sub unit of work unitOfWork.endSubUnitOfWork(exchange); } } /** * Advice when Message History has been enabled. */ @SuppressWarnings("unchecked") public static class MessageHistoryAdvice implements CamelInternalProcessorAdvice<MessageHistory> { private final MessageHistoryFactory factory; private final ProcessorDefinition<?> definition; private final String routeId; public MessageHistoryAdvice(MessageHistoryFactory factory, ProcessorDefinition<?> definition) { this.factory = factory; this.definition = definition; this.routeId = ProcessorDefinitionHelper.getRouteId(definition); } @Override public MessageHistory before(Exchange exchange) throws Exception { List<MessageHistory> list = exchange.getProperty(Exchange.MESSAGE_HISTORY, List.class); if (list == null) { list = new ArrayList<MessageHistory>(); exchange.setProperty(Exchange.MESSAGE_HISTORY, list); } MessageHistory history = factory.newMessageHistory(routeId, definition, new Date()); list.add(history); return history; } @Override public void after(Exchange exchange, MessageHistory history) throws Exception { if (history != null) { history.nodeProcessingDone(); } } } /** * Advice for {@link org.apache.camel.spi.StreamCachingStrategy} */ public static class StreamCachingAdvice implements CamelInternalProcessorAdvice<StreamCache> { private final StreamCachingStrategy strategy; public StreamCachingAdvice(StreamCachingStrategy strategy) { this.strategy = strategy; } @Override public StreamCache before(Exchange exchange) throws Exception { // check if body is already cached Object body = exchange.getIn().getBody(); if (body == null) { return null; } else if (body instanceof StreamCache) { StreamCache sc = (StreamCache) body; // reset so the cache is ready to be used before processing sc.reset(); return sc; } // cache the body and if we could do that replace it as the new body StreamCache sc = strategy.cache(exchange); if (sc != null) { exchange.getIn().setBody(sc); } return sc; } @Override public void after(Exchange exchange, StreamCache sc) throws Exception { Object body; if (exchange.hasOut()) { body = exchange.getOut().getBody(); } else { body = exchange.getIn().getBody(); } if (body != null && body instanceof StreamCache) { // reset so the cache is ready to be reused after processing ((StreamCache) body).reset(); } } } /** * Advice for delaying */ public static class DelayerAdvice implements CamelInternalProcessorAdvice { private final long delay; public DelayerAdvice(long delay) { this.delay = delay; } @Override public Object before(Exchange exchange) throws Exception { try { LOG.trace("Sleeping for: {} millis", delay); Thread.sleep(delay); } catch (InterruptedException e) { LOG.debug("Sleep interrupted"); Thread.currentThread().interrupt(); throw e; } return null; } @Override public void after(Exchange exchange, Object data) throws Exception { // noop } } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.file.formats.android.oat; import ghidra.app.cmd.disassemble.DisassembleCommand; import ghidra.app.util.bin.BinaryReader; import ghidra.app.util.bin.StructConverter; import ghidra.app.util.bin.format.elf.ElfSectionHeaderConstants; import ghidra.app.util.importer.MessageLog; import ghidra.file.analyzers.FileFormatAnalyzer; import ghidra.file.formats.android.dex.format.DexHeader; import ghidra.file.formats.android.oat.oatdexfile.OatDexFile; import ghidra.program.model.address.Address; import ghidra.program.model.address.AddressSetView; import ghidra.program.model.data.*; import ghidra.program.model.listing.*; import ghidra.program.model.mem.Memory; import ghidra.program.model.mem.MemoryBlock; import ghidra.program.model.scalar.Scalar; import ghidra.program.model.symbol.*; import ghidra.util.exception.CancelledException; import ghidra.util.task.TaskMonitor; public class OatHeaderAnalyzer extends FileFormatAnalyzer { @Override public String getName() { return "Android OAT Header Format"; } @Override public boolean getDefaultEnablement(Program program) { return true; } @Override public String getDescription() { return "Analyzes the Android OAT sections (oatdata and oatexec) in this program."; } @Override public boolean canAnalyze(Program program) { return OatConstants.isOAT(program); } @Override public boolean isPrototype() { return false; } @Override public boolean analyze(Program program, AddressSetView set, TaskMonitor monitor, MessageLog log) throws Exception { clearIfNeeded(program, monitor, log); Symbol oatDataSymbol = OatUtilities.getOatDataSymbol(program); Address address = oatDataSymbol.getAddress(); BinaryReader reader = OatUtilities.getBinaryReader(program); if (reader == null) { return false; } OatHeader oatHeader = null; try { oatHeader = OatHeaderFactory.newOatHeader(reader); OatHeaderFactory.parseOatHeader(oatHeader, program, reader, monitor, log); } catch (UnsupportedOatVersionException e) { log.appendMsg(e.getMessage()); return false; } try { DataType headerDataType = oatHeader.toDataType(); Data headerData = createData(program, address, headerDataType); address = address.add(headerDataType.getLength()); markupClassOffsets(program, oatDataSymbol, oatHeader, headerData, monitor, log); monitor.setMessage("Applying OAT DEX headers..."); monitor.initialize(oatHeader.getOatDexFileList().size()); for (int i = 0; i < oatHeader.getOatDexFileList().size(); ++i) { monitor.checkCanceled(); monitor.setMessage("Applying OAT DEX class offsets [ Pass " + i + " of " + oatHeader.getOatDexFileList().size() + " ]..."); monitor.incrementProgress(1); OatDexFile oatDexFileHeader = oatHeader.getOatDexFileList().get(i); oatDexFileHeader.markup(oatHeader, program, monitor, log); applyDexHeader(program, oatDexFileHeader, oatDataSymbol, i); } markupOatPatches(program, oatHeader, monitor, log); } catch (Exception e) { throw e; } finally { oatHeader = null; } return true; } /** * Ghidra sometimes applies undefined1[x] at "oatdata" and "oatexec". * This method checks for these arrays and clears if they exist. */ private void clearIfNeeded(Program program, TaskMonitor monitor, MessageLog log) { Symbol oatDataSymbol = OatUtilities.getOatDataSymbol(program); Data oatDataSymbolData = program.getListing().getDefinedDataAt(oatDataSymbol.getAddress()); if (oatDataSymbolData != null && oatDataSymbolData.isArray()) { Array array = (Array) oatDataSymbolData.getDataType(); if (array.getDataType().isEquivalent(new Undefined1DataType())) { program.getListing() .clearCodeUnits(oatDataSymbolData.getMinAddress(), oatDataSymbolData.getMaxAddress(), false); } } Symbol oatExecSymbol = OatUtilities.getOatExecSymbol(program); if (oatExecSymbol != null) { Data oatExecSymbolData = program.getListing().getDefinedDataAt(oatExecSymbol.getAddress()); if (oatExecSymbolData != null && oatExecSymbolData.isArray()) { Array array = (Array) oatExecSymbolData.getBaseDataType(); if (array.getDataType().isEquivalent(new Undefined1DataType())) { program.getListing() .clearCodeUnits(oatExecSymbolData.getMinAddress(), oatExecSymbolData.getMaxAddress(), false); } } } Symbol oatLastWordSymbol = OatUtilities.getOatLastWordSymbol(program); if (oatLastWordSymbol != null) { Data oatLastWordSymbolData = program.getListing().getDefinedDataAt(oatLastWordSymbol.getAddress()); if (oatLastWordSymbolData != null) { program.getListing() .clearCodeUnits(oatLastWordSymbolData.getMinAddress(), oatLastWordSymbolData.getMaxAddress(), false); } } } /** * Annotates the listing for the ".oat_patches" section(s). * The format of the section changes based on the OAT version. */ private void markupOatPatches(Program program, OatHeader oatHeader, TaskMonitor monitor, MessageLog log) throws CancelledException { monitor.setMessage("Annotating OAT Patches..."); Memory memory = program.getMemory(); if (oatHeader.getVersion().equals(OatConstants.VERSION_LOLLIPOP_MR1_FI_RELEASE)) { MemoryBlock oatBlock = memory.getBlock(OatConstants.DOT_OAT_PATCHES_SECTION_NAME); MemoryBlock destinationBlock = findOatPatchesDestinationBlock(program, oatBlock); if (oatBlock == null || destinationBlock == null) { log.appendMsg("Could not locate OAT patches source / destination block."); return; } DataType dataType = new DWordDataType(); monitor.setProgress(0); long numberOfElements = oatBlock.getSize() / dataType.getLength(); monitor.setMaximum(numberOfElements); for (int i = 0; i < numberOfElements; ++i) { monitor.checkCanceled(); monitor.incrementProgress(1); try { Address address = oatBlock.getStart().add(i * dataType.getLength()); Data data = createData(program, address, dataType); Scalar scalar = data.getScalar(0); Address toAddr = destinationBlock.getStart().add(scalar.getUnsignedValue()); program.getListing().setComment(address, CodeUnit.EOL_COMMENT, "->" + toAddr); } catch (Exception e) { log.appendException(e); return; } } } else if (oatHeader.getVersion().equals(OatConstants.VERSION_MARSHMALLOW_RELEASE)) { //TODO } else if (oatHeader.getVersion().equals(OatConstants.VERSION_NOUGAT_MR1_RELEASE)) { //TODO } else if (oatHeader.getVersion().equals(OatConstants.VERSION_OREO_RELEASE)) { //TODO } else if (oatHeader.getVersion().equals(OatConstants.VERSION_OREO_M2_RELEASE)) { //TODO } } private MemoryBlock findOatPatchesDestinationBlock(Program program, MemoryBlock oatPatchesBlock) { int pos = oatPatchesBlock.getName().indexOf(OatConstants.DOT_OAT_PATCHES_SECTION_NAME); if (pos == 0) {//the block's full name is ".oat_patches" return program.getMemory().getBlock(ElfSectionHeaderConstants.dot_text); } //the block has a prefix, that is the destination name String destinationBlockName = oatPatchesBlock.getName().substring(0, pos); return program.getMemory().getBlock(destinationBlockName); } private void applyDexHeader(Program program, OatDexFile oatDexFileHeader, Symbol oatDataSymbol, int index) throws Exception { Address address = oatDataSymbol.getAddress().add(oatDexFileHeader.getDexFileOffset()); DexHeader dexHeader = oatDexFileHeader.getDexHeader(); if (dexHeader == null) { return; } if (oatDexFileHeader.isDexHeaderExternal()) { return; } DataType dexHeaderDataType = dexHeader.toDataType(); try { dexHeaderDataType.setName(dexHeaderDataType.getName() + "_" + index); } catch (Exception e) { //ignore } createData(program, address, dexHeaderDataType); address = address.add(dexHeaderDataType.getLength()); int dexRemainder = dexHeader.getFileSize() - dexHeaderDataType.getLength(); if (dexRemainder > 0) { DataType paddingDataType = new ArrayDataType(StructConverter.BYTE, dexRemainder, StructConverter.BYTE.getLength()); createData(program, address, paddingDataType); } } private void markupClassOffsets(Program program, Symbol oatDataSymbol, OatHeader oatHeader, Data headerData, TaskMonitor monitor, MessageLog log) throws CancelledException { SymbolTable symbolTable = program.getSymbolTable(); ReferenceManager referenceManager = program.getReferenceManager(); EquateTable equateTable = program.getEquateTable(); for (int i = 0; i < headerData.getNumComponents(); ++i) { monitor.checkCanceled(); if (!headerData.getComponent(i).getFieldName().equals("executable_offset_") && headerData.getComponent(i).getFieldName().endsWith("_offset_")) { Scalar scalar = headerData.getComponent(i).getScalar(0); if (scalar.getUnsignedValue() > 0) { Address toAddr = oatDataSymbol.getAddress().add(scalar.getUnsignedValue()); toAddr = OatUtilities.adjustForThumbAsNeeded(oatHeader, program, toAddr, log); referenceManager.addMemoryReference(headerData.getComponent(i).getMinAddress(), toAddr, RefType.DATA, SourceType.ANALYSIS, 0); try { symbolTable.createLabel(toAddr, headerData.getComponent(i).getFieldName(), SourceType.ANALYSIS); disassembleAsNeeded(program, toAddr); } catch (Exception e) { //ignore... } } } else if (headerData.getComponent(i) .getFieldName() .equals(OatInstructionSet.DISPLAY_NAME)) { try { Scalar scalar = headerData.getComponent(i).getScalar(0); OatInstructionSet instructionSet = OatInstructionSet.valueOf((int) scalar.getUnsignedValue()); Equate equate = equateTable.createEquate(instructionSet.name(), scalar.getUnsignedValue()); equate.addReference(headerData.getComponent(i).getMinAddress(), 0); } catch (Exception e) { //ignore... } } } } /** * Check to see if points to instructions and not undefined, if so then disassemble. */ private void disassembleAsNeeded(Program program, Address toAddr) { if (program.getMemory().contains(toAddr) && program.getMemory().getBlock(toAddr).isExecute()) { if (program.getListing().isUndefined(toAddr, toAddr)) { DisassembleCommand cmd = new DisassembleCommand(toAddr, null, false); cmd.applyTo(program); } } } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.junit; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.execution.*; import com.intellij.execution.junit2.info.MethodLocation; import com.intellij.execution.testframework.SourceScope; import com.intellij.openapi.module.Module; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Condition; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.ClassInheritorsSearch; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiClassUtil; import com.intellij.util.Processor; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; @SuppressWarnings({"UtilityClassWithoutPrivateConstructor"}) public class JUnitUtil { @NonNls public static final String TESTCASE_CLASS = "junit.framework.TestCase"; @NonNls private static final String TEST_INTERFACE = "junit.framework.Test"; @NonNls private static final String TESTSUITE_CLASS = "junit.framework.TestSuite"; @NonNls public static final String TEST_ANNOTATION = "org.junit.Test"; @NonNls public static final String IGNORE_ANNOTATION = "org.junit.Ignore"; @NonNls public static final String RUN_WITH = "org.junit.runner.RunWith"; @NonNls public static final String DATA_POINT = "org.junit.experimental.theories.DataPoint"; @NonNls public static final String SUITE_METHOD_NAME = "suite"; public static final String BEFORE_ANNOTATION_NAME = "org.junit.Before"; public static final String AFTER_ANNOTATION_NAME = "org.junit.After"; private static final String PARAMETRIZED_PARAMETERS_ANNOTATION_NAME = "org.junit.runners.Parameterized.Parameters"; private static final String AFTER_CLASS_ANNOTATION_NAME = "org.junit.AfterClass"; private static final String BEFORE_CLASS_ANNOTATION_NAME = "org.junit.BeforeClass"; private static final String PARAMETERIZED_CLASS_NAME = "org.junit.runners.Parameterized"; public static boolean isSuiteMethod(@NotNull PsiMethod psiMethod) { if (!psiMethod.hasModifierProperty(PsiModifier.PUBLIC)) return false; if (!psiMethod.hasModifierProperty(PsiModifier.STATIC)) return false; if (psiMethod.isConstructor()) return false; if (psiMethod.getParameterList().getParametersCount() > 0) return false; final PsiType returnType = psiMethod.getReturnType(); if (returnType == null || returnType instanceof PsiPrimitiveType) return false; return returnType.equalsToText(TEST_INTERFACE)|| returnType.equalsToText(TESTSUITE_CLASS) || InheritanceUtil.isInheritor(returnType, TEST_INTERFACE); } public static boolean isTestMethod(final Location<? extends PsiMethod> location) { return isTestMethod(location, true); } public static boolean isTestMethod(final Location<? extends PsiMethod> location, boolean checkAbstract) { final PsiMethod psiMethod = location.getPsiElement(); final PsiClass aClass = location instanceof MethodLocation ? ((MethodLocation)location).getContainingClass() : psiMethod.getContainingClass(); if (aClass == null || !isTestClass(aClass, checkAbstract, true)) return false; if (isTestAnnotated(psiMethod)) return true; if (psiMethod.isConstructor()) return false; if (!psiMethod.hasModifierProperty(PsiModifier.PUBLIC)) return false; if (psiMethod.hasModifierProperty(PsiModifier.ABSTRACT)) return false; if (AnnotationUtil.isAnnotated(psiMethod, DATA_POINT, false)) return false; if (AnnotationUtil.isAnnotated(aClass, RUN_WITH, true)) return true; if (psiMethod.getParameterList().getParametersCount() > 0) return false; if (psiMethod.hasModifierProperty(PsiModifier.STATIC) && SUITE_METHOD_NAME.equals(psiMethod.getName())) return false; if (!psiMethod.getName().startsWith("test")) return false; PsiClass testCaseClass = getTestCaseClassOrNull(location); return testCaseClass != null && psiMethod.getContainingClass().isInheritor(testCaseClass, true); } private static boolean isTestCaseInheritor(final PsiClass aClass) { if (!aClass.isValid()) return false; Location<PsiClass> location = PsiLocation.fromPsiElement(aClass); PsiClass testCaseClass = getTestCaseClassOrNull(location); return testCaseClass != null && aClass.isInheritor(testCaseClass, true); } public static boolean isTestClass(final PsiClass psiClass) { return isTestClass(psiClass, true, true); } public static boolean isTestClass(@NotNull PsiClass psiClass, boolean checkAbstract, boolean checkForTestCaseInheritance) { if (!PsiClassUtil.isRunnableClass(psiClass, true, checkAbstract)) return false; if (checkForTestCaseInheritance && isTestCaseInheritor(psiClass)) return true; final PsiModifierList modifierList = psiClass.getModifierList(); if (modifierList == null) return false; if (AnnotationUtil.isAnnotated(psiClass, RUN_WITH, true)) return true; for (final PsiMethod method : psiClass.getAllMethods()) { ProgressManager.checkCanceled(); if (isSuiteMethod(method)) return true; if (isTestAnnotated(method)) return true; } return false; } public static boolean isJUnit3TestClass(final PsiClass clazz) { return isTestCaseInheritor(clazz); } public static boolean isJUnit4TestClass(final PsiClass psiClass) { return isJUnit4TestClass(psiClass, true); } private static boolean isJUnit4TestClass(final PsiClass psiClass, boolean checkAbstract) { if (!PsiClassUtil.isRunnableClass(psiClass, true, checkAbstract)) return false; final PsiModifierList modifierList = psiClass.getModifierList(); if (modifierList == null) return false; if (AnnotationUtil.isAnnotated(psiClass, RUN_WITH, true)) return true; for (final PsiMethod method : psiClass.getAllMethods()) { ProgressManager.checkCanceled(); if (isTestAnnotated(method)) return true; } return false; } public static boolean isTestAnnotated(final PsiMethod method) { if (AnnotationUtil.isAnnotated(method, TEST_ANNOTATION, false) || JUnitRecognizer.willBeAnnotatedAfterCompilation(method)) { final PsiAnnotation annotation = AnnotationUtil.findAnnotationInHierarchy(method.getContainingClass(), Collections.singleton(RUN_WITH)); if (annotation != null) { final PsiNameValuePair[] attributes = annotation.getParameterList().getAttributes(); for (PsiNameValuePair attribute : attributes) { final PsiAnnotationMemberValue value = attribute.getValue(); if (value instanceof PsiClassObjectAccessExpression ) { final PsiTypeElement typeElement = ((PsiClassObjectAccessExpression)value).getOperand(); if (typeElement.getType().getCanonicalText().equals(PARAMETERIZED_CLASS_NAME)) { return false; } } } } return true; } return false; } @Nullable private static PsiClass getTestCaseClassOrNull(final Location<?> location) { final Location<PsiClass> ancestorOrSelf = location.getAncestorOrSelf(PsiClass.class); if (ancestorOrSelf == null) return null; final PsiClass aClass = ancestorOrSelf.getPsiElement(); Module module = JavaExecutionUtil.findModule(aClass); if (module == null) return null; GlobalSearchScope scope = GlobalSearchScope.moduleRuntimeScope(module, true); return getTestCaseClassOrNull(scope, module.getProject()); } public static PsiClass getTestCaseClass(final Module module) throws NoJUnitException { if (module == null) throw new NoJUnitException(); final GlobalSearchScope scope = GlobalSearchScope.moduleRuntimeScope(module, true); return getTestCaseClass(scope, module.getProject()); } public static PsiClass getTestCaseClass(final SourceScope scope) throws NoJUnitException { if (scope == null) throw new NoJUnitException(); return getTestCaseClass(scope.getLibrariesScope(), scope.getProject()); } private static PsiClass getTestCaseClass(final GlobalSearchScope scope, final Project project) throws NoJUnitException { PsiClass testCaseClass = getTestCaseClassOrNull(scope, project); if (testCaseClass == null) throw new NoJUnitException(scope.getDisplayName()); return testCaseClass; } @Nullable private static PsiClass getTestCaseClassOrNull(final GlobalSearchScope scope, final Project project) { return JavaPsiFacade.getInstance(project).findClass(TESTCASE_CLASS, scope); } public static boolean isTestMethodOrConfig(@NotNull PsiMethod psiMethod) { if (isTestMethod(PsiLocation.fromPsiElement(psiMethod), false)) { final PsiClass containingClass = psiMethod.getContainingClass(); assert containingClass != null : psiMethod + "; " + psiMethod.getClass() + "; " + psiMethod.getParent(); if (containingClass.hasModifierProperty(PsiModifier.ABSTRACT)) { final boolean[] foundNonAbstractInheritor = new boolean[1]; ClassInheritorsSearch.search(containingClass).forEach(new Processor<PsiClass>() { @Override public boolean process(PsiClass psiClass) { if (!psiClass.hasModifierProperty(PsiModifier.ABSTRACT)) { foundNonAbstractInheritor[0] = true; return false; } return true; } }); if (foundNonAbstractInheritor[0]) { return true; } } else { return true; } } final String name = psiMethod.getName(); if (psiMethod.hasModifierProperty(PsiModifier.PUBLIC) && !psiMethod.hasModifierProperty(PsiModifier.ABSTRACT)) { if (SUITE_METHOD_NAME.equals(name) || "setUp".equals(name) || "tearDown".equals(name)) { return true; } if (psiMethod.hasModifierProperty(PsiModifier.STATIC)) { if (AnnotationUtil.isAnnotated(psiMethod, Arrays.asList(BEFORE_CLASS_ANNOTATION_NAME, AFTER_CLASS_ANNOTATION_NAME, PARAMETRIZED_PARAMETERS_ANNOTATION_NAME))) { return true; } } else { if (AnnotationUtil.isAnnotated(psiMethod, Arrays.asList(BEFORE_ANNOTATION_NAME, AFTER_ANNOTATION_NAME))) return true; } } return false; } @Nullable public static PsiMethod findFirstTestMethod(PsiClass clazz) { PsiMethod testMethod = null; for (PsiMethod method : clazz.getMethods()) { if (isTestMethod(MethodLocation.elementInClass(method, clazz)) || isSuiteMethod(method)) { testMethod = method; break; } } return testMethod; } @Nullable public static PsiMethod findSuiteMethod(PsiClass clazz) { final PsiMethod[] suiteMethods = clazz.findMethodsByName(SUITE_METHOD_NAME, false); for (PsiMethod method : suiteMethods) { if (isSuiteMethod(method)) return method; } return null; } public static class TestMethodFilter implements Condition<PsiMethod> { private final PsiClass myClass; public TestMethodFilter(final PsiClass aClass) { myClass = aClass; } public boolean value(final PsiMethod method) { return isTestMethod(MethodLocation.elementInClass(method, myClass)); } } public static PsiClass findPsiClass(final String qualifiedName, final Module module, final Project project) { final GlobalSearchScope scope = module == null ? GlobalSearchScope.projectScope(project) : GlobalSearchScope.moduleWithDependenciesScope(module); return JavaPsiFacade.getInstance(project).findClass(qualifiedName, scope); } public static PsiPackage getContainingPackage(final PsiClass psiClass) { return JavaDirectoryService.getInstance().getPackage(psiClass.getContainingFile().getContainingDirectory()); } public static PsiClass getTestClass(final PsiElement element) { return getTestClass(PsiLocation.fromPsiElement(element)); } public static PsiClass getTestClass(final Location<?> location) { for (Iterator<Location<PsiClass>> iterator = location.getAncestors(PsiClass.class, false); iterator.hasNext();) { final Location<PsiClass> classLocation = iterator.next(); if (isTestClass(classLocation.getPsiElement(), false, true)) return classLocation.getPsiElement(); } PsiElement element = location.getPsiElement(); if (element instanceof PsiClassOwner) { PsiClass[] classes = ((PsiClassOwner)element).getClasses(); if (classes.length == 1) return classes[0]; } return null; } public static PsiMethod getTestMethod(final PsiElement element) { return getTestMethod(element, true); } public static PsiMethod getTestMethod(final PsiElement element, boolean checkAbstract) { final PsiManager manager = element.getManager(); final Location<PsiElement> location = PsiLocation.fromPsiElement(manager.getProject(), element); for (Iterator<Location<PsiMethod>> iterator = location.getAncestors(PsiMethod.class, false); iterator.hasNext();) { final Location<? extends PsiMethod> methodLocation = iterator.next(); if (isTestMethod(methodLocation, checkAbstract)) return methodLocation.getPsiElement(); } return null; } public static class NoJUnitException extends CantRunException { public NoJUnitException() { super(ExecutionBundle.message("no.junit.error.message")); } public NoJUnitException(final String message) { super(ExecutionBundle.message("no.junit.in.scope.error.message", message)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.luni.tests.java.io; import java.io.File; import java.io.ObjectStreamClass; import java.io.ObjectStreamField; import java.io.Serializable; import java.lang.reflect.Proxy; import java.net.URL; import java.net.URLClassLoader; import junit.framework.TestCase; public class ObjectStreamClassTest extends TestCase { static class DummyClass implements Serializable { private static final long serialVersionUID = 999999999999999L; long bam = 999L; int ham = 9999; public static long getUID() { return serialVersionUID; } } /** * @tests java.io.ObjectStreamClass#forClass() */ public void test_forClass() { // Need to test during serialization to be sure an instance is // returned ObjectStreamClass osc = ObjectStreamClass.lookup(DummyClass.class); assertEquals("forClass returned an object: " + osc.forClass(), DummyClass.class, osc.forClass()); } /** * @tests java.io.ObjectStreamClass#getField(java.lang.String) */ public void test_getFieldLjava_lang_String() { ObjectStreamClass osc = ObjectStreamClass.lookup(DummyClass.class); assertEquals("getField did not return correct field", 'J', osc .getField("bam").getTypeCode()); assertNull("getField did not null for non-existent field", osc .getField("wham")); } /** * @tests java.io.ObjectStreamClass#getFields() */ public void test_getFields() { ObjectStreamClass osc = ObjectStreamClass.lookup(DummyClass.class); ObjectStreamField[] osfArray = osc.getFields(); assertEquals( "Array of fields should be of length 2 but is instead of length: " + osfArray.length, 2, osfArray.length); } /** * @tests java.io.ObjectStreamClass#getName() */ public void test_getName() { ObjectStreamClass osc = ObjectStreamClass.lookup(DummyClass.class); assertEquals( "getName returned incorrect name: " + osc.getName(), "org.apache.harmony.luni.tests.java.io.ObjectStreamClassTest$DummyClass", osc.getName()); } /** * @tests java.io.ObjectStreamClass#getSerialVersionUID() */ public void test_getSerialVersionUID() { ObjectStreamClass osc = ObjectStreamClass.lookup(DummyClass.class); assertEquals("getSerialversionUID returned incorrect uid: " + osc.getSerialVersionUID() + " instead of " + DummyClass.getUID(), DummyClass.getUID(), osc .getSerialVersionUID()); } static class SyntheticTest implements Serializable { private int i; private class X implements Serializable { public int get() { return i; } } public X foo() { return new X(); } } /** * @tests java.io.ObjectStreamClass#getSerialVersionUID() */ public void test_getSerialVersionUID_inner_private_class() { ObjectStreamClass osc1 = ObjectStreamClass.lookup(SyntheticTest.class); assertEquals("SyntheticTest unexpected UID: " + osc1.getSerialVersionUID(), -7784078941584535183L, osc1 .getSerialVersionUID()); ObjectStreamClass osc2 = ObjectStreamClass .lookup(SyntheticTest.X.class); assertEquals("SyntheticTest.X unexpected UID: " + osc2.getSerialVersionUID(), -7703000075736397332L, osc2 .getSerialVersionUID()); } /** * @tests java.io.ObjectStreamClass#getSerialVersionUID() */ public void test_getSerialVersionUID_classloader() throws Exception { File file = new File( "resources/org/apache/harmony/luni/tests/ObjectStreamClassTest.jar"); ClassLoader loader = new URLClassLoader(new URL[] { file.toURL() }, null); Class cl1 = Class.forName("Test1$TestVarArgs", false, loader); ObjectStreamClass osc1 = ObjectStreamClass.lookup(cl1); assertEquals("Test1$TestVarArgs unexpected UID: " + osc1.getSerialVersionUID(), -6051121963037986215L, osc1 .getSerialVersionUID()); Class cl2 = Class.forName("Test1$TestBridge", false, loader); ObjectStreamClass osc2 = ObjectStreamClass.lookup(cl2); assertEquals("Test1$TestBridge unexpected UID: " + osc2.getSerialVersionUID(), 568585976855071180L, osc2 .getSerialVersionUID()); } /** * @tests java.io.ObjectStreamClass#lookup(java.lang.Class) */ public void test_lookupLjava_lang_Class() { ObjectStreamClass osc = ObjectStreamClass.lookup(DummyClass.class); assertEquals( "lookup returned wrong class: " + osc.getName(), "org.apache.harmony.luni.tests.java.io.ObjectStreamClassTest$DummyClass", osc.getName()); } /** * @tests java.io.ObjectStreamClass#toString() */ public void test_toString() { ObjectStreamClass osc = ObjectStreamClass.lookup(DummyClass.class); String oscString = osc.toString(); // The previous test was more specific than the spec so it was replaced // with the test below assertTrue("toString returned incorrect string: " + osc.toString(), oscString.indexOf("serialVersionUID") >= 0 && oscString.indexOf("999999999999999L") >= 0); } public void testSerialization() { ObjectStreamClass osc = ObjectStreamClass .lookup(ObjectStreamClass.class); assertEquals(0, osc.getFields().length); } public void test_specialTypes() { Class<?> proxyClass = Proxy.getProxyClass(this.getClass() .getClassLoader(), new Class[] { Runnable.class }); ObjectStreamClass proxyStreamClass = ObjectStreamClass .lookup(proxyClass); assertEquals("Proxy classes should have zero serialVersionUID", 0, proxyStreamClass.getSerialVersionUID()); ObjectStreamField[] proxyFields = proxyStreamClass.getFields(); assertEquals("Proxy classes should have no serialized fields", 0, proxyFields.length); ObjectStreamClass enumStreamClass = ObjectStreamClass .lookup(Thread.State.class); assertEquals("Enum classes should have zero serialVersionUID", 0, enumStreamClass.getSerialVersionUID()); ObjectStreamField[] enumFields = enumStreamClass.getFields(); assertEquals("Enum classes should have no serialized fields", 0, enumFields.length); } }
package de.bluebiz.bluelytics.api.query.plan.expressions.operands.string; import de.bluebiz.bluelytics.api.query.plan.expressions.MathOperators; import de.bluebiz.bluelytics.api.query.plan.expressions.operands.Operand; import de.bluebiz.bluelytics.api.query.plan.expressions.operands.bool.BooleanOperand; import de.bluebiz.bluelytics.api.query.plan.expressions.operands.numeric.NumericOperand; import de.bluebiz.bluelytics.api.query.plan.expressions.targets.Predicate; import de.bluebiz.bluelytics.api.query.plan.expressions.targets.PredicateRelational; import de.bluebiz.bluelytics.api.query.plan.expressions.targets.PredicateSimple; /** * Represents a string operand. * It provides all functions that can be applied to a string, * like lower, upper, concat or equals. * * @author: bluebiz */ public abstract class StringOperand extends Operand { /** * Length numeric operand. * * @return the numeric operand */ public NumericOperand length() { return createNumericFunction(this, "length"); } /** * Concat string operand. * * @param operand the operand * @return the string operand */ public StringOperand concat(StringOperand operand) { return createStringFunction(this, "concat", operand); } /** * Lower string operand. * * @return the string operand */ public StringOperand lower() { return createStringFunction(this, "lower"); } /** * Starts with predicate. * * @param operand the operand * @return the predicate */ public Predicate startsWith(StringOperand operand) { BooleanOperand func = createBooleanFunction(this, "startsWith", operand); return new PredicateSimple(func); } /** * Starts with ignore case predicate. * * @param operand the operand * @return the predicate */ public Predicate startsWithIgnoreCase(StringOperand operand) { return lower().startsWith(operand.lower()); } /** * Ends with predicate. * * @param operand the operand * @return the predicate */ public Predicate endsWith(StringOperand operand) { BooleanOperand func = createBooleanFunction(this, "endsWith", operand); return new PredicateSimple(func); } /** * Ends with ignore case predicate. * * @param operand the operand * @return the predicate */ public Predicate endsWithIgnoreCase(StringOperand operand) { return lower().endsWith(operand.lower()); } /** * Contains predicate. * * @param operand the operand * @return the predicate */ public Predicate contains(StringOperand operand) { BooleanOperand func = createBooleanFunction(this, "strcontains", operand); return new PredicateSimple(func); } /** * Substring string operand. * * @param beginIndex the begin index * @return the string operand */ public StringOperand substring(NumericOperand beginIndex) { return createStringFunction(this, "substring", beginIndex); } /** * Substring string operand. * * @param beginIndex the begin index * @param endIndex the end index * @return the string operand */ public StringOperand substring(NumericOperand beginIndex, NumericOperand endIndex) { return createStringFunction(this, "substring", beginIndex, endIndex); } /** * Upper string operand. * * @return the string operand */ public StringOperand upper() { return createStringFunction(this, "upper"); } /** * Equals predicate. * * @param operand the operand * @return the predicate */ public Predicate equals(StringOperand operand) { return createPredicate(MathOperators.Relational.Equal, operand); } /** * Equals ignore case predicate. * * @param operand the operand * @return the predicate */ public Predicate equalsIgnoreCase(StringOperand operand) { return new PredicateRelational(this.lower(), MathOperators.Relational.Equal, operand.lower()); } /** * Not equals predicate. * * @param operand the operand * @return the predicate */ public Predicate notEquals(StringOperand operand) { return createPredicate(MathOperators.Relational.NotEqual, operand); } /** * Colognephonetic string operand. * * @return the string operand */ public StringOperand colognephonetic() { return createStringFunction(this, "colognephonetic"); } /** * Levenstein numeric operand. * * @param other the other * @return the numeric operand */ public NumericOperand levenstein(StringOperand other) { return createNumericFunction(this, "colognephonetic", other); } /** * Metaphone string operand. * * @return the string operand */ public StringOperand metaphone() { return createStringFunction(this, "metaphone"); } /** * Soundex string operand. * * @return the string operand */ public StringOperand soundex() { return createStringFunction(this, "soundex"); } /** * To char string operand. * * @return the string operand */ public StringOperand toChar() { return createStringFunction(this, "toChar"); } /** * To float numeric operand. * * @return the numeric operand */ public NumericOperand toFloat() { return createNumericFunction(this, "toFloat"); } /** * To double numeric operand. * * @return the numeric operand */ public NumericOperand toDouble() { return createNumericFunction(this, "toDouble"); } /** * To integer numeric operand. * * @return the numeric operand */ public NumericOperand toInteger() { return createNumericFunction(this, "toInteger"); } /** * To long numeric operand. * * @return the numeric operand */ public NumericOperand toLong() { return createNumericFunction(this, "toLong"); } /** * To short numeric operand. * * @return the numeric operand */ public NumericOperand toShort() { return createNumericFunction(this, "toShort"); } /** * To number numeric operand. * * @return the numeric operand */ public NumericOperand toNumber() { return createNumericFunction(this, "toNumber"); } /** * To unsigned int 16 numeric operand. * * @return the numeric operand */ public NumericOperand toUnsignedInt16() { return createNumericFunction(this, "toUnsignedInt16"); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.model.Person; import org.kie.workbench.common.forms.dynamic.service.shared.impl.MapModelRenderingContext; import org.kie.workbench.common.forms.fields.shared.fieldTypes.relations.multipleSubform.definition.MultipleSubFormFieldDefinition; import org.kie.workbench.common.forms.model.FieldDefinition; import org.kie.workbench.common.forms.model.FormDefinition; import org.kie.workbench.common.forms.model.JavaFormModel; import org.kie.workbench.common.forms.model.TypeKind; import org.kie.workbench.common.forms.model.impl.PortableJavaModel; import org.kie.workbench.common.forms.model.impl.TypeInfoImpl; import org.mockito.junit.MockitoJUnitRunner; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @RunWith(MockitoJUnitRunner.class) public class MultipleSubformBackendFormRenderingContextManagerTest extends AbstractBackendFormRenderingContextManagerTest { protected SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy"); protected List<Person> persons; @Test public void testReadNestedData() { doReadNestedData(); } protected Map<String, Object> doReadNestedData() { Map<String, Object> result = context.getRenderingContext().getModel(); assertFalse("There should be some validations for model", context.getRenderingContext().getModelConstraints().isEmpty()); assertNotNull("Result cannot be null ", result); assertTrue("Result must contain only one entry", result.size() == 1); assertTrue("Processed map must contain value for field 'persons'", result.containsKey("persons")); assertNotNull("Processed map must contain value for field 'persons'", result.get("persons")); List<Map<String, Object>> personMaps = (List<Map<String, Object>>) result.get("persons"); assertEquals("There must be 4 persons", 4, personMaps.size()); for (int i = 0; i < personMaps.size(); i++) { Person person = persons.get(i); Map<String, Object> personMap = personMaps.get(i); assertEquals("Id must be equal", person.getId(), personMap.get("id")); assertEquals("Name must be equal", person.getName(), personMap.get("name")); assertEquals("LastName must be equal", person.getLastName(), personMap.get("lastName")); assertEquals("Birthday must be equal", person.getBirthday(), personMap.get("birthday")); } return result; } @Test public void testEditExistingObjects() { Map<String, Object> formValues = doReadNestedData(); List<Map<String, Object>> personMaps = (List<Map<String, Object>>) formValues.get("persons"); String[] names = new String[]{"Tyrion", "Jaime", "Cersei", "Tywin"}; for (int i = 0; i < personMaps.size(); i++) { Map<String, Object> person = personMaps.get(i); person.put("name", names[i]); person.put("lastName", "Lannister"); person.put(MapModelRenderingContext.FORM_ENGINE_EDITED_OBJECT, Boolean.TRUE); } Map<String, Object> result = contextManager.updateContextData(context.getTimestamp(), formValues).getFormData(); assertNotNull("Result cannot be null ", result); assertTrue("Result must contain only one entry", result.size() == 1); assertTrue("Processed map must contain value for field 'person'", result.containsKey("persons")); assertNotNull("Processed map must contain value for field 'person'", result.get("persons")); assertTrue("Persons must be a List", result.get("persons") instanceof List); List value = (List) result.get("persons"); assertEquals("There should be 4 persons", 4, value.size()); for (int i = 0; i < persons.size(); i++) { assertEquals("Name must be equal", names[i], persons.get(i).getName()); assertEquals("LastName must be equal", "Lannister", persons.get(i).getLastName()); } } @Test public void testRemovingExistingInstances() { Map<String, Object> formValues = doReadNestedData(); List<Map<String, Object>> personMaps = (List<Map<String, Object>>) formValues.get("persons"); personMaps.remove(0); personMaps.remove(0); Map<String, Object> result = contextManager.updateContextData(context.getTimestamp(), formValues).getFormData(); assertNotNull("Result cannot be null ", result); assertTrue("Result must contain only one entry", result.size() == 1); assertTrue("Processed map must contain value for field 'person'", result.containsKey("persons")); assertNotNull("Processed map must contain value for field 'person'", result.get("persons")); assertTrue("Persons must be a List", result.get("persons") instanceof List); List<Person> value = (List) result.get("persons"); assertEquals("There should be 2 persons", 2, value.size()); String[] names = new String[]{"Rob", "John"}; for (int i = 0; i < value.size(); i++) { assertEquals("Name must be equal", names[i], value.get(i).getName()); } } @Test public void testWriteNestedModelWithoutModelContentMarshaller() { testCreateNestedModels(true); } @Test public void testCreateInstancesWithClassOnClassPath() { testCreateNestedModels(false); } protected void testCreateNestedModels(boolean classOnContentMarshaller) { try { initContentMarshallerClassLoader(Person.class, classOnContentMarshaller); Map<String, Object> formValues = doReadNestedData(); List<Map<String, Object>> personMaps = (List<Map<String, Object>>) formValues.get("persons"); Map<String, Object> bran = new HashMap<>(); bran.put("id", 4); bran.put("name", "Bran"); bran.put("lastName", "Stark"); bran.put("birthday", sdf.parse("14-01-2000")); Map<String, Object> sansa = new HashMap<>(); sansa.put("id", 5); sansa.put("name", "Sansa"); sansa.put("lastName", "Stark"); sansa.put("birthday", sdf.parse("14-11-2005")); personMaps.add(bran); personMaps.add(sansa); Map<String, Object> result = contextManager.updateContextData(context.getTimestamp(), formValues).getFormData(); assertNotNull("Result cannot be null ", result); assertTrue("Result must contain only one entry", result.size() == 1); assertTrue("Processed map must contain value for field 'person'", result.containsKey("persons")); assertNotNull("Processed map must contain value for field 'person'", result.get("persons")); assertTrue("Persons must be a List", result.get("persons") instanceof List); List<Person> value = (List) result.get("persons"); assertEquals("There should be 6 persons", 6, value.size()); String[] names = new String[]{"Ned", "Catelyn", "Rob", "John", "Bran", "Sansa"}; for (int i = 0; i < value.size(); i++) { assertEquals("Name must be equal", names[i], value.get(i).getName()); } } catch (Exception e) { e.printStackTrace(); } } @Override protected FormDefinition[] getNestedForms() { JavaFormModel model = new PortableJavaModel(Person.class.getName()); FormDefinition creationForm = new FormDefinition(model); creationForm.setId("person-creation"); FieldDefinition field = fieldManager.getDefinitionByDataType(new TypeInfoImpl(Long.class.getName())); field.setName("id"); field.setBinding("id"); creationForm.getFields().add(field); field = fieldManager.getDefinitionByDataType(new TypeInfoImpl(String.class.getName())); field.setName("name"); field.setBinding("name"); creationForm.getFields().add(field); field = fieldManager.getDefinitionByDataType(new TypeInfoImpl(String.class.getName())); field.setName("lastName"); field.setBinding("lastName"); creationForm.getFields().add(field); field = fieldManager.getDefinitionByDataType(new TypeInfoImpl(Date.class.getName())); field.setName("birthday"); field.setBinding("birthday"); creationForm.getFields().add(field); FormDefinition editionForm = new FormDefinition(model); editionForm.setId("person-edition"); field = fieldManager.getDefinitionByDataType(new TypeInfoImpl(Long.class.getName())); field.setName("id"); field.setBinding("id"); editionForm.getFields().add(field); field = fieldManager.getDefinitionByDataType(new TypeInfoImpl(String.class.getName())); field.setName("name"); field.setBinding("name"); editionForm.getFields().add(field); field = fieldManager.getDefinitionByDataType(new TypeInfoImpl(String.class.getName())); field.setName("lastName"); field.setBinding("lastName"); editionForm.getFields().add(field); field = fieldManager.getDefinitionByDataType(new TypeInfoImpl(Date.class.getName())); field.setName("birthday"); field.setBinding("birthday"); editionForm.getFields().add(field); return new FormDefinition[]{creationForm, editionForm}; } @Override protected FormDefinition getRootForm() { FormDefinition form = new FormDefinition(new PortableJavaModel(Person.class.getName())); form.setId("form"); FieldDefinition field = fieldManager.getDefinitionByDataType(new TypeInfoImpl(TypeKind.OBJECT, Person.class.getName(), true)); field.setName("persons"); field.setBinding("persons"); MultipleSubFormFieldDefinition multpleSubForm = (MultipleSubFormFieldDefinition) field; multpleSubForm.setCreationForm("person-creation"); multpleSubForm.setEditionForm("person-edition"); form.getFields().add(field); return form; } @Override protected Map<String, Object> generateFormData() { persons = new ArrayList<>(); try { persons.add(new Person(0, "Ned", "Stark", sdf.parse("24-02-1981"))); persons.add(new Person(1, "Catelyn", "Stark", sdf.parse("04-05-1983"))); persons.add(new Person(2, "Rob", "Stark", sdf.parse("12-04-2013"))); persons.add(new Person(3, "John", "Snow", sdf.parse("21-05-2015"))); } catch (ParseException e) { // swallow } Map<String, Object> data = new HashMap<>(); data.put("persons", persons); return data; } }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package software.amazon.awssdk.core.internal.http.pipeline.stages; import static software.amazon.awssdk.core.interceptor.SdkInternalExecutionAttribute.SDK_HTTP_EXECUTION_ATTRIBUTES; import static software.amazon.awssdk.core.internal.http.timers.TimerUtils.resolveTimeoutInMillis; import static software.amazon.awssdk.http.Header.CONTENT_LENGTH; import java.nio.ByteBuffer; import java.time.Duration; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; import java.util.function.Supplier; import org.reactivestreams.Subscriber; import software.amazon.awssdk.annotations.SdkInternalApi; import software.amazon.awssdk.core.Response; import software.amazon.awssdk.core.async.AsyncRequestBody; import software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption; import software.amazon.awssdk.core.client.config.SdkClientOption; import software.amazon.awssdk.core.exception.ApiCallAttemptTimeoutException; import software.amazon.awssdk.core.exception.SdkClientException; import software.amazon.awssdk.core.interceptor.ExecutionAttributes; import software.amazon.awssdk.core.interceptor.SdkInternalExecutionAttribute; import software.amazon.awssdk.core.internal.http.HttpClientDependencies; import software.amazon.awssdk.core.internal.http.RequestExecutionContext; import software.amazon.awssdk.core.internal.http.TransformingAsyncResponseHandler; import software.amazon.awssdk.core.internal.http.async.SimpleHttpContentPublisher; import software.amazon.awssdk.core.internal.http.pipeline.RequestPipeline; import software.amazon.awssdk.core.internal.http.timers.TimeoutTracker; import software.amazon.awssdk.core.internal.http.timers.TimerUtils; import software.amazon.awssdk.core.internal.util.MetricUtils; import software.amazon.awssdk.core.metrics.CoreMetric; import software.amazon.awssdk.http.SdkHttpFullRequest; import software.amazon.awssdk.http.SdkHttpMethod; import software.amazon.awssdk.http.async.AsyncExecuteRequest; import software.amazon.awssdk.http.async.SdkAsyncHttpClient; import software.amazon.awssdk.http.async.SdkHttpContentPublisher; import software.amazon.awssdk.metrics.MetricCollector; import software.amazon.awssdk.utils.CompletableFutureUtils; import software.amazon.awssdk.utils.Logger; /** * Delegate to the HTTP implementation to make an HTTP request and receive the response. */ @SdkInternalApi public final class MakeAsyncHttpRequestStage<OutputT> implements RequestPipeline<CompletableFuture<SdkHttpFullRequest>, CompletableFuture<Response<OutputT>>> { private static final Logger log = Logger.loggerFor(MakeAsyncHttpRequestStage.class); private final SdkAsyncHttpClient sdkAsyncHttpClient; private final TransformingAsyncResponseHandler<Response<OutputT>> responseHandler; private final Executor futureCompletionExecutor; private final ScheduledExecutorService timeoutExecutor; private final Duration apiCallAttemptTimeout; public MakeAsyncHttpRequestStage(TransformingAsyncResponseHandler<Response<OutputT>> responseHandler, HttpClientDependencies dependencies) { this.responseHandler = responseHandler; this.futureCompletionExecutor = dependencies.clientConfiguration().option(SdkAdvancedAsyncClientOption.FUTURE_COMPLETION_EXECUTOR); this.sdkAsyncHttpClient = dependencies.clientConfiguration().option(SdkClientOption.ASYNC_HTTP_CLIENT); this.apiCallAttemptTimeout = dependencies.clientConfiguration().option(SdkClientOption.API_CALL_ATTEMPT_TIMEOUT); this.timeoutExecutor = dependencies.clientConfiguration().option(SdkClientOption.SCHEDULED_EXECUTOR_SERVICE); } @Override public CompletableFuture<Response<OutputT>> execute(CompletableFuture<SdkHttpFullRequest> requestFuture, RequestExecutionContext context) { CompletableFuture<Response<OutputT>> toReturn = new CompletableFuture<>(); // Setup the cancellations. If the caller fails to provide a request, forward the exception to the future we // return CompletableFutureUtils.forwardExceptionTo(requestFuture, toReturn); // On the other hand, if the future we return is completed exceptionally, throw the exception back up to the // request future CompletableFutureUtils.forwardExceptionTo(toReturn, requestFuture); requestFuture.thenAccept(request -> { // At this point, we have a request that we're ready to execute; we do everything in a try-catch in case the // method call to executeHttpRequest throws directly try { CompletableFuture<Response<OutputT>> executeFuture = executeHttpRequest(request, context); executeFuture.whenComplete((r, t) -> { if (t != null) { toReturn.completeExceptionally(t); } else { toReturn.complete(r); } }); // Similar to cancelling the request future, but we've now started the request execution, so if our // returned future gets an exception, forward to the HTTP execution future CompletableFutureUtils.forwardExceptionTo(toReturn, executeFuture); } catch (Throwable t) { toReturn.completeExceptionally(t); } }); return toReturn; } private CompletableFuture<Response<OutputT>> executeHttpRequest(SdkHttpFullRequest request, RequestExecutionContext context) { CompletableFuture<Response<OutputT>> responseFuture = new CompletableFuture<>(); CompletableFuture<Response<OutputT>> responseHandlerFuture = responseHandler.prepare(); SdkHttpContentPublisher requestProvider = context.requestProvider() == null ? new SimpleHttpContentPublisher(request) : new SdkHttpContentPublisherAdapter(context.requestProvider()); // Set content length if it hasn't been set already. SdkHttpFullRequest requestWithContentLength = getRequestWithContentLength(request, requestProvider); MetricCollector httpMetricCollector = MetricUtils.createHttpMetricsCollector(context); AsyncExecuteRequest.Builder executeRequestBuilder = AsyncExecuteRequest.builder() .request(requestWithContentLength) .requestContentPublisher(requestProvider) .responseHandler(responseHandler) .fullDuplex(isFullDuplex(context.executionAttributes())) .metricCollector(httpMetricCollector); if (context.executionAttributes().getAttribute(SDK_HTTP_EXECUTION_ATTRIBUTES) != null) { executeRequestBuilder.httpExecutionAttributes( context.executionAttributes() .getAttribute(SDK_HTTP_EXECUTION_ATTRIBUTES)); } CompletableFuture<Void> httpClientFuture = doExecuteHttpRequest(context, executeRequestBuilder.build()); TimeoutTracker timeoutTracker = setupAttemptTimer(responseFuture, context); context.apiCallAttemptTimeoutTracker(timeoutTracker); // Forward the cancellation responseFuture.whenComplete((r, t) -> { if (t != null) { httpClientFuture.completeExceptionally(t); } }); // Offload the completion of the future returned from this stage onto // the future completion executor responseHandlerFuture.whenCompleteAsync((r, t) -> { if (t == null) { responseFuture.complete(r); } else { responseFuture.completeExceptionally(t); } }, futureCompletionExecutor); return responseFuture; } private CompletableFuture<Void> doExecuteHttpRequest(RequestExecutionContext context, AsyncExecuteRequest executeRequest) { MetricCollector metricCollector = context.attemptMetricCollector(); long callStart = System.nanoTime(); CompletableFuture<Void> httpClientFuture = sdkAsyncHttpClient.execute(executeRequest); CompletableFuture<Void> result = httpClientFuture.whenComplete((r, t) -> { long duration = System.nanoTime() - callStart; metricCollector.reportMetric(CoreMetric.SERVICE_CALL_DURATION, Duration.ofNanos(duration)); }); // Make sure failures on the result future are forwarded to the http client future. CompletableFutureUtils.forwardExceptionTo(result, httpClientFuture); return result; } private boolean isFullDuplex(ExecutionAttributes executionAttributes) { return executionAttributes.getAttribute(SdkInternalExecutionAttribute.IS_FULL_DUPLEX) != null && executionAttributes.getAttribute(SdkInternalExecutionAttribute.IS_FULL_DUPLEX); } private SdkHttpFullRequest getRequestWithContentLength(SdkHttpFullRequest request, SdkHttpContentPublisher requestProvider) { if (shouldSetContentLength(request, requestProvider)) { return request.toBuilder() .putHeader(CONTENT_LENGTH, String.valueOf(requestProvider.contentLength().get())) .build(); } return request; } private boolean shouldSetContentLength(SdkHttpFullRequest request, SdkHttpContentPublisher requestProvider) { if (request.method() == SdkHttpMethod.GET || request.method() == SdkHttpMethod.HEAD || request.firstMatchingHeader(CONTENT_LENGTH).isPresent()) { return false; } return Optional.ofNullable(requestProvider).flatMap(SdkHttpContentPublisher::contentLength).isPresent(); } private TimeoutTracker setupAttemptTimer(CompletableFuture<Response<OutputT>> executeFuture, RequestExecutionContext ctx) { long timeoutMillis = resolveTimeoutInMillis(ctx.requestConfig()::apiCallAttemptTimeout, apiCallAttemptTimeout); Supplier<SdkClientException> exceptionSupplier = () -> ApiCallAttemptTimeoutException.create(timeoutMillis); return TimerUtils.timeAsyncTaskIfNeeded(executeFuture, timeoutExecutor, exceptionSupplier, timeoutMillis); } /** * When an operation has a streaming input, the customer must supply an {@link AsyncRequestBody} to * provide the request content in a non-blocking manner. This adapts that interface to the * {@link SdkHttpContentPublisher} which the HTTP client SPI expects. */ private static final class SdkHttpContentPublisherAdapter implements SdkHttpContentPublisher { private final AsyncRequestBody asyncRequestBody; private SdkHttpContentPublisherAdapter(AsyncRequestBody asyncRequestBody) { this.asyncRequestBody = asyncRequestBody; } @Override public Optional<Long> contentLength() { return asyncRequestBody.contentLength(); } @Override public void subscribe(Subscriber<? super ByteBuffer> s) { asyncRequestBody.subscribe(s); } } }
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.context.support; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.locks.ReentrantLock; import org.springframework.context.ResourceLoaderAware; import org.springframework.core.io.DefaultResourceLoader; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.core.io.support.ResourcePropertiesPersister; import org.springframework.lang.Nullable; import org.springframework.util.PropertiesPersister; import org.springframework.util.StringUtils; /** * Spring-specific {@link org.springframework.context.MessageSource} implementation * that accesses resource bundles using specified basenames, participating in the * Spring {@link org.springframework.context.ApplicationContext}'s resource loading. * * <p>In contrast to the JDK-based {@link ResourceBundleMessageSource}, this class uses * {@link java.util.Properties} instances as its custom data structure for messages, * loading them via a {@link org.springframework.util.PropertiesPersister} strategy * from Spring {@link Resource} handles. This strategy is not only capable of * reloading files based on timestamp changes, but also of loading properties files * with a specific character encoding. It will detect XML property files as well. * * <p>Note that the basenames set as {@link #setBasenames "basenames"} property * are treated in a slightly different fashion than the "basenames" property of * {@link ResourceBundleMessageSource}. It follows the basic ResourceBundle rule of not * specifying file extension or language codes, but can refer to any Spring resource * location (instead of being restricted to classpath resources). With a "classpath:" * prefix, resources can still be loaded from the classpath, but "cacheSeconds" values * other than "-1" (caching forever) might not work reliably in this case. * * <p>For a typical web application, message files could be placed in {@code WEB-INF}: * e.g. a "WEB-INF/messages" basename would find a "WEB-INF/messages.properties", * "WEB-INF/messages_en.properties" etc arrangement as well as "WEB-INF/messages.xml", * "WEB-INF/messages_en.xml" etc. Note that message definitions in a <i>previous</i> * resource bundle will override ones in a later bundle, due to sequential lookup. * <p>This MessageSource can easily be used outside of an * {@link org.springframework.context.ApplicationContext}: it will use a * {@link org.springframework.core.io.DefaultResourceLoader} as default, * simply getting overridden with the ApplicationContext's resource loader * if running in a context. It does not have any other specific dependencies. * * <p>Thanks to Thomas Achleitner for providing the initial implementation of * this message source! * * @author Juergen Hoeller * @see #setCacheSeconds * @see #setBasenames * @see #setDefaultEncoding * @see #setFileEncodings * @see #setPropertiesPersister * @see #setResourceLoader * @see ResourcePropertiesPersister * @see org.springframework.core.io.DefaultResourceLoader * @see ResourceBundleMessageSource * @see java.util.ResourceBundle */ public class ReloadableResourceBundleMessageSource extends AbstractResourceBasedMessageSource implements ResourceLoaderAware { private static final String PROPERTIES_SUFFIX = ".properties"; private static final String XML_SUFFIX = ".xml"; @Nullable private Properties fileEncodings; private boolean concurrentRefresh = true; private PropertiesPersister propertiesPersister = ResourcePropertiesPersister.INSTANCE; private ResourceLoader resourceLoader = new DefaultResourceLoader(); // Cache to hold filename lists per Locale private final ConcurrentMap<String, Map<Locale, List<String>>> cachedFilenames = new ConcurrentHashMap<>(); // Cache to hold already loaded properties per filename private final ConcurrentMap<String, PropertiesHolder> cachedProperties = new ConcurrentHashMap<>(); // Cache to hold already loaded properties per filename private final ConcurrentMap<Locale, PropertiesHolder> cachedMergedProperties = new ConcurrentHashMap<>(); /** * Set per-file charsets to use for parsing properties files. * <p>Only applies to classic properties files, not to XML files. * @param fileEncodings a Properties with filenames as keys and charset * names as values. Filenames have to match the basename syntax, * with optional locale-specific components: e.g. "WEB-INF/messages" * or "WEB-INF/messages_en". * @see #setBasenames * @see org.springframework.util.PropertiesPersister#load */ public void setFileEncodings(Properties fileEncodings) { this.fileEncodings = fileEncodings; } /** * Specify whether to allow for concurrent refresh behavior, i.e. one thread * locked in a refresh attempt for a specific cached properties file whereas * other threads keep returning the old properties for the time being, until * the refresh attempt has completed. * <p>Default is "true": this behavior is new as of Spring Framework 4.1, * minimizing contention between threads. If you prefer the old behavior, * i.e. to fully block on refresh, switch this flag to "false". * @since 4.1 * @see #setCacheSeconds */ public void setConcurrentRefresh(boolean concurrentRefresh) { this.concurrentRefresh = concurrentRefresh; } /** * Set the PropertiesPersister to use for parsing properties files. * <p>The default is ResourcePropertiesPersister. * @see ResourcePropertiesPersister#INSTANCE */ public void setPropertiesPersister(@Nullable PropertiesPersister propertiesPersister) { this.propertiesPersister = (propertiesPersister != null ? propertiesPersister : ResourcePropertiesPersister.INSTANCE); } /** * Set the ResourceLoader to use for loading bundle properties files. * <p>The default is a DefaultResourceLoader. Will get overridden by the * ApplicationContext if running in a context, as it implements the * ResourceLoaderAware interface. Can be manually overridden when * running outside of an ApplicationContext. * @see org.springframework.core.io.DefaultResourceLoader * @see org.springframework.context.ResourceLoaderAware */ @Override public void setResourceLoader(@Nullable ResourceLoader resourceLoader) { this.resourceLoader = (resourceLoader != null ? resourceLoader : new DefaultResourceLoader()); } /** * Resolves the given message code as key in the retrieved bundle files, * returning the value found in the bundle as-is (without MessageFormat parsing). */ @Override protected String resolveCodeWithoutArguments(String code, Locale locale) { if (getCacheMillis() < 0) { PropertiesHolder propHolder = getMergedProperties(locale); String result = propHolder.getProperty(code); if (result != null) { return result; } } else { for (String basename : getBasenameSet()) { List<String> filenames = calculateAllFilenames(basename, locale); for (String filename : filenames) { PropertiesHolder propHolder = getProperties(filename); String result = propHolder.getProperty(code); if (result != null) { return result; } } } } return null; } /** * Resolves the given message code as key in the retrieved bundle files, * using a cached MessageFormat instance per message code. */ @Override @Nullable protected MessageFormat resolveCode(String code, Locale locale) { if (getCacheMillis() < 0) { PropertiesHolder propHolder = getMergedProperties(locale); MessageFormat result = propHolder.getMessageFormat(code, locale); if (result != null) { return result; } } else { for (String basename : getBasenameSet()) { List<String> filenames = calculateAllFilenames(basename, locale); for (String filename : filenames) { PropertiesHolder propHolder = getProperties(filename); MessageFormat result = propHolder.getMessageFormat(code, locale); if (result != null) { return result; } } } } return null; } /** * Get a PropertiesHolder that contains the actually visible properties * for a Locale, after merging all specified resource bundles. * Either fetches the holder from the cache or freshly loads it. * <p>Only used when caching resource bundle contents forever, i.e. * with cacheSeconds &lt; 0. Therefore, merged properties are always * cached forever. */ protected PropertiesHolder getMergedProperties(Locale locale) { PropertiesHolder mergedHolder = this.cachedMergedProperties.get(locale); if (mergedHolder != null) { return mergedHolder; } Properties mergedProps = newProperties(); long latestTimestamp = -1; String[] basenames = StringUtils.toStringArray(getBasenameSet()); for (int i = basenames.length - 1; i >= 0; i--) { List<String> filenames = calculateAllFilenames(basenames[i], locale); for (int j = filenames.size() - 1; j >= 0; j--) { String filename = filenames.get(j); PropertiesHolder propHolder = getProperties(filename); if (propHolder.getProperties() != null) { mergedProps.putAll(propHolder.getProperties()); if (propHolder.getFileTimestamp() > latestTimestamp) { latestTimestamp = propHolder.getFileTimestamp(); } } } } mergedHolder = new PropertiesHolder(mergedProps, latestTimestamp); PropertiesHolder existing = this.cachedMergedProperties.putIfAbsent(locale, mergedHolder); if (existing != null) { mergedHolder = existing; } return mergedHolder; } /** * Calculate all filenames for the given bundle basename and Locale. * Will calculate filenames for the given Locale, the system Locale * (if applicable), and the default file. * @param basename the basename of the bundle * @param locale the locale * @return the List of filenames to check * @see #setFallbackToSystemLocale * @see #calculateFilenamesForLocale */ protected List<String> calculateAllFilenames(String basename, Locale locale) { Map<Locale, List<String>> localeMap = this.cachedFilenames.get(basename); if (localeMap != null) { List<String> filenames = localeMap.get(locale); if (filenames != null) { return filenames; } } // Filenames for given Locale List<String> filenames = new ArrayList<>(7); filenames.addAll(calculateFilenamesForLocale(basename, locale)); // Filenames for default Locale, if any Locale defaultLocale = getDefaultLocale(); if (defaultLocale != null && !defaultLocale.equals(locale)) { List<String> fallbackFilenames = calculateFilenamesForLocale(basename, defaultLocale); for (String fallbackFilename : fallbackFilenames) { if (!filenames.contains(fallbackFilename)) { // Entry for fallback locale that isn't already in filenames list. filenames.add(fallbackFilename); } } } // Filename for default bundle file filenames.add(basename); if (localeMap == null) { localeMap = new ConcurrentHashMap<>(); Map<Locale, List<String>> existing = this.cachedFilenames.putIfAbsent(basename, localeMap); if (existing != null) { localeMap = existing; } } localeMap.put(locale, filenames); return filenames; } /** * Calculate the filenames for the given bundle basename and Locale, * appending language code, country code, and variant code. * <p>For example, basename "messages", Locale "de_AT_oo" &rarr; "messages_de_AT_OO", * "messages_de_AT", "messages_de". * <p>Follows the rules defined by {@link java.util.Locale#toString()}. * @param basename the basename of the bundle * @param locale the locale * @return the List of filenames to check */ protected List<String> calculateFilenamesForLocale(String basename, Locale locale) { List<String> result = new ArrayList<>(3); String language = locale.getLanguage(); String country = locale.getCountry(); String variant = locale.getVariant(); StringBuilder temp = new StringBuilder(basename); temp.append('_'); if (language.length() > 0) { temp.append(language); result.add(0, temp.toString()); } temp.append('_'); if (country.length() > 0) { temp.append(country); result.add(0, temp.toString()); } if (variant.length() > 0 && (language.length() > 0 || country.length() > 0)) { temp.append('_').append(variant); result.add(0, temp.toString()); } return result; } /** * Get a PropertiesHolder for the given filename, either from the * cache or freshly loaded. * @param filename the bundle filename (basename + Locale) * @return the current PropertiesHolder for the bundle */ protected PropertiesHolder getProperties(String filename) { PropertiesHolder propHolder = this.cachedProperties.get(filename); long originalTimestamp = -2; if (propHolder != null) { originalTimestamp = propHolder.getRefreshTimestamp(); if (originalTimestamp == -1 || originalTimestamp > System.currentTimeMillis() - getCacheMillis()) { // Up to date return propHolder; } } else { propHolder = new PropertiesHolder(); PropertiesHolder existingHolder = this.cachedProperties.putIfAbsent(filename, propHolder); if (existingHolder != null) { propHolder = existingHolder; } } // At this point, we need to refresh... if (this.concurrentRefresh && propHolder.getRefreshTimestamp() >= 0) { // A populated but stale holder -> could keep using it. if (!propHolder.refreshLock.tryLock()) { // Getting refreshed by another thread already -> // let's return the existing properties for the time being. return propHolder; } } else { propHolder.refreshLock.lock(); } try { PropertiesHolder existingHolder = this.cachedProperties.get(filename); if (existingHolder != null && existingHolder.getRefreshTimestamp() > originalTimestamp) { return existingHolder; } return refreshProperties(filename, propHolder); } finally { propHolder.refreshLock.unlock(); } } /** * Refresh the PropertiesHolder for the given bundle filename. * The holder can be {@code null} if not cached before, or a timed-out cache entry * (potentially getting re-validated against the current last-modified timestamp). * @param filename the bundle filename (basename + Locale) * @param propHolder the current PropertiesHolder for the bundle */ protected PropertiesHolder refreshProperties(String filename, @Nullable PropertiesHolder propHolder) { long refreshTimestamp = (getCacheMillis() < 0 ? -1 : System.currentTimeMillis()); Resource resource = this.resourceLoader.getResource(filename + PROPERTIES_SUFFIX); if (!resource.exists()) { resource = this.resourceLoader.getResource(filename + XML_SUFFIX); } if (resource.exists()) { long fileTimestamp = -1; if (getCacheMillis() >= 0) { // Last-modified timestamp of file will just be read if caching with timeout. try { fileTimestamp = resource.lastModified(); if (propHolder != null && propHolder.getFileTimestamp() == fileTimestamp) { if (logger.isDebugEnabled()) { logger.debug("Re-caching properties for filename [" + filename + "] - file hasn't been modified"); } propHolder.setRefreshTimestamp(refreshTimestamp); return propHolder; } } catch (IOException ex) { // Probably a class path resource: cache it forever. if (logger.isDebugEnabled()) { logger.debug(resource + " could not be resolved in the file system - assuming that it hasn't changed", ex); } fileTimestamp = -1; } } try { Properties props = loadProperties(resource, filename); propHolder = new PropertiesHolder(props, fileTimestamp); } catch (IOException ex) { if (logger.isWarnEnabled()) { logger.warn("Could not parse properties file [" + resource.getFilename() + "]", ex); } // Empty holder representing "not valid". propHolder = new PropertiesHolder(); } } else { // Resource does not exist. if (logger.isDebugEnabled()) { logger.debug("No properties file found for [" + filename + "] - neither plain properties nor XML"); } // Empty holder representing "not found". propHolder = new PropertiesHolder(); } propHolder.setRefreshTimestamp(refreshTimestamp); this.cachedProperties.put(filename, propHolder); return propHolder; } /** * Load the properties from the given resource. * @param resource the resource to load from * @param filename the original bundle filename (basename + Locale) * @return the populated Properties instance * @throws IOException if properties loading failed */ protected Properties loadProperties(Resource resource, String filename) throws IOException { Properties props = newProperties(); try (InputStream is = resource.getInputStream()) { String resourceFilename = resource.getFilename(); if (resourceFilename != null && resourceFilename.endsWith(XML_SUFFIX)) { if (logger.isDebugEnabled()) { logger.debug("Loading properties [" + resource.getFilename() + "]"); } this.propertiesPersister.loadFromXml(props, is); } else { String encoding = null; if (this.fileEncodings != null) { encoding = this.fileEncodings.getProperty(filename); } if (encoding == null) { encoding = getDefaultEncoding(); } if (encoding != null) { if (logger.isDebugEnabled()) { logger.debug("Loading properties [" + resource.getFilename() + "] with encoding '" + encoding + "'"); } this.propertiesPersister.load(props, new InputStreamReader(is, encoding)); } else { if (logger.isDebugEnabled()) { logger.debug("Loading properties [" + resource.getFilename() + "]"); } this.propertiesPersister.load(props, is); } } return props; } } /** * Template method for creating a plain new {@link Properties} instance. * The default implementation simply calls {@link Properties#Properties()}. * <p>Allows for returning a custom {@link Properties} extension in subclasses. * Overriding methods should just instantiate a custom {@link Properties} subclass, * with no further initialization or population to be performed at that point. * @return a plain Properties instance * @since 4.2 */ protected Properties newProperties() { return new Properties(); } /** * Clear the resource bundle cache. * Subsequent resolve calls will lead to reloading of the properties files. */ public void clearCache() { logger.debug("Clearing entire resource bundle cache"); this.cachedProperties.clear(); this.cachedMergedProperties.clear(); } /** * Clear the resource bundle caches of this MessageSource and all its ancestors. * @see #clearCache */ public void clearCacheIncludingAncestors() { clearCache(); if (getParentMessageSource() instanceof ReloadableResourceBundleMessageSource) { ((ReloadableResourceBundleMessageSource) getParentMessageSource()).clearCacheIncludingAncestors(); } } @Override public String toString() { return getClass().getName() + ": basenames=" + getBasenameSet(); } /** * PropertiesHolder for caching. * Stores the last-modified timestamp of the source file for efficient * change detection, and the timestamp of the last refresh attempt * (updated every time the cache entry gets re-validated). */ protected class PropertiesHolder { @Nullable private final Properties properties; private final long fileTimestamp; private volatile long refreshTimestamp = -2; private final ReentrantLock refreshLock = new ReentrantLock(); /** Cache to hold already generated MessageFormats per message code. */ private final ConcurrentMap<String, Map<Locale, MessageFormat>> cachedMessageFormats = new ConcurrentHashMap<>(); public PropertiesHolder() { this.properties = null; this.fileTimestamp = -1; } public PropertiesHolder(Properties properties, long fileTimestamp) { this.properties = properties; this.fileTimestamp = fileTimestamp; } @Nullable public Properties getProperties() { return this.properties; } public long getFileTimestamp() { return this.fileTimestamp; } public void setRefreshTimestamp(long refreshTimestamp) { this.refreshTimestamp = refreshTimestamp; } public long getRefreshTimestamp() { return this.refreshTimestamp; } @Nullable public String getProperty(String code) { if (this.properties == null) { return null; } return this.properties.getProperty(code); } @Nullable public MessageFormat getMessageFormat(String code, Locale locale) { if (this.properties == null) { return null; } Map<Locale, MessageFormat> localeMap = this.cachedMessageFormats.get(code); if (localeMap != null) { MessageFormat result = localeMap.get(locale); if (result != null) { return result; } } String msg = this.properties.getProperty(code); if (msg != null) { if (localeMap == null) { localeMap = new ConcurrentHashMap<>(); Map<Locale, MessageFormat> existing = this.cachedMessageFormats.putIfAbsent(code, localeMap); if (existing != null) { localeMap = existing; } } MessageFormat result = createMessageFormat(msg, locale); localeMap.put(locale, result); return result; } return null; } } }
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.core.position.impl; import java.math.BigDecimal; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.google.common.collect.Maps; import com.opengamma.DataNotFoundException; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.core.change.ChangeManager; import com.opengamma.core.change.DummyChangeManager; import com.opengamma.core.position.Portfolio; import com.opengamma.core.position.PortfolioNode; import com.opengamma.core.position.Position; import com.opengamma.core.position.PositionSource; import com.opengamma.core.position.Trade; import com.opengamma.core.security.impl.NonVersionedRedisSecuritySource; import com.opengamma.core.security.impl.SimpleSecurityLink; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalIdBundle; import com.opengamma.id.ObjectId; import com.opengamma.id.UniqueId; import com.opengamma.id.VersionCorrection; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.GUIDGenerator; import com.opengamma.util.metric.OpenGammaMetricRegistry; /* * REDIS DATA STRUCTURES: * Portfolio Names: * Key["PORTFOLIOS"] -> Hash * Hash[Name] -> UniqueId for the portfolio * Portfolio Unique ID Lookups: * Key["NAME-"Name] -> Hash * Hash[UNIQUE_ID] -> UniqueId for the portfolio * Portfolio objects themselves: * Key["PRT-"UniqueId] -> Hash * Hash[NAME] -> Name * HASH["ATT-"AttributeName] -> Attribute Value * Portfolio contents: * Key["PRTPOS-"UniqueId] -> Set * Each item in the list is a UniqueId for a position * Positions: * Key["POS-"UniqueId] -> Hash * Hash[QTY] -> Quantity * Hash[SEC] -> ExternalId for the security * Hash["ATT-"AttributeName] -> Attribute Value * Position contents: * Key["POSTRADES-"UniqueId] -> Set * Each item in the list is a UniqueId for a trade * Trades: * Key["TRADE-"UniqueId] -> Hash * Hash[QTY] -> Quantity * Hash[SEC] -> ExternalId for the security * Hash["ATT-"AttributeName] -> Attribute Value */ /** * A lightweight {@link PositionSource} that cannot handle any versioning, and * which stores all positions and portfolios as Redis-native data structures * (rather than Fudge encoding). */ public class NonVersionedRedisPositionSource implements PositionSource { private static final Logger s_logger = LoggerFactory.getLogger(NonVersionedRedisSecuritySource.class); /** * The default scheme for unique identifiers. */ public static final String IDENTIFIER_SCHEME_DEFAULT = "RedisPos"; /** * The default scheme for trade unique identifiers. */ public static final String TRADE_IDENTIFIER_SCHEME_DEFAULT = "RedisTrade"; private static final String PORTFOLIOS_HASH_KEY_NAME = "PORTFOLIOS"; private final JedisPool _jedisPool; private final String _redisPrefix; private final String _portfoliosHashKeyName; private Timer _getPortfolioTimer = new Timer(); private Timer _getPositionTimer = new Timer(); private Timer _portfolioStoreTimer = new Timer(); private Timer _positionStoreTimer = new Timer(); private Timer _positionSetTimer = new Timer(); private Timer _positionAddTimer = new Timer(); public NonVersionedRedisPositionSource(JedisPool jedisPool) { this(jedisPool, ""); } public NonVersionedRedisPositionSource(JedisPool jedisPool, String redisPrefix) { ArgumentChecker.notNull(jedisPool, "jedisPool"); ArgumentChecker.notNull(redisPrefix, "redisPrefix"); _jedisPool = jedisPool; _redisPrefix = redisPrefix.intern(); _portfoliosHashKeyName = constructallPortfoliosRedisKey(); registerMetrics(OpenGammaMetricRegistry.getSummaryInstance(), OpenGammaMetricRegistry.getDetailedInstance(), "NonVersionedRedisPositionSource"); } /** * Gets the jedisPool. * @return the jedisPool */ public JedisPool getJedisPool() { return _jedisPool; } /** * Gets the redisPrefix. * @return the redisPrefix */ public String getRedisPrefix() { return _redisPrefix; } public void registerMetrics(MetricRegistry summaryRegistry, MetricRegistry detailRegistry, String namePrefix) { _getPortfolioTimer = summaryRegistry.timer(namePrefix + ".getPortfolio"); _getPositionTimer = summaryRegistry.timer(namePrefix + ".getPosition"); _portfolioStoreTimer = summaryRegistry.timer(namePrefix + ".portfolioStore"); _positionStoreTimer = summaryRegistry.timer(namePrefix + ".positionStore"); _positionSetTimer = summaryRegistry.timer(namePrefix + ".positionSet"); _positionAddTimer = summaryRegistry.timer(namePrefix + ".positionAdd"); } protected static UniqueId generateUniqueId() { return UniqueId.of(IDENTIFIER_SCHEME_DEFAULT, GUIDGenerator.generate().toString()); } protected static UniqueId generateTradeUniqueId() { return UniqueId.of(TRADE_IDENTIFIER_SCHEME_DEFAULT, GUIDGenerator.generate().toString()); } // --------------------------------------------------------------------------------------- // REDIS KEY MANAGEMENT // --------------------------------------------------------------------------------------- protected final String toRedisKey(String id, String intermediate) { StringBuilder sb = new StringBuilder(); if (!getRedisPrefix().isEmpty()) { sb.append(getRedisPrefix()); sb.append("-"); } sb.append(intermediate); sb.append(id); String keyText = sb.toString(); return keyText; } protected final String toRedisKey(UniqueId uniqueId, String intermediate) { return toRedisKey(uniqueId.toString(), intermediate); } protected final String toPortfolioRedisKey(UniqueId uniqueId) { return toRedisKey(uniqueId, "PRT-"); } protected final String toPortfolioPositionsRedisKey(UniqueId uniqueId) { return toRedisKey(uniqueId, "PRTPOS-"); } protected final String toPositionRedisKey(UniqueId uniqueId) { return toRedisKey(uniqueId, "POS-"); } protected final String toTradeRedisKey(UniqueId uniqueId) { return toRedisKey(uniqueId, "TRADE-"); } protected final String toPositionTradesRedisKey(UniqueId uniqueId) { return toRedisKey(uniqueId, "POSTRADE-"); } protected final String constructallPortfoliosRedisKey() { return toRedisKey(PORTFOLIOS_HASH_KEY_NAME, ""); } protected final String toPortfolioNameRedisKey(String portfolioName) { return toRedisKey(portfolioName, "NAME-"); } // --------------------------------------------------------------------------------------- // DATA MANIPULATION // --------------------------------------------------------------------------------------- /** * Deep store an entire portfolio, including all positions. * The portfolio itself is not modified, including setting the unique ID. * * @param portfolio The portfolio to store. * @return the UniqueId of the portfolio. */ public UniqueId storePortfolio(Portfolio portfolio) { ArgumentChecker.notNull(portfolio, "portfolio"); UniqueId uniqueId = null; try (Timer.Context context = _portfolioStoreTimer.time()) { Jedis jedis = getJedisPool().getResource(); try { uniqueId = storePortfolio(jedis, portfolio); storePortfolioNodes(jedis, toPortfolioPositionsRedisKey(uniqueId), portfolio.getRootNode()); getJedisPool().returnResource(jedis); } catch (Exception e) { s_logger.error("Unable to store portfolio " + portfolio, e); getJedisPool().returnBrokenResource(jedis); throw new OpenGammaRuntimeException("Unable to store portfolio " + portfolio, e); } } return uniqueId; } public UniqueId storePosition(Position position) { ArgumentChecker.notNull(position, "position"); UniqueId uniqueId = null; try (Timer.Context context = _positionStoreTimer.time()) { Jedis jedis = getJedisPool().getResource(); try { uniqueId = storePosition(jedis, position); getJedisPool().returnResource(jedis); } catch (Exception e) { s_logger.error("Unable to store position " + position, e); getJedisPool().returnBrokenResource(jedis); throw new OpenGammaRuntimeException("Unable to store position " + position, e); } } return uniqueId; } /** * A special fast-pass method to just update a position quantity, without * updating any of the other fields. Results in a single Redis write. * * @param position The position, which must already be in the source. */ public void updatePositionQuantity(Position position) { ArgumentChecker.notNull(position, "position"); try (Timer.Context context = _positionSetTimer.time()) { Jedis jedis = getJedisPool().getResource(); try { String redisKey = toPositionRedisKey(position.getUniqueId()); jedis.hset(redisKey, "QTY", position.getQuantity().toPlainString()); getJedisPool().returnResource(jedis); } catch (Exception e) { s_logger.error("Unable to store position " + position, e); getJedisPool().returnBrokenResource(jedis); throw new OpenGammaRuntimeException("Unable to store position " + position, e); } } } /** * Store a new position and attach it to the specified portfolio. * @param portfolio the existing portfolio. Must already be in this source. * @param position the new position to store and attach. * @return map of id to position, not-null. */ public Map<String, Position> addPositionToPortfolio(Portfolio portfolio, Position position) { return addPositionsToPortfolio(portfolio, Collections.singleton(position)); } /** * Store a new set of positions and attach it to the specified portfolio. * @param portfolio the existing portfolio. Must already be in this source. * @param positions the new positions to store and attach. * @return map of id to position, not-null. */ public Map<String, Position> addPositionsToPortfolio(Portfolio portfolio, Collection<Position> positions) { ArgumentChecker.notNull(portfolio, "portfolio"); ArgumentChecker.notNull(portfolio.getUniqueId(), "portfolio UniqueId"); ArgumentChecker.notNull(positions, "position"); Map<String, Position> id2position = Maps.newLinkedHashMap(); try (Timer.Context context = _positionAddTimer.time()) { Jedis jedis = getJedisPool().getResource(); try { String[] uniqueIdStrings = new String[positions.size()]; int i = 0; for (Position position : positions) { String uniqueId = storePosition(jedis, position).toString(); uniqueIdStrings[i] = uniqueId; i++; id2position.put(uniqueId, position); } UniqueId portfolioUniqueId = portfolio.getUniqueId(); String portfolioPositionsKey = toPortfolioPositionsRedisKey(portfolioUniqueId); // NOTE kirk 2013-06-18 -- The following call is a known performance bottleneck. // I spent a full day attempting almost every single way I could imagine to // figure out what was going on, before I gave up for the time being. // When we're running in a far more realistic way we need to second guess // it, but it is a known performance issue on large portfolio loading. jedis.sadd(portfolioPositionsKey, uniqueIdStrings); getJedisPool().returnResource(jedis); } catch (Exception e) { s_logger.error("Unable to store positions " + positions, e); getJedisPool().returnBrokenResource(jedis); throw new OpenGammaRuntimeException("Unable to store positions " + positions, e); } } return id2position; } protected UniqueId storePortfolio(Jedis jedis, Portfolio portfolio) { UniqueId uniqueId = portfolio.getUniqueId(); if (uniqueId == null) { uniqueId = generateUniqueId(); } String uniqueIdKey = toPortfolioRedisKey(uniqueId); String portfolioNameKey = toPortfolioNameRedisKey(portfolio.getName()); jedis.hset(portfolioNameKey, "UNIQUE_ID", uniqueId.toString()); jedis.hset(_portfoliosHashKeyName, portfolio.getName(), uniqueId.toString()); jedis.hset(uniqueIdKey, "NAME", portfolio.getName()); for (Map.Entry<String, String> attribute : portfolio.getAttributes().entrySet()) { jedis.hset(uniqueIdKey, "ATT-" + attribute.getKey(), attribute.getValue()); } return uniqueId; } protected void storePortfolioNodes(Jedis jedis, String redisKey, PortfolioNode node) { Set<String> positionUniqueIds = new HashSet<String>(); for (Position position : node.getPositions()) { UniqueId uniqueId = storePosition(jedis, position); positionUniqueIds.add(uniqueId.toString()); } if (!positionUniqueIds.isEmpty()) { jedis.sadd(redisKey, positionUniqueIds.toArray(new String[0])); } if (!node.getChildNodes().isEmpty()) { s_logger.warn("Possible misuse. Portfolio has a deep structure, but this source flattens. Positions being stored flat."); } for (PortfolioNode childNode : node.getChildNodes()) { storePortfolioNodes(jedis, redisKey, childNode); } } protected UniqueId storePosition(Jedis jedis, Position position) { UniqueId uniqueId = position.getUniqueId(); if (uniqueId == null) { uniqueId = generateUniqueId(); } String redisKey = toPositionRedisKey(uniqueId); jedis.hset(redisKey, "QTY", position.getQuantity().toPlainString()); ExternalIdBundle securityBundle = position.getSecurityLink().getExternalId(); if (securityBundle == null) { throw new OpenGammaRuntimeException("Can only store positions with a link to an ExternalId"); } if (securityBundle.size() != 1) { s_logger.warn("Bundle {} not exactly one. Possible misuse of this source.", securityBundle); } ExternalId securityId = securityBundle.iterator().next(); jedis.hset(redisKey, "SEC", securityId.toString()); for (Map.Entry<String, String> attribute : position.getAttributes().entrySet()) { jedis.hset(redisKey, "ATT-" + attribute.getKey(), attribute.getValue()); } if (position.getTrades() != null) { Set<String> tradeUniqueIds = new HashSet<>(); for (Trade trade : position.getTrades()) { UniqueId tradeId = storeTrade(jedis, trade); tradeUniqueIds.add(tradeId.toString()); } jedis.sadd(toPositionTradesRedisKey(uniqueId), tradeUniqueIds.toArray(new String[tradeUniqueIds.size()])); } return uniqueId; } protected UniqueId storeTrade(Jedis jedis, Trade trade) { UniqueId uniqueId = trade.getUniqueId(); if (uniqueId == null) { uniqueId = generateTradeUniqueId(); } String redisKey = toTradeRedisKey(uniqueId); jedis.hset(redisKey, "QTY", trade.getQuantity().toPlainString()); ExternalIdBundle securityBundle = trade.getSecurityLink().getExternalId(); if (securityBundle == null) { throw new OpenGammaRuntimeException("Can only store positions with a link to an ExternalId"); } if (securityBundle.size() != 1) { s_logger.warn("Bundle {} not exactly one. Possible misuse of this source.", securityBundle); } ExternalId securityId = securityBundle.iterator().next(); jedis.hset(redisKey, "SEC", securityId.toString()); for (Map.Entry<String, String> attribute : trade.getAttributes().entrySet()) { jedis.hset(redisKey, "ATT-" + attribute.getKey(), attribute.getValue()); } return uniqueId; } // --------------------------------------------------------------------------------------- // QUERIES OUTSIDE OF POSITION SOURCE INTERFACE // --------------------------------------------------------------------------------------- public Portfolio getByName(String portfolioName) { ArgumentChecker.notNull(portfolioName, "portfolioName"); Portfolio portfolio = null; try (Timer.Context context = _getPortfolioTimer.time()) { Jedis jedis = getJedisPool().getResource(); try { String nameKey = toPortfolioNameRedisKey(portfolioName); String uniqueIdString = jedis.hget(nameKey, "UNIQUE_ID"); if (uniqueIdString != null) { UniqueId uniqueId = UniqueId.parse(uniqueIdString); portfolio = getPortfolioWithJedis(jedis, uniqueId); } getJedisPool().returnResource(jedis); } catch (Exception e) { s_logger.error("Unable to get portfolio by name " + portfolioName, e); getJedisPool().returnBrokenResource(jedis); throw new OpenGammaRuntimeException("Unable to get portfolio by name " + portfolioName, e); } } return portfolio; } public Map<String, UniqueId> getAllPortfolioNames() { Map<String, UniqueId> result = new TreeMap<String, UniqueId>(); Jedis jedis = getJedisPool().getResource(); try { Map<String, String> portfolioNames = jedis.hgetAll(_portfoliosHashKeyName); for (Map.Entry<String, String> entry : portfolioNames.entrySet()) { result.put(entry.getKey(), UniqueId.parse(entry.getValue())); } getJedisPool().returnResource(jedis); } catch (Exception e) { s_logger.error("Unable to get portfolio names", e); getJedisPool().returnBrokenResource(jedis); throw new OpenGammaRuntimeException("Unable to get portfolio names", e); } return result; } // --------------------------------------------------------------------------------------- // IMPLEMENTATION OF POSITION SOURCE // --------------------------------------------------------------------------------------- @Override public ChangeManager changeManager() { return DummyChangeManager.INSTANCE; } @Override public Portfolio getPortfolio(UniqueId uniqueId, VersionCorrection versionCorrection) { ArgumentChecker.notNull(uniqueId, "uniqueId"); SimplePortfolio portfolio = null; try (Timer.Context context = _getPortfolioTimer.time()) { Jedis jedis = getJedisPool().getResource(); try { portfolio = getPortfolioWithJedis(jedis, uniqueId); getJedisPool().returnResource(jedis); } catch (Exception e) { s_logger.error("Unable to get portfolio " + uniqueId, e); getJedisPool().returnBrokenResource(jedis); throw new OpenGammaRuntimeException("Unable to get portfolio " + uniqueId, e); } } if (portfolio == null) { throw new DataNotFoundException("Unable to locate portfolio with UniqueId " + uniqueId); } return portfolio; } protected SimplePortfolio getPortfolioWithJedis(Jedis jedis, UniqueId uniqueId) { SimplePortfolio portfolio = null; String redisKey = toPortfolioRedisKey(uniqueId); if (jedis.exists(redisKey)) { Map<String, String> hashFields = jedis.hgetAll(redisKey); portfolio = new SimplePortfolio(hashFields.get("NAME")); portfolio.setUniqueId(uniqueId); for (Map.Entry<String, String> field : hashFields.entrySet()) { if (!field.getKey().startsWith("ATT-")) { continue; } String attributeName = field.getKey().substring(4); portfolio.addAttribute(attributeName, field.getValue()); } SimplePortfolioNode portfolioNode = new SimplePortfolioNode(); portfolioNode.setName(portfolio.getName()); String portfolioPositionsKey = toPortfolioPositionsRedisKey(portfolio.getUniqueId()); Set<String> positionUniqueIds = jedis.smembers(portfolioPositionsKey); for (String positionUniqueId : positionUniqueIds) { Position position = getPosition(jedis, UniqueId.parse(positionUniqueId)); if (position != null) { portfolioNode.addPosition(position); } } portfolio.setRootNode(portfolioNode); } return portfolio; } @Override public Portfolio getPortfolio(ObjectId objectId, VersionCorrection versionCorrection) { return getPortfolio(UniqueId.of(objectId, null), null); } @Override public PortfolioNode getPortfolioNode(UniqueId uniqueId, VersionCorrection versionCorrection) { throw new UnsupportedOperationException("Trades not supported."); } @Override public Position getPosition(UniqueId uniqueId) { ArgumentChecker.notNull(uniqueId, "uniqueId"); SimplePosition position = null; try (Timer.Context context = _getPositionTimer.time()) { Jedis jedis = getJedisPool().getResource(); try { position = getPosition(jedis, uniqueId); getJedisPool().returnResource(jedis); } catch (Exception e) { s_logger.error("Unable to get position " + uniqueId, e); getJedisPool().returnBrokenResource(jedis); throw new OpenGammaRuntimeException("Unable to get position " + uniqueId, e); } } if (position == null) { throw new DataNotFoundException("Unable to find position with UniqueId " + uniqueId); } return position; } protected SimplePosition getPosition(Jedis jedis, UniqueId uniqueId) { String redisKey = toPositionRedisKey(uniqueId); if (!jedis.exists(redisKey)) { return null; } SimplePosition position = new SimplePosition(); position.setUniqueId(uniqueId); Map<String, String> hashFields = jedis.hgetAll(redisKey); position.setQuantity(new BigDecimal(hashFields.get("QTY"))); ExternalId secId = ExternalId.parse(hashFields.get("SEC")); SimpleSecurityLink secLink = new SimpleSecurityLink(); secLink.addExternalId(secId); position.setSecurityLink(secLink); for (Map.Entry<String, String> field : hashFields.entrySet()) { if (!field.getKey().startsWith("ATT-")) { continue; } String attributeName = field.getKey().substring(4); position.addAttribute(attributeName, field.getValue()); } // trades String tradesKey = toPositionTradesRedisKey(position.getUniqueId()); Set<String> tradesUniqueIds = jedis.smembers(tradesKey); for (String tradesUniqueId : tradesUniqueIds) { Trade trade = getTrade(jedis, UniqueId.parse(tradesUniqueId)); if (trade != null) { position.addTrade(trade); } } return position; } protected SimpleTrade getTrade(Jedis jedis, UniqueId uniqueId) { String redisKey = toTradeRedisKey(uniqueId); if (!jedis.exists(redisKey)) { return null; } SimpleTrade trade = new SimpleTrade(); trade.setUniqueId(uniqueId); Map<String, String> hashFields = jedis.hgetAll(redisKey); trade.setQuantity(new BigDecimal(hashFields.get("QTY"))); ExternalId secId = ExternalId.parse(hashFields.get("SEC")); SimpleSecurityLink secLink = new SimpleSecurityLink(); secLink.addExternalId(secId); trade.setSecurityLink(secLink); for (Map.Entry<String, String> field : hashFields.entrySet()) { if (!field.getKey().startsWith("ATT-")) { continue; } String attributeName = field.getKey().substring(4); trade.addAttribute(attributeName, field.getValue()); } return trade; } @Override public Position getPosition(ObjectId objectId, VersionCorrection versionCorrection) { return getPosition(UniqueId.of(objectId, null)); } @Override public Trade getTrade(UniqueId uniqueId) { ArgumentChecker.notNull(uniqueId, "uniqueId"); SimpleTrade trade = null; try (Timer.Context context = _getPositionTimer.time()) { Jedis jedis = getJedisPool().getResource(); try { trade = getTrade(jedis, uniqueId); getJedisPool().returnResource(jedis); } catch (Exception e) { s_logger.error("Unable to get position " + uniqueId, e); getJedisPool().returnBrokenResource(jedis); throw new OpenGammaRuntimeException("Unable to get trade " + uniqueId, e); } } if (trade == null) { throw new DataNotFoundException("Unable to find position with UniqueId " + uniqueId); } return trade; } }
package io.craigmiller160.stockmarket.stock; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.math.BigDecimal; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.net.UnknownHostException; import java.util.Calendar; import java.util.Collections; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Scanner; import net.jcip.annotations.ThreadSafe; import org.joda.time.DateTime; /** * Implemented <tt>StockDownloader</tt> utilizing Yahoo!Finance's online * service to acquire stock data. This class downloads both detailed field data * and an extended history of the stock. * <p> * <b>SUPPORTED STOCK FIELDS:</b> This downloader only supports the following fields. * All names for these fields are constant values in the <tt>AbstractStock</tt> class. * Using any fields that are not listed here in with the <tt>downloadStockDetails()</tt> * method will throw an <tt>IllegalArgumentException</tt>. * <p> * ASK<br> * ASK_SIZE<br> * AVERAGE_DAILY_VOLUME<br> * BID<br> * CHANGE<br> * CHANGE_IN_PERCENT<br> * CHANGE_50_DAY_AVG<br> * CHANGE_50_DAY_AVG_PERCENT<br> * CHANGE_200_DAY_AVG<br> * CHANGE_200_DAY_AVG_PERCENT<br> * CHANGE_YEAR_HIGH<br> * CHANGE_YEAR_HIGH_PERCENT<br> * CHANGE_YEAR_LOW<br> * CHANGE_YEAR_LOW_PERCENT<br> * CURRENT_PRICE<br> * CURRENCY<br> * DAYS_HIGH<br> * DAYS_LOW<br> * DAYS_RANGE<br> * FIFTY_DAY_AVG<br> * LAST_TRADE_DATE<br> * LAST_TRADE_SIZE<br> * LAST_TRADE_TIME<br> * MARKET_CAPITALIZATION<br> * NAME<br> * OPEN<br> * PREVIOUS_CLOSE<br> * REVENUE<br> * SYMBOL<br> * TWO_HUNDRED_DAY_AVG<br> * VOLUME<br> * YEAR_HIGH<br> * YEAR_LOW<br> * YEAR_RANGE * <p> * <b>THREAD SAFETY:</b> This class has no mutable state. All variables are either * static and final, or local and confined to the individual thread stack. Therefore * any instance of this class can be successfully used in a concurrent environment. * * @author craig * @version 2.3 */ @ThreadSafe public final class YahooStockDownloader implements StockDownloader { /** * A <tt>Map</tt> of property codes, to be checked against a list * of field names provided to the <tt>downloadStockDetails()</tt> method. */ private static final Map<String,String> propertyCodeMap; /** * Map value for the symbol field, used to identify it and excluse it while parsing the * field array. Because of how Yahoo's csv downloading tool and the mechanism * developed to check the validity of the stock, actually including this property * in the URL query would give a false positive verification. */ private static final String SYMBOL_FIELD = "Symbol"; /** * Static initializer creates the <tt>propertyCodeMap</tt>. */ static{ Map<String,String> tempMap = new HashMap<>(); tempMap.put(AbstractStock.ASK, "a"); tempMap.put(AbstractStock.ASK_SIZE, "a5"); tempMap.put(AbstractStock.AVERAGE_DAILY_VOLUME, "a2"); tempMap.put(AbstractStock.BID, "b"); tempMap.put(AbstractStock.BID_SIZE, "b6"); tempMap.put(AbstractStock.CHANGE_TODAY, "c1"); tempMap.put(AbstractStock.CHANGE_TODAY_PERCENT, "p2"); tempMap.put(AbstractStock.CHANGE_50_DAY_AVG, "m7"); tempMap.put(AbstractStock.CHANGE_50_DAY_AVG_PERCENT, "m8"); tempMap.put(AbstractStock.CHANGE_200_DAY_AVG, "m5"); tempMap.put(AbstractStock.CHANGE_200_DAY_AVG_PERCENT, "m6"); tempMap.put(AbstractStock.CHANGE_YEAR_HIGH, "k4"); tempMap.put(AbstractStock.CHANGE_YEAR_HIGH_PERCENT, "k5"); tempMap.put(AbstractStock.CHANGE_YEAR_LOW, "j5"); tempMap.put(AbstractStock.CHANGE_YEAR_LOW_PERCENT, "j6"); //On YahooFinance website, this property is called LastTradePrice tempMap.put(AbstractStock.CURRENT_PRICE, "l1"); tempMap.put(AbstractStock.CURRENCY, "c4"); tempMap.put(AbstractStock.DAYS_HIGH, "h"); tempMap.put(AbstractStock.DAYS_LOW, "g"); tempMap.put(AbstractStock.DAYS_RANGE, "m"); tempMap.put(AbstractStock.FIFTY_DAY_AVG, "m3"); tempMap.put(AbstractStock.LAST_TRADE_DATE, "d1"); tempMap.put(AbstractStock.LAST_TRADE_SIZE, "k3"); tempMap.put(AbstractStock.LAST_TRADE_TIME, "t1"); tempMap.put(AbstractStock.MARKET_CAPITALIZATION, "j1"); tempMap.put(AbstractStock.NAME, "n"); tempMap.put(AbstractStock.OPEN, "o"); tempMap.put(AbstractStock.PREVIOUS_CLOSE, "p"); tempMap.put(AbstractStock.REVENUE, "s6"); tempMap.put(AbstractStock.SYMBOL, SYMBOL_FIELD); tempMap.put(AbstractStock.TWO_HUNDRED_DAY_AVG, "m4"); tempMap.put(AbstractStock.VOLUME, "v"); tempMap.put(AbstractStock.YEAR_HIGH, "k"); tempMap.put(AbstractStock.YEAR_LOW, "j"); tempMap.put(AbstractStock.YEAR_RANGE, "w"); propertyCodeMap = Collections.unmodifiableMap(tempMap); } /** * Constructs a downloader object using Yahoo!Finance's service. */ public YahooStockDownloader() { } @Override public Map<String,String> downloadStockDetails(String symbol, String[] fields) throws InvalidStockException, UnknownHostException, IOException{ StringBuffer csvFile = new StringBuffer(); //Construct URL and connect to it URL url = createUrlForDetails(symbol, fields); URLConnection connection = url.openConnection(); //Download stock data try(BufferedReader reader = new BufferedReader( new InputStreamReader(connection.getInputStream()))) { String line = ""; while((line = reader.readLine()) != null){ csvFile.append(line); } } //Parse downloaded stock data Map<String,String> stockDataMap = null; if(!csvFile.toString().equals("")){ try{ stockDataMap = parseCsvForDetails(csvFile.toString(), fields); } catch(InvalidStockException ex){ throw new InvalidStockException(symbol); } } stockDataMap.put(AbstractStock.SYMBOL, symbol); return stockDataMap; } /** * Construct the URL from the values provided. * * @param symbol the symbol of the stock. * @param fields the fields to get information for. * @return the constructed URL. * @throws MalformedURLException if the URL created is not valid. */ private URL createUrlForDetails(String symbol, String[] fields) throws MalformedURLException{ String urlStart = "http://download.finance.yahoo.com/d/quotes.csv?s="; String urlProperties = getURLProperties(fields); String urlEnd = "&e=.csv"; return new URL(urlStart + symbol + urlProperties + urlEnd); } /** * Parse the downloaded csv text and pair each entry with the appropriate * field name in a <tt>Map</tt>. * * @param csvText the raw csv text to be parsed. * @param fields the fields to be paired with the raw data from the csv. * @return a <tt>Map</tt> containing data parsed from the csv text. * @throws InvalidStockException if the stock is not a valid marketplace stock. */ private Map<String, String> parseCsvForDetails(String csvText, String[] fields) throws InvalidStockException{ Map<String,String> stockDataMap = new HashMap<>(); String[] tempData = csvText.split(","); if(!verifyStock(tempData)){ throw new InvalidStockException(); } for(int i = 0, n = 0; i < tempData.length; i++){ if(fields[n] != AbstractStock.SYMBOL){ //As long as the value of the field array at this index //isn't SYMBOL, parse the tempData for a value String dataEntry = null; if((tempData[i].charAt(0) == '"') && (tempData[i].charAt(tempData[i].length() - 1) != '"')){ //If the first char is ", but the last char is NOT " //Only occurs when Name has comma in the middle, combine fields to fix dataEntry = tempData[i] + tempData[i + 1]; dataEntry = dataEntry.substring(1, dataEntry.length() - 1); i++; } else if((tempData[i].charAt(0) == '"') && (tempData[i].charAt(tempData[i].length() - 1) == '"')){ //If the first and last char are " //Remove the quotes, helps with number parsing later on. dataEntry = tempData[i].substring(1, tempData[i].length() - 1); } else{ //Nothing special, add to final map. dataEntry = tempData[i]; } if(dataEntry.charAt(dataEntry.length() - 1) == '%'){ //If there's a % sign at the end of the string, remove it to help with number parsing dataEntry = dataEntry.substring(0, dataEntry.length() - 1); } stockDataMap.put(fields[n], dataEntry); } else{ //If the field array value is SYMBOL, decrement i to avoid tempData moving ahead i--; } //At the end of every operation, increment the index for the field array n++; } return stockDataMap; } /** * Verify that the stock being downloaded is a valid marketplace stock. * If it is, this method returns true, if not it returns false. * * @param stockData the data to parse to confirm the stock's validitiy. * @return true if the stock is valid, false if it is not. */ private boolean verifyStock(String[] stockData){ boolean verified = false; for(String s : stockData){ if(!s.equals("N/A")){ verified = true; break; } } return verified; } /** * Prepare a <tt>String</tt> of property values to be added to the URL. * These property codes are prepared based on the array of field names * passed as a parameter. If any of the fields are not supported by this * downloader, an <tt>IllegalArgumentException</tt> is thrown. * * @param fields the list of field names for the property codes to be * added to the URL. * @return a <tt>String</tt> of property codes to be added to the URL. * @throws IllegalArgumentException if one or more of the property codes * aren't supported by this downloader. */ private String getURLProperties(String[] fields){ StringBuffer urlProperties = new StringBuffer("&f="); for(int i = 0; i < fields.length; i++){ String code = propertyCodeMap.get(fields[i]); if(code == SYMBOL_FIELD){ //do nothing } else if(code == null){ throw new IllegalArgumentException(fields[i] + " is not supported by this downloader"); } else{ urlProperties.append(code); } } return urlProperties.toString(); } @Override public List<HistoricalQuote> downloadStockHistory(String symbol, int months) throws InvalidStockException, UnknownHostException, IOException{ //Create URL & connect URL url = createUrlForHistory(symbol, months); URLConnection connection = url.openConnection(); //Download the csv data StringBuffer csvFile = new StringBuffer(); try(BufferedReader reader = new BufferedReader( new InputStreamReader(connection.getInputStream()))) { String line = ""; while((line = reader.readLine()) != null){ csvFile.append(line + "\n"); } } catch(FileNotFoundException ex){ //FileNotFoundException is thrown if the stock is invalid and //yahoo can't produce the chart. throw new InvalidStockException(symbol); } //Parse the csv data for the stock history List<HistoricalQuote> historyList = null; if(!(csvFile.toString().equals(""))){ historyList = parseCsvForHistory(csvFile.toString(), symbol); } return historyList; } private URL createUrlForHistory(String symbol, int months) throws MalformedURLException{ String urlStart = "http://ichart.yahoo.com/table.csv?s="; String urlFromDate = getFromDateURLCode(months); String urlToDate = getToDateURLCode(); String urlInterval = "&g=d"; //Daily quote interval String urlEnd = "&ignore=.csv"; return new URL(urlStart + symbol + urlFromDate + urlToDate + urlInterval + urlEnd); } /** * Parse the csv data for the stock history. Individual quotes are identified and * stores in <tt>HistoricalQuote</tt> objects, a list of which is returned to the * caller. * * @param csvData the raw csv data being parsed for historical stock information. * @return a list of <tt>HistoricalQuote</tt> objects composed from the raw data. */ private List<HistoricalQuote> parseCsvForHistory(String csvData, String symbol){ List<HistoricalQuote> historyList = new StockHistoryList(symbol); try(Scanner scan = new Scanner(csvData)){ int lineCount = 0; //First line is header, needs to be skipped String[] historyData = null; while(scan.hasNext()){ if(lineCount > 0){ historyData = scan.nextLine().split(","); int[] calNums = parseCalendarNumbers(historyData[0]); Calendar date = new GregorianCalendar(calNums[2], calNums[0], calNums[1]); BigDecimal closeValue = new BigDecimal(Double.parseDouble(historyData[4])); historyList.add(new HistoricalQuote(date, closeValue)); } else{ scan.nextLine(); } lineCount++; } } return historyList; } /** * Parse raw calendar number values from a <tt>String</tt> and return * their <tt>int</tt> values. * * @param nums a <tt>String</tt> containing calendar number values. * @return the <tt>int</tt> calendar values. */ private int[] parseCalendarNumbers(String nums){ int[] calNums = new int[3]; String[] calText = nums.split("-"); calNums[0] = Integer.parseInt(calText[1]) - 1; //Month calNums[1] = Integer.parseInt(calText[2]); //Day calNums[2] = Integer.parseInt(calText[0]); //Year return calNums; } /** * Composes the URL code for the "from date", the date the history starts at. * * @param months the number of months ago the "from date" is. * @return the URL code for the "from date". */ private String getFromDateURLCode(int months){ Calendar from = new DateTime(DateTime.now().minusMonths(months)).toGregorianCalendar(); int sMonth = from.get(Calendar.MONTH); int sDay = from.get(Calendar.DAY_OF_MONTH); int sYear = from.get(Calendar.YEAR); return "&a=" + sMonth + "&b=" + sDay + "&c=" + sYear; } /** * Composes the URL code for the "to date", the date the history ends at. * * @return the URL code for the "to date". */ private String getToDateURLCode(){ Calendar today = Calendar.getInstance(); int eMonth = today.get(Calendar.MONTH); int eDay = today.get(Calendar.DAY_OF_MONTH); int eYear = today.get(Calendar.YEAR); return "&d=" + eMonth + "&e=" + eDay + "&f=" + eYear; } }
package br.com.makadu.makaduevento.DAO.dao.entityDao; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.util.Log; import java.util.ArrayList; import java.util.List; import br.com.makadu.makaduevento.DAO.dao.Dao; import br.com.makadu.makaduevento.DAO.table.EventTable; import br.com.makadu.makaduevento.model.Event; /** * Created by lucasschwalbeferreira on 10/1/15. */ public class EventDao extends Dao<Event> { public EventDao(Context context) { super(context); } public void upsert(Event entity) { if(entity != null) { SQLiteDatabase db = dbHelper.getWritableDatabase(); try { ContentValues contentValues = new ContentValues(); contentValues.put(EventTable.COLUMN_ID, Long.parseLong(entity.getId())); contentValues.put(EventTable.COLUMN_TITLE, entity.getTitle()); contentValues.put(EventTable.COLUMN_DESCRIPTION, entity.getDescription()); contentValues.put(EventTable.COLUMN_ADDRESS, entity.getAddress()); contentValues.put(EventTable.COLUMN_VANUE, entity.getVenue()); contentValues.put(EventTable.COLUMN_CITY, entity.getCity()); contentValues.put(EventTable.COLUMN_STRING_LOGO, entity.logo); contentValues.put(EventTable.COLUMN_STATE, entity.getState()); contentValues.put(EventTable.COLUMN_START_DATE, entity.getStart_date()); contentValues.put(EventTable.COLUMN_END_DATE, entity.getEnd_date()); contentValues.put(EventTable.COLUMN_ACTIVE, 1); contentValues.put(EventTable.COLUMN_EVENT_TYPE, entity.event_type); contentValues.put(EventTable.COLUMN_EVENT_PASSWORD, entity.password); contentValues.put(EventTable.COLUMN_HAVE_PAPERS, entity.have_papers == true ? 1 : 0); //contentValues.put(EventTable.COLUMN_UPDATED_AT, String.valueOf(entity.updated_at)); if (containEvent(entity)) { db.update(EventTable.TABLE_EVENT, contentValues, "_id = " + Long.parseLong(entity.getId()), null); Log.v("LOG_DAO", "Atualizou:" + entity.getTitle()); } else { db.insert(EventTable.TABLE_EVENT, null, contentValues); Log.v("LOG_DAO", "Inseriu:" + entity.getTitle()); } } catch (Exception e) { Log.e("Error EventDao", "upsert: " + e.getMessage()); } finally { if (db != null || db.isOpen()) { db.close(); } } } } public void upsert(List<Event> events) { Log.v("LOG_DAO", " INI"); removeAll(); SQLiteDatabase db = dbHelper.getWritableDatabase(); try { Log.v("LOG_DAO", " POS TRY"); for (Event entity: events) { ContentValues contentValues = new ContentValues(); contentValues.put(EventTable.COLUMN_ID, Long.parseLong(entity.getId())); contentValues.put(EventTable.COLUMN_TITLE, entity.getTitle()); contentValues.put(EventTable.COLUMN_DESCRIPTION, entity.getDescription()); contentValues.put(EventTable.COLUMN_ADDRESS, entity.getAddress()); contentValues.put(EventTable.COLUMN_VANUE, entity.getVenue()); contentValues.put(EventTable.COLUMN_CITY, entity.getCity()); contentValues.put(EventTable.COLUMN_STRING_LOGO, entity.logo); contentValues.put(EventTable.COLUMN_STATE, entity.getState()); contentValues.put(EventTable.COLUMN_START_DATE, entity.getStart_date()); contentValues.put(EventTable.COLUMN_END_DATE, entity.getEnd_date()); contentValues.put(EventTable.COLUMN_ACTIVE, entity.active); contentValues.put(EventTable.COLUMN_EVENT_TYPE, entity.event_type); contentValues.put(EventTable.COLUMN_EVENT_PASSWORD, entity.password); contentValues.put(EventTable.COLUMN_HAVE_PAPERS, entity.have_papers == true ? 1 : 0); if (containEvent(entity)) { db.update(EventTable.TABLE_EVENT, contentValues, "_id = " + Long.parseLong(entity.getId()), null); Log.v("LOG_DAO", "Atualizou:" + entity.getTitle()); } else { db.insert(EventTable.TABLE_EVENT, null, contentValues); Log.v("LOG_DAO", "Inseriu:" + entity.getTitle()); } } } catch (Exception e) { Log.e("Error EventDao", "upsert: " + e.getMessage()); } finally { if (db != null || db.isOpen()) { db.close(); } } } public Event save(Event entity) { return null; } public int update(Event entity) { return 0; } public int remove(Event entity) { return 0; } public int removeAll() { SQLiteDatabase db = dbHelper.getWritableDatabase(); int countRows = 0; try { countRows = db.delete(EventTable.TABLE_EVENT,"", null); }catch (Exception e){ Log.e("Error DAO", e.getMessage()); }finally { if (db != null && db.isOpen()) { db.close(); } } return countRows; } public boolean eventPrivate(String id) { SQLiteDatabase db = dbHelper.getReadableDatabase(); boolean privado = false; try { Cursor cursor = db.query(EventTable.TABLE_EVENT, null,"_id = " + id,null,null,null, EventTable.COLUMN_TITLE + " asc"); if (cursor.moveToNext()){ privado = cursor.getString(11).equalsIgnoreCase("Privado"); } }catch (Exception e){ Log.e("Error CandidateDAO", e.getMessage()); }finally { db.close(); } return privado; } public String eventPassword(String id) { SQLiteDatabase db = dbHelper.getReadableDatabase(); String ePassword = ""; try { Cursor cursor = db.query(EventTable.TABLE_EVENT, null,"_id = " + id,null,null,null, EventTable.COLUMN_TITLE + " asc"); if (cursor.moveToNext()){ ePassword = cursor.getString(12); } }catch (Exception e){ Log.e("Error CandidateDAO", e.getMessage()); }finally { db.close(); } return ePassword; } public Event getById(long pk) { SQLiteDatabase db = dbHelper.getReadableDatabase(); Event event = null; try { Cursor cursor = db.query(EventTable.TABLE_EVENT, null,"_id = " + pk,null,null,null, EventTable.COLUMN_TITLE + " asc"); if (cursor.moveToNext()){ event = new Event(); event.setId((cursor.getLong(0) + "")); event.setTitle(cursor.getString(1)); event.setDescription(cursor.getString(2)); event.setAddress(cursor.getString(3)); event.setVenue(cursor.getString(4)); event.setCity(cursor.getString(5)); event.logo = cursor.getString(6); event.setState(cursor.getString(7)); event.setStart_date(cursor.getString(8)); event.setEnd_date(cursor.getString(9)); event.event_type = cursor.getString(11); event.password = cursor.getString(12); event.have_papers = cursor.getInt(13) == 1 ? true : false; } }catch (Exception e){ Log.e("Error CandidateDAO", e.getMessage()); }finally { db.close(); } return event; } public List<Event> getAll() { List<Event> list = new ArrayList<Event>(); SQLiteDatabase db = dbHelper.getReadableDatabase(); try{ Cursor cursor = db.query(EventTable.TABLE_EVENT, null, "active = " + 1,null,null,null, EventTable.COLUMN_TITLE + " asc"); while (cursor.moveToNext()){ Event event = new Event(); event.setId((cursor.getLong(0) + "")); event.setTitle(cursor.getString(1)); event.setDescription(cursor.getString(2)); event.setAddress(cursor.getString(3)); event.setVenue(cursor.getString(4)); event.setCity(cursor.getString(5)); event.logo = cursor.getString(6); event.state = cursor.getString(7); event.start_date = cursor.getString(8); event.end_date = cursor.getString(9); event.event_type = cursor.getString(11); event.password = cursor.getString(12); event.have_papers = cursor.getInt(13) == 1 ? true : false; //event.s(cursor.getString(10)); list.add(event); } }catch (Exception e){ Log.e("Error EventDao","getAll: "+ e.getMessage()); }finally { if (db != null || db.isOpen()) { db.close(); } } return list; } private boolean containEvent(Event entity) { Log.e("LOG_DAO", "entrou containEvent ID: " + entity.getId()); boolean contain = false; Long id = Long.parseLong(entity.getId()); if(id != null) { SQLiteDatabase db = dbHelper.getReadableDatabase(); try { Cursor cursor = db.query(EventTable.TABLE_EVENT, null, EventTable.COLUMN_ID + " = " + id, null, null, null, EventTable.COLUMN_ID); Log.e("LOG_DAO", "entrou containEvent getCount: " + cursor.getCount()); contain = cursor.getCount() != 0; }catch (Exception e) { Log.e("Error EventDAO", "containEvent:" +e.getMessage()); } finally { /*if (db != null || db.isOpen()) { db.close(); }*/ } } return contain; } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.video.stitcher.v1; import com.google.api.pathtemplate.PathTemplate; import com.google.api.resourcenames.ResourceName; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. @Generated("by gapic-generator-java") public class VodAdTagDetailName implements ResourceName { private static final PathTemplate PROJECT_LOCATION_VOD_SESSION_VOD_AD_TAG_DETAIL = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/vodSessions/{vod_session}/vodAdTagDetails/{vod_ad_tag_detail}"); private volatile Map<String, String> fieldValuesMap; private final String project; private final String location; private final String vodSession; private final String vodAdTagDetail; @Deprecated protected VodAdTagDetailName() { project = null; location = null; vodSession = null; vodAdTagDetail = null; } private VodAdTagDetailName(Builder builder) { project = Preconditions.checkNotNull(builder.getProject()); location = Preconditions.checkNotNull(builder.getLocation()); vodSession = Preconditions.checkNotNull(builder.getVodSession()); vodAdTagDetail = Preconditions.checkNotNull(builder.getVodAdTagDetail()); } public String getProject() { return project; } public String getLocation() { return location; } public String getVodSession() { return vodSession; } public String getVodAdTagDetail() { return vodAdTagDetail; } public static Builder newBuilder() { return new Builder(); } public Builder toBuilder() { return new Builder(this); } public static VodAdTagDetailName of( String project, String location, String vodSession, String vodAdTagDetail) { return newBuilder() .setProject(project) .setLocation(location) .setVodSession(vodSession) .setVodAdTagDetail(vodAdTagDetail) .build(); } public static String format( String project, String location, String vodSession, String vodAdTagDetail) { return newBuilder() .setProject(project) .setLocation(location) .setVodSession(vodSession) .setVodAdTagDetail(vodAdTagDetail) .build() .toString(); } public static VodAdTagDetailName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } Map<String, String> matchMap = PROJECT_LOCATION_VOD_SESSION_VOD_AD_TAG_DETAIL.validatedMatch( formattedString, "VodAdTagDetailName.parse: formattedString not in valid format"); return of( matchMap.get("project"), matchMap.get("location"), matchMap.get("vod_session"), matchMap.get("vod_ad_tag_detail")); } public static List<VodAdTagDetailName> parseList(List<String> formattedStrings) { List<VodAdTagDetailName> list = new ArrayList<>(formattedStrings.size()); for (String formattedString : formattedStrings) { list.add(parse(formattedString)); } return list; } public static List<String> toStringList(List<VodAdTagDetailName> values) { List<String> list = new ArrayList<>(values.size()); for (VodAdTagDetailName value : values) { if (value == null) { list.add(""); } else { list.add(value.toString()); } } return list; } public static boolean isParsableFrom(String formattedString) { return PROJECT_LOCATION_VOD_SESSION_VOD_AD_TAG_DETAIL.matches(formattedString); } @Override public Map<String, String> getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder(); if (project != null) { fieldMapBuilder.put("project", project); } if (location != null) { fieldMapBuilder.put("location", location); } if (vodSession != null) { fieldMapBuilder.put("vod_session", vodSession); } if (vodAdTagDetail != null) { fieldMapBuilder.put("vod_ad_tag_detail", vodAdTagDetail); } fieldValuesMap = fieldMapBuilder.build(); } } } return fieldValuesMap; } public String getFieldValue(String fieldName) { return getFieldValuesMap().get(fieldName); } @Override public String toString() { return PROJECT_LOCATION_VOD_SESSION_VOD_AD_TAG_DETAIL.instantiate( "project", project, "location", location, "vod_session", vodSession, "vod_ad_tag_detail", vodAdTagDetail); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o != null || getClass() == o.getClass()) { VodAdTagDetailName that = ((VodAdTagDetailName) o); return Objects.equals(this.project, that.project) && Objects.equals(this.location, that.location) && Objects.equals(this.vodSession, that.vodSession) && Objects.equals(this.vodAdTagDetail, that.vodAdTagDetail); } return false; } @Override public int hashCode() { int h = 1; h *= 1000003; h ^= Objects.hashCode(project); h *= 1000003; h ^= Objects.hashCode(location); h *= 1000003; h ^= Objects.hashCode(vodSession); h *= 1000003; h ^= Objects.hashCode(vodAdTagDetail); return h; } /** * Builder for * projects/{project}/locations/{location}/vodSessions/{vod_session}/vodAdTagDetails/{vod_ad_tag_detail}. */ public static class Builder { private String project; private String location; private String vodSession; private String vodAdTagDetail; protected Builder() {} public String getProject() { return project; } public String getLocation() { return location; } public String getVodSession() { return vodSession; } public String getVodAdTagDetail() { return vodAdTagDetail; } public Builder setProject(String project) { this.project = project; return this; } public Builder setLocation(String location) { this.location = location; return this; } public Builder setVodSession(String vodSession) { this.vodSession = vodSession; return this; } public Builder setVodAdTagDetail(String vodAdTagDetail) { this.vodAdTagDetail = vodAdTagDetail; return this; } private Builder(VodAdTagDetailName vodAdTagDetailName) { this.project = vodAdTagDetailName.project; this.location = vodAdTagDetailName.location; this.vodSession = vodAdTagDetailName.vodSession; this.vodAdTagDetail = vodAdTagDetailName.vodAdTagDetail; } public VodAdTagDetailName build() { return new VodAdTagDetailName(this); } } }
package jsky.app.ot.gemini.michelle; import jsky.util.gui.NumberBoxWidget; import jsky.util.gui.TextBoxWidget; import javax.swing.*; import java.awt.*; /* * Created by JFormDesigner on Wed Nov 02 16:06:30 CET 2005 */ /** * @author User #1 */ public class MichelleForm extends JPanel { public MichelleForm() { initComponents(); } private void initComponents() { // JFormDesigner - Component initialization - DO NOT MODIFY //GEN-BEGIN:initComponents JPanel top1 = new JPanel(); filterLabel = new JLabel(); totalOnSourceTimeLabel = new JLabel(); filterComboBox = new JComboBox(); filterOverride = new JLabel(); totalOnSourceTime = new NumberBoxWidget(); totalOnSourceTimeUnitsLabel = new JLabel(); nodIntervalLabel = new JLabel(); nodInterval = new NumberBoxWidget(); nodIntervalUnitsLabel = new JLabel(); focalPlaneMaskLabel = new JLabel(); posAngleLabel = new JLabel(); focalPlaneMaskComboBox = new JComboBox(); posAngle = new NumberBoxWidget(); posAngleUnitsLabel = new JLabel(); disperserLabel = new JLabel(); centralWavelengthLabel = new JLabel(); disperserComboBox = new JComboBox(); centralWavelength = new NumberBoxWidget(); chopAngleLabel = new JLabel(); chopThrowLabel = new JLabel(); chopAngle = new NumberBoxWidget(); chopThrow = new NumberBoxWidget(); chopAngleUnitsLabel = new JLabel(); JLabel scienceFOVLabel = new JLabel(); centralWavelengthUnitsLabel = new JLabel(); chopThrowUnitsLabel = new JLabel(); scienceFOV = new JLabel(); exposureTimeLabel = new JLabel(); exposureTime = new TextBoxWidget(); exposureTimeUnitsLabel = new JLabel(); autoConfigureLabel = new JLabel(); JPanel autoConfigurePanel = new JPanel(); autoConfigureYesButton = new JRadioButton(); autoConfigureNoButton = new JRadioButton(); nodOrientationLabel = new JLabel(); nodOrientationComboBox = new JComboBox(); maskOverride = new JLabel(); JLabel polarimetryLabel = new JLabel(); JPanel polarimetryPanel = new JPanel(); polarimetryYesButton = new JRadioButton(); polarimetryNoButton = new JRadioButton(); JPanel jPanel2 = new JPanel(); //======== this ======== setLayout(new GridBagLayout()); //======== top1 ======== { top1.setLayout(new GridBagLayout()); //---- filterLabel ---- filterLabel.setLabelFor(null); filterLabel.setText("Filter"); top1.add(filterLabel, new GridBagConstraints(0, 0, 1, 1, 0.2, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- totalOnSourceTimeLabel ---- totalOnSourceTimeLabel.setLabelFor(null); totalOnSourceTimeLabel.setText("Total On-Source Time"); top1.add(totalOnSourceTimeLabel, new GridBagConstraints(2, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- filterComboBox ---- filterComboBox.setFont(new Font("Dialog", Font.PLAIN, 12)); filterComboBox.setToolTipText("Select the Filter to use"); top1.add(filterComboBox, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); top1.add(filterOverride, new GridBagConstraints(1, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 5, 0, 0), 0, 0)); //---- totalOnSourceTime ---- totalOnSourceTime.setToolTipText("Enter the Total On-Source Time in Seconds"); top1.add(totalOnSourceTime, new GridBagConstraints(2, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- totalOnSourceTimeUnitsLabel ---- totalOnSourceTimeUnitsLabel.setText("sec"); top1.add(totalOnSourceTimeUnitsLabel, new GridBagConstraints(3, 1, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 6, 0, 0), 0, 0)); //---- nodIntervalLabel ---- nodIntervalLabel.setLabelFor(null); nodIntervalLabel.setText("Nod Interval"); top1.add(nodIntervalLabel, new GridBagConstraints(2, 4, 2, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- nodInterval ---- nodInterval.setToolTipText("Enter the Nod Interval in Seconds"); top1.add(nodInterval, new GridBagConstraints(2, 5, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- nodIntervalUnitsLabel ---- nodIntervalUnitsLabel.setText("sec"); top1.add(nodIntervalUnitsLabel, new GridBagConstraints(3, 5, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 6, 0, 0), 0, 0)); //---- focalPlaneMaskLabel ---- focalPlaneMaskLabel.setLabelFor(null); focalPlaneMaskLabel.setText("Focal Plane Mask"); top1.add(focalPlaneMaskLabel, new GridBagConstraints(0, 6, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- posAngleLabel ---- posAngleLabel.setLabelFor(null); posAngleLabel.setText("Position Angle"); top1.add(posAngleLabel, new GridBagConstraints(2, 6, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- focalPlaneMaskComboBox ---- focalPlaneMaskComboBox.setToolTipText("Select the Focal Plane Mask to use"); top1.add(focalPlaneMaskComboBox, new GridBagConstraints(0, 7, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- posAngle ---- posAngle.setToolTipText("Enter the Position Angle in Degrees East of North"); top1.add(posAngle, new GridBagConstraints(2, 7, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- posAngleUnitsLabel ---- posAngleUnitsLabel.setText("deg E of N"); top1.add(posAngleUnitsLabel, new GridBagConstraints(3, 7, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 6, 0, 0), 0, 0)); //---- disperserLabel ---- disperserLabel.setLabelFor(null); disperserLabel.setText("Disperser"); top1.add(disperserLabel, new GridBagConstraints(0, 8, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- centralWavelengthLabel ---- centralWavelengthLabel.setLabelFor(null); centralWavelengthLabel.setText("Grating Central Wavelength"); top1.add(centralWavelengthLabel, new GridBagConstraints(2, 8, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- disperserComboBox ---- disperserComboBox.setToolTipText("Select the Disperser to use"); top1.add(disperserComboBox, new GridBagConstraints(0, 9, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- centralWavelength ---- centralWavelength.setToolTipText("Enter the Grating Central Wavelength in um"); top1.add(centralWavelength, new GridBagConstraints(2, 9, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- chopAngleLabel ---- chopAngleLabel.setLabelFor(null); chopAngleLabel.setText("Chop Angle"); top1.add(chopAngleLabel, new GridBagConstraints(0, 10, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- chopThrowLabel ---- chopThrowLabel.setLabelFor(null); chopThrowLabel.setText("Chop Throw"); top1.add(chopThrowLabel, new GridBagConstraints(2, 10, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- chopAngle ---- chopAngle.setToolTipText("Enter the Chop Angle in degrees East of North"); top1.add(chopAngle, new GridBagConstraints(0, 11, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- chopThrow ---- chopThrow.setToolTipText("Enter the Chop Throw in arcsec"); top1.add(chopThrow, new GridBagConstraints(2, 11, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- chopAngleUnitsLabel ---- chopAngleUnitsLabel.setText("deg E of N"); top1.add(chopAngleUnitsLabel, new GridBagConstraints(1, 11, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 6, 0, 0), 0, 0)); //---- scienceFOVLabel ---- scienceFOVLabel.setText("Science FOV"); top1.add(scienceFOVLabel, new GridBagConstraints(0, 12, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- centralWavelengthUnitsLabel ---- centralWavelengthUnitsLabel.setText("um"); top1.add(centralWavelengthUnitsLabel, new GridBagConstraints(3, 9, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 6, 0, 0), 0, 0)); //---- chopThrowUnitsLabel ---- chopThrowUnitsLabel.setText("arcsec"); top1.add(chopThrowUnitsLabel, new GridBagConstraints(3, 11, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 6, 0, 0), 0, 0)); //---- scienceFOV ---- scienceFOV.setToolTipText("The Calculated Field of View"); scienceFOV.setText("000.000 arcsec"); top1.add(scienceFOV, new GridBagConstraints(0, 13, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- exposureTimeLabel ---- exposureTimeLabel.setToolTipText(""); exposureTimeLabel.setLabelFor(null); exposureTimeLabel.setText("Exposure (Frame) Time"); top1.add(exposureTimeLabel, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //---- exposureTime ---- exposureTime.setToolTipText("Set the Exposure (Frame) Time in Seconds"); top1.add(exposureTime, new GridBagConstraints(0, 3, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); //---- exposureTimeUnitsLabel ---- exposureTimeUnitsLabel.setText("sec"); top1.add(exposureTimeUnitsLabel, new GridBagConstraints(1, 3, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 6, 0, 0), 0, 0)); //---- autoConfigureLabel ---- autoConfigureLabel.setLabelFor(null); autoConfigureLabel.setText("Auto-Configure"); top1.add(autoConfigureLabel, new GridBagConstraints(2, 2, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //======== autoConfigurePanel ======== { autoConfigurePanel.setLayout(new GridBagLayout()); //---- autoConfigureYesButton ---- autoConfigureYesButton.setToolTipText("Automatically configure the instrument exposure time"); autoConfigureYesButton.setText("Yes"); autoConfigurePanel.add(autoConfigureYesButton, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 11, 0, 0), 0, 0)); //---- autoConfigureNoButton ---- autoConfigureNoButton.setToolTipText("Do not automatically configure the instrument exposure time"); autoConfigureNoButton.setText("No"); autoConfigurePanel.add(autoConfigureNoButton, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 11, 0, 0), 0, 0)); } top1.add(autoConfigurePanel, new GridBagConstraints(2, 3, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)); //---- nodOrientationLabel ---- nodOrientationLabel.setText("Nod Orientation"); top1.add(nodOrientationLabel, new GridBagConstraints(0, 4, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); top1.add(nodOrientationComboBox, new GridBagConstraints(0, 5, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 11, 0, 0), 0, 0)); top1.add(maskOverride, new GridBagConstraints(1, 7, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 5, 0, 0), 0, 0)); //---- polarimetryLabel ---- polarimetryLabel.setText("Polarimetry"); top1.add(polarimetryLabel, new GridBagConstraints(2, 12, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(11, 11, 0, 0), 0, 0)); //======== polarimetryPanel ======== { polarimetryPanel.setLayout(new GridBagLayout()); //---- polarimetryYesButton ---- polarimetryYesButton.setText("Yes"); polarimetryPanel.add(polarimetryYesButton, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 11, 0, 0), 0, 0)); //---- polarimetryNoButton ---- polarimetryNoButton.setText("No"); polarimetryPanel.add(polarimetryNoButton, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 11, 0, 0), 0, 0)); } top1.add(polarimetryPanel, new GridBagConstraints(2, 13, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)); } add(top1, new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 37, 0), 0, 0)); //======== jPanel2 ======== { jPanel2.setLayout(new FlowLayout()); } add(jPanel2, new GridBagConstraints(0, 1, 1, 1, 1.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); //---- autoConfigureButtonGroup ---- ButtonGroup autoConfigureButtonGroup = new ButtonGroup(); autoConfigureButtonGroup.add(autoConfigureYesButton); autoConfigureButtonGroup.add(autoConfigureNoButton); //---- polarimetryButtonGroup ---- ButtonGroup polarimetryButtonGroup = new ButtonGroup(); polarimetryButtonGroup.add(polarimetryYesButton); polarimetryButtonGroup.add(polarimetryNoButton); // JFormDesigner - End of component initialization //GEN-END:initComponents } // JFormDesigner - Variables declaration - DO NOT MODIFY //GEN-BEGIN:variables JLabel filterLabel; JLabel totalOnSourceTimeLabel; JComboBox filterComboBox; JLabel filterOverride; NumberBoxWidget totalOnSourceTime; JLabel totalOnSourceTimeUnitsLabel; JLabel nodIntervalLabel; NumberBoxWidget nodInterval; JLabel nodIntervalUnitsLabel; JLabel focalPlaneMaskLabel; JLabel posAngleLabel; JComboBox focalPlaneMaskComboBox; NumberBoxWidget posAngle; JLabel posAngleUnitsLabel; JLabel disperserLabel; JLabel centralWavelengthLabel; JComboBox disperserComboBox; NumberBoxWidget centralWavelength; JLabel chopAngleLabel; JLabel chopThrowLabel; NumberBoxWidget chopAngle; NumberBoxWidget chopThrow; JLabel chopAngleUnitsLabel; JLabel centralWavelengthUnitsLabel; JLabel chopThrowUnitsLabel; JLabel scienceFOV; JLabel exposureTimeLabel; TextBoxWidget exposureTime; JLabel exposureTimeUnitsLabel; JLabel autoConfigureLabel; JRadioButton autoConfigureYesButton; JRadioButton autoConfigureNoButton; JLabel nodOrientationLabel; JComboBox nodOrientationComboBox; JLabel maskOverride; JRadioButton polarimetryYesButton; JRadioButton polarimetryNoButton; // JFormDesigner - End of variables declaration //GEN-END:variables }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.state.internals; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.streams.processor.internals.InternalProcessorContext; import org.apache.kafka.streams.processor.internals.ProcessorRecordContext; import org.apache.kafka.streams.processor.internals.ProcessorStateManager; import org.apache.kafka.streams.state.KeyValueIterator; import org.apache.kafka.streams.state.StateSerdes; import org.apache.kafka.streams.state.WindowStore; import org.apache.kafka.streams.state.WindowStoreIterator; class CachingWindowStore extends WrappedStateStore<WindowStore<Bytes, byte[]>, byte[], byte[]> implements WindowStore<Bytes, byte[]>, CachedStateStore<byte[], byte[]> { private final long windowSize; private final SegmentedBytesStore.KeySchema keySchema = new WindowKeySchema(); private String name; private ThreadCache cache; private boolean sendOldValues; private InternalProcessorContext context; private StateSerdes<Bytes, byte[]> bytesSerdes; private CacheFlushListener<byte[], byte[]> flushListener; private final SegmentedCacheFunction cacheFunction; CachingWindowStore(final WindowStore<Bytes, byte[]> underlying, final long windowSize, final long segmentInterval) { super(underlying); this.windowSize = windowSize; this.cacheFunction = new SegmentedCacheFunction(keySchema, segmentInterval); } @Override public void init(final ProcessorContext context, final StateStore root) { initInternal((InternalProcessorContext) context); super.init(context, root); } @SuppressWarnings("unchecked") private void initInternal(final InternalProcessorContext context) { this.context = context; final String topic = ProcessorStateManager.storeChangelogTopic(context.applicationId(), name()); bytesSerdes = new StateSerdes<>( topic, Serdes.Bytes(), Serdes.ByteArray()); name = context.taskId() + "-" + name(); cache = this.context.getCache(); cache.addDirtyEntryFlushListener(name, entries -> { for (final ThreadCache.DirtyEntry entry : entries) { putAndMaybeForward(entry, context); } }); } private void putAndMaybeForward(final ThreadCache.DirtyEntry entry, final InternalProcessorContext context) { final byte[] binaryWindowKey = cacheFunction.key(entry.key()).get(); final Windowed<Bytes> windowedKeyBytes = WindowKeySchema.fromStoreBytesKey(binaryWindowKey, windowSize); final long windowStartTimestamp = windowedKeyBytes.window().start(); final Bytes binaryKey = windowedKeyBytes.key(); if (flushListener != null) { final byte[] rawNewValue = entry.newValue(); final byte[] rawOldValue = rawNewValue == null || sendOldValues ? wrapped().fetch(binaryKey, windowStartTimestamp) : null; // this is an optimization: if this key did not exist in underlying store and also not in the cache, // we can skip flushing to downstream as well as writing to underlying store if (rawNewValue != null || rawOldValue != null) { // we need to get the old values if needed, and then put to store, and then flush wrapped().put(binaryKey, entry.newValue(), windowStartTimestamp); final ProcessorRecordContext current = context.recordContext(); context.setRecordContext(entry.entry().context()); try { flushListener.apply( binaryWindowKey, rawNewValue, sendOldValues ? rawOldValue : null, entry.entry().context().timestamp()); } finally { context.setRecordContext(current); } } } else { wrapped().put(binaryKey, entry.newValue(), windowStartTimestamp); } } @Override public boolean setFlushListener(final CacheFlushListener<byte[], byte[]> flushListener, final boolean sendOldValues) { this.flushListener = flushListener; this.sendOldValues = sendOldValues; return true; } @Override public synchronized void put(final Bytes key, final byte[] value) { put(key, value, context.timestamp()); } @Override public synchronized void put(final Bytes key, final byte[] value, final long windowStartTimestamp) { // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. validateStoreOpen(); final Bytes keyBytes = WindowKeySchema.toStoreKeyBinary(key, windowStartTimestamp, 0); final LRUCacheEntry entry = new LRUCacheEntry( value, context.headers(), true, context.offset(), context.timestamp(), context.partition(), context.topic()); cache.put(name, cacheFunction.cacheKey(keyBytes), entry); } @Override public byte[] fetch(final Bytes key, final long timestamp) { validateStoreOpen(); final Bytes bytesKey = WindowKeySchema.toStoreKeyBinary(key, timestamp, 0); final Bytes cacheKey = cacheFunction.cacheKey(bytesKey); if (cache == null) { return wrapped().fetch(key, timestamp); } final LRUCacheEntry entry = cache.get(name, cacheKey); if (entry == null) { return wrapped().fetch(key, timestamp); } else { return entry.value(); } } @SuppressWarnings("deprecation") // note, this method must be kept if super#fetch(...) is removed @Override public synchronized WindowStoreIterator<byte[]> fetch(final Bytes key, final long timeFrom, final long timeTo) { // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. validateStoreOpen(); final WindowStoreIterator<byte[]> underlyingIterator = wrapped().fetch(key, timeFrom, timeTo); if (cache == null) { return underlyingIterator; } final Bytes cacheKeyFrom = cacheFunction.cacheKey(keySchema.lowerRangeFixedSize(key, timeFrom)); final Bytes cacheKeyTo = cacheFunction.cacheKey(keySchema.upperRangeFixedSize(key, timeTo)); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, cacheKeyFrom, cacheKeyTo); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(key, key, timeFrom, timeTo); final PeekingKeyValueIterator<Bytes, LRUCacheEntry> filteredCacheIterator = new FilteredCacheIterator( cacheIterator, hasNextCondition, cacheFunction ); return new MergedSortedCacheWindowStoreIterator(filteredCacheIterator, underlyingIterator); } @SuppressWarnings("deprecation") // note, this method must be kept if super#fetch(...) is removed @Override public KeyValueIterator<Windowed<Bytes>, byte[]> fetch(final Bytes from, final Bytes to, final long timeFrom, final long timeTo) { // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. validateStoreOpen(); final KeyValueIterator<Windowed<Bytes>, byte[]> underlyingIterator = wrapped().fetch(from, to, timeFrom, timeTo); if (cache == null) { return underlyingIterator; } final Bytes cacheKeyFrom = cacheFunction.cacheKey(keySchema.lowerRange(from, timeFrom)); final Bytes cacheKeyTo = cacheFunction.cacheKey(keySchema.upperRange(to, timeTo)); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, cacheKeyFrom, cacheKeyTo); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(from, to, timeFrom, timeTo); final PeekingKeyValueIterator<Bytes, LRUCacheEntry> filteredCacheIterator = new FilteredCacheIterator(cacheIterator, hasNextCondition, cacheFunction); return new MergedSortedCacheWindowStoreKeyValueIterator( filteredCacheIterator, underlyingIterator, bytesSerdes, windowSize, cacheFunction ); } @SuppressWarnings("deprecation") // note, this method must be kept if super#fetchAll(...) is removed @Override public KeyValueIterator<Windowed<Bytes>, byte[]> fetchAll(final long timeFrom, final long timeTo) { validateStoreOpen(); final KeyValueIterator<Windowed<Bytes>, byte[]> underlyingIterator = wrapped().fetchAll(timeFrom, timeTo); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.all(name); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(null, null, timeFrom, timeTo); final PeekingKeyValueIterator<Bytes, LRUCacheEntry> filteredCacheIterator = new FilteredCacheIterator(cacheIterator, hasNextCondition, cacheFunction); return new MergedSortedCacheWindowStoreKeyValueIterator( filteredCacheIterator, underlyingIterator, bytesSerdes, windowSize, cacheFunction ); } @Override public KeyValueIterator<Windowed<Bytes>, byte[]> all() { validateStoreOpen(); final KeyValueIterator<Windowed<Bytes>, byte[]> underlyingIterator = wrapped().all(); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.all(name); return new MergedSortedCacheWindowStoreKeyValueIterator( cacheIterator, underlyingIterator, bytesSerdes, windowSize, cacheFunction ); } @Override public synchronized void flush() { cache.flush(name); wrapped().flush(); } @Override public void close() { flush(); cache.close(name); wrapped().close(); } }
/* * Copyright 2012 Dynastream Innovations Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.surfsite.iconsole; import android.os.RemoteException; import android.util.Log; import com.dsi.ant.channel.AntChannel; import com.dsi.ant.channel.AntCommandFailedException; import com.dsi.ant.channel.IAntChannelEventHandler; import com.dsi.ant.message.ChannelId; import com.dsi.ant.message.ChannelType; import com.dsi.ant.message.EventCode; import com.dsi.ant.message.fromant.ChannelEventMessage; import com.dsi.ant.message.fromant.MessageFromAntType; import com.dsi.ant.message.ipc.AntMessageParcel; import java.util.Random; public class SpeedChannelController { // The device type and transmission type to be part of the channel ID message private static final int CHANNEL_SPEED_DEVICE_TYPE = 0x7B; private static final int CHANNEL_SPEED_TRANSMISSION_TYPE = 1; // The period and frequency values the channel will be configured to private static final int CHANNEL_SPEED_PERIOD = 8118; // 1 Hz private static final int CHANNEL_SPEED_FREQUENCY = 57; private static final String TAG = SpeedChannelController.class.getSimpleName(); public static final int SPEED_SENSOR_ID = 0x9e3d4b65; private static Random randGen = new Random(); private AntChannel mAntChannel; private ChannelEventCallback mChannelEventCallback = new ChannelEventCallback(); private boolean mIsOpen; double speed = 0.0; public SpeedChannelController(AntChannel antChannel) { mAntChannel = antChannel; openChannel(); } boolean openChannel() { if (null != mAntChannel) { if (mIsOpen) { Log.w(TAG, "Channel was already open"); } else { // Channel ID message contains device number, type and transmission type. In // order for master (TX) channels and slave (RX) channels to connect, they // must have the same channel ID, or wildcard (0) is used. ChannelId channelId = new ChannelId(SPEED_SENSOR_ID & 0xFFFF, CHANNEL_SPEED_DEVICE_TYPE, CHANNEL_SPEED_TRANSMISSION_TYPE); try { // Setting the channel event handler so that we can receive messages from ANT mAntChannel.setChannelEventHandler(mChannelEventCallback); // Performs channel assignment by assigning the type to the channel. Additional // features (such as, background scanning and frequency agility) can be enabled // by passing an ExtendedAssignment object to assign(ChannelType, ExtendedAssignment). mAntChannel.assign(ChannelType.BIDIRECTIONAL_MASTER); /* * Configures the channel ID, messaging period and rf frequency after assigning, * then opening the channel. * * For any additional ANT features such as proximity search or background scanning, refer to * the ANT Protocol Doc found at: * http://www.thisisant.com/resources/ant-message-protocol-and-usage/ */ mAntChannel.setChannelId(channelId); mAntChannel.setPeriod(CHANNEL_SPEED_PERIOD); mAntChannel.setRfFrequency(CHANNEL_SPEED_FREQUENCY); mAntChannel.open(); mIsOpen = true; Log.d(TAG, "Opened channel with device number: " + SPEED_SENSOR_ID); } catch (RemoteException e) { channelError(e); } catch (AntCommandFailedException e) { // This will release, and therefore unassign if required channelError("Open failed", e); } } } else { Log.w(TAG, "No channel available"); } return mIsOpen; } void channelError(RemoteException e) { String logString = "Remote service communication failed."; Log.e(TAG, logString); } void channelError(String error, AntCommandFailedException e) { StringBuilder logString; if (e.getResponseMessage() != null) { String initiatingMessageId = "0x" + Integer.toHexString( e.getResponseMessage().getInitiatingMessageId()); String rawResponseCode = "0x" + Integer.toHexString( e.getResponseMessage().getRawResponseCode()); logString = new StringBuilder(error) .append(". Command ") .append(initiatingMessageId) .append(" failed with code ") .append(rawResponseCode); } else { String attemptedMessageId = "0x" + Integer.toHexString( e.getAttemptedMessageType().getMessageId()); String failureReason = e.getFailureReason().toString(); logString = new StringBuilder(error) .append(". Command ") .append(attemptedMessageId) .append(" failed with reason ") .append(failureReason); } Log.e(TAG, logString.toString()); mAntChannel.release(); Log.e(TAG, "ANT Command Failed"); } public void close() { // TODO kill all our resources if (null != mAntChannel) { mIsOpen = false; // Releasing the channel to make it available for others. // After releasing, the AntChannel instance cannot be reused. mAntChannel.release(); mAntChannel = null; } Log.e(TAG, "Channel Closed"); } /** * Implements the Channel Event Handler Interface so that messages can be * received and channel death events can be handled. */ public class ChannelEventCallback implements IAntChannelEventHandler { int revCounts = 0; int ucMessageCount = 0; byte ucPageChange = 0; byte ucExtMesgType = 1; long lastTime = 0; double way; int rev; double remWay; double wheel = 0.1; @Override public void onChannelDeath() { // Display channel death message when channel dies Log.e(TAG, "Channel Death"); } @Override public void onReceiveMessage(MessageFromAntType messageType, AntMessageParcel antParcel) { Log.d(TAG, "Rx: " + antParcel); Log.d(TAG, "Message Type: " + messageType); // Switching on message type to handle different types of messages switch (messageType) { // If data message, construct from parcel and update channel data case BROADCAST_DATA: // Rx Data //updateData(new BroadcastDataMessage(antParcel).getPayload()); break; case ACKNOWLEDGED_DATA: // Rx Data //updateData(new AcknowledgedDataMessage(antParcel).getPayload()); break; case CHANNEL_EVENT: // Constructing channel event message from parcel ChannelEventMessage eventMessage = new ChannelEventMessage(antParcel); EventCode code = eventMessage.getEventCode(); Log.d(TAG, "Event Code: " + code); // Switching on event code to handle the different types of channel events switch (code) { case TX: long unixTime = System.currentTimeMillis() / 1000L; if (lastTime != 0) { way = speed * (unixTime - lastTime) / 3.6 + remWay; rev = (int)(way / wheel + 0.5); remWay = way - rev * wheel; revCounts += rev; } lastTime = unixTime; ucPageChange += 0x20; ucPageChange &= 0xF0; ucMessageCount += 1; byte[] payload = new byte[8]; if (ucMessageCount >= 65) { if (ucExtMesgType >= 4) ucExtMesgType = 1; if (ucExtMesgType == 1) { int halfunixTime = (int) (unixTime / 2L); payload[0] = (byte) ((byte) 0x01 | (byte) (ucPageChange & (byte) 0x80)); payload[1] = (byte) (halfunixTime & 0xFF); payload[2] = (byte) ((halfunixTime >> 8) & 0xFF); payload[3] = (byte) ((halfunixTime >> 16) & 0xFF); } else if (ucExtMesgType == 2) { payload[0] = (byte) ((byte) 0x02 | (byte) (ucPageChange & (byte) 0x80)); payload[1] = (byte) 0xFF; payload[2] = (byte) ((SPEED_SENSOR_ID >> 16) & 0xFF); payload[3] = (byte) ((SPEED_SENSOR_ID >> 24) & 0xFF); } else if (ucExtMesgType == 3) { payload[0] = (byte) ((byte) 0x03 | (byte) (ucPageChange & (byte) 0x80)); payload[1] = (byte) 0x01; payload[2] = (byte) 0x01; payload[3] = (byte) 0x01; } if (ucMessageCount >= 68) { ucMessageCount = 0; ucExtMesgType += 1; } } else { payload[0] = (byte) (ucPageChange & 0x80); payload[1] = (byte) 0xFF; payload[2] = (byte) 0xFF; payload[3] = (byte) 0xFF; } int unixTime1024 = (int) (unixTime * 1024); payload[4] = (byte) (unixTime1024 & 0xFF); payload[5] = (byte) ((unixTime1024 >> 8) & 0xFF); payload[6] = (byte) (revCounts & 0xFF); payload[7] = (byte) ((revCounts >> 8) & 0xFF); if (mIsOpen) { try { // Setting the data to be broadcast on the next channel period mAntChannel.setBroadcastData(payload); } catch (RemoteException e) { channelError(e); } } break; case CHANNEL_COLLISION: ucPageChange += 0x20; ucPageChange &= 0xF0; ucMessageCount += 1; break; case RX_SEARCH_TIMEOUT: // TODO May want to keep searching Log.e(TAG, "No Device Found"); break; case CHANNEL_CLOSED: case RX_FAIL: case RX_FAIL_GO_TO_SEARCH: case TRANSFER_RX_FAILED: case TRANSFER_TX_COMPLETED: case TRANSFER_TX_FAILED: case TRANSFER_TX_START: case UNKNOWN: // TODO More complex communication will need to handle these events break; } break; case ANT_VERSION: case BURST_TRANSFER_DATA: case CAPABILITIES: case CHANNEL_ID: case CHANNEL_RESPONSE: case CHANNEL_STATUS: case SERIAL_NUMBER: case OTHER: // TODO More complex communication will need to handle these message types break; } } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.util; import com.intellij.openapi.application.AccessToken; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.roots.libraries.LibraryTable; import com.intellij.openapi.roots.libraries.LibraryTablesRegistrar; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileSystem; import com.intellij.openapi.vfs.newvfs.ArchiveFileSystem; import com.intellij.psi.JavaPsiFacade; import com.intellij.psi.PsiClass; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.config.GroovyConfigUtils; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.regex.Pattern; /** * @author ilyas */ public class LibrariesUtil { public static final String SOME_GROOVY_CLASS = "org.codehaus.groovy.control.CompilationUnit"; private LibrariesUtil() { } public static Library[] getLibrariesByCondition(final Module module, final Condition<Library> condition) { if (module == null) return Library.EMPTY_ARRAY; final ArrayList<Library> libraries = new ArrayList<>(); ApplicationManager.getApplication().runReadAction(() -> populateOrderEntries(module, condition, libraries, false, new THashSet<>())); return libraries.toArray(Library.EMPTY_ARRAY); } private static void populateOrderEntries(@NotNull Module module, Condition<Library> condition, ArrayList<Library> libraries, boolean exportedOnly, Set<Module> visited) { if (!visited.add(module)) { return; } for (OrderEntry entry : ModuleRootManager.getInstance(module).getOrderEntries()) { if (entry instanceof LibraryOrderEntry) { LibraryOrderEntry libEntry = (LibraryOrderEntry)entry; if (exportedOnly && !libEntry.isExported()) { continue; } Library library = libEntry.getLibrary(); if (condition.value(library)) { libraries.add(library); } } else if (entry instanceof ModuleOrderEntry) { final Module dep = ((ModuleOrderEntry)entry).getModule(); if (dep != null) { populateOrderEntries(dep, condition, libraries, true, visited); } } } } public static Library[] getGlobalLibraries(Condition<Library> condition) { LibraryTable table = LibraryTablesRegistrar.getInstance().getLibraryTable(); List<Library> libs = ContainerUtil.findAll(table.getLibraries(), condition); return libs.toArray(Library.EMPTY_ARRAY); } @NotNull public static String getGroovyLibraryHome(Library library) { final VirtualFile[] classRoots = library.getFiles(OrderRootType.CLASSES); final String home = getGroovyLibraryHome(classRoots); return home == null ? "" : home; } public static boolean hasGroovySdk(@Nullable Module module) { return module != null && getGroovyHomePath(module) != null; } @Nullable public static VirtualFile findJarWithClass(@NotNull Module module, final String classQName) { GlobalSearchScope scope = GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(module); for (PsiClass psiClass : JavaPsiFacade.getInstance(module.getProject()).findClasses(classQName, scope)) { VirtualFile virtualFile = psiClass.getContainingFile().getVirtualFile(); final VirtualFile local = getLocalFor(virtualFile); if (local != null) { return local; } } return null; } private static VirtualFile getLocalFor(VirtualFile virtualFile) { if (virtualFile != null) { VirtualFileSystem fileSystem = virtualFile.getFileSystem(); if (fileSystem instanceof ArchiveFileSystem) { return ((ArchiveFileSystem)fileSystem).getLocalByEntry(virtualFile); } } return null; } @Nullable public static String getGroovyHomePath(@NotNull Module module) { if (!DumbService.isDumb(module.getProject())) { final VirtualFile local = findJarWithClass(module, SOME_GROOVY_CLASS); if (local != null) { final VirtualFile parent = local.getParent(); if (parent != null) { if (("lib".equals(parent.getName()) || "embeddable".equals(parent.getName())) && parent.getParent() != null) { return parent.getParent().getPath(); } return parent.getPath(); } } } final String home = getGroovyLibraryHome(OrderEnumerator.orderEntries(module).getAllLibrariesAndSdkClassesRoots()); return StringUtil.isEmpty(home) ? null : home; } @Nullable private static String getGroovySdkHome(VirtualFile[] classRoots) { for (VirtualFile file : classRoots) { final String name = file.getName(); if (GroovyConfigUtils.GROOVY_JAR_PATTERN.matcher(name).matches()) { String jarPath = file.getPresentableUrl(); File realFile = new File(jarPath); if (realFile.exists()) { File parentFile = realFile.getParentFile(); if (parentFile != null) { if ("lib".equals(parentFile.getName())) { return parentFile.getParent(); } return parentFile.getPath(); } } } } return null; } @Nullable private static String getEmbeddableGroovyJar(VirtualFile[] classRoots) { for (VirtualFile file : classRoots) { final String name = file.getName(); if (GroovyConfigUtils.matchesGroovyAll(name)) { String jarPath = file.getPresentableUrl(); File realFile = new File(jarPath); if (realFile.exists()) { return realFile.getPath(); } } } return null; } @Nullable public static String getGroovyLibraryHome(VirtualFile[] classRoots) { final String sdkHome = getGroovySdkHome(classRoots); if (sdkHome != null) { return sdkHome; } final String embeddable = getEmbeddableGroovyJar(classRoots); if (embeddable != null) { final File emb = new File(embeddable); if (emb.exists()) { final File parent = emb.getParentFile(); if ("embeddable".equals(parent.getName()) || "lib".equals(parent.getName())) { return parent.getParent(); } return parent.getPath(); } } return null; } @NotNull public static VirtualFile getLocalFile(@NotNull VirtualFile libFile) { VirtualFile local = getLocalFor(libFile); if (local != null) { return local; } return libFile; } public static void placeEntryToCorrectPlace(ModifiableRootModel model, LibraryOrderEntry addedEntry) { final OrderEntry[] order = model.getOrderEntries(); //place library after module sources assert order[order.length - 1] == addedEntry; int insertionPoint = -1; for (int i = 0; i < order.length - 1; i++) { if (order[i] instanceof ModuleSourceOrderEntry) { insertionPoint = i + 1; break; } } if (insertionPoint >= 0) { for (int i = order.length - 1; i > insertionPoint; i--) { order[i] = order[i - 1]; } order[insertionPoint] = addedEntry; model.rearrangeOrderEntries(order); } } public static File[] getFilesInDirectoryByPattern(String dirPath, final Pattern pattern) { File distDir = new File(dirPath); File[] files = distDir.listFiles((dir, name) -> pattern.matcher(name).matches()); return files != null ? files : new File[0]; } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package com.android.systemui.navigation.smartbar; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.ObjectAnimator; import android.content.Context; import android.graphics.Canvas; import android.graphics.CanvasProperty; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.drawable.Drawable; import android.view.DisplayListCanvas; import android.view.RenderNodeAnimator; import android.view.View; import android.view.animation.Interpolator; import android.view.animation.PathInterpolator; import com.android.internal.utils.du.DUActionUtils; import com.android.systemui.R; import java.util.ArrayList; import java.util.HashSet; public class SmartButtonRipple extends Drawable { private static final float GLOW_MAX_SCALE_FACTOR = 1.35f; private static final float GLOW_MAX_ALPHA = 0.2f; private static final int ANIMATION_DURATION_SCALE = 350; private static final int ANIMATION_DURATION_FADE = 450; private Paint mRipplePaint; private CanvasProperty<Float> mLeftProp; private CanvasProperty<Float> mTopProp; private CanvasProperty<Float> mRightProp; private CanvasProperty<Float> mBottomProp; private CanvasProperty<Float> mRxProp; private CanvasProperty<Float> mRyProp; private CanvasProperty<Paint> mPaintProp; private float mGlowAlpha = 0f; private float mGlowScale = 1f; private boolean mPressed; private boolean mDrawingHardwareGlow; private int mMaxWidth; private final Interpolator mInterpolator = new LogInterpolator(); private final Interpolator mAlphaExitInterpolator = new PathInterpolator(0.4f, 0f, 1f, 1f); private boolean mSupportHardware; private final View mTargetView; private final HashSet<Animator> mRunningAnimations = new HashSet<>(); private final ArrayList<Animator> mTmpArray = new ArrayList<>(); private int mRippleColor; private boolean mEnabled; public SmartButtonRipple(Context ctx, View targetView) { final Context context = ctx; mMaxWidth = context.getResources().getDimensionPixelSize(R.dimen.key_button_ripple_max_width); mTargetView = targetView; mRippleColor = context.getResources().getColor(R.color.navbutton_ripple_color); } private Paint getRipplePaint() { if (mRipplePaint == null) { mRipplePaint = new Paint(); mRipplePaint.setAntiAlias(true); mRipplePaint.setColor(mRippleColor); } return mRipplePaint; } private void drawSoftware(Canvas canvas) { if (mGlowAlpha > 0f) { final Paint p = getRipplePaint(); p.setAlpha((int)(mGlowAlpha * 255f)); final float w = getBounds().width(); final float h = getBounds().height(); final boolean horizontal = w > h; final float diameter = getRippleSize() * mGlowScale; final float radius = diameter * .5f; final float cx = w * .5f; final float cy = h * .5f; final float rx = horizontal ? radius : cx; final float ry = horizontal ? cy : radius; final float corner = horizontal ? cy : cx; canvas.drawRoundRect(cx - rx, cy - ry, cx + rx, cy + ry, corner, corner, p); } } public void setEnabled(boolean enabled) { mEnabled = enabled; } @Override public void draw(Canvas canvas) { if (mEnabled) { mSupportHardware = canvas.isHardwareAccelerated(); if (mSupportHardware) { drawHardware((DisplayListCanvas) canvas); } else { drawSoftware(canvas); } } } @Override public void setAlpha(int alpha) { // Not supported. } @Override public void setColorFilter(ColorFilter colorFilter) { // Not supported. } @Override public int getOpacity() { return PixelFormat.TRANSLUCENT; } private boolean isHorizontal() { return getBounds().width() > getBounds().height(); } private void drawHardware(DisplayListCanvas c) { if (mDrawingHardwareGlow) { c.drawRoundRect(mLeftProp, mTopProp, mRightProp, mBottomProp, mRxProp, mRyProp, mPaintProp); } } public float getGlowAlpha() { return mGlowAlpha; } public void setGlowAlpha(float x) { mGlowAlpha = x; invalidateSelf(); } public float getGlowScale() { return mGlowScale; } public void setGlowScale(float x) { mGlowScale = x; invalidateSelf(); } @Override protected boolean onStateChange(int[] state) { boolean pressed = false; for (int i = 0; i < state.length; i++) { if (state[i] == android.R.attr.state_pressed) { pressed = true; break; } } if (pressed != mPressed) { setPressed(pressed); mPressed = pressed; return true; } else { return false; } } @Override public void jumpToCurrentState() { cancelAnimations(); } @Override public boolean isStateful() { return true; } public void setPressed(boolean pressed) { if (mSupportHardware) { setPressedHardware(pressed); } else { setPressedSoftware(pressed); } } private void cancelAnimations() { mTmpArray.addAll(mRunningAnimations); int size = mTmpArray.size(); for (int i = 0; i < size; i++) { Animator a = mTmpArray.get(i); a.cancel(); } mTmpArray.clear(); mRunningAnimations.clear(); } private void setPressedSoftware(boolean pressed) { if (pressed) { enterSoftware(); } else { exitSoftware(); } } private void enterSoftware() { cancelAnimations(); mGlowAlpha = GLOW_MAX_ALPHA; ObjectAnimator scaleAnimator = ObjectAnimator.ofFloat(this, "glowScale", 0f, GLOW_MAX_SCALE_FACTOR); scaleAnimator.setInterpolator(mInterpolator); scaleAnimator.setDuration(ANIMATION_DURATION_SCALE); scaleAnimator.addListener(mAnimatorListener); scaleAnimator.start(); mRunningAnimations.add(scaleAnimator); } private void exitSoftware() { ObjectAnimator alphaAnimator = ObjectAnimator.ofFloat(this, "glowAlpha", mGlowAlpha, 0f); alphaAnimator.setInterpolator(mAlphaExitInterpolator); alphaAnimator.setDuration(ANIMATION_DURATION_FADE); alphaAnimator.addListener(mAnimatorListener); alphaAnimator.start(); mRunningAnimations.add(alphaAnimator); } private void setPressedHardware(boolean pressed) { if (pressed) { enterHardware(); } else { exitHardware(); } } /** * Sets the left/top property for the round rect to {@code prop} depending on whether we are * horizontal or vertical mode. */ private void setExtendStart(CanvasProperty<Float> prop) { if (isHorizontal()) { mLeftProp = prop; } else { mTopProp = prop; } } private CanvasProperty<Float> getExtendStart() { return isHorizontal() ? mLeftProp : mTopProp; } /** * Sets the right/bottom property for the round rect to {@code prop} depending on whether we are * horizontal or vertical mode. */ private void setExtendEnd(CanvasProperty<Float> prop) { if (isHorizontal()) { mRightProp = prop; } else { mBottomProp = prop; } } private CanvasProperty<Float> getExtendEnd() { return isHorizontal() ? mRightProp : mBottomProp; } private int getExtendSize() { return isHorizontal() ? getBounds().width() : getBounds().height(); } private int getRippleSize() { int size = isHorizontal() ? getBounds().width() : getBounds().height(); return Math.min(size, mMaxWidth); } private void enterHardware() { cancelAnimations(); mDrawingHardwareGlow = true; setExtendStart(CanvasProperty.createFloat(getExtendSize() / 2)); final RenderNodeAnimator startAnim = new RenderNodeAnimator(getExtendStart(), getExtendSize()/2 - GLOW_MAX_SCALE_FACTOR * getRippleSize()/2); startAnim.setDuration(ANIMATION_DURATION_SCALE); startAnim.setInterpolator(mInterpolator); startAnim.addListener(mAnimatorListener); startAnim.setTarget(mTargetView); setExtendEnd(CanvasProperty.createFloat(getExtendSize() / 2)); final RenderNodeAnimator endAnim = new RenderNodeAnimator(getExtendEnd(), getExtendSize()/2 + GLOW_MAX_SCALE_FACTOR * getRippleSize()/2); endAnim.setDuration(ANIMATION_DURATION_SCALE); endAnim.setInterpolator(mInterpolator); endAnim.addListener(mAnimatorListener); endAnim.setTarget(mTargetView); if (isHorizontal()) { mTopProp = CanvasProperty.createFloat(0f); mBottomProp = CanvasProperty.createFloat(getBounds().height()); mRxProp = CanvasProperty.createFloat(getBounds().height()/2); mRyProp = CanvasProperty.createFloat(getBounds().height()/2); } else { mLeftProp = CanvasProperty.createFloat(0f); mRightProp = CanvasProperty.createFloat(getBounds().width()); mRxProp = CanvasProperty.createFloat(getBounds().width()/2); mRyProp = CanvasProperty.createFloat(getBounds().width()/2); } mGlowScale = GLOW_MAX_SCALE_FACTOR; mGlowAlpha = GLOW_MAX_ALPHA; mRipplePaint = getRipplePaint(); mRipplePaint.setAlpha((int) (mGlowAlpha * 255)); mPaintProp = CanvasProperty.createPaint(mRipplePaint); startAnim.start(); endAnim.start(); mRunningAnimations.add(startAnim); mRunningAnimations.add(endAnim); invalidateSelf(); } private void exitHardware() { mPaintProp = CanvasProperty.createPaint(getRipplePaint()); final RenderNodeAnimator opacityAnim = new RenderNodeAnimator(mPaintProp, RenderNodeAnimator.PAINT_ALPHA, 0); opacityAnim.setDuration(ANIMATION_DURATION_FADE); opacityAnim.setInterpolator(mAlphaExitInterpolator); opacityAnim.addListener(mAnimatorListener); opacityAnim.setTarget(mTargetView); opacityAnim.start(); mRunningAnimations.add(opacityAnim); invalidateSelf(); } private final AnimatorListenerAdapter mAnimatorListener = new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mRunningAnimations.remove(animation); if (mRunningAnimations.isEmpty() && !mPressed) { mDrawingHardwareGlow = false; invalidateSelf(); } } }; /** * Interpolator with a smooth log deceleration */ private static final class LogInterpolator implements Interpolator { @Override public float getInterpolation(float input) { return 1 - (float) Math.pow(400, -input * 1.4); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.fct.companian.compare; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.fct.fdmm.FDKind; import de.fct.fdmm.FDRating; import de.fct.fdmm.FDifference; import de.fct.fdmm.apidl.APIDlProxy; import de.fct.fdmm.apidl.MethodDescription; import de.fct.fdmm.hotspot.BindingCapability; import de.fct.fdmm.hotspot.CodingUnit; import de.fct.fdmm.hotspot.Constraint; import de.fct.fdmm.hotspot.DeploymentCapability; import de.fct.fdmm.hotspot.HookCall; import de.fct.fdmm.hotspot.HookProtocol; import de.fct.fdmm.hotspot.HotSpot; import de.fct.fdmm.hotspot.HotSpotUnit; public class CompareHotspot extends AbstractCompare { private static Logger log = LoggerFactory.getLogger(CompareHotspot.class); private CompareBasis basis; private CompareCDL cdl; private ComparePDL pdl; private CompareAPIDL apidl; private APIDlProxy apiProxy; public CompareHotspot(Map<String,Object> context) { super(context); } protected FDifference compareHotSpots(HotSpot leftHs, HotSpot rightHs) { log.debug("comparing hot spots {} <> {}", leftHs, rightHs); FDifference diff = null; FDifference descDiff = basis.compareDescriptions(leftHs, rightHs, leftHs); FDifference bindDiff = this.compareHsBinding(leftHs.getBinding(), rightHs.getBinding()); FDifference deplDiff = this.compareHsDeployment(leftHs.getDeployment(), rightHs.getDeployment()); FDifference consDiff = this.compareHsConstraints(leftHs, rightHs); FDifference unitDiff = this.compareHsUnits(leftHs, rightHs); if (descDiff != null || bindDiff != null || deplDiff != null || consDiff != null || unitDiff != null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); diff.setSource(leftHs); diff.setDescription("The Hotspot '" + leftHs.getName() + "' has changed in version " + this.getRightVersion()); diff.addDifference(descDiff); diff.addDifference(bindDiff); diff.addDifference(deplDiff); diff.addDifference(consDiff); diff.addDifference(unitDiff); } return diff; } private FDifference compareHsBinding(BindingCapability leftBinding, BindingCapability rightBinding) { return basis.compareNamedElements("binding", leftBinding, rightBinding, leftBinding); } private FDifference compareHsDeployment(DeploymentCapability leftDeployment, DeploymentCapability rightDeployment) { return basis.compareNamedElements("deployment", leftDeployment, rightDeployment, leftDeployment); } private FDifference compareHsConstraints(HotSpot leftHs, HotSpot rightHs) { FDifference diff = null; if (leftHs.getConstraints() != null) { for (Constraint leftConstr : leftHs.getConstraints()) { Constraint rightConstr = (Constraint) this.getFromList(rightHs.getConstraints(), leftConstr); if (rightConstr != null) { FDifference subDiff = this.compareConstraints(leftConstr, rightConstr); if (subDiff != null) { if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } else { // Deleted Constraint FDifference subDiff = new FDifference(FDKind.DeletedElement, FDRating.Warning); subDiff.setSource(leftConstr); subDiff.setDescription("The constraint '" + leftConstr.getName() + "' for Hotspot '" + leftHs.getName() + "' was deleted in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (rightHs.getConstraints() != null) { for (Constraint rightConstr : rightHs.getConstraints()) { if (this.getFromList(leftHs.getConstraints(), rightConstr) == null) { // Created Constraint FDifference subDiff = new FDifference(FDKind.CreatedElement, FDRating.Warning); subDiff.setSource(rightConstr); subDiff.setDescription("The constraint '" + rightConstr.getName() + "' for Hotspot '" + leftHs.getName() + "' was added in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (diff != null) { diff.setSource(leftHs); diff.setDescription("The constraints of Hotspot '" + leftHs.getName() + "' have changed in version " + this.getRightVersion()); } return diff; } private FDifference compareConstraints(Constraint leftConstr, Constraint rightConstr) { FDifference diff = null; FDifference descDiff = basis.compareDescriptions(leftConstr, rightConstr, leftConstr); FDifference assertDiff = null; if (leftConstr.getAssertion() != null) { if (rightConstr.getAssertion() != null) { assertDiff = cdl.compareConstraintAssertions(leftConstr.getAssertion(), rightConstr .getAssertion()); } else { // Deleted Assertion assertDiff = new FDifference(FDKind.DeletedElement, FDRating.Warning); assertDiff.setSource(leftConstr.getAssertion()); assertDiff.setDescription("The assertion '" + leftConstr.getAssertion().getName() + "' of constraint '" + leftConstr.getName() + "' was deleted in version " + this.getRightVersion()); } } else if (rightConstr.getAssertion() != null) { // Created Assertion assertDiff = new FDifference(FDKind.DeletedElement, FDRating.Warning); assertDiff.setSource(rightConstr.getAssertion()); assertDiff.setDescription("The assertion '" + rightConstr.getAssertion().getName() + "' of constraint '" + leftConstr.getName() + "' was added in version " + this.getRightVersion()); } if (descDiff != null || assertDiff != null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); diff.addDifference(descDiff); diff.addDifference(assertDiff); } return diff; } private FDifference compareHsUnits(HotSpot leftHs, HotSpot rightHs) { FDifference diff = null; if (leftHs.getUnits() != null) { for (HotSpotUnit leftUnit : leftHs.getUnits()) { HotSpotUnit rightUnit = (HotSpotUnit) this.getFromList(rightHs.getUnits(), leftUnit); if (rightUnit != null) { if (leftUnit instanceof CodingUnit) { CodingUnit leftCodingUnit = (CodingUnit) leftUnit; CodingUnit rightCodingUnit = (CodingUnit) rightUnit; FDifference subDiff = this.compareCodingUnits(leftCodingUnit, rightCodingUnit); if (subDiff != null) { if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } else { log.warn("Unkown Hotspot unit kind " + leftUnit.getKind().name() + " for comparison."); } } else { // Deleted Unit FDifference subDiff = new FDifference(FDKind.DeletedElement, FDRating.Conflict); subDiff.setSource(leftUnit); subDiff.setDescription("The Hotspot Unit '" + leftUnit.getName() + "' for Hotspot '" + leftHs.getName() + "' was removed in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (rightHs.getUnits() != null) { for (HotSpotUnit rightUnit : rightHs.getUnits()) { if (this.getFromList(leftHs.getUnits(), rightUnit) == null) { // Created Unit FDifference subDiff = new FDifference(FDKind.CreatedElement, FDRating.Warning); subDiff.setSource(rightUnit); subDiff.setDescription("The Hotspot Unit '" + rightUnit.getName() + "' for Hotspot '" + leftHs.getName() + "' was added in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (diff != null) { diff.setSource(leftHs); diff.setDescription("The list of Hotspot Units of Hotspot '" + leftHs.getName() + "' has changed in version " + this.getRightVersion()); } return diff; } private FDifference compareCodingUnits(CodingUnit leftCodingUnit, CodingUnit rightCodingUnit) { FDifference hooksDiff = compareHooks(leftCodingUnit, rightCodingUnit); FDifference constrDiff = compareCodingUnitConstraints(leftCodingUnit, rightCodingUnit); FDifference protDiff = compareCodingUnitProtocols(leftCodingUnit, rightCodingUnit); FDifference typeDiff = compareCodingUnitType(leftCodingUnit, rightCodingUnit); FDifference diff = null; if (hooksDiff != null || constrDiff != null || protDiff != null || typeDiff != null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); diff.setSource(leftCodingUnit); diff.setDescription("The Coding Unit '" + leftCodingUnit.getName() + "' has changed in version " + this.getRightVersion()); diff.addDifference(hooksDiff); diff.addDifference(constrDiff); diff.addDifference(protDiff); diff.addDifference(typeDiff); } return diff; } private FDifference compareHooks(CodingUnit leftCodingUnit, CodingUnit rightCodingUnit) { FDifference diff = null; if (leftCodingUnit.getHooks() != null) { for (HookCall leftHook : leftCodingUnit.getHooks()) { HookCall rightHook = (HookCall) this.getFromList(rightCodingUnit.getHooks(), leftHook); if (rightHook != null) { FDifference subDiff = this.compareHookCalls(leftHook, rightHook); if (subDiff != null) { if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } else { // Deleted HookCall FDifference subDiff = new FDifference(FDKind.DeletedElement, FDRating.Conflict); subDiff.setSource(leftHook); subDiff.setDescription("The Hook '" + leftHook.getName() + "' of Coding Unit '" + leftCodingUnit.getName() + "' was removed in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (rightCodingUnit.getHooks() != null) { for (HookCall rightHook : rightCodingUnit.getHooks()) { if (this.getFromList(leftCodingUnit.getHooks(), rightHook) == null) { // Created HookCall FDifference subDiff = new FDifference(FDKind.CreatedElement, FDRating.Warning); subDiff.setSource(rightHook); subDiff.setDescription("The Hook '" + rightHook.getName() + "' of Coding Unit '" + leftCodingUnit.getName() + "' was added in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (diff != null) { diff.setSource(leftCodingUnit); diff.setDescription("The list of Hooks of Coding Unit '" + leftCodingUnit.getName() + "' has changed in version " + this.getRightVersion()); } return diff; } private FDifference compareHookCalls(HookCall leftHook, HookCall rightHook) { log.debug("comparing hook calls {} <> {}", leftHook, rightHook); FDifference signDiff = null; FDifference signDescrDiff = null; MethodDescription leftMethod = apiProxy.getMethodDescription(leftHook.getMethodAPIPath()); MethodDescription rightMethod = apiProxy.getMethodDescription(rightHook.getMethodAPIPath()); if (leftMethod != null && rightMethod != null) { signDiff = basis.compareValues("signature", leftMethod.getSignature().toString(), rightMethod.getSignature().toString(), leftMethod); signDescrDiff = basis.compareDescriptions("Hook " + leftMethod.getSignature(), leftMethod.getSignature().getDescription(), rightMethod.getSignature().getDescription(), leftMethod); } FDifference constrDiff = this.compareHookCallConstraints(leftHook, rightHook); FDifference diff = null; if (constrDiff != null || signDiff != null || signDescrDiff != null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); diff.setSource(leftHook); diff.setDescription("The Hook '" + leftHook.getName() + "' has changed in version " + this.getRightVersion()); diff.addDifference(constrDiff); diff.addDifference(signDiff); diff.addDifference(signDescrDiff); } return diff; } private FDifference compareHookCallConstraints(HookCall leftHook, HookCall rightHook) { FDifference diff = null; if (leftHook.getConstraints() != null) { for (Constraint leftConstr : leftHook.getConstraints()) { Constraint rightConstr = (Constraint) this .getFromList(rightHook.getConstraints(), leftConstr); if (rightConstr != null) { FDifference subDiff = this.compareConstraints(leftConstr, rightConstr); if (subDiff != null) { if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } else { // Deleted Constraint FDifference subDiff = new FDifference(FDKind.DeletedElement, FDRating.Warning); subDiff.setSource(leftConstr); subDiff.setDescription("The constraint '" + leftConstr.getName() + "' for Hook '" + leftHook.getName() + "' was deleted in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (rightHook.getConstraints() != null) { for (Constraint rightConstr : rightHook.getConstraints()) { if (this.getFromList(leftHook.getConstraints(), rightConstr) == null) { // Created Constraint FDifference subDiff = new FDifference(FDKind.CreatedElement, FDRating.Warning); subDiff.setSource(rightConstr); subDiff.setDescription("The constraint '" + rightConstr.getName() + "' for Hook '" + leftHook.getName() + "' was added in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (diff != null) { diff.setSource(leftHook); diff.setDescription("The constraints of Hook '" + leftHook.getName() + "' have changed in version " + this.getRightVersion()); } return diff; } private FDifference compareCodingUnitConstraints(CodingUnit leftCodingUnit, CodingUnit rightCodingUnit) { FDifference diff = null; if (leftCodingUnit.getConstraints() != null) { for (Constraint leftConstr : leftCodingUnit.getConstraints()) { Constraint rightConstr = (Constraint) this.getFromList(rightCodingUnit.getConstraints(), leftConstr); if (rightConstr != null) { FDifference subDiff = this.compareConstraints(leftConstr, rightConstr); if (subDiff != null) { if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } else { // Deleted Constraint FDifference subDiff = new FDifference(FDKind.DeletedElement, FDRating.Warning); subDiff.setSource(leftConstr); subDiff.setDescription("The constraint '" + leftConstr.getName() + "' for Coding Unit '" + leftCodingUnit.getName() + "' was deleted in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (rightCodingUnit.getConstraints() != null) { for (Constraint rightConstr : rightCodingUnit.getConstraints()) { if (this.getFromList(leftCodingUnit.getConstraints(), rightConstr) == null) { // Created Constraint FDifference subDiff = new FDifference(FDKind.CreatedElement, FDRating.Warning); subDiff.setSource(rightConstr); subDiff.setDescription("The constraint '" + rightConstr.getName() + "' for Coding Unit '" + leftCodingUnit.getName() + "' was added in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (diff != null) { diff.setSource(leftCodingUnit); diff.setDescription("The constraints of Coding Unit '" + leftCodingUnit.getName() + "' have changed in version " + this.getRightVersion()); } return diff; } private FDifference compareCodingUnitProtocols(CodingUnit leftCodingUnit, CodingUnit rightCodingUnit) { FDifference diff = null; if (leftCodingUnit.getProtocols() != null) { for (HookProtocol leftProtocol : leftCodingUnit.getProtocols()) { HookProtocol rightProtocol = (HookProtocol) this.getFromList(rightCodingUnit.getProtocols(), leftProtocol); if (rightProtocol != null) { FDifference subDiff = pdl.compareHookProtocols(leftProtocol, rightProtocol); if (subDiff != null) { if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } else { // Deleted Protocol FDifference subDiff = new FDifference(FDKind.DeletedElement, FDRating.Warning); subDiff.setSource(leftProtocol); subDiff.setDescription("The Hook Protocol '" + leftProtocol.getName() + "' for Coding Unit '" + leftCodingUnit.getName() + "' was deleted in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (rightCodingUnit.getProtocols() != null) { for (HookProtocol rightProtocol : rightCodingUnit.getProtocols()) { if (this.getFromList(leftCodingUnit.getProtocols(), rightProtocol) == null) { // Created Protocol FDifference subDiff = new FDifference(FDKind.CreatedElement, FDRating.Warning); subDiff.setSource(rightProtocol); subDiff.setDescription("The Hook Protocol '" + rightProtocol.getName() + "' for Coding Unit '" + leftCodingUnit.getName() + "' was added in version " + this.getRightVersion()); if (diff == null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); } diff.addDifference(subDiff); } } } if (diff != null) { diff.setSource(leftCodingUnit); diff.setDescription("The Hook protocols of Coding Unit '" + leftCodingUnit.getName() + "' have changed in version " + this.getRightVersion()); } return diff; } private FDifference compareCodingUnitType(CodingUnit leftCodingUnit, CodingUnit rightCodingUnit) { FDifference fileDiff = null; FDifference typeDiff = null; if (leftCodingUnit.getTypeAPIPath() != null) { if (rightCodingUnit.getTypeAPIPath() != null) { typeDiff = apidl.compareTypeDescriptions(leftCodingUnit, rightCodingUnit, leftCodingUnit); } else { // Deleted type description fileDiff = new FDifference(FDKind.DeletedElement, FDRating.Warning); fileDiff.setSource(leftCodingUnit); fileDiff.setDescription("The path '" + leftCodingUnit.getTypeAPIPath() + "' to the API type description of coding unit '" + leftCodingUnit.getName() + "' was removed in version " + this.getRightVersion()); } } else if (rightCodingUnit.getTypeAPIPath() != null) { // Created type description fileDiff = new FDifference(FDKind.CreatedElement, FDRating.Warning); fileDiff.setSource(rightCodingUnit); fileDiff.setDescription("The path '" + rightCodingUnit.getTypeAPIPath() + "' to the API type description of coding unit '" + leftCodingUnit.getName() + "' was added in version " + this.getRightVersion()); } FDifference diff = null; if (fileDiff != null || typeDiff != null) { diff = new FDifference(FDKind.ChangedElement, FDRating.Warning); diff.setSource(leftCodingUnit); diff.setDescription("The API type description of coding unit '" + leftCodingUnit.getName() + "' has changed in version " + this.getRightVersion()); diff.addDifference(fileDiff); diff.addDifference(typeDiff); } return diff; } public void setBasis(CompareBasis basis) { this.basis = basis; } public void setCdl(CompareCDL cdl) { this.cdl = cdl; } public void setPdl(ComparePDL pdl) { this.pdl = pdl; } public void setApidl(CompareAPIDL apidl) { this.apidl = apidl; } public void setApiProxy(APIDlProxy apiProxy) { this.apiProxy = apiProxy; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdmodel.graphics.color; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.cos.COSObject; import org.apache.pdfbox.pdmodel.MissingResourceException; import org.apache.pdfbox.pdmodel.PDResources; import org.apache.pdfbox.pdmodel.common.COSObjectable; import java.awt.*; import java.awt.color.ColorSpace; import java.awt.image.*; import java.io.IOException; /** * A color space specifies how the colours of graphics objects will be painted on the page. * * @author John Hewson * @author Ben Litchfield */ public abstract class PDColorSpace implements COSObjectable { /** * Creates a color space space given a name or array. * @param colorSpace the color space COS object * @return a new color space * @throws IOException if the color space is unknown or cannot be created */ public static PDColorSpace create(COSBase colorSpace) throws IOException { return create(colorSpace, null); } /** * Creates a color space given a name or array. * @param colorSpace the color space COS object * @param resources the current resources. * @return a new color space * @throws MissingResourceException if the color space is missing in the resources dictionary * @throws IOException if the color space is unknown or cannot be created */ public static PDColorSpace create(COSBase colorSpace, PDResources resources) throws IOException { return create(colorSpace, resources, false); } /** * Creates a color space given a name or array. * @param colorSpace the color space COS object * @param resources the current resources. * @param wasDefault indicate is current color space used by default color space * @return a new color space * @throws MissingResourceException if the color space is missing in the resources dictionary * @throws IOException if the color space is unknown or cannot be created */ public static PDColorSpace create(COSBase colorSpace, PDResources resources, boolean wasDefault) throws IOException { if (colorSpace instanceof COSObject) { return create(((COSObject) colorSpace).getObject(), resources); } else if (colorSpace instanceof COSName) { COSName name = (COSName)colorSpace; // default color spaces if (resources != null) { COSName defaultName = getDefaultValue(resources, name); if (resources.hasColorSpace(defaultName) && !wasDefault) { return resources.getColorSpace(defaultName, true); } } // built-in color spaces if (name == COSName.DEVICECMYK) { return PDDeviceCMYK.INSTANCE; } else if (name == COSName.DEVICERGB) { return PDDeviceRGB.INSTANCE; } else if (name == COSName.DEVICEGRAY) { return PDDeviceGray.INSTANCE; } else if (name == COSName.PATTERN) { return new PDPattern(resources); } else if (resources != null) { if (!resources.hasColorSpace(name)) { throw new MissingResourceException("Missing color space: " + name.getName()); } return resources.getColorSpace(name); } else { throw new MissingResourceException("Unknown color space: " + name.getName()); } } else if (colorSpace instanceof COSArray) { COSArray array = (COSArray)colorSpace; COSName name = (COSName)array.getObject(0); // TODO cache these returned color spaces? if (name == COSName.CALGRAY) { return new PDCalGray(array); } else if (name == COSName.CALRGB) { return new PDCalRGB(array); } else if (name == COSName.DEVICEN) { return new PDDeviceN(array); } else if (name == COSName.INDEXED || name == COSName.I) { return new PDIndexed(array); } else if (name == COSName.SEPARATION) { return new PDSeparation(array); } else if (name == COSName.ICCBASED) { return new PDICCBased(array); } else if (name == COSName.LAB) { return new PDLab(array); } else if (name == COSName.PATTERN) { if (array.size() == 1) { return new PDPattern(resources); } else { return new PDPattern(resources, PDColorSpace.create(array.get(1))); } } else if (name == COSName.DEVICECMYK || name == COSName.DEVICERGB || name == COSName.DEVICEGRAY) { // not allowed in an array, but we sometimes encounter these regardless return create(name, resources, wasDefault); } else { throw new IOException("Invalid color space kind: " + name); } } else { throw new IOException("Expected a name or array but got: " + colorSpace); } } /** * Returns name of corresponding default color space if passed name * is device depended color space * * @param resources current resource dictionary * @param name color space name * @return name of corresponding default color space if passed name * is device depended color space */ public static COSName getDefaultValue(PDResources resources, COSName name) { if (name.equals(COSName.DEVICECMYK) && resources.hasColorSpace(COSName.DEFAULT_CMYK)) { return COSName.DEFAULT_CMYK; } else if (name.equals(COSName.DEVICERGB) && resources.hasColorSpace(COSName.DEFAULT_RGB)) { return COSName.DEFAULT_RGB; } else if (name.equals(COSName.DEVICEGRAY) && resources.hasColorSpace(COSName.DEFAULT_GRAY)) { return COSName.DEFAULT_GRAY; } else { return null; } } // array for the given parameters protected COSArray array; /** * Returns the name of the color space. * @return the name of the color space */ public abstract String getName(); /** * Returns the number of components in this color space * @return the number of components in this color space */ public abstract int getNumberOfComponents(); /** * Returns the default decode array for this color space. * @return the default decode array */ public abstract float[] getDefaultDecode(int bitsPerComponent); /** * Returns the initial color value for this color space. * @return the initial color value for this color space */ public abstract PDColor getInitialColor(); /** * Returns the RGB equivalent of the given color value. * @param value a color value with component values between 0 and 1 * @return an array of R,G,B value between 0 and 255 * @throws IOException if the color conversion fails */ public abstract float[] toRGB(float[] value) throws IOException; /** * Returns the (A)RGB equivalent of the given raster. * @param raster the source raster * @return an (A)RGB buffered image * @throws IOException if the color conversion fails */ public abstract BufferedImage toRGBImage(WritableRaster raster) throws IOException; /** * Returns the (A)RGB equivalent of the given raster, using the given AWT color space * to perform the conversion. * @param raster the source raster * @param colorSpace the AWT * @return an (A)RGB buffered image */ protected BufferedImage toRGBImageAWT(WritableRaster raster, ColorSpace colorSpace) { // // WARNING: this method is performance sensitive, modify with care! // // ICC Profile color transforms are only fast when performed using ColorConvertOp ColorModel colorModel = new ComponentColorModel(colorSpace, false, false, Transparency.OPAQUE, raster.getDataBuffer().getDataType()); BufferedImage src = new BufferedImage(colorModel, raster, false, null); BufferedImage dest = new BufferedImage(raster.getWidth(), raster.getHeight(), BufferedImage.TYPE_INT_RGB); ColorConvertOp op = new ColorConvertOp(null); op.filter(src, dest); return dest; } @Override public COSBase getCOSObject() { return array; } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dashbuilder.displayer.client.widgets; import java.util.Arrays; import org.dashbuilder.dataset.DataSetFactory; import org.dashbuilder.dataset.client.DataSetReadyCallback; import org.dashbuilder.dataset.sort.SortOrder; import org.dashbuilder.displayer.DisplayerConstraints; import org.dashbuilder.displayer.DisplayerSettings; import org.dashbuilder.displayer.DisplayerSettingsFactory; import org.dashbuilder.displayer.DisplayerType; import org.dashbuilder.displayer.Position; import org.dashbuilder.displayer.client.DataSetHandler; import org.dashbuilder.displayer.client.Displayer; import org.dashbuilder.displayer.client.DisplayerLocator; import org.dashbuilder.displayer.client.RendererLibrary; import org.dashbuilder.displayer.client.RendererManager; import org.dashbuilder.displayer.client.events.DisplayerSettingsChangedEvent; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.invocation.InvocationOnMock; import org.mockito.runners.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import org.uberfire.ext.properties.editor.model.validators.PropertyFieldValidator; import org.uberfire.mocks.EventSourceMock; import static org.dashbuilder.displayer.DisplayerAttributeDef.*; import static org.dashbuilder.displayer.DisplayerAttributeGroupDef.*; import static org.junit.Assert.*; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; @RunWith(MockitoJUnitRunner.class) public class DisplayerSettingsEditorTest { @Mock DisplayerSettingsEditor.View view; @Mock Displayer displayer; @Mock DataSetHandler dataSetHandler; @Mock EventSourceMock<DisplayerSettingsChangedEvent> event; @Mock DisplayerLocator displayerLocator; @Mock RendererManager rendererManager; @Mock RendererLibrary rendererA; @Mock RendererLibrary rendererB; DisplayerSettingsEditor presenter; @Before public void init() throws Exception { when(rendererManager.getRenderersForType(any(DisplayerType.class))).thenReturn(Arrays.asList(rendererA, rendererB)); when(rendererA.getUUID()).thenReturn("rendererA"); when(rendererB.getUUID()).thenReturn("rendererB"); when(rendererManager.getRendererForDisplayer(any(DisplayerSettings.class))).thenReturn(rendererB); when(displayerLocator.lookupDisplayer(any(DisplayerSettings.class))).thenReturn(displayer); when(displayer.getDataSetHandler()).thenReturn(dataSetHandler); when(displayer.getDisplayerConstraints()).thenReturn(new DisplayerConstraints(null) .supportsAttribute(TYPE) .supportsAttribute(SUBTYPE) .supportsAttribute(RENDERER) .supportsAttribute(GENERAL_GROUP) .supportsAttribute(COLUMNS_GROUP) .supportsAttribute(REFRESH_GROUP) .supportsAttribute(FILTER_GROUP) .supportsAttribute(CHART_GROUP) .supportsAttribute(TABLE_GROUP) .supportsAttribute(AXIS_GROUP) .supportsAttribute(METER_GROUP)); presenter = new DisplayerSettingsEditor(view, displayerLocator, rendererManager, event); // Call to init implies calling to presenter.show() internally doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { presenter.show(); return null; } }).when(dataSetHandler).lookupDataSet(any(DataSetReadyCallback.class)); } @Test public void testGeneralSettings() { when(dataSetHandler.getLastDataSet()).thenReturn(DataSetFactory.newDataSetBuilder() .label("dept") .number("amount") .buildDataSet()); when(displayer.getDisplayerSettings()).thenReturn(DisplayerSettingsFactory.newBarChartSettings() .dataset("dset") .group("dept") .column("dept").format("Department") .column("amount").format("Total", "#.##0").expression("value/100") .title("Sales by dept") .titleVisible(true) .legendOn(Position.BOTTOM) .width(400).height(200) .xAxisTitle("Depts") .yAxisTitle("Amount $") .margins(10, 80, 80, 100) .filterOn(false, true, true) .refreshOn(3, false) .buildSettings()); // Call to init implies calling to presenter.show() internally (see above) presenter.init(displayer); verify(view).clear(); verify(view).addTextProperty(TITLE, "Sales by dept"); verify(view).addBooleanProperty(TITLE_VISIBLE, true); verify(view, never()).addBooleanProperty(EXPORT_TO_CSV, false); verify(view, never()).addBooleanProperty(EXPORT_TO_XLS, false); verify(view).addTextProperty(eq(CHART_WIDTH), eq("400"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addTextProperty(eq(CHART_HEIGHT), eq("200"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addColorProperty(eq(CHART_BGCOLOR), anyString()); verify(view).addBooleanProperty(CHART_3D, false); verify(view).addTextProperty(eq(CHART_MARGIN_TOP), eq("10"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addTextProperty(eq(CHART_MARGIN_BOTTOM), eq("80"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addTextProperty(eq(CHART_MARGIN_LEFT), eq("80"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addTextProperty(eq(CHART_MARGIN_RIGHT), eq("100"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addBooleanProperty(CHART_SHOWLEGEND, true); verify(view).addListProperty(eq(CHART_LEGENDPOSITION), anyListOf(String.class), anyString()); verify(view).addBooleanProperty(XAXIS_SHOWLABELS, true); verify(view).addTextProperty(XAXIS_LABELSANGLE, "0"); verify(view).addTextProperty(XAXIS_TITLE, "Depts"); verify(view).addBooleanProperty(YAXIS_SHOWLABELS, true); verify(view).addTextProperty(YAXIS_TITLE, "Amount $"); verify(view).addBooleanProperty(FILTER_ENABLED, true); verify(view).addBooleanProperty(FILTER_SELFAPPLY_ENABLED, false); verify(view).addBooleanProperty(FILTER_NOTIFICATION_ENABLED, true); verify(view).addBooleanProperty(FILTER_LISTENING_ENABLED, true); verify(view).addTextProperty(eq(REFRESH_INTERVAL), eq("3"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addBooleanProperty(REFRESH_STALE_DATA, false); verify(view).addTextProperty(eq("columns.dept.name"), anyString(), eq("Department")); verify(view).addTextProperty(eq("columns.amount.name"), anyString(), eq("Total")); verify(view).addTextProperty(eq("columns.amount.expression"), anyString(), eq("value/100")); verify(view).addTextProperty(eq("columns.amount.pattern"), anyString(), eq("#.##0")); verify(view).show(); } @Test public void testChangeAttributes() { DisplayerSettings settings = DisplayerSettingsFactory.newBarChartSettings().buildSettings(); when(displayer.getDisplayerSettings()).thenReturn(settings); presenter.init(displayer); presenter.onAttributeChanged(TITLE.getFullId(), "Test"); presenter.onAttributeChanged(TITLE_VISIBLE.getFullId(), "true"); presenter.onAttributeChanged(EXPORT_TO_CSV.getFullId(), "false"); presenter.onAttributeChanged(EXPORT_TO_XLS.getFullId(), "false"); presenter.onAttributeChanged(CHART_HEIGHT.getFullId(), "400"); presenter.onAttributeChanged("columns.amount.name", "Total"); presenter.onAttributeChanged("columns.amount.pattern", "#.###,00"); presenter.onAttributeChanged("columns.amount.expression", "value"); assertEquals(settings.getTitle(), "Test"); assertEquals(settings.isTitleVisible(), true); assertEquals(settings.isCSVExportAllowed(), false); assertEquals(settings.isExcelExportAllowed(), false); assertEquals(settings.getChartHeight(), 400); assertEquals(settings.getColumnSettings("amount").getColumnName(), "Total"); assertEquals(settings.getColumnSettings("amount").getValuePattern(), "#.###,00"); assertEquals(settings.getColumnSettings("amount").getValueExpression(), "value"); verify(event, atLeastOnce()).fire(any(DisplayerSettingsChangedEvent.class)); } @Test public void testTableSettings() { when(dataSetHandler.getLastDataSet()).thenReturn(DataSetFactory.newDataSetBuilder() .label("dept") .date("date") .number("amount") .buildDataSet()); when(displayer.getDisplayerConstraints()).thenReturn( new DisplayerConstraints(null) .supportsAttribute(TABLE_GROUP) .supportsAttribute(EXPORT_GROUP)); when(displayer.getDisplayerSettings()).thenReturn(DisplayerSettingsFactory.newTableSettings() .tablePageSize(10) .tableWidth(500) .tableOrderEnabled(true) .tableOrderDefault("date", SortOrder.ASCENDING) .tableColumnPickerEnabled(false) .allowCsvExport(true) .allowExcelExport(false) .buildSettings()); presenter.init(displayer); verify(view).clear(); verify(view).addTextProperty(eq(TABLE_WIDTH), eq("500"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addBooleanProperty(TABLE_SORTENABLED, true); verify(view).addListProperty(eq(TABLE_SORTCOLUMNID), anyListOf(String.class), eq("date")); verify(view).addListProperty(eq(TABLE_SORTORDER), anyListOf(String.class), eq(SortOrder.ASCENDING.toString())); verify(view).addBooleanProperty(TABLE_COLUMN_PICKER_ENABLED, false); verify(view).addBooleanProperty(EXPORT_TO_CSV, true); verify(view).addBooleanProperty(EXPORT_TO_XLS, false); verify(view).show(); } @Test public void testMeterSettings() { when(displayer.getDisplayerConstraints()).thenReturn( new DisplayerConstraints(null) .supportsAttribute(METER_GROUP)); when(displayer.getDisplayerSettings()).thenReturn(DisplayerSettingsFactory.newMeterChartSettings() .meter(0, 100, 500, 900) .buildSettings()); presenter.init(displayer); verify(view).clear(); verify(view).addTextProperty(eq(METER_START), eq("0"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addTextProperty(eq(METER_WARNING), eq("100"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addTextProperty(eq(METER_CRITICAL), eq("500"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).addTextProperty(eq(METER_END), eq("900"), any(DisplayerSettingsEditor.LongValidator.class)); verify(view).show(); } @Test public void testRenderer() { DisplayerSettings settings = DisplayerSettingsFactory.newBarChartSettings() .renderer("rendererB") .buildSettings(); when(rendererManager.getRendererForDisplayer(settings)).thenReturn(rendererB); when(displayer.getDisplayerConstraints()).thenReturn( new DisplayerConstraints(null) .supportsAttribute(RENDERER)); when(displayer.getDisplayerSettings()).thenReturn(settings); presenter.init(displayer); verify(view).clear(); verify(view).addListProperty(RENDERER, Arrays.asList("rendererA", "rendererB"), "rendererB"); verify(view).show(); } @Test public void testSupportedAttrs() { when(displayer.getDisplayerConstraints()).thenReturn(new DisplayerConstraints(null) .supportsAttribute(TYPE) .supportsAttribute(SUBTYPE) .supportsAttribute(TITLE) .supportsAttribute(FILTER_GROUP)); when(displayer.getDisplayerSettings()).thenReturn(DisplayerSettingsFactory.newBarChartSettings().buildSettings()); presenter.init(displayer); assertEquals(presenter.isSupported(TYPE), true); assertEquals(presenter.isSupported(SUBTYPE), true); assertEquals(presenter.isSupported(TITLE), true); assertEquals(presenter.isSupported(TITLE_VISIBLE), false); assertEquals(presenter.isSupported(RENDERER), false); assertEquals(presenter.isSupported(FILTER_ENABLED), true); assertEquals(presenter.isSupported(FILTER_LISTENING_ENABLED), true); assertEquals(presenter.isSupported(FILTER_NOTIFICATION_ENABLED), true); assertEquals(presenter.isSupported(FILTER_SELFAPPLY_ENABLED), true); } @Test public void testLongValidator() { PropertyFieldValidator validator = presenter.createLongValidator(); assertEquals(validator.validate("500"), true); assertEquals(validator.validate("500d"), false); assertEquals(validator.validate("aaa"), false); } @Test public void testMeterValidator() { DisplayerSettings settings = DisplayerSettingsFactory.newMeterChartSettings() .meter(0, 100, 500, 900) .buildSettings(); when(displayer.getDisplayerSettings()).thenReturn(settings); presenter.init(displayer); PropertyFieldValidator validator = presenter.createMeterValidator(settings, 0); assertEquals(validator.validate("aaa"), false); assertEquals(validator.validate("0"), true); assertEquals(validator.validate("99"), true); assertEquals(validator.validate("100"), true); assertEquals(validator.validate("101"), false); assertEquals(validator.validate("-999999999999"), true); validator = presenter.createMeterValidator(settings, 1); assertEquals(validator.validate("0"), true); assertEquals(validator.validate("99"), true); assertEquals(validator.validate("100"), true); assertEquals(validator.validate("101"), true); assertEquals(validator.validate("500"), true); assertEquals(validator.validate("501"), false); assertEquals(validator.validate("-1"), false); validator = presenter.createMeterValidator(settings, 2); assertEquals(validator.validate("99"), false); assertEquals(validator.validate("100"), true); assertEquals(validator.validate("900"), true); assertEquals(validator.validate("901"), false); validator = presenter.createMeterValidator(settings, 3); assertEquals(validator.validate("499"), false); assertEquals(validator.validate("500"), true); assertEquals(validator.validate("900"), true); assertEquals(validator.validate("10000000000000"), true); } }
/******************************************************************************* "FreePastry" Peer-to-Peer Application Development Substrate Copyright 2002-2007, Rice University. Copyright 2006-2007, Max Planck Institute for Software Systems. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Rice University (RICE), Max Planck Institute for Software Systems (MPI-SWS) nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. This software is provided by RICE, MPI-SWS and the contributors on an "as is" basis, without any representations or warranties of any kind, express or implied including, but not limited to, representations or warranties of non-infringement, merchantability or fitness for a particular purpose. In no event shall RICE, MPI-SWS or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of substitute goods or services; loss of use, data, or profits; or business interruption) however caused and on any theory of liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising in any way out of the use of this software, even if advised of the possibility of such damage. *******************************************************************************/ package org.mpisws.p2p.transport.simpleidentity; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Map; import org.mpisws.p2p.transport.ClosedChannelException; import org.mpisws.p2p.transport.ErrorHandler; import org.mpisws.p2p.transport.MessageCallback; import org.mpisws.p2p.transport.MessageRequestHandle; import org.mpisws.p2p.transport.P2PSocket; import org.mpisws.p2p.transport.P2PSocketReceiver; import org.mpisws.p2p.transport.SocketCallback; import org.mpisws.p2p.transport.SocketRequestHandle; import org.mpisws.p2p.transport.TransportLayer; import org.mpisws.p2p.transport.TransportLayerCallback; import org.mpisws.p2p.transport.util.InsufficientBytesException; import org.mpisws.p2p.transport.util.SocketInputBuffer; import org.mpisws.p2p.transport.util.SocketRequestHandleImpl; import org.mpisws.p2p.transport.util.SocketWrapperSocket; import rice.environment.Environment; import rice.environment.logging.Logger; import rice.p2p.util.rawserialization.SimpleOutputBuffer; /** * The purpose of this layer is to identify the opener of a TCP connection, because the * socket is using an ephemeral port. * * @author Jeff Hoye * * @param <Identifier> * @param <MessageType> */ public class SimpleIdentityTransportLayer<Identifier, MessageType> implements TransportLayer<Identifier, MessageType>, TransportLayerCallback<Identifier, MessageType>{ protected TransportLayer<Identifier, MessageType> tl; protected Logger logger; private TransportLayerCallback<Identifier, MessageType> callback; protected ErrorHandler<Identifier> errorHandler; protected Serializer<Identifier> serializer; LocalIdentifierStrategy<Identifier> localIdStrategy; /** * Sends the same identifier every time. * @author Jeff Hoye * * @param <Identifier> */ class DefaultLocalIdentifierStrategy<Identifier> implements LocalIdentifierStrategy<Identifier> { byte[] localIdentifierBytes; public DefaultLocalIdentifierStrategy(Identifier i) throws IOException { SimpleOutputBuffer sob = new SimpleOutputBuffer(); serializer.serialize(tl.getLocalIdentifier(), sob); localIdentifierBytes = sob.getBytes(); } public byte[] getLocalIdentifierBytes() { return localIdentifierBytes; } } public SimpleIdentityTransportLayer(TransportLayer<Identifier, MessageType> tl, Serializer<Identifier> serializer, LocalIdentifierStrategy<Identifier> localIdStrategy, Environment env, ErrorHandler<Identifier> handler) throws IOException { this.tl = tl; this.tl.setCallback(this); this.errorHandler = handler; this.logger = env.getLogManager().getLogger(getClass(), null); this.serializer = serializer; this.localIdStrategy = localIdStrategy; if (this.localIdStrategy == null) { this.localIdStrategy = new DefaultLocalIdentifierStrategy<Identifier>(tl.getLocalIdentifier()); } } public void acceptMessages(boolean b) { tl.acceptMessages(b); } public void acceptSockets(boolean b) { tl.acceptSockets(b); } public Identifier getLocalIdentifier() { return tl.getLocalIdentifier(); } public SocketRequestHandle<Identifier> openSocket(Identifier i, final SocketCallback<Identifier> deliverSocketToMe, Map<String, Object> options) { final SocketRequestHandleImpl<Identifier> ret = new SocketRequestHandleImpl<Identifier>(i,options,logger); ret.setSubCancellable(tl.openSocket(i, new SocketCallback<Identifier>() { public void receiveResult(SocketRequestHandle<Identifier> cancellable, P2PSocket<Identifier> sock) { // write the local identifier try { final ByteBuffer writeMe = ByteBuffer.wrap(localIdStrategy.getLocalIdentifierBytes()); new P2PSocketReceiver<Identifier>() { public void receiveSelectResult(P2PSocket<Identifier> socket, boolean canRead, boolean canWrite) throws IOException { if (socket.write(writeMe) < 0) { deliverSocketToMe.receiveException(ret, new ClosedChannelException("Socket closed.")); return; } if (writeMe.hasRemaining()) { socket.register(false, true, this); return; } // done deliverSocketToMe.receiveResult(ret, new SocketWrapperSocket<Identifier, Identifier>(socket.getIdentifier(),socket,logger,errorHandler,socket.getOptions())); } public void receiveException(P2PSocket<Identifier> socket, Exception ioe) { deliverSocketToMe.receiveException(ret, ioe); } }.receiveSelectResult(sock, false, true); } catch (IOException ioe) { deliverSocketToMe.receiveException(ret, ioe); } } public void receiveException(SocketRequestHandle<Identifier> s, Exception ex) { deliverSocketToMe.receiveException(ret, ex); } }, options)); return ret; } public MessageRequestHandle<Identifier, MessageType> sendMessage( Identifier i, MessageType m, MessageCallback<Identifier, MessageType> deliverAckToMe, Map<String, Object> options) { return tl.sendMessage(i, m, deliverAckToMe, options); } public void setCallback( TransportLayerCallback<Identifier, MessageType> callback) { this.callback = callback; } public void setErrorHandler(ErrorHandler<Identifier> handler) { this.errorHandler = handler; } public void destroy() { // TODO Auto-generated method stub } public void incomingSocket(P2PSocket<Identifier> s) throws IOException { final SocketInputBuffer sib = new SocketInputBuffer(s); new P2PSocketReceiver<Identifier>() { public void receiveSelectResult(P2PSocket<Identifier> socket, boolean canRead, boolean canWrite) throws IOException { try { Identifier remoteIdentifier = serializer.deserialize(sib, socket.getIdentifier(), socket.getOptions()); callback.incomingSocket(new SocketWrapperSocket<Identifier, Identifier>(remoteIdentifier,socket,logger,errorHandler,socket.getOptions())); } catch (InsufficientBytesException ibe) { socket.register(true, false, this); } // throw the rest } public void receiveException(P2PSocket<Identifier> socket, Exception ioe) { errorHandler.receivedException(socket.getIdentifier(), ioe); } }.receiveSelectResult(s, true, false); } public void messageReceived(Identifier i, MessageType m, Map<String, Object> options) throws IOException { callback.messageReceived(i, m, options); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.webmonitor.history; import org.apache.flink.api.common.JobID; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.HistoryServerOptions; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.runtime.history.FsJobArchivist; import org.apache.flink.runtime.jobgraph.JobStatus; import org.apache.flink.runtime.messages.webmonitor.MultipleJobsDetails; import org.apache.flink.runtime.rest.messages.JobsOverviewHeaders; import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.test.util.MiniClusterWithClientResource; import org.apache.flink.util.TestLogger; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.io.IOUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.net.HttpURLConnection; import java.net.URL; import java.nio.file.Path; import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.apache.flink.runtime.rest.handler.legacy.utils.ArchivedJobGenerationUtils.JACKSON_FACTORY; /** * Tests for the HistoryServer. */ public class HistoryServerTest extends TestLogger { @ClassRule public static final TemporaryFolder TMP = new TemporaryFolder(); private MiniClusterWithClientResource cluster; private File jmDirectory; private File hsDirectory; @Before public void setUp() throws Exception { jmDirectory = TMP.newFolder("jm"); hsDirectory = TMP.newFolder("hs"); Configuration clusterConfig = new Configuration(); clusterConfig.setString(JobManagerOptions.ARCHIVE_DIR, jmDirectory.toURI().toString()); cluster = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(clusterConfig) .setNumberTaskManagers(1) .setNumberSlotsPerTaskManager(1) .build()); cluster.before(); } @After public void tearDown() { if (cluster != null) { cluster.after(); } } @Test public void testHistoryServerIntegration() throws Exception { final int numJobs = 2; for (int x = 0; x < numJobs; x++) { runJob(); } createLegacyArchive(jmDirectory.toPath()); CountDownLatch numFinishedPolls = new CountDownLatch(1); Configuration historyServerConfig = new Configuration(); historyServerConfig.setString(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_DIRS, jmDirectory.toURI().toString()); historyServerConfig.setString(HistoryServerOptions.HISTORY_SERVER_WEB_DIR, hsDirectory.getAbsolutePath()); historyServerConfig.setInteger(HistoryServerOptions.HISTORY_SERVER_WEB_PORT, 0); // the job is archived asynchronously after env.execute() returns File[] archives = jmDirectory.listFiles(); while (archives == null || archives.length != numJobs + 1) { Thread.sleep(50); archives = jmDirectory.listFiles(); } HistoryServer hs = new HistoryServer(historyServerConfig, numFinishedPolls); try { hs.start(); String baseUrl = "http://localhost:" + hs.getWebPort(); numFinishedPolls.await(10L, TimeUnit.SECONDS); ObjectMapper mapper = new ObjectMapper(); String response = getFromHTTP(baseUrl + JobsOverviewHeaders.URL); MultipleJobsDetails overview = mapper.readValue(response, MultipleJobsDetails.class); Assert.assertEquals(numJobs + 1, overview.getJobs().size()); } finally { hs.stop(); } } private static void runJob() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.fromElements(1, 2, 3) .print(); env.execute(); } public static String getFromHTTP(String url) throws Exception { URL u = new URL(url); HttpURLConnection connection = (HttpURLConnection) u.openConnection(); connection.setConnectTimeout(100000); connection.connect(); InputStream is; if (connection.getResponseCode() >= 400) { // error! is = connection.getErrorStream(); } else { is = connection.getInputStream(); } return IOUtils.toString(is, connection.getContentEncoding() != null ? connection.getContentEncoding() : "UTF-8"); } private static void createLegacyArchive(Path directory) throws IOException { JobID jobID = JobID.generate(); StringWriter sw = new StringWriter(); try (JsonGenerator gen = JACKSON_FACTORY.createGenerator(sw)) { try (JsonObject root = new JsonObject(gen)) { try (JsonArray finished = new JsonArray(gen, "finished")) { try (JsonObject job = new JsonObject(gen)) { gen.writeStringField("jid", jobID.toString()); gen.writeStringField("name", "testjob"); gen.writeStringField("state", JobStatus.FINISHED.name()); gen.writeNumberField("start-time", 0L); gen.writeNumberField("end-time", 1L); gen.writeNumberField("duration", 1L); gen.writeNumberField("last-modification", 1L); try (JsonObject tasks = new JsonObject(gen, "tasks")) { gen.writeNumberField("total", 0); gen.writeNumberField("pending", 0); gen.writeNumberField("running", 0); gen.writeNumberField("finished", 0); gen.writeNumberField("canceling", 0); gen.writeNumberField("canceled", 0); gen.writeNumberField("failed", 0); } } } } } String json = sw.toString(); ArchivedJson archivedJson = new ArchivedJson("/joboverview", json); FsJobArchivist.archiveJob(new org.apache.flink.core.fs.Path(directory.toUri()), jobID, Collections.singleton(archivedJson)); } private static final class JsonObject implements AutoCloseable { private final JsonGenerator gen; JsonObject(JsonGenerator gen) throws IOException { this.gen = gen; gen.writeStartObject(); } private JsonObject(JsonGenerator gen, String name) throws IOException { this.gen = gen; gen.writeObjectFieldStart(name); } @Override public void close() throws IOException { gen.writeEndObject(); } } private static final class JsonArray implements AutoCloseable { private final JsonGenerator gen; JsonArray(JsonGenerator gen, String name) throws IOException { this.gen = gen; gen.writeArrayFieldStart(name); } @Override public void close() throws IOException { gen.writeEndArray(); } } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.Closeable; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.regex.Matcher; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.InfoStream; /* * This class keeps track of each SegmentInfos instance that * is still "live", either because it corresponds to a * segments_N file in the Directory (a "commit", i.e. a * committed SegmentInfos) or because it's an in-memory * SegmentInfos that a writer is actively updating but has * not yet committed. This class uses simple reference * counting to map the live SegmentInfos instances to * individual files in the Directory. * * The same directory file may be referenced by more than * one IndexCommit, i.e. more than one SegmentInfos. * Therefore we count how many commits reference each file. * When all the commits referencing a certain file have been * deleted, the refcount for that file becomes zero, and the * file is deleted. * * A separate deletion policy interface * (IndexDeletionPolicy) is consulted on creation (onInit) * and once per commit (onCommit), to decide when a commit * should be removed. * * It is the business of the IndexDeletionPolicy to choose * when to delete commit points. The actual mechanics of * file deletion, retrying, etc, derived from the deletion * of commit points is the business of the IndexFileDeleter. * * The current default deletion policy is {@link * KeepOnlyLastCommitDeletionPolicy}, which removes all * prior commits when a new commit has completed. This * matches the behavior before 2.2. * * Note that you must hold the write.lock before * instantiating this class. It opens segments_N file(s) * directly with no retry logic. */ final class IndexFileDeleter implements Closeable { /* Files that we tried to delete but failed (likely * because they are open and we are running on Windows), * so we will retry them again later: */ private Set<String> deletable; /* Reference count for all files in the index. * Counts how many existing commits reference a file. **/ private Map<String, RefCount> refCounts = new HashMap<>(); /* Holds all commits (segments_N) currently in the index. * This will have just 1 commit if you are using the * default delete policy (KeepOnlyLastCommitDeletionPolicy). * Other policies may leave commit points live for longer * in which case this list would be longer than 1: */ private List<CommitPoint> commits = new ArrayList<>(); /* Holds files we had incref'd from the previous * non-commit checkpoint: */ private final List<String> lastFiles = new ArrayList<>(); /* Commits that the IndexDeletionPolicy have decided to delete: */ private List<CommitPoint> commitsToDelete = new ArrayList<>(); private final InfoStream infoStream; private Directory directory; private IndexDeletionPolicy policy; final boolean startingCommitDeleted; private SegmentInfos lastSegmentInfos; /** Change to true to see details of reference counts when * infoStream is enabled */ public static boolean VERBOSE_REF_COUNTS = false; private final IndexWriter writer; // called only from assert private boolean locked() { return writer == null || Thread.holdsLock(writer); } /** * Initialize the deleter: find all previous commits in * the Directory, incref the files they reference, call * the policy to let it delete commits. This will remove * any files not referenced by any of the commits. * @throws IOException if there is a low-level IO error */ public IndexFileDeleter(Directory directory, IndexDeletionPolicy policy, SegmentInfos segmentInfos, InfoStream infoStream, IndexWriter writer, boolean initialIndexExists) throws IOException { Objects.requireNonNull(writer); this.infoStream = infoStream; this.writer = writer; final String currentSegmentsFile = segmentInfos.getSegmentsFileName(); if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "init: current segments file is \"" + currentSegmentsFile + "\"; deletionPolicy=" + policy); } this.policy = policy; this.directory = directory; // First pass: walk the files and initialize our ref // counts: long currentGen = segmentInfos.getGeneration(); CommitPoint currentCommitPoint = null; String[] files = directory.listAll(); if (currentSegmentsFile != null) { Matcher m = IndexFileNames.CODEC_FILE_PATTERN.matcher(""); for (String fileName : files) { m.reset(fileName); if (!fileName.endsWith("write.lock") && (m.matches() || fileName.startsWith(IndexFileNames.SEGMENTS) || fileName.startsWith(IndexFileNames.PENDING_SEGMENTS))) { // Add this file to refCounts with initial count 0: getRefCount(fileName); if (fileName.startsWith(IndexFileNames.SEGMENTS) && !fileName.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { // This is a commit (segments or segments_N), and // it's valid (<= the max gen). Load it, then // incref all files it refers to: if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "init: load commit \"" + fileName + "\""); } SegmentInfos sis = null; try { sis = SegmentInfos.readCommit(directory, fileName); } catch (FileNotFoundException | NoSuchFileException e) { // LUCENE-948: on NFS (and maybe others), if // you have writers switching back and forth // between machines, it's very likely that the // dir listing will be stale and will claim a // file segments_X exists when in fact it // doesn't. So, we catch this and handle it // as if the file does not exist if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point"); } } catch (IOException e) { if (SegmentInfos.generationFromSegmentsFileName(fileName) <= currentGen && directory.fileLength(fileName) > 0) { throw e; } else { // Most likely we are opening an index that // has an aborted "future" commit, so suppress // exc in this case } } if (sis != null) { final CommitPoint commitPoint = new CommitPoint(commitsToDelete, directory, sis); if (sis.getGeneration() == segmentInfos.getGeneration()) { currentCommitPoint = commitPoint; } commits.add(commitPoint); incRef(sis, true); if (lastSegmentInfos == null || sis.getGeneration() > lastSegmentInfos.getGeneration()) { lastSegmentInfos = sis; } } } } } } if (currentCommitPoint == null && currentSegmentsFile != null && initialIndexExists) { // We did not in fact see the segments_N file // corresponding to the segmentInfos that was passed // in. Yet, it must exist, because our caller holds // the write lock. This can happen when the directory // listing was stale (eg when index accessed via NFS // client with stale directory listing cache). So we // try now to explicitly open this commit point: SegmentInfos sis = null; try { sis = SegmentInfos.readCommit(directory, currentSegmentsFile); } catch (IOException e) { throw new CorruptIndexException("unable to read current segments_N file", currentSegmentsFile, e); } if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "forced open of current segments file " + segmentInfos.getSegmentsFileName()); } currentCommitPoint = new CommitPoint(commitsToDelete, directory, sis); commits.add(currentCommitPoint); incRef(sis, true); } // We keep commits list in sorted order (oldest to newest): CollectionUtil.timSort(commits); // refCounts only includes "normal" filenames (does not include write.lock) inflateGens(segmentInfos, refCounts.keySet(), infoStream); // Now delete anything with ref count at 0. These are // presumably abandoned files eg due to crash of // IndexWriter. for(Map.Entry<String, RefCount> entry : refCounts.entrySet() ) { RefCount rc = entry.getValue(); final String fileName = entry.getKey(); if (0 == rc.count) { if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "init: removing unreferenced file \"" + fileName + "\""); } deleteFile(fileName); } } // Finally, give policy a chance to remove things on // startup: policy.onInit(commits); // Always protect the incoming segmentInfos since // sometime it may not be the most recent commit checkpoint(segmentInfos, false); startingCommitDeleted = currentCommitPoint == null ? false : currentCommitPoint.isDeleted(); deleteCommits(); } /** Set all gens beyond what we currently see in the directory, to avoid double-write in cases where the previous IndexWriter did not * gracefully close/rollback (e.g. os/machine crashed or lost power). */ static void inflateGens(SegmentInfos infos, Collection<String> files, InfoStream infoStream) { long maxSegmentGen = Long.MIN_VALUE; int maxSegmentName = Integer.MIN_VALUE; // Confusingly, this is the union of liveDocs, field infos, doc values // (and maybe others, in the future) gens. This is somewhat messy, // since it means DV updates will suddenly write to the next gen after // live docs' gen, for example, but we don't have the APIs to ask the // codec which file is which: Map<String,Long> maxPerSegmentGen = new HashMap<>(); for(String fileName : files) { if (fileName.equals(IndexFileNames.OLD_SEGMENTS_GEN) || fileName.equals(IndexWriter.WRITE_LOCK_NAME)) { // do nothing } else if (fileName.startsWith(IndexFileNames.SEGMENTS)) { try { maxSegmentGen = Math.max(SegmentInfos.generationFromSegmentsFileName(fileName), maxSegmentGen); } catch (NumberFormatException ignore) { // trash file: we have to handle this since we allow anything starting with 'segments' here } } else if (fileName.startsWith(IndexFileNames.PENDING_SEGMENTS)) { try { maxSegmentGen = Math.max(SegmentInfos.generationFromSegmentsFileName(fileName.substring(8)), maxSegmentGen); } catch (NumberFormatException ignore) { // trash file: we have to handle this since we allow anything starting with 'pending_segments' here } } else { String segmentName = IndexFileNames.parseSegmentName(fileName); assert segmentName.startsWith("_"): "wtf? file=" + fileName; maxSegmentName = Math.max(maxSegmentName, Integer.parseInt(segmentName.substring(1), Character.MAX_RADIX)); Long curGen = maxPerSegmentGen.get(segmentName); if (curGen == null) { curGen = 0L; } try { curGen = Math.max(curGen, IndexFileNames.parseGeneration(fileName)); } catch (NumberFormatException ignore) { // trash file: we have to handle this since codec regex is only so good } maxPerSegmentGen.put(segmentName, curGen); } } // Generation is advanced before write: infos.setGeneration(Math.max(infos.getGeneration(), maxSegmentGen)); if (infos.counter < 1+maxSegmentName) { if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "init: inflate infos.counter to " + (1+maxSegmentName) + " vs current=" + infos.counter); } infos.counter = 1+maxSegmentName; } for(SegmentCommitInfo info : infos) { Long gen = maxPerSegmentGen.get(info.info.name); assert gen != null; long genLong = gen; if (info.getNextWriteDelGen() < genLong+1) { if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "init: seg=" + info.info.name + " set nextWriteDelGen=" + (genLong+1) + " vs current=" + info.getNextWriteDelGen()); } info.setNextWriteDelGen(genLong+1); } if (info.getNextWriteFieldInfosGen() < genLong+1) { if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "init: seg=" + info.info.name + " set nextWriteFieldInfosGen=" + (genLong+1) + " vs current=" + info.getNextWriteFieldInfosGen()); } info.setNextWriteFieldInfosGen(genLong+1); } if (info.getNextWriteDocValuesGen() < genLong+1) { if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "init: seg=" + info.info.name + " set nextWriteDocValuesGen=" + (genLong+1) + " vs current=" + info.getNextWriteDocValuesGen()); } info.setNextWriteDocValuesGen(genLong+1); } } } void ensureOpen() throws AlreadyClosedException { writer.ensureOpen(false); // since we allow 'closing' state, we must still check this, we could be closing because we hit e.g. OOM if (writer.tragedy != null) { throw new AlreadyClosedException("refusing to delete any files: this IndexWriter hit an unrecoverable exception", writer.tragedy); } } // for testing boolean isClosed() { try { ensureOpen(); return false; } catch (AlreadyClosedException ace) { return true; } } public SegmentInfos getLastSegmentInfos() { return lastSegmentInfos; } /** * Remove the CommitPoints in the commitsToDelete List by * DecRef'ing all files from each SegmentInfos. */ private void deleteCommits() { int size = commitsToDelete.size(); if (size > 0) { // First decref all files that had been referred to by // the now-deleted commits: Throwable firstThrowable = null; for(int i=0;i<size;i++) { CommitPoint commit = commitsToDelete.get(i); if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "deleteCommits: now decRef commit \"" + commit.getSegmentsFileName() + "\""); } try { decRef(commit.files); } catch (Throwable t) { if (firstThrowable == null) { firstThrowable = t; } } } commitsToDelete.clear(); // NOTE: does nothing if firstThrowable is null IOUtils.reThrowUnchecked(firstThrowable); // Now compact commits to remove deleted ones (preserving the sort): size = commits.size(); int readFrom = 0; int writeTo = 0; while(readFrom < size) { CommitPoint commit = commits.get(readFrom); if (!commit.deleted) { if (writeTo != readFrom) { commits.set(writeTo, commits.get(readFrom)); } writeTo++; } readFrom++; } while(size > writeTo) { commits.remove(size-1); size--; } } } /** * Writer calls this when it has hit an error and had to * roll back, to tell us that there may now be * unreferenced files in the filesystem. So we re-list * the filesystem and delete such files. If segmentName * is non-null, we will only delete files corresponding to * that segment. */ void refresh(String segmentName) throws IOException { assert locked(); String[] files = directory.listAll(); String segmentPrefix1; String segmentPrefix2; if (segmentName != null) { segmentPrefix1 = segmentName + "."; segmentPrefix2 = segmentName + "_"; } else { segmentPrefix1 = null; segmentPrefix2 = null; } Matcher m = IndexFileNames.CODEC_FILE_PATTERN.matcher(""); for(int i=0;i<files.length;i++) { String fileName = files[i]; m.reset(fileName); if ((segmentName == null || fileName.startsWith(segmentPrefix1) || fileName.startsWith(segmentPrefix2)) && !fileName.endsWith("write.lock") && !refCounts.containsKey(fileName) && (m.matches() || fileName.startsWith(IndexFileNames.SEGMENTS) // we only try to clear out pending_segments_N during rollback(), because we don't ref-count it // TODO: this is sneaky, should we do this, or change TestIWExceptions? rollback closes anyway, and // any leftover file will be deleted/retried on next IW bootup anyway... || (segmentName == null && fileName.startsWith(IndexFileNames.PENDING_SEGMENTS)))) { // Unreferenced file, so remove it if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "refresh [prefix=" + segmentName + "]: removing newly created unreferenced file \"" + fileName + "\""); } deleteFile(fileName); } } } void refresh() throws IOException { // Set to null so that we regenerate the list of pending // files; else we can accumulate same file more than // once assert locked(); deletable = null; refresh(null); } @Override public void close() { // DecRef old files from the last checkpoint, if any: assert locked(); if (!lastFiles.isEmpty()) { try { decRef(lastFiles); } finally { lastFiles.clear(); } } deletePendingFiles(); } /** * Revisits the {@link IndexDeletionPolicy} by calling its * {@link IndexDeletionPolicy#onCommit(List)} again with the known commits. * This is useful in cases where a deletion policy which holds onto index * commits is used. The application may know that some commits are not held by * the deletion policy anymore and call * {@link IndexWriter#deleteUnusedFiles()}, which will attempt to delete the * unused commits again. */ void revisitPolicy() throws IOException { assert locked(); if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "now revisitPolicy"); } if (commits.size() > 0) { policy.onCommit(commits); deleteCommits(); } } public void deletePendingFiles() { assert locked(); if (deletable != null) { Set<String> oldDeletable = deletable; deletable = null; for(String fileName : oldDeletable) { if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "delete pending file " + fileName); } RefCount rc = refCounts.get(fileName); if (rc != null && rc.count > 0) { // LUCENE-5904: should never happen! This means we are about to pending-delete a referenced index file assert false: "fileName=" + fileName + " is in pending delete list but also has refCount=" + rc.count; } else { deleteFile(fileName); } } } } /** * For definition of "check point" see IndexWriter comments: * "Clarification: Check Points (and commits)". * * Writer calls this when it has made a "consistent * change" to the index, meaning new files are written to * the index and the in-memory SegmentInfos have been * modified to point to those files. * * This may or may not be a commit (segments_N may or may * not have been written). * * We simply incref the files referenced by the new * SegmentInfos and decref the files we had previously * seen (if any). * * If this is a commit, we also call the policy to give it * a chance to remove other commits. If any commits are * removed, we decref their files as well. */ public void checkpoint(SegmentInfos segmentInfos, boolean isCommit) throws IOException { assert locked(); assert Thread.holdsLock(writer); long t0 = 0; if (infoStream.isEnabled("IFD")) { t0 = System.nanoTime(); infoStream.message("IFD", "now checkpoint \"" + writer.segString(writer.toLiveInfos(segmentInfos)) + "\" [" + segmentInfos.size() + " segments " + "; isCommit = " + isCommit + "]"); } // Try again now to delete any previously un-deletable // files (because they were in use, on Windows): deletePendingFiles(); // Incref the files: incRef(segmentInfos, isCommit); if (isCommit) { // Append to our commits list: commits.add(new CommitPoint(commitsToDelete, directory, segmentInfos)); // Tell policy so it can remove commits: policy.onCommit(commits); // Decref files for commits that were deleted by the policy: deleteCommits(); } else { // DecRef old files from the last checkpoint, if any: try { decRef(lastFiles); } finally { lastFiles.clear(); } // Save files so we can decr on next checkpoint/commit: lastFiles.addAll(segmentInfos.files(false)); } if (infoStream.isEnabled("IFD")) { long t1 = System.nanoTime(); infoStream.message("IFD", ((t1-t0)/1000000) + " msec to checkpoint"); } } void incRef(SegmentInfos segmentInfos, boolean isCommit) throws IOException { assert locked(); // If this is a commit point, also incRef the // segments_N file: for(final String fileName: segmentInfos.files(isCommit)) { incRef(fileName); } } void incRef(Collection<String> files) { assert locked(); for(final String file : files) { incRef(file); } } void incRef(String fileName) { assert locked(); RefCount rc = getRefCount(fileName); if (infoStream.isEnabled("IFD")) { if (VERBOSE_REF_COUNTS) { infoStream.message("IFD", " IncRef \"" + fileName + "\": pre-incr count is " + rc.count); } } rc.IncRef(); } /** Decrefs all provided files, even on exception; throws first exception hit, if any. */ void decRef(Collection<String> files) { assert locked(); Throwable firstThrowable = null; for(final String file : files) { try { decRef(file); } catch (Throwable t) { if (firstThrowable == null) { // Save first exception and throw it in the end, but be sure to finish decRef all files firstThrowable = t; } } } // NOTE: does nothing if firstThrowable is null IOUtils.reThrowUnchecked(firstThrowable); } /** Decrefs all provided files, ignoring any exceptions hit; call this if * you are already handling an exception. */ void decRefWhileHandlingException(Collection<String> files) { assert locked(); for(final String file : files) { try { decRef(file); } catch (Throwable t) { } } } void decRef(String fileName) { assert locked(); RefCount rc = getRefCount(fileName); if (infoStream.isEnabled("IFD")) { if (VERBOSE_REF_COUNTS) { infoStream.message("IFD", " DecRef \"" + fileName + "\": pre-decr count is " + rc.count); } } if (0 == rc.DecRef()) { // This file is no longer referenced by any past // commit points nor by the in-memory SegmentInfos: try { deleteFile(fileName); } finally { refCounts.remove(fileName); } } } void decRef(SegmentInfos segmentInfos) throws IOException { assert locked(); decRef(segmentInfos.files(false)); } public boolean exists(String fileName) { assert locked(); if (!refCounts.containsKey(fileName)) { return false; } else { return getRefCount(fileName).count > 0; } } private RefCount getRefCount(String fileName) { assert locked(); RefCount rc; if (!refCounts.containsKey(fileName)) { rc = new RefCount(fileName); // We should never incRef a file we are already wanting to delete: assert deletable == null || deletable.contains(fileName) == false: "file \"" + fileName + "\" cannot be incRef'd: it's already pending delete"; refCounts.put(fileName, rc); } else { rc = refCounts.get(fileName); } return rc; } void deleteFiles(List<String> files) { assert locked(); for(final String file: files) { deleteFile(file); } } /** Deletes the specified files, but only if they are new * (have not yet been incref'd). */ void deleteNewFiles(Collection<String> files) { assert locked(); for (final String fileName: files) { // NOTE: it's very unusual yet possible for the // refCount to be present and 0: it can happen if you // open IW on a crashed index, and it removes a bunch // of unref'd files, and then you add new docs / do // merging, and it reuses that segment name. // TestCrash.testCrashAfterReopen can hit this: if (!refCounts.containsKey(fileName) || refCounts.get(fileName).count == 0) { if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "delete new file \"" + fileName + "\""); } deleteFile(fileName); } } } void deleteFile(String fileName) { assert locked(); ensureOpen(); try { if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "delete \"" + fileName + "\""); } directory.deleteFile(fileName); } catch (IOException e) { // if delete fails // Some operating systems (e.g. Windows) don't // permit a file to be deleted while it is opened // for read (e.g. by another process or thread). So // we assume that when a delete fails it is because // the file is open in another process, and queue // the file for subsequent deletion. if (infoStream.isEnabled("IFD")) { infoStream.message("IFD", "unable to remove file \"" + fileName + "\": " + e.toString() + "; Will re-try later."); } if (deletable == null) { deletable = new HashSet<>(); } deletable.add(fileName); // add to deletable } } /** * Tracks the reference count for a single index file: */ final private static class RefCount { // fileName used only for better assert error messages final String fileName; boolean initDone; RefCount(String fileName) { this.fileName = fileName; } int count; public int IncRef() { if (!initDone) { initDone = true; } else { assert count > 0: Thread.currentThread().getName() + ": RefCount is 0 pre-increment for file \"" + fileName + "\""; } return ++count; } public int DecRef() { assert count > 0: Thread.currentThread().getName() + ": RefCount is 0 pre-decrement for file \"" + fileName + "\""; return --count; } } /** * Holds details for each commit point. This class is * also passed to the deletion policy. Note: this class * has a natural ordering that is inconsistent with * equals. */ final private static class CommitPoint extends IndexCommit { Collection<String> files; String segmentsFileName; boolean deleted; Directory directory; Collection<CommitPoint> commitsToDelete; long generation; final Map<String,String> userData; private final int segmentCount; public CommitPoint(Collection<CommitPoint> commitsToDelete, Directory directory, SegmentInfos segmentInfos) throws IOException { this.directory = directory; this.commitsToDelete = commitsToDelete; userData = segmentInfos.getUserData(); segmentsFileName = segmentInfos.getSegmentsFileName(); generation = segmentInfos.getGeneration(); files = Collections.unmodifiableCollection(segmentInfos.files(true)); segmentCount = segmentInfos.size(); } @Override public String toString() { return "IndexFileDeleter.CommitPoint(" + segmentsFileName + ")"; } @Override public int getSegmentCount() { return segmentCount; } @Override public String getSegmentsFileName() { return segmentsFileName; } @Override public Collection<String> getFileNames() { return files; } @Override public Directory getDirectory() { return directory; } @Override public long getGeneration() { return generation; } @Override public Map<String,String> getUserData() { return userData; } /** * Called only be the deletion policy, to remove this * commit point from the index. */ @Override public void delete() { if (!deleted) { deleted = true; commitsToDelete.add(this); } } @Override public boolean isDeleted() { return deleted; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene87.Lucene87Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.suggest.document.Completion84PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; public class CompletionStatsCacheTests extends ESTestCase { public void testExceptionsAreNotCached() { final AtomicInteger openCount = new AtomicInteger(); final CompletionStatsCache completionStatsCache = new CompletionStatsCache(() -> { throw new ElasticsearchException("simulated " + openCount.incrementAndGet()); }); assertThat(expectThrows(ElasticsearchException.class, completionStatsCache::get).getMessage(), equalTo("simulated 1")); assertThat(expectThrows(ElasticsearchException.class, completionStatsCache::get).getMessage(), equalTo("simulated 2")); } public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); final PostingsFormat postingsFormat = new Completion84PostingsFormat(); indexWriterConfig.setCodec(new Lucene87Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields } }); final QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { @Override public void onUse(Query query) { } @Override public boolean shouldCache(Query query) { return false; } }; try (Directory directory = newDirectory(); IndexWriter indexWriter = new IndexWriter(directory, indexWriterConfig)) { final Document document = new Document(); document.add(new SuggestField("suggest1", "val", 1)); document.add(new SuggestField("suggest2", "val", 1)); document.add(new SuggestField("suggest2", "anotherval", 1)); document.add(new SuggestField("otherfield", "val", 1)); document.add(new SuggestField("otherfield", "anotherval", 1)); document.add(new SuggestField("otherfield", "yetmoreval", 1)); indexWriter.addDocument(document); final OpenCloseCounter openCloseCounter = new OpenCloseCounter(); final CompletionStatsCache completionStatsCache = new CompletionStatsCache(() -> { openCloseCounter.countOpened(); try { final DirectoryReader directoryReader = DirectoryReader.open(indexWriter); return new Engine.Searcher("test", directoryReader, null, null, queryCachingPolicy, () -> { openCloseCounter.countClosed(); IOUtils.close(directoryReader); }); } catch (IOException e) { throw new AssertionError(e); } }); final int threadCount = 6; final TestHarness testHarness = new TestHarness(completionStatsCache, threadCount); final Thread[] threads = new Thread[threadCount]; threads[0] = new Thread(() -> testHarness.getStats(0, "*")); threads[1] = new Thread(() -> testHarness.getStats(1, "suggest1", "suggest2")); threads[2] = new Thread(() -> testHarness.getStats(2, "sug*")); threads[3] = new Thread(() -> testHarness.getStats(3, "no match*")); threads[4] = new Thread(() -> testHarness.getStats(4)); threads[5] = new Thread(() -> testHarness.getStats(5, (String[]) null)); for (Thread thread : threads) { thread.start(); } testHarness.start(); for (Thread thread : threads) { thread.join(); } // 0: "*" should match all fields: final long suggest1Size = testHarness.getResult(0).getFields().get("suggest1"); final long suggest2Size = testHarness.getResult(0).getFields().get("suggest2"); final long otherFieldSize = testHarness.getResult(0).getFields().get("otherfield"); final long totalSizeInBytes = testHarness.getResult(0).getSizeInBytes(); assertThat(suggest1Size, greaterThan(0L)); assertThat(suggest2Size, greaterThan(0L)); assertThat(otherFieldSize, greaterThan(0L)); assertThat(totalSizeInBytes, equalTo(suggest1Size + suggest2Size + otherFieldSize)); // 1: enumerating fields omits the other ones assertThat(testHarness.getResult(1).getSizeInBytes(), equalTo(totalSizeInBytes)); assertThat(testHarness.getResult(1).getFields().get("suggest1"), equalTo(suggest1Size)); assertThat(testHarness.getResult(1).getFields().get("suggest2"), equalTo(suggest2Size)); assertFalse(testHarness.getResult(1).getFields().containsField("otherfield")); // 2: wildcards also exclude some fields assertThat(testHarness.getResult(2).getSizeInBytes(), equalTo(totalSizeInBytes)); assertThat(testHarness.getResult(2).getFields().get("suggest1"), equalTo(suggest1Size)); assertThat(testHarness.getResult(2).getFields().get("suggest2"), equalTo(suggest2Size)); assertFalse(testHarness.getResult(2).getFields().containsField("otherfield")); // 3: non-matching wildcard returns empty set of fields assertThat(testHarness.getResult(3).getSizeInBytes(), equalTo(totalSizeInBytes)); assertFalse(testHarness.getResult(3).getFields().containsField("suggest1")); assertFalse(testHarness.getResult(3).getFields().containsField("suggest2")); assertFalse(testHarness.getResult(3).getFields().containsField("otherfield")); // 4: no fields means per-fields stats is null assertThat(testHarness.getResult(4).getSizeInBytes(), equalTo(totalSizeInBytes)); assertNull(testHarness.getResult(4).getFields()); // 5: null fields means per-fields stats is null assertThat(testHarness.getResult(5).getSizeInBytes(), equalTo(totalSizeInBytes)); assertNull(testHarness.getResult(5).getFields()); // the stats were only computed once openCloseCounter.assertCount(1); // the stats are not recomputed on a refresh completionStatsCache.afterRefresh(true); openCloseCounter.assertCount(1); // but they are recomputed on the next get completionStatsCache.get(); openCloseCounter.assertCount(2); // and they do update final Document document2 = new Document(); document2.add(new SuggestField("suggest1", "foo", 1)); document2.add(new SuggestField("suggest2", "bar", 1)); document2.add(new SuggestField("otherfield", "baz", 1)); indexWriter.addDocument(document2); completionStatsCache.afterRefresh(true); final CompletionStats updatedStats = completionStatsCache.get(); assertThat(updatedStats.getSizeInBytes(), greaterThan(totalSizeInBytes)); openCloseCounter.assertCount(3); // beforeRefresh does not invalidate the cache completionStatsCache.beforeRefresh(); completionStatsCache.get(); openCloseCounter.assertCount(3); // afterRefresh does not invalidate the cache if no refresh took place completionStatsCache.afterRefresh(false); completionStatsCache.get(); openCloseCounter.assertCount(3); } } private static class OpenCloseCounter { private final AtomicInteger openCount = new AtomicInteger(); private final AtomicInteger closeCount = new AtomicInteger(); void countOpened() { openCount.incrementAndGet(); } void countClosed() { closeCount.incrementAndGet(); } void assertCount(int expectedCount) { assertThat(openCount.get(), equalTo(expectedCount)); assertThat(closeCount.get(), equalTo(expectedCount)); } } private static class TestHarness { private final CompletionStatsCache completionStatsCache; private final CyclicBarrier cyclicBarrier; private final CompletionStats[] results; TestHarness(CompletionStatsCache completionStatsCache, int resultCount) { this.completionStatsCache = completionStatsCache; results = new CompletionStats[resultCount]; cyclicBarrier = new CyclicBarrier(resultCount + 1); } void getStats(int threadIndex, String... fieldPatterns) { start(); results[threadIndex] = completionStatsCache.get(fieldPatterns); } void start() { try { cyclicBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { throw new AssertionError(e); } } CompletionStats getResult(int index) { return results[index]; } } }
/** * */ package securbank.controller; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.util.List; import java.util.UUID; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import securbank.exceptions.Exceptions; import securbank.models.CreditCard; import securbank.models.CreditCardStatement; import securbank.models.Transaction; import securbank.models.Transfer; import securbank.models.User; import securbank.models.ViewAuthorization; import securbank.services.CreditCardService; import securbank.services.OtpService; import securbank.services.PDFService; import securbank.services.TransactionService; import securbank.services.TransferService; import securbank.services.UserService; import securbank.services.ViewAuthorizationService; import securbank.validators.EditUserFormValidator; import securbank.validators.NewMerchantPaymentFormValidator; import securbank.validators.NewTransactionFormValidator; import securbank.validators.NewTransferFormValidator; import securbank.validators.NewUserFormValidator; /** * @author Ayush Gupta * */ @Controller public class MerchantController { @Autowired UserService userService; @Autowired PDFService pdfService; @Autowired private TransactionService transactionService; @Autowired NewTransactionFormValidator transactionFormValidator; @Autowired private TransferService transferService; @Autowired NewTransferFormValidator transferFormValidator; @Autowired public HttpSession session; @Autowired CreditCardService creditCardService; @Autowired ViewAuthorizationService viewAuthorizationService; @Autowired NewUserFormValidator userFormValidator; @Autowired EditUserFormValidator editUserFormValidator; @Autowired NewMerchantPaymentFormValidator merchantPaymentFormValidator; @Autowired OtpService otpService; final static Logger logger = LoggerFactory.getLogger(MerchantController.class); @GetMapping("/merchant/details") public String currentUserDetails(Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { //return "redirect:/error?code=400&path=user-notfound"; throw new Exceptions("400","User Not Found !"); } model.addAttribute("user", user); logger.info("GET request: External user detail"); return "merchant/detail"; } @GetMapping("/merchant") public String currentMerchant(Model model) throws Exceptions { return "redirect:/merchant/details"; } @GetMapping("/merchant/createtransaction") public String newTransactionForm(Model model){ model.addAttribute("transaction", new Transaction()); logger.info("GET request: Extrernal user transaction creation request"); return "merchant/createtransaction"; } @GetMapping("/merchant/transaction/otp") public String createTransactionOtp(Model model){ model.addAttribute("transaction", new Transaction()); logger.info("GET request: Extrernal user transaction generate OTP"); User currentUser = userService.getCurrentUser(); otpService.createOtpForUser(currentUser); return "redirect:/merchant/createtransaction"; } @PostMapping("/merchant/createtransaction") public String submitNewTransaction(@ModelAttribute Transaction transaction, BindingResult bindingResult) throws Exceptions { logger.info("POST request: Submit transaction"); transactionFormValidator.validate(transaction, bindingResult); if(bindingResult.hasErrors()){ logger.info("POST request: createtransaction form with validation errors"); return "redirect:/merchant/createtransaction"; } if(transaction.getType().contentEquals("CREDIT")){ if (transactionService.initiateCredit(transaction) == null) { //return "redirect:/error?code=400&path=transaction-error"; throw new Exceptions("400","Transaction Error !"); } } else { if (transactionService.initiateDebit(transaction) == null) { //return "redirect:/error?code=400&path=transaction-error"; throw new Exceptions("400","Transaction Error !"); } } //deactivate current otp otpService.deactivateOtpByUser(userService.getCurrentUser()); return "redirect:/merchant/createtransaction?successTransaction=true"; } @GetMapping("/merchant/createtransfer") public String newTransferForm(Model model){ model.addAttribute("transfer", new Transfer()); logger.info("GET request: Extrernal user transfer creation request"); return "merchant/createtransfer"; } @GetMapping("/merchant/transfer/otp") public String createTransferOtp(Model model){ model.addAttribute("transfer", new Transfer()); logger.info("GET request: Extrernal user transfer generate OTP"); User currentUser = userService.getCurrentUser(); otpService.createOtpForUser(currentUser); return "redirect:/merchant/createtransfer"; } @PostMapping("merchant/createtransfer") public String submitNewTransfer(@ModelAttribute Transfer transfer, BindingResult bindingResult) throws Exceptions { logger.info("POST request: Submit transfer"); transferFormValidator.validate(transfer, bindingResult); if(bindingResult.hasErrors()){ logger.info("POST request: createtransfer form with validation errors"); return "redirect:merchant/createtransfer"; } if(transferService.initiateTransfer(transfer)==null){ //return "redirect:/error?code=400&path=transfer-error"; throw new Exceptions("400","Transfer Error !"); } //deactivate current otp otpService.deactivateOtpByUser(userService.getCurrentUser()); return "redirect:/merchant/createtransfer?successTransaction=true"; } @GetMapping("/merchant/payment") public String newMerchantTransferForm(Model model){ model.addAttribute("transfer", new Transfer()); logger.info("GET request: Extrernal user transfer creation request"); return "merchant/payment"; } @GetMapping("/merchant/payment/otp") public String createMerchantTransferOtp(Model model){ model.addAttribute("transfer", new Transfer()); logger.info("GET request: Extrernal user transfer generate OTP"); User currentUser = userService.getCurrentUser(); otpService.createOtpForUser(currentUser); return "redirect:/merchant/payment"; } @PostMapping("/merchant/payment") public String submitNewMerchantTransfer(@ModelAttribute Transfer transfer, BindingResult bindingResult) throws Exceptions { logger.info("POST request: Submit transfer"); merchantPaymentFormValidator.validate(transfer, bindingResult); String otp = otpService.getOtpByUser(userService.getCurrentUser()).getCode(); if(!transfer.getOtp().equals(otp)){ logger.info("Otp mismatch"); //return "redirect:/error?code=400&path=transfer-error"; throw new Exceptions("400","Transfer Error !"); } if(bindingResult.hasErrors()){ logger.info("POST request: createtransfer form with validation errors"); //return "redirect:/error?code=400&path=transfer-error"; throw new Exceptions("400","Transfer Error !"); } if(transferService.initiateMerchantPaymentRequest(transfer)==null){ //return "redirect:/error?code=400&path=transfer-error"; throw new Exceptions("400","Transfer Error !"); } //deactivate current otp otpService.deactivateOtpByUser(userService.getCurrentUser()); return "redirect:/merchant/payment?successPayment=true"; } @GetMapping("/merchant/edit") public String editUser(Model model) { User user = userService.getCurrentUser(); if (user == null) { return "redirect:/error"; } model.addAttribute("user", user); return "merchant/edit"; } @PostMapping("/merchant/edit") public String editSubmit(@ModelAttribute User user, BindingResult bindingResult) { editUserFormValidator.validate(user, bindingResult); if (bindingResult.hasErrors()) { return "merchant/edit"; } // create request userService.createExternalModificationRequest(user); return "redirect:/merchant/details?successEdit=true"; } @GetMapping("/merchant/transfers") public String getTransfers(Model model) throws Exceptions { logger.info("GET request: All pending transfers"); List<Transfer> transfers = transferService.getTransfersByStatusAndUser(userService.getCurrentUser(),"Waiting"); if (transfers == null) { //return "redirect:/error?code=404&path=transfers-not-found"; throw new Exceptions("404","Transfer Not Found !"); } model.addAttribute("transfers", transfers); model.addAttribute("currentUser", userService.getCurrentUser()); return "merchant/pendingtransfers"; } @PostMapping("/merchant/transfer/request/{id}") public String approveRejectTransfer(@ModelAttribute Transfer trans, @PathVariable() UUID id, BindingResult bindingResult) throws Exceptions { Transfer transfer = transferService.getTransferById(id); if (transfer == null) { //return "redirect:/error?code=404&path=request-invalid"; throw new Exceptions("404","Transfer Not Found !"); } // checks if user is authorized for the request to approve if (!transfer.getFromAccount().getUser().getEmail().equalsIgnoreCase(userService.getCurrentUser().getEmail())) { logger.warn("Transafer made TO non external account"); //return "redirect:/error?code=401&path=request-unauthorised"; throw new Exceptions("401"," "); } if (!transfer.getToAccount().getUser().getRole().equalsIgnoreCase("ROLE_MERCHANT")) { logger.warn("Transafer made FROM non merchant account"); //return "redirect:/error?code=401&path=request-unauthorised"; throw new Exceptions("401"," "); } if("approved".equalsIgnoreCase(trans.getStatus())){ //check if transfer is valid in case modified if(transferService.isTransferValid(transfer)==false){ //return "redirect:/error?code=401&path=amount-invalid"; throw new Exceptions("401","Amount Invalid !"); } transferService.approveTransferToPending(transfer); } else if ("rejected".equalsIgnoreCase(trans.getStatus())) { transferService.declineTransfer(transfer); } logger.info("GET request: Manager approve/decline external transaction requests"); return "redirect:/merchant/transfers?successAction=true"; } @GetMapping("/merchant/transfer/{id}") public String getTransferRequest(Model model, @PathVariable() UUID id) throws Exceptions { Transfer transfer = transferService.getTransferById(id); if (transfer == null) { //return "redirect:/error?code=404&path=request-invalid"; throw new Exceptions("404","Invalid Request !"); } // checks if user is authorized for the request to approve if (!transfer.getFromAccount().getUser().getEmail().equalsIgnoreCase(userService.getCurrentUser().getEmail())) { logger.warn("Transafer made TO non external account"); //return "redirect:/error?code=401&path=request-unauthorised"; throw new Exceptions("401"," "); } if (!transfer.getToAccount().getUser().getRole().equalsIgnoreCase("ROLE_MERCHANT")) { logger.warn("Transafer made FROM non merchant account"); //return "redirect:/error?code=401&path=request-unauthorised"; throw new Exceptions("401"," "); } model.addAttribute("transfer", transfer); logger.info("GET request: User merchant transfer request by ID"); return "merchant/approverequests"; } @GetMapping("/merchant/request") public String getRequest(Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { //return "redirect:/error"; throw new Exceptions("401"," "); } model.addAttribute("viewrequests", viewAuthorizationService.getPendingAuthorization(user)); return "merchant/accessrequests"; } @GetMapping("/merchant/request/view/{id}") public String getRequest(@PathVariable UUID id, Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { return "redirect:/login"; } ViewAuthorization authorization = viewAuthorizationService.getAuthorizationById(id); if (authorization == null) { //return "redirect:/error?code=404"; throw new Exceptions("404"," "); } if (authorization.getExternal() != user) { //return "redirect:/error?code=401"; throw new Exceptions("401"," "); } model.addAttribute("viewrequest", authorization); return "merchant/accessrequest_detail"; } @PostMapping("/merchant/request/{id}") public String getRequests(@PathVariable UUID id, @ModelAttribute ViewAuthorization request, BindingResult bindingResult) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { return "redirect:/login"; } String status = request.getStatus(); if (status == null || !(status.equals("approved") || status.equals("rejected"))) { //return "redirect:/error?code=400"; throw new Exceptions("400"," "); } ViewAuthorization authorization = viewAuthorizationService.getAuthorizationById(id); if (authorization == null) { //return "redirect:/error?code=404"; throw new Exceptions("404"," "); } if (authorization.getExternal() != user) { //return "redirect:/error?code=401"; throw new Exceptions("401"," "); } authorization.setStatus(status); authorization = viewAuthorizationService.approveAuthorization(authorization); return "redirect:/merchant/request?successAction=true"; } @RequestMapping("/merchant/downloadPDF") public void downloadPDF(HttpServletRequest request, HttpServletResponse response) throws IOException { final ServletContext servletContext = request.getSession().getServletContext(); final File tempDirectory = (File) servletContext.getAttribute("javax.servlet.context.tempdir"); final String temperotyFilePath = tempDirectory.getAbsolutePath(); User user = userService.getCurrentUser(); String fileName = "account_statement.pdf"; response.setContentType("application/pdf"); response.setHeader("Content-disposition", "attachment; filename=" + fileName); try { pdfService.createStatementPDF(temperotyFilePath + "\\" + fileName, user); ByteArrayOutputStream baos = new ByteArrayOutputStream(); baos = pdfService.convertPDFToByteArrayOutputStream(temperotyFilePath + "\\" + fileName); OutputStream os = response.getOutputStream(); baos.writeTo(os); os.flush(); } catch (Exception e1) { e1.printStackTrace(); } } @GetMapping("/merchant/credit-card/create") public String createCreditCard(Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { throw new Exceptions("404", "User Not Found"); } if (creditCardService.getCreditCardDetails(user) != null) { return "redirect:/merchant/credit-card/details"; } logger.info("GET request: create credit card"); return "merchant/creditcard_create"; } @PostMapping("/merchant/credit-card/create") public String createCreditCard(@ModelAttribute CreditCard cc, BindingResult bindingResult)throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { throw new Exceptions("404", "User Not Found"); } if (creditCardService.getCreditCardDetails(user) != null) { return "redirect:/merchant/credit-card/details"; } logger.info("POST request: create credit card"); creditCardService.createCreditCard(user); return "redirect:/merchant/credit-card/details"; } @GetMapping("/merchant/credit-card/details") public String detailCreditCard(Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { throw new Exceptions("404", "User Not Found"); } CreditCard creditCard = creditCardService.getCreditCardDetails(user); if (creditCard == null) { return "redirect:/merchant/credit-card/create"; } model.addAttribute("creditCard", creditCard); logger.info("GET request: credit card detail"); return "merchant/creditcard_detail"; } @GetMapping("/merchant/credit-card/transaction/create") public String createCreditCardTransacttion(Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { throw new Exceptions("404", "User Not Found"); } if (creditCardService.getCreditCardDetails(user) == null) { return "redirect:/merchant/credit-card/create"; } model.addAttribute("transaction", new Transaction()); logger.info("GET request: create credit card transaction"); return "merchant/creditcard_transaction_create"; } @PostMapping("/merchant/credit-card/transaction/create") public String createCreditCardTransaction(@ModelAttribute Transaction transaction, BindingResult bindingResult) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { throw new Exceptions("404", "User Not Found"); } CreditCard cc = creditCardService.getCreditCardDetails(user); if (creditCardService.getCreditCardDetails(user) == null) { return "redirect:/merchant/credit-card/create"; } transaction.setType("DEBIT"); transactionFormValidator.validate(transaction, bindingResult); logger.info("POST request: make a payment for credit card"); creditCardService.createCreditCardTransaction(transaction, cc); return "redirect:/merchant/credit-card/details"; } @GetMapping("/merchant/credit-card/transaction") public String getCreditCardTransacttions(Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { throw new Exceptions("404", "User Not Found"); } CreditCard cc = creditCardService.getCreditCardDetails(user); if (cc == null) { return "redirect:/merchant/credit-card/create"; } List<Transaction> transactions = transactionService.getTransactionsByAccount(cc.getAccount()); model.addAttribute("transactions", transactions); logger.info("GET request: get credit card all transactions"); return "merchant/creditcard_transactions"; } @GetMapping("/merchant/credit-card/makepayment") public String createCreditCardMakePayment(Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { return "redirect:/login"; } CreditCard cc = creditCardService.getCreditCardDetails(user); if (cc == null) { return "redirect:/merchant/credit-card/create"; } cc = creditCardService.getDueAmount(cc); model.addAttribute("creditcard", cc); logger.info("GET request: make a payment for credit card"); return "merchant/creditcard_transaction_makepayment"; } @PostMapping("/merchant/credit-card/makepayment") public String createCreditCardMakePayment(@ModelAttribute Transaction transaction, BindingResult bindingResult) throws Exceptions { // TODO validate transaction User user = userService.getCurrentUser(); if (user == null) { return "redirect:/login"; } CreditCard cc = creditCardService.getCreditCardDetails(user); if (creditCardService.getCreditCardDetails(user) == null) { return "redirect:/merchant/credit-card/create"; } logger.info("POST request: make a payment for credit card"); transaction = creditCardService.creditCardMakePayment(cc); if (transaction == null) { throw new Exceptions("400", "Bad Request"); } return "redirect:/merchant/credit-card/details"; } @GetMapping("/merchant/credit-card/statement") public String getCreditCardStatements(Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { return "redirect:/login"; } CreditCard cc = creditCardService.getCreditCardDetails(user); if (cc == null) { return "redirect:/merchant/credit-card/create"; } logger.info("GET request: get statements for credit card"); model.addAttribute("statements", cc.getStatements()); return "merchant/creditcard_statements"; } @GetMapping("/merchant/credit-card/statement/{id}") public String getCreditCardStatements(@PathVariable UUID id, Model model) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { return "redirect:/login"; } CreditCard cc = creditCardService.getCreditCardDetails(user); if (creditCardService.getCreditCardDetails(user) == null) { return "redirect:/merchant/credit-card/create"; } // TODO: adds validation of transaction logger.info("GET request: get statements for credit card"); CreditCardStatement statement = creditCardService.getStatementById(cc, id); if (statement == null) { throw new Exceptions("400", "Bad Request"); } model.addAttribute("statement", statement); return "merchant/creditcard_statementdetail"; } @GetMapping("/merchant/credit-card/statement/{id}/pdf") public void getCreditCardStatementPdf(@PathVariable UUID id, HttpServletRequest request, HttpServletResponse response) throws Exceptions { User user = userService.getCurrentUser(); if (user == null) { throw new Exceptions("404", "User not found"); } CreditCard cc = creditCardService.getCreditCardDetails(user); if (creditCardService.getCreditCardDetails(user) == null) { throw new Exceptions("404", "Credit Card not found"); } CreditCardStatement statement = creditCardService.getStatementById(cc, id); if (statement == null) { throw new Exceptions("400", "Bad Request"); } final ServletContext servletContext = request.getSession().getServletContext(); final File tempDirectory = (File) servletContext.getAttribute("javax.servlet.context.tempdir"); final String temperotyFilePath = tempDirectory.getAbsolutePath(); String fileName = "statement.pdf"; response.setContentType("application/pdf"); response.setHeader("Content-disposition", "attachment; filename=" + fileName); try { pdfService.createCreditCardStatementPDF(temperotyFilePath + "\\" + fileName, statement); ByteArrayOutputStream baos = new ByteArrayOutputStream(); baos = pdfService.convertPDFToByteArrayOutputStream(temperotyFilePath + "\\" + fileName); OutputStream os = response.getOutputStream(); baos.writeTo(os); os.flush(); } catch (Exception e1) { e1.printStackTrace(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.internal; import java.lang.reflect.AccessibleObject; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.geode.cache.EntryDestroyedException; import org.apache.geode.cache.query.NameNotFoundException; import org.apache.geode.cache.query.QueryInvocationTargetException; import org.apache.geode.cache.query.QueryService; import org.apache.geode.internal.cache.Token; import org.apache.geode.pdx.JSONFormatter; import org.apache.geode.pdx.PdxInstance; import org.apache.geode.pdx.PdxSerializationException; import org.apache.geode.pdx.internal.FieldNotFoundInPdxVersion; import org.apache.geode.pdx.internal.PdxInstanceImpl; /** * Utility for managing an attribute * * @version $Revision: 1.1 $ */ public class AttributeDescriptor { private final String _name; private final MethodInvocationAuthorizer _methodInvocationAuthorizer; /** cache for remembering the correct Member for a class and attribute */ private static final ConcurrentMap<List, Member> _localCache = new ConcurrentHashMap(); public AttributeDescriptor(MethodInvocationAuthorizer methodInvocationAuthorizer, String name) { _methodInvocationAuthorizer = methodInvocationAuthorizer; _name = name; } /** Validate whether this attribute <i>can</i> be evaluated for target type */ public boolean validateReadType(Class targetType) { try { getReadMember(targetType); return true; } catch (NameNotFoundException e) { return false; } } public Object read(Object target) throws NameNotFoundException, QueryInvocationTargetException { if (target == null || target == QueryService.UNDEFINED) { return QueryService.UNDEFINED; } if (target instanceof PdxInstance) { return readPdx((PdxInstance) target); } // for non pdx objects return readReflection(target); } // used when the resolution of an attribute must be on a superclass // instead of the runtime class private Object readReflection(Object target) throws NameNotFoundException, QueryInvocationTargetException { Support.Assert(target != null); Support.Assert(target != QueryService.UNDEFINED); if (target instanceof Token) { return QueryService.UNDEFINED; } Class resolutionClass = target.getClass(); Member m = getReadMember(resolutionClass); try { if (m instanceof Method) { try { _methodInvocationAuthorizer.authorizeMethodInvocation((Method) m, target); return ((Method) m).invoke(target, (Object[]) null); } catch (EntryDestroyedException e) { // eat the Exception return QueryService.UNDEFINED; } catch (IllegalAccessException e) { throw new NameNotFoundException( String.format( "Method ' %s ' in class ' %s ' is not accessible to the query processor", new Object[] {m.getName(), target.getClass().getName()}), e); } catch (InvocationTargetException e) { // if the target exception is Exception, wrap that, // otherwise wrap the InvocationTargetException itself Throwable t = e.getTargetException(); if ((t instanceof EntryDestroyedException)) { // eat the exception return QueryService.UNDEFINED; } if (t instanceof Exception) throw new QueryInvocationTargetException(t); throw new QueryInvocationTargetException(e); } } else { try { return ((Field) m).get(target); } catch (IllegalAccessException e) { throw new NameNotFoundException( String.format( "Field ' %s ' in class ' %s ' is not accessible to the query processor", new Object[] {m.getName(), target.getClass().getName()}), e); } catch (EntryDestroyedException e) { return QueryService.UNDEFINED; } } } catch (EntryDestroyedException e) { // eat the exception return QueryService.UNDEFINED; } } Member getReadMember(Class targetClass) throws NameNotFoundException { // mapping: public field (same name), method (getAttribute()), // method (attribute()) List key = new ArrayList(); key.add(targetClass); key.add(_name); Member m = _localCache.computeIfAbsent(key, k -> { Member member = getReadField(targetClass); return member == null ? getReadMethod(targetClass) : member; }); if (m == null) { throw new NameNotFoundException( String.format("No public attribute named ' %s ' was found in class %s", new Object[] {_name, targetClass.getName()})); } // override security for nonpublic derived classes with public members ((AccessibleObject) m).setAccessible(true); return m; } private Field getReadField(Class targetType) { try { return targetType.getField(_name); } catch (NoSuchFieldException e) { return null; } } private Method getReadMethod(Class targetType) { Method m; // Check for a getter method for this _name String beanMethod = "get" + _name.substring(0, 1).toUpperCase() + _name.substring(1); m = getReadMethod(targetType, beanMethod); if (m != null) return m; return getReadMethod(targetType, _name); } private Method getReadMethod(Class targetType, String methodName) { try { return targetType.getMethod(methodName, (Class[]) null); } catch (NoSuchMethodException e) { updateClassToMethodsMap(targetType.getCanonicalName(), _name); return null; } } /** * reads field value from a PdxInstance * * @return the value of the field from PdxInstance */ private Object readPdx(PdxInstance target) throws NameNotFoundException, QueryInvocationTargetException { if (target instanceof PdxInstanceImpl) { PdxInstanceImpl pdxInstance = (PdxInstanceImpl) target; // if the field is present in the pdxinstance if (pdxInstance.hasField(_name)) { // return PdxString if field is a String otherwise invoke readField return pdxInstance.getRawField(_name); } else { // field not found in the pdx instance, look for the field in any of the // PdxTypes (versions of the pdxinstance) in the type registry String className = pdxInstance.getClassName(); // don't look further for field or method or reflect on GemFire JSON data if (className.equals(JSONFormatter.JSON_CLASSNAME)) { return QueryService.UNDEFINED; } // check if the field was not found previously if (!isFieldAlreadySearchedAndNotFound(className, _name)) { try { return pdxInstance.getDefaultValueIfFieldExistsInAnyPdxVersions(_name, className); } catch (FieldNotFoundInPdxVersion e1) { // remember the field that is not present in any version to avoid // trips to the registry next time updateClassToFieldsMap(className, _name); } } // if the field is not present in any of the versions try to // invoke implicit method call if (!this.isMethodAlreadySearchedAndNotFound(className, _name)) { try { return readFieldFromDeserializedObject(pdxInstance, target); } catch (NameNotFoundException ex) { updateClassToMethodsMap(pdxInstance.getClassName(), _name); throw ex; } } else return QueryService.UNDEFINED; } } else { // target could be another implementation of PdxInstance like // PdxInstanceEnum, in this case getRawField and getCachedOjects methods are // not available if (((PdxInstance) target).hasField(_name)) { return ((PdxInstance) target).getField(_name); } throw new NameNotFoundException( String.format("Field ' %s ' in class ' %s ' is not accessible to the query processor", new Object[] {_name, target.getClass().getName()})); } } private Object readFieldFromDeserializedObject(PdxInstanceImpl pdxInstance, Object target) throws NameNotFoundException, QueryInvocationTargetException { try { Object obj = pdxInstance.getCachedObject(); return readReflection(obj); } catch (PdxSerializationException e) { throw new NameNotFoundException( // the domain object is not available String.format("Field ' %s ' in class ' %s ' is not accessible to the query processor", new Object[] {_name, target.getClass().getName()})); } } private void updateClassToFieldsMap(String className, String field) { Map<String, Set<String>> map = DefaultQuery.getPdxClasstofieldsmap(); Set<String> fields = map.get(className); if (fields == null) { fields = new HashSet<String>(); map.put(className, fields); } fields.add(field); } private boolean isFieldAlreadySearchedAndNotFound(String className, String field) { Set<String> fields = DefaultQuery.getPdxClasstofieldsmap().get(className); if (fields != null) { return fields.contains(field); } return false; } private void updateClassToMethodsMap(String className, String field) { Map<String, Set<String>> map = DefaultQuery.getPdxClasstoMethodsmap(); Set<String> fields = map.get(className); if (fields == null) { fields = new HashSet<String>(); map.put(className, fields); } fields.add(field); } private boolean isMethodAlreadySearchedAndNotFound(String className, String field) { Set<String> fields = DefaultQuery.getPdxClasstoMethodsmap().get(className); if (fields != null) { return fields.contains(field); } return false; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package git4idea.config; import com.intellij.dvcs.branch.DvcsSyncSettings; import com.intellij.dvcs.ui.DvcsBundle; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.options.ConfigurableUi; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.VcsBundle; import com.intellij.ui.EnumComboBoxModel; import com.intellij.ui.ListCellRendererWrapper; import com.intellij.ui.components.JBCheckBox; import com.intellij.ui.components.JBLabel; import com.intellij.ui.components.JBTextField; import com.intellij.ui.components.fields.ExpandableTextField; import com.intellij.util.ObjectUtils; import com.intellij.util.execution.ParametersListUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import git4idea.branch.GitBranchIncomingOutgoingManager; import git4idea.i18n.GitBundle; import git4idea.repo.GitRepositoryManager; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.text.NumberFormatter; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.text.NumberFormat; import java.util.List; import java.util.Objects; import static com.intellij.util.containers.ContainerUtil.sorted; /** * Git VCS configuration panel */ public class GitVcsPanel implements ConfigurableUi<GitVcsConfigurable.GitVcsSettingsHolder> { private static final String IDEA_SSH_NAME = GitBundle.getString("git.vcs.config.ssh.mode.idea"); // IDEA ssh value private static final String NATIVE_SSH_NAME = GitBundle.getString("git.vcs.config.ssh.mode.native"); // Native SSH value @NotNull private final Project myProject; @NotNull private final GitExecutableManager myExecutableManager; private String myApplicationGitPath; private volatile boolean versionCheckRequested = false; private JButton myTestButton; // Test git executable private JComponent myRootPanel; private TextFieldWithBrowseButton myGitField; private JBCheckBox myProjectGitPathCheckBox; private JComboBox<String> mySSHExecutableComboBox; // Type of SSH executable to use private JCheckBox myAutoUpdateIfPushRejected; private JBCheckBox mySyncControl; private JCheckBox myAutoCommitOnCherryPick; private JCheckBox myAddCherryPickSuffix; private JBCheckBox myWarnAboutCrlf; private JCheckBox myWarnAboutDetachedHead; private JTextField myProtectedBranchesField; private JBLabel myProtectedBranchesLabel; private JComboBox myUpdateMethodComboBox; private JCheckBox myUpdateBranchInfoCheckBox; private JFormattedTextField myBranchUpdateTimeField; private JPanel myBranchTimePanel; private JBLabel mySupportedBranchUpLabel; private JPanel myIncomingOutgoingSettingPanel; private JBCheckBox myPreviewPushOnCommitAndPush; private JBCheckBox myPreviewPushProtectedOnly; private JPanel myPreviewPushProtectedOnlyBorder; public GitVcsPanel(@NotNull Project project, @NotNull GitExecutableManager executableManager) { myProject = project; myExecutableManager = executableManager; mySSHExecutableComboBox.addItem(IDEA_SSH_NAME); mySSHExecutableComboBox.addItem(NATIVE_SSH_NAME); mySSHExecutableComboBox.setSelectedItem(IDEA_SSH_NAME); myTestButton.addActionListener(e -> testExecutable()); myGitField.addBrowseFolderListener(GitBundle.getString("find.git.title"), GitBundle.getString("find.git.description"), project, FileChooserDescriptorFactory.createSingleFileNoJarsDescriptor()); myProjectGitPathCheckBox.addActionListener(e -> handleProjectOverrideStateChanged()); if (!project.isDefault()) { final GitRepositoryManager repositoryManager = GitRepositoryManager.getInstance(project); mySyncControl.setVisible(repositoryManager.moreThanOneRoot()); } else { mySyncControl.setVisible(true); } mySyncControl.setToolTipText(DvcsBundle.message("sync.setting.description", "Git")); myProtectedBranchesLabel.setLabelFor(myProtectedBranchesField); myUpdateBranchInfoCheckBox.addItemListener(e -> UIUtil.setEnabled(myBranchTimePanel, myUpdateBranchInfoCheckBox.isSelected(), true)); myPreviewPushOnCommitAndPush.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { updateEnabled(); } }); Insets insets = myPreviewPushProtectedOnly.getBorder().getBorderInsets(myPreviewPushProtectedOnly); myPreviewPushProtectedOnlyBorder.setBorder(JBUI.Borders.emptyLeft( UIUtil.getCheckBoxTextHorizontalOffset(myPreviewPushOnCommitAndPush) - insets.left)); } private void updateEnabled() { myPreviewPushProtectedOnly.setEnabled(myPreviewPushOnCommitAndPush.isSelected()); } private void testExecutable() { String pathToGit = ObjectUtils.notNull(getCurrentExecutablePath(), myExecutableManager.getDetectedExecutable()); new Task.Modal(myProject, GitBundle.getString("git.executable.version.progress.title"), true) { private GitVersion myVersion; @Override public void run(@NotNull ProgressIndicator indicator) { myVersion = myExecutableManager.identifyVersion(pathToGit); } @Override public void onThrowable(@NotNull Throwable error) { GitExecutableProblemsNotifier.showExecutionErrorDialog(error, myProject); } @Override public void onSuccess() { if (myVersion.isSupported()) { Messages .showInfoMessage(myRootPanel, GitBundle.message("git.executable.version.is", myVersion.getPresentation()), GitBundle.getString("git.executable.version.success.title")); } else { GitExecutableProblemsNotifier.showUnsupportedVersionDialog(myVersion, myProject); } } }.queue(); } private void handleProjectOverrideStateChanged() { if (!myProjectGitPathCheckBox.isSelected() && !Objects.equals(getCurrentExecutablePath(), myApplicationGitPath)) { switch (Messages.showYesNoCancelDialog(myRootPanel, VcsBundle.getString("executable.project.override.reset.message"), VcsBundle.getString("executable.project.override.reset.title"), VcsBundle.getString("executable.project.override.reset.globalize"), VcsBundle.getString("executable.project.override.reset.revert"), Messages.CANCEL_BUTTON, null)) { case Messages.NO: myGitField.setText(myApplicationGitPath); break; case Messages.CANCEL: myProjectGitPathCheckBox.setSelected(true); break; } } } @Nullable private String getCurrentExecutablePath() { return StringUtil.nullize(myGitField.getText().trim()); } @NotNull @Override public JComponent getComponent() { return myRootPanel; } @Override public void reset(@NotNull GitVcsConfigurable.GitVcsSettingsHolder settings) { GitVcsApplicationSettings applicationSettings = settings.getApplicationSettings(); GitVcsSettings projectSettings = settings.getProjectSettings(); GitSharedSettings sharedSettings = settings.getSharedSettings(); myApplicationGitPath = applicationSettings.getSavedPathToGit(); String projectSettingsPathToGit = projectSettings.getPathToGit(); myGitField.setText(ObjectUtils.coalesce(projectSettingsPathToGit, myApplicationGitPath)); myProjectGitPathCheckBox.setSelected(projectSettingsPathToGit != null); mySSHExecutableComboBox.setSelectedItem(applicationSettings.isUseIdeaSsh() ? IDEA_SSH_NAME : NATIVE_SSH_NAME); myAutoUpdateIfPushRejected.setSelected(projectSettings.autoUpdateIfPushRejected()); mySyncControl.setSelected(projectSettings.getSyncSetting() == DvcsSyncSettings.Value.SYNC); myAutoCommitOnCherryPick.setSelected(projectSettings.isAutoCommitOnCherryPick()); myAddCherryPickSuffix.setSelected(projectSettings.shouldAddSuffixToCherryPicksOfPublishedCommits()); myWarnAboutCrlf.setSelected(projectSettings.warnAboutCrlf()); myWarnAboutDetachedHead.setSelected(projectSettings.warnAboutDetachedHead()); myUpdateMethodComboBox.setSelectedItem(projectSettings.getUpdateType()); myProtectedBranchesField.setText(ParametersListUtil.COLON_LINE_JOINER.fun(sharedSettings.getForcePushProhibitedPatterns())); boolean branchInfoSupported = isBranchInfoSupported(); myUpdateBranchInfoCheckBox.setSelected(branchInfoSupported && projectSettings.shouldUpdateBranchInfo()); myUpdateBranchInfoCheckBox.setEnabled(branchInfoSupported); myBranchUpdateTimeField.setValue(projectSettings.getBranchInfoUpdateTime()); myPreviewPushOnCommitAndPush.setSelected(projectSettings.shouldPreviewPushOnCommitAndPush()); myPreviewPushProtectedOnly.setSelected(projectSettings.isPreviewPushProtectedOnly()); updateEnabled(); } private boolean isBranchInfoSupported() { return GitVersionSpecialty.INCOMING_OUTGOING_BRANCH_INFO.existsIn(myProject); } @Override public boolean isModified(@NotNull GitVcsConfigurable.GitVcsSettingsHolder settings) { GitVcsApplicationSettings applicationSettings = settings.getApplicationSettings(); GitVcsSettings projectSettings = settings.getProjectSettings(); GitSharedSettings sharedSettings = settings.getSharedSettings(); return isGitPathModified(applicationSettings, projectSettings) || applicationSettings.isUseIdeaSsh() != IDEA_SSH_NAME.equals(mySSHExecutableComboBox.getSelectedItem()) || !projectSettings.autoUpdateIfPushRejected() == myAutoUpdateIfPushRejected.isSelected() || ((projectSettings.getSyncSetting() == DvcsSyncSettings.Value.SYNC) != mySyncControl.isSelected() || projectSettings.isAutoCommitOnCherryPick() != myAutoCommitOnCherryPick.isSelected() || projectSettings.shouldAddSuffixToCherryPicksOfPublishedCommits() != myAddCherryPickSuffix.isSelected() || projectSettings.warnAboutCrlf() != myWarnAboutCrlf.isSelected() || projectSettings.warnAboutDetachedHead() != myWarnAboutDetachedHead.isSelected() || projectSettings.shouldPreviewPushOnCommitAndPush() != myPreviewPushOnCommitAndPush.isSelected() || projectSettings.isPreviewPushProtectedOnly() != myPreviewPushProtectedOnly.isSelected() || projectSettings.getUpdateType() != myUpdateMethodComboBox.getModel().getSelectedItem() || isUpdateBranchSettingsModified(projectSettings) || !sorted(sharedSettings.getForcePushProhibitedPatterns()).equals(sorted(getProtectedBranchesPatterns()))); } private boolean isGitPathModified(@NotNull GitVcsApplicationSettings applicationSettings, @NotNull GitVcsSettings projectSettings) { return myProjectGitPathCheckBox.isSelected() ? !Objects.equals(getCurrentExecutablePath(), projectSettings.getPathToGit()) : !Objects.equals(getCurrentExecutablePath(), applicationSettings.getSavedPathToGit()) || projectSettings.getPathToGit() != null; } @Override public void apply(@NotNull GitVcsConfigurable.GitVcsSettingsHolder settings) { GitVcsApplicationSettings applicationSettings = settings.getApplicationSettings(); GitVcsSettings projectSettings = settings.getProjectSettings(); GitSharedSettings sharedSettings = settings.getSharedSettings(); if (myProjectGitPathCheckBox.isSelected()) { projectSettings.setPathToGit(getCurrentExecutablePath()); } else { myApplicationGitPath = getCurrentExecutablePath(); applicationSettings.setPathToGit(getCurrentExecutablePath()); projectSettings.setPathToGit(null); } applicationSettings.setIdeaSsh(IDEA_SSH_NAME.equals(mySSHExecutableComboBox.getSelectedItem()) ? GitVcsApplicationSettings.SshExecutable.IDEA_SSH : GitVcsApplicationSettings.SshExecutable.NATIVE_SSH); projectSettings.setAutoUpdateIfPushRejected(myAutoUpdateIfPushRejected.isSelected()); projectSettings.setSyncSetting(mySyncControl.isSelected() ? DvcsSyncSettings.Value.SYNC : DvcsSyncSettings.Value.DONT_SYNC); projectSettings.setAutoCommitOnCherryPick(myAutoCommitOnCherryPick.isSelected()); projectSettings.setAddSuffixToCherryPicks(myAddCherryPickSuffix.isSelected()); projectSettings.setWarnAboutCrlf(myWarnAboutCrlf.isSelected()); projectSettings.setWarnAboutDetachedHead(myWarnAboutDetachedHead.isSelected()); projectSettings.setUpdateType((UpdateMethod)myUpdateMethodComboBox.getSelectedItem()); projectSettings.setPreviewPushOnCommitAndPush(myPreviewPushOnCommitAndPush.isSelected()); projectSettings.setPreviewPushProtectedOnly(myPreviewPushProtectedOnly.isSelected()); sharedSettings.setForcePushProhibitedPatters(getProtectedBranchesPatterns()); applyBranchUpdateInfo(projectSettings); validateExecutableOnceAfterClose(); } /** * Special method to check executable after it has been changed through settings */ public void validateExecutableOnceAfterClose() { if (!versionCheckRequested) { ApplicationManager.getApplication().invokeLater(() -> { new Task.Backgroundable(myProject, GitBundle.getString("git.executable.version.progress.title"), true) { @Override public void run(@NotNull ProgressIndicator indicator) { myExecutableManager.testGitExecutableVersionValid(myProject); } }.queue(); versionCheckRequested = false; }, ModalityState.NON_MODAL); versionCheckRequested = true; } } private void applyBranchUpdateInfo(@NotNull GitVcsSettings projectSettings) { boolean branchInfoSupported = isBranchInfoSupported(); myUpdateBranchInfoCheckBox.setEnabled(branchInfoSupported); if (!branchInfoSupported) { myUpdateBranchInfoCheckBox.setSelected(false); } if (isUpdateBranchSettingsModified(projectSettings)) { projectSettings.setBranchInfoUpdateTime((Integer)myBranchUpdateTimeField.getValue()); projectSettings.setUpdateBranchInfo(myUpdateBranchInfoCheckBox.isSelected()); GitBranchIncomingOutgoingManager incomingOutgoingManager = GitBranchIncomingOutgoingManager.getInstance(myProject); incomingOutgoingManager.stopScheduling(); if (projectSettings.shouldUpdateBranchInfo()) { incomingOutgoingManager.startScheduling(); } } } private boolean isUpdateBranchSettingsModified(@NotNull GitVcsSettings projectSettings) { return projectSettings.getBranchInfoUpdateTime() != (Integer)myBranchUpdateTimeField.getValue() || projectSettings.shouldUpdateBranchInfo() != myUpdateBranchInfoCheckBox.isSelected(); } @NotNull private List<String> getProtectedBranchesPatterns() { return ParametersListUtil.COLON_LINE_PARSER.fun(myProtectedBranchesField.getText()); } private void createUIComponents() { JBTextField textField = new JBTextField(); textField.getEmptyText().setText("Auto-detected: " + myExecutableManager.getDetectedExecutable()); myGitField = new TextFieldWithBrowseButton(textField); myProtectedBranchesField = new ExpandableTextField(ParametersListUtil.COLON_LINE_PARSER, ParametersListUtil.COLON_LINE_JOINER); myUpdateMethodComboBox = new ComboBox(new EnumComboBoxModel<>(UpdateMethod.class)); myUpdateMethodComboBox.setRenderer(new ListCellRendererWrapper<UpdateMethod>() { @Override public void customize(JList list, UpdateMethod value, int index, boolean selected, boolean hasFocus) { setText(StringUtil.capitalize(StringUtil.toLowerCase(value.name().replace('_', ' ')))); } }); myIncomingOutgoingSettingPanel = new JPanel(new BorderLayout()); myIncomingOutgoingSettingPanel.setVisible(false); NumberFormatter numberFormatter = new NumberFormatter(NumberFormat.getIntegerInstance()); numberFormatter.setMinimum(1); numberFormatter.setAllowsInvalid(true); myBranchUpdateTimeField = new JFormattedTextField(numberFormatter); mySupportedBranchUpLabel = new JBLabel("Supported from Git 2.9+"); mySupportedBranchUpLabel.setBorder(JBUI.Borders.emptyLeft(2)); } }
/***************************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ****************************************************************************/ package org.apache.xmpbox.schema; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.List; import java.util.Random; import org.apache.xmpbox.XMPMetadata; import org.apache.xmpbox.type.AbstractSimpleProperty; import org.apache.xmpbox.type.ArrayProperty; import org.apache.xmpbox.type.Cardinality; import org.apache.xmpbox.type.TypeMapping; import org.apache.xmpbox.type.AbstractTypeTester; import org.apache.xmpbox.type.Types; class SchemaTester extends AbstractTypeTester { private final XMPMetadata metadata; private final Class<?> schemaClass; private final String fieldName; private final Types type; private final Cardinality cardinality; private final TypeMapping typeMapping; XMPSchema getSchema() { switch(schemaClass.getSimpleName()) { case "DublinCoreSchema": return metadata.createAndAddDublinCoreSchema(); case "PhotoshopSchema": return metadata.createAndAddPhotoshopSchema(); default: return metadata.createAndAddXMPBasicSchema(); } } public SchemaTester(XMPMetadata metadata, Class<?> schemaClass, String fieldName, Types type, Cardinality card) { this.metadata = metadata; this.schemaClass = schemaClass; this.typeMapping = metadata.getTypeMapping(); this.fieldName = fieldName; this.type = type; this.cardinality = card; } public void testInitializedToNull() throws Exception { XMPSchema schema = getSchema(); // default method assertNull(schema.getProperty(fieldName)); // accessor if (cardinality == Cardinality.Simple) { String getter = calculateSimpleGetter(fieldName); Method get = schemaClass.getMethod(getter); Object result = get.invoke(schema); assertNull(result); } else { // arrays String getter = calculateArrayGetter(fieldName); Method get = schemaClass.getMethod(getter); Object result = get.invoke(schema); assertNull(result); } } public void testSettingValue() throws Exception { internalTestSettingValue(); } public void testRandomSettingValue() throws Exception { initializeSeed(new Random()); for (int i=0; i < RAND_LOOP_COUNT;i++) { internalTestSettingValue(); } } private void internalTestSettingValue() throws Exception { if (cardinality != Cardinality.Simple) { return; } XMPSchema schema = getSchema(); // only test simple properties Object value = getJavaValue(type); AbstractSimpleProperty property = schema.instanciateSimple(fieldName, value); schema.addProperty(property); String qn = getPropertyQualifiedName(fieldName); assertNotNull(schema.getProperty(fieldName)); // check other properties not modified List<Field> fields = getXmpFields(schemaClass); for (Field field : fields) { // do not check the current name String fqn = getPropertyQualifiedName(field.get(null).toString()); if (!fqn.equals(qn)) { assertNull(schema.getProperty(fqn)); } } } public void testSettingValueInArray() throws Exception { internalTestSettingValueInArray(); } public void testRandomSettingValueInArray() throws Exception { initializeSeed(new Random()); for (int i=0; i < RAND_LOOP_COUNT;i++) { internalTestSettingValueInArray(); } } private void internalTestSettingValueInArray() throws Exception { if (cardinality == Cardinality.Simple) { return; } XMPSchema schema = getSchema(); // only test array properties Object value = getJavaValue(type); AbstractSimpleProperty property = schema.instanciateSimple(fieldName, value); switch (cardinality) { case Seq: schema.addUnqualifiedSequenceValue(property.getPropertyName(), property); break; case Bag: schema.addBagValue(property.getPropertyName(), property); break; default: throw new Exception("Unexpected case in test : " + cardinality.name()); } String qn = getPropertyQualifiedName(fieldName); assertNotNull(schema.getProperty(fieldName)); // check other properties not modified List<Field> fields = getXmpFields(schemaClass); for (Field field : fields) { // do not check the current name String fqn = getPropertyQualifiedName(field.get(null).toString()); if (!fqn.equals(qn)) { assertNull(schema.getProperty(fqn)); } } } public void testPropertySetterSimple() throws Exception { internalTestPropertySetterSimple(); } public void testRandomPropertySetterSimple() throws Exception { initializeSeed(new Random()); for (int i=0; i < RAND_LOOP_COUNT;i++) { internalTestPropertySetterSimple(); } } private void internalTestPropertySetterSimple() throws Exception { if (cardinality != Cardinality.Simple) { return; } XMPSchema schema = getSchema(); String setter = calculateSimpleSetter(fieldName) + "Property"; Object value = getJavaValue(type); AbstractSimpleProperty asp = typeMapping.instanciateSimpleProperty(schema.getNamespace(), schema .getPrefix(), fieldName, value, type); Method set = schemaClass.getMethod(setter, type.getImplementingClass()); set.invoke(schema, asp); // check property set AbstractSimpleProperty stored = (AbstractSimpleProperty) schema.getProperty(fieldName); assertEquals(value, stored.getValue()); // check getter String getter = calculateSimpleGetter(fieldName) + "Property"; Method get = schemaClass.getMethod(getter); Object result = get.invoke(schema); assertTrue(type.getImplementingClass().isAssignableFrom(result.getClass())); assertEquals(asp, result); } public void testPropertySetterInArray() throws Exception { internalTestPropertySetterInArray(); } public void testRandomPropertySetterInArray() throws Exception { initializeSeed(new Random()); for (int i=0; i < RAND_LOOP_COUNT;i++) { internalTestPropertySetterInArray(); } } private void internalTestPropertySetterInArray() throws Exception { if (cardinality == Cardinality.Simple) { return; } XMPSchema schema = getSchema(); // add value String setter = "add" + calculateFieldNameForMethod(fieldName); // TypeDescription<AbstractSimpleProperty> td = // typeMapping.getSimpleDescription(type); Object value1 = getJavaValue(type); Method set = schemaClass.getMethod(setter, getJavaType(type)); set.invoke(schema, value1); // retrieve complex property String getter = calculateArrayGetter(fieldName) + "Property"; Method getcp = schemaClass.getMethod(getter); Object ocp = getcp.invoke(schema); assertTrue(ocp instanceof ArrayProperty); ArrayProperty cp = (ArrayProperty) ocp; // check size is ok (1) assertEquals(1, cp.getContainer().getAllProperties().size()); // add a new one Object value2 = getJavaValue(type); set.invoke(schema, value2); assertEquals(2, cp.getContainer().getAllProperties().size()); // remove the first String remover = "remove" + calculateFieldNameForMethod(fieldName); Method remove = schemaClass.getMethod(remover, getJavaType(type)); remove.invoke(schema, value1); assertEquals(1, cp.getContainer().getAllProperties().size()); } protected String getPropertyQualifiedName(String name) { XMPSchema schema = getSchema(); StringBuilder sb = new StringBuilder(); sb.append(schema.getPrefix()).append(":").append(name); return sb.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.utils; import java.util.Random; /** * Base class for hashtables that use open addressing to resolve collisions. */ abstract public class FastHash implements Cloneable { /** the current number of occupied slots in the hash. */ protected transient int size_; /** the current number of free slots in the hash. */ protected transient int free_; /** the load above which rehashing occurs. */ protected static final float DEFAULT_LOAD_FACTOR = 0.5f; /** * the default initial capacity for the hash table. This is one less than a * prime value because one is added to it when searching for a prime * capacity to account for the free slot required by open addressing. Thus, * the real default capacity is 11. */ protected static final int DEFAULT_INITIAL_CAPACITY = 10; /** * Determines how full the internal table can become before rehashing is * required. This must be a value in the range: 0.0 < loadFactor < 1.0. The * default value is 0.5, which is about as large as you can get in open * addressing without hurting performance. Cf. Knuth, Volume 3., Chapter 6. */ protected float loadFactor_; /** * The maximum number of elements allowed without allocating more space. */ protected int maxSize_; /** * The number of removes that should be performed before an auto-compaction * occurs. */ protected int autoCompactRemovesRemaining_; /** * The auto-compaction factor for the table. * * @see #setAutoCompactionFactor */ protected float autoCompactionFactor_; /** * @see */ private boolean autoCompactTemporaryDisable_ = false; /** * Creates a new <code>THash</code> instance with the default capacity and * load factor. */ public FastHash() { this(DEFAULT_INITIAL_CAPACITY, DEFAULT_LOAD_FACTOR); } /** * Creates a new <code>THash</code> instance with a prime capacity at or * near the specified capacity and with the default load factor. * * @param initialCapacity * an <code>int</code> value */ public FastHash(int initialCapacity) { this(initialCapacity, DEFAULT_LOAD_FACTOR); } /** * Creates a new <code>THash</code> instance with a prime capacity at or * near the minimum needed to hold <tt>initialCapacity</tt> elements with * load factor <tt>loadFactor</tt> without triggering a rehash. * * @param initialCapacity * an <code>int</code> value * @param loadFactor * a <code>float</code> value */ public FastHash(int initialCapacity, float loadFactor) { super(); loadFactor_ = loadFactor; // Through testing, the load factor (especially the default load factor) // has been // found to be a pretty good starting auto-compaction factor. autoCompactionFactor_ = loadFactor; setUp((int) Math.ceil(initialCapacity / loadFactor)); } public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException cnse) { return null; // it's supported } } /** * Tells whether this set is currently holding any elements. * * @return a <code>boolean</code> value */ public boolean isEmpty() { return 0 == size_; } /** * Returns the number of distinct elements in this collection. * * @return an <code>int</code> value */ public int size() { return size_; } /** * @return the current physical capacity of the hash table. */ abstract protected int capacity(); /** * Ensure that this hashtable has sufficient capacity to hold * <tt>desiredCapacity<tt> <b>additional</b> elements without * requiring a rehash. This is a tuning method you can call * before doing a large insert. * * @param desiredCapacity an <code>int</code> value */ public void ensureCapacity(int desiredCapacity) { if (desiredCapacity > (maxSize_ - size())) { rehash(PrimeFinder.nextPrime((int) Math.ceil(desiredCapacity + size() / loadFactor_) + 1)); computeMaxSize(capacity()); } } /** * Compresses the hashtable to the minimum prime size (as defined by * PrimeFinder) that will hold all of the elements currently in the table. * If you have done a lot of <tt>remove</tt> operations and plan to do a * lot of queries or insertions or iteration, it is a good idea to invoke * this method. Doing so will accomplish two things: * * <ol> * <li> You'll free memory allocated to the table but no longer needed * because of the remove()s.</li> * * <li> You'll get better query/insert/iterator performance because there * won't be any <tt>REMOVED</tt> slots to skip over when probing for * indices in the table.</li> * </ol> */ public void compact() { // need at least one free spot for open addressing rehash(PrimeFinder.nextPrime((int) Math.ceil(size() / loadFactor_) + 1)); computeMaxSize(capacity()); // If auto-compaction is enabled, re-determine the compaction interval if (autoCompactionFactor_ != 0) { computeNextAutoCompactionAmount(size()); } } /** * The auto-compaction factor controls whether and when a table performs a * {@link #compact} automatically after a certain number of remove * operations. If the value is non-zero, the number of removes that need to * occur for auto-compaction is the size of table at the time of the * previous compaction (or the initial capacity) multiplied by this factor. * <p> * Setting this value to zero will disable auto-compaction. */ public void setAutoCompactionFactor(float factor) { if (factor < 0) { throw new IllegalArgumentException("Factor must be >= 0: " + factor); } autoCompactionFactor_ = factor; } /** * @see #setAutoCompactionFactor */ public float getAutoCompactionFactor() { return autoCompactionFactor_; } /** * This simply calls {@link #compact compact}. It is included for symmetry * with other collection classes. Note that the name of this method is * somewhat misleading (which is why we prefer <tt>compact</tt>) as the * load factor may require capacity above and beyond the size of this * collection. * * @see #compact */ public final void trimToSize() { compact(); } /** * Delete the record at <tt>index</tt>. Reduces the size of the * collection by one. * * @param index * an <code>int</code> value */ protected void removeAt(int index) { size_--; // If auto-compaction is enabled, see if we need to compact if (autoCompactionFactor_ != 0) { autoCompactRemovesRemaining_--; if (!autoCompactTemporaryDisable_ && autoCompactRemovesRemaining_ <= 0) { // Do the compact // NOTE: this will cause the next compaction interval to be // calculated compact(); } } } /** * Empties the collection. */ public void clear() { size_ = 0; free_ = capacity(); } /** * initializes the hashtable to a prime capacity which is at least * <tt>initialCapacity + 1</tt>. * * @param initialCapacity * an <code>int</code> value * @return the actual capacity chosen */ protected int setUp(int initialCapacity) { int capacity; capacity = PrimeFinder.nextPrime(initialCapacity); computeMaxSize(capacity); computeNextAutoCompactionAmount(initialCapacity); return capacity; } /** * Rehashes the set. * * @param newCapacity * an <code>int</code> value */ protected abstract void rehash(int newCapacity); /** * Temporarily disables auto-compaction. MUST be followed by calling * {@link #reenableAutoCompaction}. */ protected void tempDisableAutoCompaction() { autoCompactTemporaryDisable_ = true; } /** * Re-enable auto-compaction after it was disabled via * {@link #tempDisableAutoCompaction()}. * * @param check_for_compaction * True if compaction should be performed if needed before * returning. If false, no compaction will be performed. */ protected void reenableAutoCompaction(boolean check_for_compaction) { autoCompactTemporaryDisable_ = false; if (check_for_compaction && autoCompactRemovesRemaining_ <= 0 && autoCompactionFactor_ != 0) { // Do the compact // NOTE: this will cause the next compaction interval to be // calculated compact(); } } /** * Computes the values of maxSize. There will always be at least one free * slot required. * * @param capacity * an <code>int</code> value */ private final void computeMaxSize(int capacity) { // need at least one free slot for open addressing maxSize_ = Math.min(capacity - 1, (int) Math.floor(capacity * loadFactor_)); free_ = capacity - size_; // reset the free element count } /** * Computes the number of removes that need to happen before the next * auto-compaction will occur. */ private void computeNextAutoCompactionAmount(int size) { if (autoCompactionFactor_ != 0) { autoCompactRemovesRemaining_ = Math.round(size * autoCompactionFactor_); } } /** * After an insert, this hook is called to adjust the size/free values of * the set and to perform rehashing if necessary. */ protected final void postInsertHook(boolean usedFreeSlot) { if (usedFreeSlot) { free_--; } // rehash whenever we exhaust the available space in the table if (++size_ > maxSize_ || free_ == 0) { // choose a new capacity suited to the new state of the table // if we've grown beyond our maximum size, double capacity; // if we've exhausted the free spots, rehash to the same capacity, // which will free up any stale removed slots for reuse. int newCapacity = size_ > maxSize_ ? PrimeFinder .nextPrime(capacity() << 1) : capacity(); rehash(newCapacity); computeMaxSize(capacity()); } } protected int calculateGrownCapacity() { return capacity() << 1; } }// THash
// Copyright 2012 Digipom Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.digipom.calculator.logic; import static com.digipom.calculator.logic.Calculator.ExpressionState.DISPLAY; import static com.digipom.calculator.logic.Calculator.ExpressionState.EDIT; import static com.digipom.calculator.logic.Calculator.ExpressionState.ERROR; import java.math.BigDecimal; import java.util.EnumMap; import android.content.Context; import android.util.Log; import com.digipom.android.library.evaluator.BigDecimalPostfixEvaluator; import com.digipom.android.library.evaluator.builder.ExpressionBuilder; import com.digipom.android.library.evaluator.exception.ParseException; import com.digipom.android.library.evaluator.lexer.Operator; import com.digipom.android.library.evaluator.lexer.Parenthesis; import com.digipom.android.library.evaluator.lexer.PredefinedFunction; import com.digipom.calculator.R; import com.digipom.calculator.config.LoggerConfig; public class Calculator { private static final String TAG = "Calculator"; private final Context context; /** Calculator internals. */ private final InputBuffer inputBuffer = new InputBuffer(); private final Memory memory = new Memory(); /** Command definitions. */ private final Command[] digitCommands = new Command[] { new DigitCommand(0), new DigitCommand(1), new DigitCommand(2), new DigitCommand(3), new DigitCommand(4), new DigitCommand(5), new DigitCommand(6), new DigitCommand(7), new DigitCommand(8), new DigitCommand(9) }; private final Command addCommand = new OperatorCommand(Operator.ADD); private final Command subtractCommand = new OperatorCommand(Operator.SUBTRACT); private final Command multiplyCommand = new OperatorCommand(Operator.MULTIPLY); private final Command divideCommand = new OperatorCommand(Operator.DIVIDE); private final Command powerCommand = new OperatorCommand(Operator.POWER); private final Command openParenthesisCommand = new ParenthesisCommand(Parenthesis.OPEN); private final Command closeParenthesisCommand = new ParenthesisCommand(Parenthesis.CLOSE); private final Command equalsCommand = new EqualsCommand(); private final Command sqrtCommand = new FunctionCommand(PredefinedFunction.SQRT); private final Command xSquaredCommand = new XSquaredCommand(); private final Command lnCommand = new FunctionCommand(PredefinedFunction.LN); private final Command dotCommand = new DotCommand(); private final Command plusMinusCommand = new PlusMinusCommand(); private final Command deleteCommand = new DeleteCommand(); private final Command selectMemModeCommand = new SwitchModeCommand(InputMode.MEMORY); private final Command acCommand = new AcCommand(); private final Command ceCommand = new CeCommand(); private final Command stoCommand = new StoCommand(); private final Command rclCommand = new RclCommand(); /** States and Input Modes. */ enum DigitState { DEFAULT, RECALL, STORE } enum InputMode { NORMAL, MEMORY } private final EnumMap<InputMode, AbstractInputMode> inputModes = new EnumMap<InputMode, AbstractInputMode>( InputMode.class); private DigitState digitState = DigitState.DEFAULT; private InputMode inputMode = InputMode.NORMAL; // TODO I'm getting there, but this thing is jacked. Fix it. // Basically anything that changed the input buffer until enter, up, down, or CE is pressed should set this to true. private boolean modeDataModified; /** Constructor. */ public Calculator(Context context) { this.context = context; inputModes.put(InputMode.NORMAL, new NormalInputMode()); inputModes.put(InputMode.MEMORY, new MemoryInputMode()); } /** Input buffer methods. */ public String getModeHeader() { return inputModes.get(inputMode).getHeader() + (inputMode != InputMode.NORMAL && !modeDataModified ? " = " : ""); } public String getExpression() { return inputBuffer.getExpression(); } /** Arithmetic commands. */ public void selectAdd() { addCommand.execute(); } public void selectSubtract() { subtractCommand.execute(); } public void selectMultiply() { multiplyCommand.execute(); } public void selectDivide() { divideCommand.execute(); } public void selectYPowX() { powerCommand.execute(); } public void selectLeftParenthesis() { openParenthesisCommand.execute(); } public void selectRightParenthesis() { closeParenthesisCommand.execute(); } public void selectEquals() { equalsCommand.execute(); } /** Functions. */ public void selectSqrtX() { sqrtCommand.execute(); } public void selectXSquared() { xSquaredCommand.execute(); } public void selectLn() { lnCommand.execute(); } /** Digit commands. */ public void selectDigit(int digit) { digitCommands[digit].execute(); } public void selectDecimal() { dotCommand.execute(); } public void selectPlusMinus() { plusMinusCommand.execute(); } /** Input commands. */ public void selectRightArrow() { deleteCommand.execute(); } /** InputMode commands. */ public void selectEnter() { inputModes.get(inputMode).store(inputBuffer.validateExpressionAndGet()); modeDataModified = false; } public void selectUpArrow() { inputModes.get(inputMode).selectNext(); inputBuffer.setExpression(inputModes.get(inputMode).get()); modeDataModified = false; } public void selectDownArrow() { inputModes.get(inputMode).selectPrevious(); inputBuffer.setExpression(inputModes.get(inputMode).get()); modeDataModified = false; } public void selectMemMode() { selectMemModeCommand.execute(); } /** State commands. */ public void selectAc() { acCommand.execute(); } public void selectCe() { ceCommand.execute(); } /** Memory commands. */ public void selectSto() { stoCommand.execute(); } public void selectRcl() { rclCommand.execute(); } /** Commands */ abstract class Command { boolean resetToDefaultDigitStateAfterExecute = true; boolean canRunInErrorState = false; final void execute() { if (canRunInErrorState || !(inputBuffer.state == ERROR)) { doCommand(); if (resetToDefaultDigitStateAfterExecute) { digitState = DigitState.DEFAULT; } } } protected abstract void doCommand(); } abstract class ExpressionModifyingCommand { } class DigitCommand extends Command { final int digit; DigitCommand(int digit) { this.digit = digit; } @Override protected void doCommand() { switch (digitState) { case DEFAULT: inputBuffer.appendDigit(digit); break; case RECALL: inputBuffer.setExpression(memory.readExpressionFromStore(digit)); break; case STORE: memory.addExpressionToStore(digit, inputBuffer.validateExpressionAndGet()); break; } } } class OperatorCommand extends Command { final Operator operator; OperatorCommand(Operator operator) { this.operator = operator; } @Override protected void doCommand() { inputBuffer.appendOperator(operator); } } class ParenthesisCommand extends Command { final Parenthesis parenthesis; ParenthesisCommand(Parenthesis parenthesis) { this.parenthesis = parenthesis; } @Override protected void doCommand() { inputBuffer.appendParenthesis(parenthesis); } } class EqualsCommand extends Command { @Override protected void doCommand() { try { String input = inputBuffer.getExpression(); if (input.length() > 0) { String validatedInput = inputBuffer.validateExpressionAndGet(); if (input.equals(validatedInput)) { final BigDecimal result = new BigDecimalPostfixEvaluator(input).evaluate(); memory.addAnswer(result); inputBuffer.setExpression(result.toPlainString()); } } } catch (ParseException pe) { if (LoggerConfig.ON) { Log.v(TAG, pe.toString(), pe); } inputBuffer.enterErrorState(); } catch (RuntimeException re) { if (LoggerConfig.ON) { Log.w(TAG, re); } inputBuffer.enterErrorState(); } } } class FunctionCommand extends Command { final PredefinedFunction function; FunctionCommand(PredefinedFunction function) { this.function = function; } @Override protected void doCommand() { inputBuffer.appendFunction(function); } } class XSquaredCommand extends Command { @Override protected void doCommand() { inputBuffer.appendXSquared(); } } class DotCommand extends Command { @Override protected void doCommand() { inputBuffer.appendDecimal(); } } class PlusMinusCommand extends Command { @Override protected void doCommand() { inputBuffer.togglePlusMinus(); } } class DeleteCommand extends Command { @Override protected void doCommand() { inputBuffer.deleteElement(); } } class SwitchModeCommand extends Command { final InputMode mode; SwitchModeCommand(InputMode mode) { this.mode = mode; } @Override protected void doCommand() { Calculator.this.inputMode = mode; inputBuffer.setExpression(inputModes.get(mode).get()); modeDataModified = false; } } class AcCommand extends Command { AcCommand() { canRunInErrorState = true; } @Override protected void doCommand() { memory.clearAnswers(); inputBuffer.clear(); inputMode = InputMode.NORMAL; digitState = DigitState.DEFAULT; } } class CeCommand extends Command { CeCommand() { canRunInErrorState = true; } @Override protected void doCommand() { if (inputBuffer.state == ERROR) { inputBuffer.clear(); } else if (inputMode != InputMode.NORMAL) { if (modeDataModified) { inputBuffer.setExpression(inputModes.get(inputMode).get()); modeDataModified = false; } else { inputMode = InputMode.NORMAL; } } else { inputBuffer.clear(); } digitState = DigitState.DEFAULT; } } class StoCommand extends Command { StoCommand() { resetToDefaultDigitStateAfterExecute = false; } @Override protected void doCommand() { digitState = DigitState.STORE; } } class RclCommand extends Command { RclCommand() { resetToDefaultDigitStateAfterExecute = false; } @Override protected void doCommand() { digitState = DigitState.RECALL; } } /** Modes. */ abstract class AbstractInputMode { abstract String getHeader(); abstract void store(String toStore); abstract String get(); abstract void selectNext(); abstract void selectPrevious(); } class NormalInputMode extends AbstractInputMode { @Override String getHeader() { return ""; } @Override void store(String toStore) { // No-op } @Override String get() { return ""; } @Override void selectNext() { // No-op } @Override void selectPrevious() { // No-op } } class MemoryInputMode extends AbstractInputMode { int selectedSlot = 0; @Override String getHeader() { return "M" + selectedSlot; } @Override void store(String toStore) { memory.addExpressionToStore(selectedSlot, toStore); } @Override String get() { return memory.readExpressionFromStore(selectedSlot); } @Override void selectNext() { selectedSlot++; if (selectedSlot > memory.getStoreSize() - 1) { selectedSlot = 0; } } @Override void selectPrevious() { selectedSlot--; if (selectedSlot < 0) { selectedSlot = memory.getStoreSize() - 1; } } } enum ExpressionState { DISPLAY, EDIT, ERROR } /** Input buffer helper */ // TODO Move to separate class to clean up the way that the error states are being used a bit. class InputBuffer { private final ExpressionBuilder expressionBuilder = new ExpressionBuilder(); private final StringBuilder builder = new StringBuilder(); ExpressionState state = DISPLAY; void enterErrorState() { state = ERROR; } String getExpression() { if (state == ERROR) { return context.getString(R.string.error); } else { return expressionBuilder.toString(); } } String validateExpressionAndGet() { if (state == ERROR) { return context.getString(R.string.error); } else { state = DISPLAY; return expressionBuilder.build(); } } void setExpression(String expression) { state = DISPLAY; expressionBuilder.setExpression(expression); modeDataModified = true; } void clear() { state = DISPLAY; expressionBuilder.clear(); modeDataModified = false; } private boolean enterEdit() { if (state == ERROR) { return false; } else { if (state == DISPLAY) { state = EDIT; modeDataModified = true; } return true; } } private boolean enterEditAndClearIfNecessary() { if (state == ERROR) { return false; } else { if (state == DISPLAY) { expressionBuilder.clear(); state = EDIT; modeDataModified = true; } return true; } } void appendOperator(Operator operator) { if (enterEdit()) { expressionBuilder.appendOperator(operator); } } void appendXSquared() { if (enterEdit()) { expressionBuilder.appendXSquared(); } } void appendFunction(PredefinedFunction function) { boolean shouldWrapOldExpression = state == DISPLAY && !expressionBuilder.isEmpty(); if (enterEdit()) { if (shouldWrapOldExpression) { builder.setLength(0); builder.append(function.toString() + Parenthesis.OPEN.toString()); builder.append(expressionBuilder.toString()); builder.append(Parenthesis.CLOSE); expressionBuilder.setExpression(builder.toString()); } else { expressionBuilder.appendFunction(function); } } } void togglePlusMinus() { if (enterEdit()) { expressionBuilder.togglePlusMinus(); } } void appendDigit(int digit) { if (enterEditAndClearIfNecessary()) { expressionBuilder.appendDigit(digit); } } void appendDecimal() { if (enterEditAndClearIfNecessary()) { expressionBuilder.appendDecimal(); } } void appendParenthesis(Parenthesis parenthesis) { if (enterEditAndClearIfNecessary()) { expressionBuilder.appendParenthesis(parenthesis); } } void deleteElement() { if (enterEditAndClearIfNecessary()) { expressionBuilder.deleteElement(); } } } }
package gov.va.cpe.vpr.web; import gov.va.cpe.vpr.Observation; import gov.va.cpe.vpr.PatientAlert; import gov.va.cpe.vpr.UidUtils; import gov.va.cpe.vpr.frameeng.*; import gov.va.cpe.vpr.pom.*; import gov.va.cpe.vpr.pom.jds.JdsOperations; import gov.va.cpe.vpr.termeng.ITermEng; import gov.va.hmp.healthtime.PointInTime; import gov.va.hmp.vista.rpc.RpcOperations; import gov.va.hmp.web.WebUtils; import gov.va.hmp.web.servlet.mvc.ParameterMap; import gov.va.hmp.web.servlet.view.ModelAndViewFactory; import org.apache.commons.lang.StringEscapeUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpServletRequest; import java.util.*; @Controller public class FrameController { private IFrameRegistry registry; private IFrameRunner runner; private IGenericPatientObjectDAO dao; private IPatientDAO patdao; private JdsOperations tpl; private RpcOperations rpcTemplate; private ITermEng eng; @Autowired public void setRegistry(IFrameRegistry registry) { this.registry = registry; } @Autowired public void setRunner(IFrameRunner runner) { this.runner = runner; } @Autowired public void setDao(IGenericPatientObjectDAO dao) { this.dao = dao; } @Autowired public void setPatdao(IPatientDAO patdao) { this.patdao = patdao; } @Autowired public void setTpl(JdsOperations tpl) { this.tpl = tpl; } @Autowired public void setRpcTemplate(RpcOperations rpcTemplate) { this.rpcTemplate = rpcTemplate; } @Autowired public void setEng(ITermEng eng) { this.eng = eng; } @RequestMapping(value = {"/frame/invoke/{entryPoint}"}) public ModelAndView invoke(@PathVariable String entryPoint, HttpServletRequest req, @RequestParam(value = "uid", required = false) String uid) throws Frame.FrameInitException, Frame.FrameExecException { return exec(uid, null, entryPoint, null, req); } @RequestMapping(value = {"/frame/call/{frameID}"}) public ModelAndView call(@PathVariable String frameID, HttpServletRequest req, @RequestParam(value = "uid", required = false) String uid) throws Frame.FrameInitException, Frame.FrameExecException { return exec(uid, frameID, null, null, req); } /** * Primary frame invoke/exec/call method indentded to be used as a web service */ @RequestMapping(value = "/frame/exec", method = RequestMethod.GET) public ModelAndView exec(@RequestParam(required = false) String uid, @RequestParam(required = false) String frameID, @RequestParam(required = false) String entryPoint, @RequestParam(required = false) String mode, HttpServletRequest req) throws Frame.FrameExecException, Frame.FrameInitException { Map params = WebUtils.extractGroupAndSortParams(req); Class clazz = UidUtils.getDomainClassByUid(uid); IPOMObject obj = (StringUtils.hasText(uid) && clazz != null) ? dao.findByUID(clazz, uid) : patdao.findByPid(uid); if (!StringUtils.hasText((String) params.get("pid")) && obj != null && obj instanceof IPatientObject) params.put("pid", ((IPatientObject) obj).getPid()); if (frameID == null && entryPoint == null) { throw new BadRequestException("You must specify either 'frameID' or 'entryPoint'"); } // Create and run the event/job IFrameEvent evt = StringUtils.hasText(frameID) ? new CallEvent(frameID, obj, params) : new IFrameEvent.InvokeEvent(entryPoint, obj, params); FrameJob job = runner.exec(evt); // return appropriate view if (mode == null || mode.equalsIgnoreCase("json")) { LinkedHashMap<String, Object> model = new LinkedHashMap<>(); model.put("actions", job.getActions()); LinkedHashMap<Object, Object> frames = new LinkedHashMap<>(); for (IFrame f : job.getFrames()) { frames.put(f.getID(), f.getName()); } model.put("frames", frames); return ModelAndViewFactory.contentNegotiatingModelAndView(model); } else { LinkedHashMap<String, Object> map = new LinkedHashMap<>(); map.put("dao", dao); map.put("patdao", patdao); map.put("job", job); return new ModelAndView(mode, map); } } /** * There are 2 ways an alert might want to be rendered. * <p/> * 1) a stored alert (exists in the cache under the specified UID) 2) a generated alert (does not exist, but should * have the same fields) * * @param request * @return */ @RequestMapping(value = "/frame/alert") public ModelAndView renderAlert(HttpServletRequest request, @RequestBody(required = false) String alertBody) { ParameterMap params = new ParameterMap(request); String uid = (String) params.get("uid"); PatientAlert alert = StringUtils.hasText(uid) ? dao.findByUID(PatientAlert.class, uid) : null; String frameID = (String) params.get("frameID"); IFrame frame = registry.findByID(frameID); ArrayList<Object> links = new ArrayList<Object>(); if (alert == null && StringUtils.hasText(alertBody)) { alert = POMUtils.newInstance(PatientAlert.class, alertBody); } if (alert != null) { for (Map m : alert.getLinks()) { String linkUid = (String) m.get("uid"); if (StringUtils.hasText(linkUid)) links.add(dao.findByUID(linkUid)); } } LinkedHashMap<String, Object> map = new LinkedHashMap<>(); map.put("params", params); map.put("alert", alert); map.put("frame", frame); map.put("links", links); return new ModelAndView("/frame/alert", map); } @RequestMapping(value = "/frame/info/{uid}") public ModelAndView renderInfo(@PathVariable(value = "uid") String uid) { IFrame frame = registry.findByID(uid); if (frame == null) { throw new BadRequestException("unknown frame uid: " + uid); } LinkedHashMap<String, IFrame> map = new LinkedHashMap<String, IFrame>(); map.put("frame", frame); return new ModelAndView("/frame/info", map); } @RequestMapping(value = "/frame/info") public ModelAndView renderInfo2(@RequestParam(value = "uid") String uid) { IFrame frame = registry.findByID(uid); if (frame == null) { throw new BadRequestException("unknown frame uid: " + uid); } LinkedHashMap<String, IFrame> map = new LinkedHashMap<>(); map.put("frame", frame); return new ModelAndView("/frame/info", map); } @RequestMapping(value = "/frame/goal/{id}/{pid}") public ModelAndView renderGoal(@PathVariable(value = "id") String id, @PathVariable(value = "pid") String pid) { LinkedHashMap<String, Object> map = new LinkedHashMap<>(); map.put("pid", pid); map.put("dao", dao); map.put("patdao", patdao); map.put("rpc", rpcTemplate); return new ModelAndView("/frame/" + id, map); } @ResponseBody @RequestMapping(value = "/frame/param/delete/{frame}") public String delParam(@PathVariable String frame, @RequestParam(value = "pid") String pid) { String uid = "urn:va:::frame:" + frame; tpl.delete("/vpr/" + pid + "/" + uid); return "Deleted"; } @ResponseBody @RequestMapping(value = "/frame/param/set/{frame}") public String setParam(@PathVariable String frame, @RequestParam(value = "pid") String pid, @RequestParam Map params) { // fetch the current values String uid = "urn:va:::frame:" + frame; Map data = null; try { data = tpl.getForMap("/vpr/" + pid + "/" + uid); } catch (Exception ex) { // TODO: not found!?! } if (data != null) { LinkedHashMap<String, String> map = new LinkedHashMap<>(); map.put("uid", uid); map.put("pid", pid); data = map; } // add the specified values data.putAll(params); // update the VPR results tpl.postForLocation("/vpr/" + pid, data); return "Saved"; } @ResponseBody @RequestMapping(value = "/frame/param/get/{frame}") public String getParam(@PathVariable String frame, @RequestParam(value = "pid") String pid, @RequestParam(value = "key") String key) { String uid = "urn:va:::frame:" + frame; try { Map data = tpl.getForMap("/vpr/" + pid + "/" + uid); data = (Map) ((List)((Map) data.get("data")).get("items")).get(0); if (key != null) { Object paramVal = data.get(key); if (paramVal != null) return StringEscapeUtils.escapeHtml(paramVal.toString()); else return ""; } return StringEscapeUtils.escapeHtml(data.toString()); } catch (Exception ex) { // TODO: not found!?! return ""; } } @ResponseBody @RequestMapping(value = "/frame/obs/set/{pid}/{key}") public String addObservation(@PathVariable Object pid, @PathVariable String key, @RequestParam Object value, @RequestParam(required = false) Object observed) { String uid = "urn:va:::obs:" + key; LinkedHashMap<String, Object> data = new LinkedHashMap<String, Object>(); data.put("uid", uid); data.put("pid", pid); data.put("entered", PointInTime.now()); data.put("kind", "Clinical Observation"); data.put("typeCode", key); data.put("typeName", eng.getDescription(key)); data.put("result", value); data.put("observed", observed); Observation item = new Observation(); item.setData(data); dao.save(item); runner.pushEvents(item.getEvents());// TODO: This should not be here... return null; } @ResponseBody @RequestMapping(value = "/frame/obs/delete/{pid}/{key}") public String delObservation(@PathVariable Object pid, @PathVariable String key) { String uid = "urn:va:::obs:" + key; dao.deleteByUID(Observation.class, uid); return null; } @RequestMapping(value = "/frame/event/push") public ModelAndView createEvent(HttpServletRequest request) throws Frame.FrameExecException, Frame.FrameInitException { List<String> msgs = new ArrayList<>(); Set<String> paramNames = new HashSet<>(Arrays.asList("eventClass", "frameID", "uid", "_ACTION_", "_NEW_KEY_", "_NEW_VAL_")); Map<String, Object> eventParams = new LinkedHashMap<>(); Enumeration<String> i = request.getParameterNames(); while (i.hasMoreElements()) { String key = i.nextElement(); if (!paramNames.contains(key)) eventParams.put(key, request.getParameter(key)); } if (StringUtils.hasText(request.getParameter("_NEW_KEY_"))) { eventParams.put(request.getParameter("_NEW_KEY_"), request.getParameter("_NEW_VAL_")); } // run the event if action=Execute String action = request.getParameter("_ACTION_"); String uid = request.getParameter("uid"); if (StringUtils.hasText(action)) { IFrameEvent evt = null; if (request.getParameter("eventClass").equals("gov.va.cpe.vpr.frameeng.CallEvent")) { evt = new CallEvent(request.getParameter("frameID"), null, eventParams); } else if (request.getParameter("eventClass").equals("gov.va.cpe.vpr.pom.PatientEvent")) { IPatientObject obj = dao.findByUID(uid); if (obj != null) { evt = new PatientEvent(obj); ((PatientEvent) evt).setParams(eventParams); } else { msgs.add("Unable to find that UID"); } } if (action.equals("Enqueue") && evt instanceof PatientEvent) { runner.pushEvent((PatientEvent) evt); } else if (evt != null) { FrameJob job = runner.exec(evt); LinkedHashMap<String, Object> model = new LinkedHashMap<>(); model.put("event", evt.getClass().getName()); model.put("eventParams", eventParams); model.put("actions", job.getActions()); Map<String, Object> frames = new LinkedHashMap<>(); for (IFrame f : job.getFrames()) { frames.put(f.getID(), f.getName()); } model.put("frames", frames); return ModelAndViewFactory.contentNegotiatingModelAndView(model); } } Map params = WebUtils.extractGroupAndSortParams(request); LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>(3); map.put("msgs", msgs); map.put("params", params); map.put("eventParams", eventParams); return new ModelAndView("/event/createEvent", map); } }
package wolfboyft.magicalmages.entity.mob.enemy.actual; import java.util.Iterator; import java.util.List; import net.minecraft.block.material.Material; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.SharedMonsterAttributes; import net.minecraft.entity.ai.EntityAIBase; import net.minecraft.entity.ai.EntityAILookIdle; import net.minecraft.entity.ai.EntityAIMoveTowardsRestriction; import net.minecraft.entity.ai.EntityAINearestAttackableTarget; import net.minecraft.entity.ai.EntityAIWander; import net.minecraft.entity.ai.EntityAIWatchClosest; import net.minecraft.entity.ai.EntityLookHelper; import net.minecraft.entity.ai.EntityMoveHelper; import net.minecraft.entity.monster.EntityMob; import net.minecraft.entity.passive.EntitySquid; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.entity.projectile.EntityFishHook; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.ItemFishFood; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.network.play.server.S2BPacketChangeGameState; import net.minecraft.pathfinding.PathNavigate; import net.minecraft.pathfinding.PathNavigateSwimmer; import net.minecraft.potion.Potion; import net.minecraft.potion.PotionEffect; import net.minecraft.util.BlockPos; import net.minecraft.util.DamageSource; import net.minecraft.util.EnumParticleTypes; import net.minecraft.util.MathHelper; import net.minecraft.util.Vec3; import net.minecraft.util.WeightedRandom; import net.minecraft.util.WeightedRandomFishable; import net.minecraft.world.EnumDifficulty; import net.minecraft.world.World; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import com.google.common.base.Predicate; public class WitherGuardian extends EntityMob { private float field_175482_b; private float field_175484_c; private float field_175483_bk; private float field_175485_bl; private float field_175486_bm; private EntityLivingBase field_175478_bn; private int field_175479_bo; private boolean field_175480_bp; private EntityAIWander wander; private static final String __OBFID = "CL_00002213"; public WitherGuardian(World worldIn) { super(worldIn); this.experienceValue = 10; this.setSize(0.85F, 0.85F); this.tasks.addTask(4, new WitherGuardian.AIGuardianAttack()); EntityAIMoveTowardsRestriction entityaimovetowardsrestriction; this.tasks .addTask( 5, entityaimovetowardsrestriction = new EntityAIMoveTowardsRestriction( this, 1.0D)); this.tasks.addTask(7, this.wander = new EntityAIWander(this, 1.0D, 80)); this.tasks.addTask(8, new EntityAIWatchClosest(this, EntityPlayer.class, 8.0F)); this.tasks.addTask(8, new EntityAIWatchClosest(this, WitherGuardian.class, 12.0F, 0.01F)); this.tasks.addTask(9, new EntityAILookIdle(this)); this.wander.setMutexBits(3); entityaimovetowardsrestriction.setMutexBits(3); this.targetTasks.addTask(1, new EntityAINearestAttackableTarget(this, EntityLivingBase.class, 10, true, false, new WitherGuardian.GuardianTargetSelector())); this.moveHelper = new WitherGuardian.GuardianMoveHelper(); this.field_175484_c = this.field_175482_b = this.rand.nextFloat(); } protected void applyEntityAttributes() { super.applyEntityAttributes(); this.getEntityAttribute(SharedMonsterAttributes.attackDamage) .setBaseValue(6.0D); this.getEntityAttribute(SharedMonsterAttributes.movementSpeed) .setBaseValue(0.5D); this.getEntityAttribute(SharedMonsterAttributes.followRange) .setBaseValue(16.0D); this.getEntityAttribute(SharedMonsterAttributes.maxHealth) .setBaseValue(30.0D); } public void readEntityFromNBT(NBTTagCompound tagCompund) { super.readEntityFromNBT(tagCompund); this.func_175467_a(tagCompund.getBoolean("Elder")); } public void writeEntityToNBT(NBTTagCompound tagCompound) { super.writeEntityToNBT(tagCompound); tagCompound.setBoolean("Elder", this.isElder()); } protected PathNavigate func_175447_b(World worldIn) { return new PathNavigateSwimmer(this, worldIn); } protected void entityInit() { super.entityInit(); this.dataWatcher.addObject(16, Integer.valueOf(0)); this.dataWatcher.addObject(17, Integer.valueOf(0)); } private boolean func_175468_a(int p_175468_1_) { return (this.dataWatcher.getWatchableObjectInt(16) & p_175468_1_) != 0; } private void func_175473_a(int p_175473_1_, boolean p_175473_2_) { int j = this.dataWatcher.getWatchableObjectInt(16); if (p_175473_2_) { this.dataWatcher.updateObject(16, Integer.valueOf(j | p_175473_1_)); } else { this.dataWatcher .updateObject(16, Integer.valueOf(j & ~p_175473_1_)); } } public boolean func_175472_n() { return this.func_175468_a(2); } private void func_175476_l(boolean p_175476_1_) { this.func_175473_a(2, p_175476_1_); } public int func_175464_ck() { return this.isElder() ? 60 : 80; } public boolean isElder() { return this.func_175468_a(4); } public void func_175467_a(boolean p_175467_1_) { this.func_175473_a(4, p_175467_1_); if (p_175467_1_) { this.setSize(1.9975F, 1.9975F); this.getEntityAttribute(SharedMonsterAttributes.movementSpeed) .setBaseValue(0.30000001192092896D); this.getEntityAttribute(SharedMonsterAttributes.attackDamage) .setBaseValue(8.0D); this.getEntityAttribute(SharedMonsterAttributes.maxHealth) .setBaseValue(80.0D); this.enablePersistence(); this.wander.func_179479_b(400); } } @SideOnly(Side.CLIENT) public void func_175465_cm() { this.func_175467_a(true); this.field_175486_bm = this.field_175485_bl = 1.0F; } private void func_175463_b(int p_175463_1_) { this.dataWatcher.updateObject(17, Integer.valueOf(p_175463_1_)); } public boolean func_175474_cn() { return this.dataWatcher.getWatchableObjectInt(17) != 0; } public EntityLivingBase getTargetedEntity() { if (!this.func_175474_cn()) { return null; } else if (this.worldObj.isRemote) { if (this.field_175478_bn != null) { return this.field_175478_bn; } else { Entity entity = this.worldObj.getEntityByID(this.dataWatcher .getWatchableObjectInt(17)); if (entity instanceof EntityLivingBase) { this.field_175478_bn = (EntityLivingBase) entity; return this.field_175478_bn; } else { return null; } } } else { return this.getAttackTarget(); } } public void func_145781_i(int p_145781_1_) { super.func_145781_i(p_145781_1_); if (p_145781_1_ == 16) { if (this.isElder() && this.width < 1.0F) { this.setSize(1.9975F, 1.9975F); } } else if (p_145781_1_ == 17) { this.field_175479_bo = 0; this.field_175478_bn = null; } } public int getTalkInterval() { return 160; } protected String getLivingSound() { return !this.isInWater() ? "mob.guardian.land.idle" : (this.isElder() ? "mob.guardian.elder.idle" : "mob.guardian.idle"); } protected String getHurtSound() { return !this.isInWater() ? "mob.guardian.land.hit" : (this.isElder() ? "mob.guardian.elder.hit" : "mob.guardian.hit"); } protected String getDeathSound() { return !this.isInWater() ? "mob.guardian.land.death" : (this.isElder() ? "mob.guardian.elder.death" : "mob.guardian.death"); } protected boolean canTriggerWalking() { return false; } public float getEyeHeight() { return this.height * 0.5F; } public float func_180484_a(BlockPos p_180484_1_) { return this.worldObj.getBlockState(p_180484_1_).getBlock() .getMaterial() == Material.water ? 10.0F + this.worldObj .getLightBrightness(p_180484_1_) - 0.5F : super .func_180484_a(p_180484_1_); } public void onLivingUpdate() { if (this.worldObj.isRemote) { this.field_175484_c = this.field_175482_b; if (!this.isInWater()) { this.field_175483_bk = 2.0F; if (this.motionY > 0.0D && this.field_175480_bp && !this.isSilent()) { this.worldObj.playSound(this.posX, this.posY, this.posZ, "mob.guardian.flop", 1.0F, 1.0F, false); } this.field_175480_bp = this.motionY < 0.0D && this.worldObj.isBlockNormalCube( (new BlockPos(this)).down(), false); } else if (this.func_175472_n()) { if (this.field_175483_bk < 0.5F) { this.field_175483_bk = 4.0F; } else { this.field_175483_bk += (0.5F - this.field_175483_bk) * 0.1F; } } else { this.field_175483_bk += (0.125F - this.field_175483_bk) * 0.2F; } this.field_175482_b += this.field_175483_bk; this.field_175486_bm = this.field_175485_bl; if (!this.isInWater()) { this.field_175485_bl = this.rand.nextFloat(); } else if (this.func_175472_n()) { this.field_175485_bl += (0.0F - this.field_175485_bl) * 0.25F; } else { this.field_175485_bl += (1.0F - this.field_175485_bl) * 0.06F; } if (this.func_175472_n() && this.isInWater()) { Vec3 vec3 = this.getLook(0.0F); for (int i = 0; i < 2; ++i) { this.worldObj .spawnParticle(EnumParticleTypes.WATER_BUBBLE, this.posX + (this.rand.nextDouble() - 0.5D) * (double) this.width - vec3.xCoord * 1.5D, this.posY + this.rand.nextDouble() * (double) this.height - vec3.yCoord * 1.5D, this.posZ + (this.rand.nextDouble() - 0.5D) * (double) this.width - vec3.zCoord * 1.5D, 0.0D, 0.0D, 0.0D, new int[0]); } } if (this.func_175474_cn()) { if (this.field_175479_bo < this.func_175464_ck()) { ++this.field_175479_bo; } EntityLivingBase entitylivingbase = this.getTargetedEntity(); if (entitylivingbase != null) { this.getLookHelper().setLookPositionWithEntity( entitylivingbase, 90.0F, 90.0F); this.getLookHelper().onUpdateLook(); double d5 = (double) this.func_175477_p(0.0F); double d0 = entitylivingbase.posX - this.posX; double d1 = entitylivingbase.posY + (double) (entitylivingbase.height * 0.5F) - (this.posY + (double) this.getEyeHeight()); double d2 = entitylivingbase.posZ - this.posZ; double d3 = Math.sqrt(d0 * d0 + d1 * d1 + d2 * d2); d0 /= d3; d1 /= d3; d2 /= d3; double d4 = this.rand.nextDouble(); while (d4 < d3) { d4 += 1.8D - d5 + this.rand.nextDouble() * (1.7D - d5); this.worldObj.spawnParticle( EnumParticleTypes.WATER_BUBBLE, this.posX + d0 * d4, this.posY + d1 * d4 + (double) this.getEyeHeight(), this.posZ + d2 * d4, 0.0D, 0.0D, 0.0D, new int[0]); } } } } if (this.inWater) { this.setAir(300); } else if (this.onGround) { this.motionY += 0.5D; this.motionX += (double) ((this.rand.nextFloat() * 2.0F - 1.0F) * 0.4F); this.motionZ += (double) ((this.rand.nextFloat() * 2.0F - 1.0F) * 0.4F); this.rotationYaw = this.rand.nextFloat() * 360.0F; this.onGround = false; this.isAirBorne = true; } if (this.func_175474_cn()) { this.rotationYaw = this.rotationYawHead; } super.onLivingUpdate(); } @SideOnly(Side.CLIENT) public float func_175471_a(float p_175471_1_) { return this.field_175484_c + (this.field_175482_b - this.field_175484_c) * p_175471_1_; } @SideOnly(Side.CLIENT) public float func_175469_o(float p_175469_1_) { return this.field_175486_bm + (this.field_175485_bl - this.field_175486_bm) * p_175469_1_; } public float func_175477_p(float p_175477_1_) { return ((float) this.field_175479_bo + p_175477_1_) / (float) this.func_175464_ck(); } protected void updateAITasks() { super.updateAITasks(); if (this.isElder()) { boolean flag = true; boolean flag1 = true; boolean flag2 = true; boolean flag3 = true; if ((this.ticksExisted + this.getEntityId()) % 1200 == 0) { Potion potion = Potion.digSlowdown; List list = this.worldObj.getPlayers(EntityPlayerMP.class, new Predicate() { private static final String __OBFID = "CL_00002212"; public boolean func_179913_a( EntityPlayerMP p_179913_1_) { return WitherGuardian.this .getDistanceSqToEntity(p_179913_1_) < 2500.0D && p_179913_1_.theItemInWorldManager .func_180239_c(); } public boolean apply(Object p_apply_1_) { return this .func_179913_a((EntityPlayerMP) p_apply_1_); } }); Iterator iterator = list.iterator(); while (iterator.hasNext()) { EntityPlayerMP entityplayermp = (EntityPlayerMP) iterator .next(); if (!entityplayermp.isPotionActive(potion) || entityplayermp.getActivePotionEffect(potion) .getAmplifier() < 2 || entityplayermp.getActivePotionEffect(potion) .getDuration() < 1200) { entityplayermp.playerNetServerHandler .sendPacket(new S2BPacketChangeGameState(10, 0.0F)); entityplayermp.addPotionEffect(new PotionEffect( potion.id, 6000, 2)); } } } if (!this.hasHome()) { this.func_175449_a(new BlockPos(this), 16); } } } protected void dropFewItems(boolean p_70628_1_, int p_70628_2_) { int j = this.rand.nextInt(3) + this.rand.nextInt(p_70628_2_ + 1); if (j > 0) { this.entityDropItem(new ItemStack(Items.prismarine_shard, j, 0), 1.0F); } if (this.rand.nextInt(3 + p_70628_2_) > 1) { this.entityDropItem(new ItemStack(Items.fish, 1, ItemFishFood.FishType.COD.getMetadata()), 1.0F); } else if (this.rand.nextInt(3 + p_70628_2_) > 1) { this.entityDropItem(new ItemStack(Items.prismarine_crystals, 1, 0), 1.0F); } if (p_70628_1_ && this.isElder()) { this.entityDropItem(new ItemStack(Blocks.sponge, 1, 1), 1.0F); } } protected void addRandomArmor() { ItemStack itemstack = ((WeightedRandomFishable) WeightedRandom .getRandomItem(this.rand, EntityFishHook.func_174855_j())) .getItemStack(this.rand); this.entityDropItem(itemstack, 1.0F); } protected boolean isValidLightLevel() { return true; } public boolean handleLavaMovement() { return this.worldObj.checkNoEntityCollision( this.getEntityBoundingBox(), this) && this.worldObj.getCollidingBoundingBoxes(this, this.getEntityBoundingBox()).isEmpty(); } public boolean getCanSpawnHere() { return (this.rand.nextInt(20) == 0 || !this.worldObj .canBlockSeeSky(new BlockPos(this))) && super.getCanSpawnHere(); } public boolean attackEntityFrom(DamageSource source, float amount) { if (!this.func_175472_n() && !source.isMagicDamage() && source.getSourceOfDamage() instanceof EntityLivingBase) { EntityLivingBase entitylivingbase = (EntityLivingBase) source .getSourceOfDamage(); if (!source.isExplosion()) { entitylivingbase.attackEntityFrom( DamageSource.causeThornsDamage(this), 2.0F); entitylivingbase.playSound("damage.thorns", 0.5F, 1.0F); } } this.wander.func_179480_f(); return super.attackEntityFrom(source, amount); } public int getVerticalFaceSpeed() { return 180; } public void moveEntityWithHeading(float p_70612_1_, float p_70612_2_) { if (this.isServerWorld()) { if (this.isInWater()) { this.moveFlying(p_70612_1_, p_70612_2_, 0.1F); this.moveEntity(this.motionX, this.motionY, this.motionZ); this.motionX *= 0.8999999761581421D; this.motionY *= 0.8999999761581421D; this.motionZ *= 0.8999999761581421D; if (!this.func_175472_n() && this.getAttackTarget() == null) { this.motionY -= 0.005D; } } else { super.moveEntityWithHeading(p_70612_1_, p_70612_2_); } } else { super.moveEntityWithHeading(p_70612_1_, p_70612_2_); } } class AIGuardianAttack extends EntityAIBase { private WitherGuardian field_179456_a = WitherGuardian.this; private int field_179455_b; private static final String __OBFID = "CL_00002211"; public AIGuardianAttack() { this.setMutexBits(3); } public boolean shouldExecute() { EntityLivingBase entitylivingbase = this.field_179456_a .getAttackTarget(); return entitylivingbase != null && entitylivingbase.isEntityAlive(); } public boolean continueExecuting() { return super.continueExecuting() && (this.field_179456_a.isElder() || this.field_179456_a .getDistanceSqToEntity(this.field_179456_a .getAttackTarget()) > 9.0D); } public void startExecuting() { this.field_179455_b = -10; this.field_179456_a.getNavigator().clearPathEntity(); this.field_179456_a.getLookHelper().setLookPositionWithEntity( this.field_179456_a.getAttackTarget(), 90.0F, 90.0F); this.field_179456_a.isAirBorne = true; } public void resetTask() { this.field_179456_a.func_175463_b(0); this.field_179456_a.setAttackTarget((EntityLivingBase) null); this.field_179456_a.wander.func_179480_f(); } public void updateTask() { EntityLivingBase entitylivingbase = this.field_179456_a .getAttackTarget(); this.field_179456_a.getNavigator().clearPathEntity(); this.field_179456_a.getLookHelper().setLookPositionWithEntity( entitylivingbase, 90.0F, 90.0F); if (!this.field_179456_a.canEntityBeSeen(entitylivingbase)) { this.field_179456_a.setAttackTarget((EntityLivingBase) null); } else { ++this.field_179455_b; if (this.field_179455_b == 0) { this.field_179456_a.func_175463_b(this.field_179456_a .getAttackTarget().getEntityId()); this.field_179456_a.worldObj.setEntityState( this.field_179456_a, (byte) 21); } else if (this.field_179455_b >= this.field_179456_a .func_175464_ck()) { float f = 1.0F; if (this.field_179456_a.worldObj.getDifficulty() == EnumDifficulty.HARD) { f += 2.0F; } if (this.field_179456_a.isElder()) { f += 2.0F; } entitylivingbase.attackEntityFrom(DamageSource .causeIndirectMagicDamage(this.field_179456_a, this.field_179456_a), f); entitylivingbase.attackEntityFrom( DamageSource.causeMobDamage(this.field_179456_a), (float) this.field_179456_a.getEntityAttribute( SharedMonsterAttributes.attackDamage) .getAttributeValue()); this.field_179456_a .setAttackTarget((EntityLivingBase) null); } else if (this.field_179455_b >= 60 && this.field_179455_b % 20 == 0) { ; } super.updateTask(); } } } class GuardianMoveHelper extends EntityMoveHelper { private WitherGuardian field_179930_g = WitherGuardian.this; private static final String __OBFID = "CL_00002209"; public GuardianMoveHelper() { super(WitherGuardian.this); } public void onUpdateMoveHelper() { if (this.update && !this.field_179930_g.getNavigator().noPath()) { double d0 = this.posX - this.field_179930_g.posX; double d1 = this.posY - this.field_179930_g.posY; double d2 = this.posZ - this.field_179930_g.posZ; double d3 = d0 * d0 + d1 * d1 + d2 * d2; d3 = (double) MathHelper.sqrt_double(d3); d1 /= d3; float f = (float) (Math.atan2(d2, d0) * 180.0D / Math.PI) - 90.0F; this.field_179930_g.rotationYaw = this.limitAngle( this.field_179930_g.rotationYaw, f, 30.0F); this.field_179930_g.renderYawOffset = this.field_179930_g.rotationYaw; float f1 = (float) (this.speed * this.field_179930_g .getEntityAttribute( SharedMonsterAttributes.movementSpeed) .getAttributeValue()); this.field_179930_g.setAIMoveSpeed(this.field_179930_g .getAIMoveSpeed() + (f1 - this.field_179930_g.getAIMoveSpeed()) * 0.125F); double d4 = Math .sin((double) (this.field_179930_g.ticksExisted + this.field_179930_g .getEntityId()) * 0.5D) * 0.05D; double d5 = Math.cos((double) (this.field_179930_g.rotationYaw * (float) Math.PI / 180.0F)); double d6 = Math.sin((double) (this.field_179930_g.rotationYaw * (float) Math.PI / 180.0F)); this.field_179930_g.motionX += d4 * d5; this.field_179930_g.motionZ += d4 * d6; d4 = Math .sin((double) (this.field_179930_g.ticksExisted + this.field_179930_g .getEntityId()) * 0.75D) * 0.05D; this.field_179930_g.motionY += d4 * (d6 + d5) * 0.25D; this.field_179930_g.motionY += (double) this.field_179930_g .getAIMoveSpeed() * d1 * 0.1D; EntityLookHelper entitylookhelper = this.field_179930_g .getLookHelper(); double d7 = this.field_179930_g.posX + d0 / d3 * 2.0D; double d8 = (double) this.field_179930_g.getEyeHeight() + this.field_179930_g.posY + d1 / d3 * 1.0D; double d9 = this.field_179930_g.posZ + d2 / d3 * 2.0D; double d10 = entitylookhelper.func_180423_e(); double d11 = entitylookhelper.func_180422_f(); double d12 = entitylookhelper.func_180421_g(); if (!entitylookhelper.func_180424_b()) { d10 = d7; d11 = d8; d12 = d9; } this.field_179930_g.getLookHelper().setLookPosition( d10 + (d7 - d10) * 0.125D, d11 + (d8 - d11) * 0.125D, d12 + (d9 - d12) * 0.125D, 10.0F, 40.0F); this.field_179930_g.func_175476_l(true); } else { this.field_179930_g.setAIMoveSpeed(0.0F); this.field_179930_g.func_175476_l(false); } } } class GuardianTargetSelector implements Predicate { private WitherGuardian field_179916_a = WitherGuardian.this; private static final String __OBFID = "CL_00002210"; public boolean func_179915_a(EntityLivingBase p_179915_1_) { return (p_179915_1_ instanceof EntityPlayer || p_179915_1_ instanceof EntitySquid) && p_179915_1_.getDistanceSqToEntity(this.field_179916_a) > 9.0D; } public boolean apply(Object p_apply_1_) { return this.func_179915_a((EntityLivingBase) p_apply_1_); } } }
package com.enigmabridge; import com.enigmabridge.comm.EBCorruptedException; import com.enigmabridge.create.*; import com.enigmabridge.create.misc.EBRSAPrivateCrtKey; import com.enigmabridge.create.misc.EBRSAPrivateCrtKeyWrapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.crypto.spec.SecretKeySpec; import java.io.IOException; import java.math.BigInteger; import java.security.SecureRandom; import java.security.interfaces.RSAPrivateCrtKey; import java.security.interfaces.RSAPrivateKey; import java.security.spec.RSAPrivateCrtKeySpec; import java.security.spec.RSAPrivateKeySpec; /** * Creates UserObject keys in the EB. * * Created by dusanklinec on 13.07.16. */ public class UserObjectKeyCreator { private static final Logger LOG = LoggerFactory.getLogger(UserObjectKeyCreator.class); protected SecureRandom random; protected EBEngine engine; protected UserObjectType uoType; protected EBUOGetTemplateRequest getTemplateRequest; protected EBCommKeys commKeys; protected EBUOTemplateKey appKey; protected EBCreateUOResponse lastResponse; public static abstract class AbstractBuilder<T extends UserObjectKeyCreator, B extends AbstractBuilder> { public B setRandom(SecureRandom random) { getObj().setRandom(random); return getThisBuilder(); } public B setEngine(EBEngine engine) { getObj().setEngine(engine); return getThisBuilder(); } public B setGetTemplateRequest(EBUOGetTemplateRequest getTemplateRequest) { getObj().setGetTemplateRequest(getTemplateRequest); return getThisBuilder(); } public B setUoType(UserObjectType uoType) { getObj().setUoType(uoType); return getThisBuilder(); } public abstract T build(); public abstract B getThisBuilder(); public abstract T getObj(); } public static class Builder extends AbstractBuilder<UserObjectKeyCreator, UserObjectKeyCreator.Builder> { private final UserObjectKeyCreator parent = new UserObjectKeyCreator(); @Override public UserObjectKeyCreator.Builder getThisBuilder() { return this; } @Override public UserObjectKeyCreator getObj() { return parent; } @Override public UserObjectKeyCreator build() { final UserObjectKeyCreator obj = getObj(); // Engine has to be set. if (obj.engine == null){ throw new NullPointerException("Engine has to be set"); } // At least one type has to be set if (obj.uoType == null && obj.getTemplateRequest == null){ throw new NullPointerException("At least one of the uoType, getTemplateRequest has to be set"); } // Empty request -> create a new one if (obj.getTemplateRequest == null){ obj.getTemplateRequest = new EBUOGetTemplateRequest().setType(obj.uoType); } // SecureRandom if (obj.random == null){ obj.setRandom(obj.engine.getRnd()); } if (obj.random == null){ obj.setRandom(new SecureRandom()); } return parent; } } // Build logic public UserObjectKeyCreator setCommKeys(byte[] encKey, byte[] macKey){ this.commKeys = new EBCommKeys(encKey, macKey); return this; } public UserObjectKeyCreator setCommKeys(EBCommKeys commKeys){ this.commKeys = commKeys; return this; } public UserObjectKeyCreator setAppKeyGeneration(int appKeyGeneration){ this.getTemplateRequest.setGenerationAppKey(appKeyGeneration); return this; } public UserObjectKeyCreator setUoTypeFunction(int typeFunction){ this.getTemplateRequest.setTypeFunction(typeFunction); return this; } public UserObjectKeyCreator setAppKey(byte[] appKey){ this.appKey = new EBUOTemplateKey(Constants.KEY_APP, appKey); return this; } // Sugar for easy setting of objects // Later maybe move to anoher builder class, after more objects emerge. /** * Adds the symmetric key from the specifications (e.g., AES). * For secret key it is needed to set setAppKeyGeneration, setUoTypeFunction. * We don't know which mode user wants to use (e.g., derive, client = import). * * @param keySpec secret key spec to import * @return this */ public UserObjectKeyCreator setAppKey(SecretKeySpec keySpec){ this.appKey = new EBUOTemplateKey(keySpec); return this; } /** * Sets RSA private CRT key wrapper for import. * Serializes it to the appKey for import, sets setAppKeyGeneration(client), calls * appropriate setUoTypeFunction. * * @param wrapper RSA private key wrapper. * @return this */ public UserObjectKeyCreator setAppKey(EBRSAPrivateCrtKeyWrapper wrapper){ this.appKey = new EBUOTemplateKeyRSA(wrapper); setAppKeyGeneration(Constants.GENKEY_CLIENT); setRSADecryptFunctionFromModulus(wrapper.getModulus()); return this; } /** * Sets RSA private CRT key spec for import. * Serializes it to the appKey for import, sets setAppKeyGeneration(client), calls * appropriate setUoTypeFunction. * * @param spec RSA private key spec. * @return this */ public UserObjectKeyCreator setAppKey(RSAPrivateCrtKeySpec spec){ return setAppKey(spec, null); } /** * Sets RSA private CRT key spec for import. * Serializes it to the appKey for import, sets setAppKeyGeneration(client), calls * appropriate setUoTypeFunction. * * @param spec RSA private key spec. * @param e public exponent, unused. for easier calling if user is unsure whether CRT or not. * @return this */ public UserObjectKeyCreator setAppKey(RSAPrivateCrtKeySpec spec, BigInteger e){ return setAppKey(new EBRSAPrivateCrtKeyWrapper(spec)); } /** * Sets RSA private CRT key for import. * Serializes it to the appKey for import, sets setAppKeyGeneration(client), calls * appropriate setUoTypeFunction. * * @param key RSA private key spec. * @return this */ public UserObjectKeyCreator setAppKey(RSAPrivateCrtKey key){ return setAppKey(key, null); } /** * Sets RSA private CRT key for import. * Serializes it to the appKey for import, sets setAppKeyGeneration(client), calls * appropriate setUoTypeFunction. * * @param key RSA private key spec. * @param e public exponent, unused. for easier calling if user is unsure whether CRT or not. * @return this */ public UserObjectKeyCreator setAppKey(RSAPrivateCrtKey key, BigInteger e){ return setAppKey(new EBRSAPrivateCrtKeyWrapper(key)); } /** * Sets RSA private key spec for import. * Only CRT keys are allowed to import thus this one needs to be converted to CRT key. * Such conversion may take some time and may not succeed. If conversion fails, RuntimeException is thrown. * * Key is then serialized to the appKey for import, setAppKeyGeneration(client), calls * appropriate setUoTypeFunction. * * @param spec RSA private key spec. * @param e public exponent, required. * @return this */ public UserObjectKeyCreator setAppKey(RSAPrivateKeySpec spec, BigInteger e){ return setAppKey(new EBRSAPrivateCrtKeyWrapper(new EBRSAPrivateCrtKey(spec, e))); } /** * Sets RSA private key for import. * Only CRT keys are allowed to import thus this one needs to be converted to CRT key. * Such conversion may take some time and may not succeed. If conversion fails, RuntimeException is thrown. * * Key is then serialized to the appKey for import, setAppKeyGeneration(client), calls * appropriate setUoTypeFunction. * * @param key RSA private key. * @param e public exponent, required. * @return this */ public UserObjectKeyCreator setAppKey(RSAPrivateKey key, BigInteger e){ return setAppKey(new EBRSAPrivateCrtKeyWrapper(new EBRSAPrivateCrtKey(key, e))); } protected void setRSADecryptFunctionFromModulus(BigInteger modulus){ if (modulus == null){ throw new NullPointerException("Empty modulus"); } setUoTypeFunction(UserObjectType.getRSADecryptFunctionFromModulus(modulus)); } /** * Creates a new Use Object Key from the input values. * @return builder for new user object key */ public UserObjectKeyBase.Builder create() throws IOException { final EBEndpointInfo endpoint = engine.getEndpointEnrollment(); final EBUOGetTemplateRequest req = this.getTemplateRequest; if (commKeys == null){ commKeys = EBCommKeys.generate(random); } // Force remaining parameters to default values. req.setGenerationBillingKey(Constants.GENKEY_LEGACY_ENROLL_RANDOM); req.setFormat(1); req.setProtocol(1); EBCreateUOSimpleCall.Builder callBld = new EBCreateUOSimpleCall.Builder() .setEngine(engine) .setEndpoint(endpoint) .setRequest(req) .addKey(new EBUOTemplateKey(Constants.KEY_COMM_ENC, commKeys.getEncKey())) .addKey(new EBUOTemplateKey(Constants.KEY_COMM_MAC, commKeys.getMacKey())); if (appKey != null){ callBld.addKey(appKey); } final EBCreateUOSimpleCall createCall = callBld.build(); try { final EBCreateUOResponse response = createCall.create(); if (!response.isCodeOk()){ if (EBDevSettings.shouldLogFailedCreateUO()){ LOG.debug("Failed createUO: " + createCall.getCreateRequest()); } throw new EBEngineException("Could not create UO - response: " + response.toString()); } // Create UOKey final UserObjectKeyBase.Builder keyBld = new UserObjectKeyBase.Builder() .setUoid(response.getHandle().getUoId()) .setUserObjectType(response.getHandle().getUoType().getValue()) .setCommKeys(new EBCommKeys(commKeys)) .setKeyLength(response.getHandle().getUoType().keyLength()); lastResponse = response; return keyBld; } catch (IOException e) { throw new IOException("Could not create UO", e); } catch (EBCorruptedException e) { throw new EBEngineException("Could not create UO", e); } } // Setters protected void setRandom(SecureRandom random) { this.random = random; } protected void setEngine(EBEngine engine) { this.engine = engine; } public void setGetTemplateRequest(EBUOGetTemplateRequest getTemplateRequest) { if (getTemplateRequest == null){ throw new NullPointerException("Template request cannot be null"); } this.getTemplateRequest = getTemplateRequest; } public void setUoType(UserObjectType uoType) { if (getTemplateRequest != null) { this.getTemplateRequest.setType(uoType); } this.uoType = uoType; } // Getters public SecureRandom getRandom() { return random; } public EBEngine getEngine() { return engine; } public EBUOGetTemplateRequest getGetTemplateRequest() { return getTemplateRequest; } public UserObjectType getUoType() { return UserObjectType.valueOf(getTemplateRequest.getType()); } public EBCreateUOResponse getLastResponse() { return lastResponse; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig.NUM_TOP_CLASSES; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig.NUM_TOP_FEATURE_IMPORTANCE_VALUES; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig.PREDICTION_FIELD_TYPE; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig.TOP_CLASSES_RESULTS_FIELD; public class ClassificationConfigUpdate implements InferenceConfigUpdate<ClassificationConfig> { public static final ParseField NAME = new ParseField("classification"); public static ClassificationConfigUpdate EMPTY_PARAMS = new ClassificationConfigUpdate(null, null, null, null, null); private final Integer numTopClasses; private final String topClassesResultsField; private final String resultsField; private final Integer numTopFeatureImportanceValues; private final PredictionFieldType predictionFieldType; public static ClassificationConfigUpdate fromMap(Map<String, Object> map) { Map<String, Object> options = new HashMap<>(map); Integer numTopClasses = (Integer)options.remove(NUM_TOP_CLASSES.getPreferredName()); String topClassesResultsField = (String)options.remove(TOP_CLASSES_RESULTS_FIELD.getPreferredName()); String resultsField = (String)options.remove(RESULTS_FIELD.getPreferredName()); Integer featureImportance = (Integer)options.remove(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName()); String predictionFieldTypeStr = (String)options.remove(PREDICTION_FIELD_TYPE.getPreferredName()); if (options.isEmpty() == false) { throw ExceptionsHelper.badRequestException("Unrecognized fields {}.", options.keySet()); } return new ClassificationConfigUpdate(numTopClasses, resultsField, topClassesResultsField, featureImportance, predictionFieldTypeStr == null ? null : PredictionFieldType.fromString(predictionFieldTypeStr)); } public static ClassificationConfigUpdate fromConfig(ClassificationConfig config) { return new ClassificationConfigUpdate(config.getNumTopClasses(), config.getResultsField(), config.getTopClassesResultsField(), config.getNumTopFeatureImportanceValues(), config.getPredictionFieldType()); } private static final ObjectParser<ClassificationConfigUpdate.Builder, Void> STRICT_PARSER = createParser(false); private static ObjectParser<ClassificationConfigUpdate.Builder, Void> createParser(boolean lenient) { ObjectParser<ClassificationConfigUpdate.Builder, Void> parser = new ObjectParser<>( NAME.getPreferredName(), lenient, ClassificationConfigUpdate.Builder::new); parser.declareInt(ClassificationConfigUpdate.Builder::setNumTopClasses, NUM_TOP_CLASSES); parser.declareString(ClassificationConfigUpdate.Builder::setResultsField, RESULTS_FIELD); parser.declareString(ClassificationConfigUpdate.Builder::setTopClassesResultsField, TOP_CLASSES_RESULTS_FIELD); parser.declareInt(ClassificationConfigUpdate.Builder::setNumTopFeatureImportanceValues, NUM_TOP_FEATURE_IMPORTANCE_VALUES); parser.declareString(ClassificationConfigUpdate.Builder::setPredictionFieldType, PREDICTION_FIELD_TYPE); return parser; } public static ClassificationConfigUpdate fromXContentStrict(XContentParser parser) { return STRICT_PARSER.apply(parser, null).build(); } public ClassificationConfigUpdate(Integer numTopClasses, String resultsField, String topClassesResultsField, Integer featureImportance, PredictionFieldType predictionFieldType) { this.numTopClasses = numTopClasses; this.topClassesResultsField = topClassesResultsField; this.resultsField = resultsField; if (featureImportance != null && featureImportance < 0) { throw new IllegalArgumentException("[" + NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName() + "] must be greater than or equal to 0"); } this.numTopFeatureImportanceValues = featureImportance; this.predictionFieldType = predictionFieldType; } public ClassificationConfigUpdate(StreamInput in) throws IOException { this.numTopClasses = in.readOptionalInt(); this.topClassesResultsField = in.readOptionalString(); this.resultsField = in.readOptionalString(); this.numTopFeatureImportanceValues = in.readOptionalVInt(); this.predictionFieldType = in.readOptionalWriteable(PredictionFieldType::fromStream); } public Integer getNumTopClasses() { return numTopClasses; } public String getTopClassesResultsField() { return topClassesResultsField; } public String getResultsField() { return resultsField; } public Integer getNumTopFeatureImportanceValues() { return numTopFeatureImportanceValues; } public PredictionFieldType getPredictionFieldType() { return predictionFieldType; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalInt(numTopClasses); out.writeOptionalString(topClassesResultsField); out.writeOptionalString(resultsField); out.writeOptionalVInt(numTopFeatureImportanceValues); out.writeOptionalWriteable(predictionFieldType); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ClassificationConfigUpdate that = (ClassificationConfigUpdate) o; return Objects.equals(numTopClasses, that.numTopClasses) && Objects.equals(topClassesResultsField, that.topClassesResultsField) && Objects.equals(resultsField, that.resultsField) && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues) && Objects.equals(predictionFieldType, that.predictionFieldType); } @Override public int hashCode() { return Objects.hash(numTopClasses, topClassesResultsField, resultsField, numTopFeatureImportanceValues, predictionFieldType); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (numTopClasses != null) { builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses); } if (topClassesResultsField != null) { builder.field(TOP_CLASSES_RESULTS_FIELD.getPreferredName(), topClassesResultsField); } if (resultsField != null) { builder.field(RESULTS_FIELD.getPreferredName(), resultsField); } if (numTopFeatureImportanceValues != null) { builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); } if (predictionFieldType != null) { builder.field(PREDICTION_FIELD_TYPE.getPreferredName(), predictionFieldType.toString()); } builder.endObject(); return builder; } @Override public String getWriteableName() { return NAME.getPreferredName(); } @Override public String getName() { return NAME.getPreferredName(); } @Override public ClassificationConfig apply(ClassificationConfig originalConfig) { if (isNoop(originalConfig)) { return originalConfig; } ClassificationConfig.Builder builder = new ClassificationConfig.Builder(originalConfig); if (resultsField != null) { builder.setResultsField(resultsField); } if (numTopFeatureImportanceValues != null) { builder.setNumTopFeatureImportanceValues(numTopFeatureImportanceValues); } if (topClassesResultsField != null) { builder.setTopClassesResultsField(topClassesResultsField); } if (numTopClasses != null) { builder.setNumTopClasses(numTopClasses); } if (predictionFieldType != null) { builder.setPredictionFieldType(predictionFieldType); } return builder.build(); } @Override public InferenceConfig toConfig() { return apply(ClassificationConfig.EMPTY_PARAMS); } @Override public boolean isSupported(InferenceConfig inferenceConfig) { return inferenceConfig instanceof ClassificationConfig; } boolean isNoop(ClassificationConfig originalConfig) { return (resultsField == null || resultsField.equals(originalConfig.getResultsField())) && (numTopFeatureImportanceValues == null || originalConfig.getNumTopFeatureImportanceValues() == numTopFeatureImportanceValues) && (topClassesResultsField == null || topClassesResultsField.equals(originalConfig.getTopClassesResultsField())) && (numTopClasses == null || originalConfig.getNumTopClasses() == numTopClasses) && (predictionFieldType == null || predictionFieldType.equals(originalConfig.getPredictionFieldType())); } public static class Builder { private Integer numTopClasses; private String topClassesResultsField; private String resultsField; private Integer numTopFeatureImportanceValues; private PredictionFieldType predictionFieldType; public Builder setNumTopClasses(int numTopClasses) { this.numTopClasses = numTopClasses; return this; } public Builder setTopClassesResultsField(String topClassesResultsField) { this.topClassesResultsField = topClassesResultsField; return this; } public Builder setResultsField(String resultsField) { this.resultsField = resultsField; return this; } public Builder setNumTopFeatureImportanceValues(int numTopFeatureImportanceValues) { this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; return this; } private Builder setPredictionFieldType(String predictionFieldType) { this.predictionFieldType = PredictionFieldType.fromString(predictionFieldType); return this; } public ClassificationConfigUpdate build() { return new ClassificationConfigUpdate(numTopClasses, resultsField, topClassesResultsField, numTopFeatureImportanceValues, predictionFieldType); } } }
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.components.media_router.caf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyBoolean; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import android.content.Context; import androidx.mediarouter.media.MediaRouter; import com.google.android.gms.cast.framework.CastContext; import com.google.android.gms.cast.framework.CastSession; import com.google.android.gms.cast.framework.SessionManager; import com.google.android.gms.cast.framework.media.RemoteMediaClient; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InOrder; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.robolectric.RuntimeEnvironment; import org.robolectric.annotation.Config; import org.robolectric.shadows.ShadowLooper; import org.chromium.base.test.BaseRobolectricTestRunner; import org.chromium.components.media_router.ClientRecord; import org.chromium.components.media_router.MediaRoute; import org.chromium.components.media_router.MediaRouteManager; import org.chromium.components.media_router.MediaRouterClient; import org.chromium.components.media_router.MediaSink; import org.chromium.components.media_router.TestMediaRouterClient; /** * Robolectric tests for CafMediaRouteProvider. */ @RunWith(BaseRobolectricTestRunner.class) @Config(manifest = Config.NONE, shadows = {ShadowMediaRouter.class, ShadowCastContext.class, ShadowLooper.class, ShadowCastMediaSource.class}) public class CafMediaRouteProviderTest { private static final String SUPPORTED_SOURCE = "cast:DEADBEEF"; private static final String SUPPORTED_AUTOJOIN_SOURCE = "cast:DEADBEEF" + "?clientId=12345&autoJoinPolicy=" + CastMediaSource.AUTOJOIN_TAB_AND_ORIGIN_SCOPED; private Context mContext; private CafMediaRouteProvider mProvider; private MediaRouterTestHelper mMediaRouterHelper; private MediaRouter mMediaRouter; private MediaRoute mRoute1; private MediaRoute mRoute2; @Mock private MediaRouteManager mManager; @Mock private CastContext mCastContext; @Mock private CastSession mCastSession; @Mock private SessionManager mSessionManager; @Mock private RemoteMediaClient mRemoteMediaClient; @Mock private BaseSessionController mSessionController; @Mock private ShadowCastMediaSource.ShadowImplementation mShadowCastMediaSource; @Mock private CafMessageHandler mMessageHandler; @Mock private CastMediaSource mSource1; @Mock private CastMediaSource mSource2; @Mock private MediaSink mSink; @Before public void setUp() { MockitoAnnotations.initMocks(this); MediaRouterClient.setInstance(new TestMediaRouterClient()); mContext = RuntimeEnvironment.application; ShadowCastContext.setInstance(mCastContext); ShadowCastMediaSource.setImplementation(mShadowCastMediaSource); mMediaRouterHelper = new MediaRouterTestHelper(); mMediaRouter = MediaRouter.getInstance(mContext); mProvider = spy(CafMediaRouteProvider.create(mManager)); mProvider.mMessageHandler = mMessageHandler; mRoute1 = new MediaRoute("sink-id", "source-id-1", "presentation-id-1"); mRoute2 = new MediaRoute("sink-id", "source-id-2", "presentation-id-2"); doReturn(mSource1).when(mShadowCastMediaSource).from("source-id-1"); doReturn(mSource2).when(mShadowCastMediaSource).from("source-id-2"); doReturn("client-id-1").when(mSource1).getClientId(); doReturn("client-id-2").when(mSource2).getClientId(); doReturn("app-id-1").when(mSource1).getApplicationId(); doReturn("app-id-2").when(mSource2).getApplicationId(); doReturn("sink-id").when(mSink).getId(); doReturn(mSessionController).when(mProvider).sessionController(); doReturn(mSessionManager).when(mCastContext).getSessionManager(); doReturn(mCastSession).when(mSessionController).getSession(); doReturn(mRemoteMediaClient).when(mCastSession).getRemoteMediaClient(); } @After public void tearDown() { MediaRouterClient.setInstance(null); } @Test public void testJoinRoute() { InOrder inOrder = inOrder(mManager); doReturn(mSource1).when(mShadowCastMediaSource).from("source-id-1"); doReturn(mSink).when(mSessionController).getSink(); doReturn(true).when(mSessionController).isConnected(); doReturn(true).when(mProvider).canJoinExistingSession( anyString(), anyString(), anyInt(), any(CastMediaSource.class)); // Regular case. mProvider.joinRoute("source-id-1", "presentation-id-1", "origin", 1, 1); inOrder.verify(mManager, never()).onJoinRouteRequestError(anyString(), anyInt()); inOrder.verify(mManager).onRouteCreated( anyString(), eq("sink-id"), eq(1), eq(mProvider), eq(false)); assertEquals(mProvider.mRoutes.size(), 1); MediaRoute route = (MediaRoute) (mProvider.mRoutes.values().toArray()[0]); assertEquals(route.sinkId, "sink-id"); assertEquals(route.sourceId, "source-id-1"); assertEquals(route.presentationId, "presentation-id-1"); // No source. mProvider.mRoutes.clear(); doReturn(null).when(mShadowCastMediaSource).from("source-id-1"); mProvider.joinRoute("source-id-1", "presentation-id-1", "origin", 1, 1); verifyJoinRouteRequestError(inOrder, "Unsupported presentation URL", 1); assertTrue(mProvider.mRoutes.isEmpty()); // No client ID. doReturn(mSource1).when(mShadowCastMediaSource).from("source-id-1"); doReturn(null).when(mSource1).getClientId(); mProvider.joinRoute("source-id-1", "presentation-id-1", "origin", 1, 1); verifyJoinRouteRequestError(inOrder, "Unsupported presentation URL", 1); assertTrue(mProvider.mRoutes.isEmpty()); // No session. doReturn("client-id-1").when(mSource1).getClientId(); doReturn(false).when(mSessionController).isConnected(); mProvider.joinRoute("source-id-1", "presentation-id-1", "origin", 1, 1); verifyJoinRouteRequestError(inOrder, "No presentation", 1); assertTrue(mProvider.mRoutes.isEmpty()); // No matching route. doReturn(true).when(mSessionController).isConnected(); doReturn(false).when(mProvider).canJoinExistingSession( anyString(), anyString(), anyInt(), any(CastMediaSource.class)); mProvider.joinRoute("source-id-1", "presentation-id-1", "origin", 1, 1); verifyJoinRouteRequestError(inOrder, "No matching route", 1); assertTrue(mProvider.mRoutes.isEmpty()); } @Test public void testCloseRoute() { InOrder inOrder = inOrder(mMessageHandler); doReturn(mSink).when(mSessionController).getSink(); // Regular case when there is active session. mProvider.addRoute(mRoute1, "origin", 1, 1, false); doReturn(true).when(mSessionController).isConnected(); mProvider.closeRoute(mRoute1.id); inOrder.verify(mMessageHandler) .sendReceiverActionToClient(mRoute1.id, mSink, "client-id-1", "stop"); assertEquals(mProvider.mRoutes.size(), 1); assertEquals(mProvider.getClientIdToRecords().size(), 1); // Abnormal case when the session controller doesn't have a sink. doReturn(null).when(mSessionController).getSink(); mProvider.closeRoute(mRoute1.id); inOrder.verify(mMessageHandler, never()) .sendReceiverActionToClient( anyString(), any(MediaSink.class), anyString(), anyString()); assertEquals(mProvider.mRoutes.size(), 1); assertEquals(mProvider.getClientIdToRecords().size(), 1); // Abnormal case when there is no session. doReturn(mSink).when(mSessionController).getSink(); doReturn(false).when(mSessionController).isConnected(); mProvider.closeRoute(mRoute1.id); inOrder.verify(mMessageHandler, never()) .sendReceiverActionToClient( anyString(), any(MediaSink.class), anyString(), anyString()); assertTrue(mProvider.mRoutes.isEmpty()); assertTrue(mProvider.getClientIdToRecords().isEmpty()); } @Test public void testSendStringMessage() { InOrder inOrder = inOrder(mMessageHandler); mProvider.addRoute(mRoute1, "origin", 1, 1, false); // A client in record sends a message. mProvider.sendStringMessage(mRoute1.id, "message"); inOrder.verify(mMessageHandler).handleMessageFromClient("message"); // An unknown client sends a mesasge. mProvider.sendStringMessage("other-route-id", "message"); inOrder.verify(mMessageHandler, never()).handleMessageFromClient(anyString()); } @Test public void testSendMessageToClient() { InOrder inOrder = inOrder(mManager); mProvider.addRoute(mRoute1, "origin", 1, 1, false); mProvider.getClientIdToRecords().get("client-id-1").isConnected = true; // Normal case. mProvider.sendMessageToClient("client-id-1", "message"); inOrder.verify(mManager).onMessage(mRoute1.id, "message"); // Client is not in record. mProvider.sendMessageToClient("client-id-unkonwn", "message"); inOrder.verify(mManager, never()).onMessage(anyString(), anyString()); // Message enqueued while client is not connected. mProvider.getClientIdToRecords().get("client-id-1").isConnected = false; mProvider.sendMessageToClient("client-id-1", "message"); inOrder.verify(mManager, never()).onMessage(anyString(), anyString()); // Flush message mProvider.flushPendingMessagesToClient(mProvider.getClientIdToRecords().get("client-id-1")); inOrder.verify(mManager).onMessage(mRoute1.id, "message"); } @Test public void testOnSessionStarted() { InOrder inOrder = inOrder(mSessionController, mRemoteMediaClient); doReturn(mSink).when(mSessionController).getSink(); doReturn(mCastSession).when(mSessionManager).getCurrentCastSession(); doReturn(null).when(mSessionController).getSession(); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) { doReturn(invocation.getArguments()[0]).when(mSessionController).getSession(); return null; } }) .when(mSessionController) .attachToCastSession(any(CastSession.class)); // Prepare the pending create route request so super.onSessionStarted() behaves correctly. mProvider.createRoute( "source-id-1", "cast-route", "presentation-id", "origin", 1, false, 1); mProvider.addRoute(mRoute1, "origin", 1, 1, false); mProvider.addRoute(mRoute2, "origin", 1, 1, false); // Skip adding route when the super.onSessionStarted() is called. doNothing().when(mProvider).addRoute( any(MediaRoute.class), anyString(), anyInt(), anyInt(), anyBoolean()); mProvider.onSessionStarted(mCastSession, "session-id"); // Verify super.onSessionStarted() is called. inOrder.verify(mSessionController).attachToCastSession(mCastSession); inOrder.verify(mRemoteMediaClient).requestStatus(); verify(mMessageHandler) .sendReceiverActionToClient(mRoute1.id, mSink, "client-id-1", "cast"); verify(mMessageHandler) .sendReceiverActionToClient(mRoute2.id, mSink, "client-id-2", "cast"); } @Test public void testRouteManagement() { // Add the first route. mProvider.addRoute(mRoute1, "origin-1", 1, 1, false); assertEquals(mProvider.mRoutes.size(), 1); assertEquals(mProvider.getClientIdToRecords().size(), 1); ClientRecord record = mProvider.getClientIdToRecords().get("client-id-1"); verifyClientRecord(record, mRoute1.id, "client-id-1", "app-id-1", "origin-1", 1, false); // Add the second route. mProvider.addRoute(mRoute2, "origin-2", 2, 2, false); assertEquals(mProvider.mRoutes.size(), 2); assertEquals(mProvider.getClientIdToRecords().size(), 2); record = mProvider.getClientIdToRecords().get("client-id-2"); verifyClientRecord(record, mRoute2.id, "client-id-2", "app-id-2", "origin-2", 2, false); // Add a duplicate route. This addition will be ignored as `mRoute2` is already in record. // This should never happen in production. mProvider.addRoute(mRoute2, "origin-3", 3, 3, false); assertEquals(mProvider.mRoutes.size(), 2); assertEquals(mProvider.getClientIdToRecords().size(), 2); record = mProvider.getClientIdToRecords().get("client-id-2"); verifyClientRecord(record, mRoute2.id, "client-id-2", "app-id-2", "origin-2", 2, false); // Remove a route. ClientRecord lastRecord = mProvider.getClientIdToRecords().get("client-id-1"); mProvider.removeRoute(mRoute1.id, null); assertEquals(mProvider.mRoutes.size(), 1); assertEquals(mProvider.getClientIdToRecords().size(), 1); record = mProvider.getClientIdToRecords().get("client-id-2"); verifyClientRecord(record, mRoute2.id, "client-id-2", "app-id-2", "origin-2", 2, false); assertEquals(mProvider.mLastRemovedRouteRecord, lastRecord); // Remove a non-existing route. mProvider.removeRoute(mRoute1.id, null); assertEquals(mProvider.mRoutes.size(), 1); assertEquals(mProvider.getClientIdToRecords().size(), 1); record = mProvider.getClientIdToRecords().get("client-id-2"); verifyClientRecord(record, mRoute2.id, "client-id-2", "app-id-2", "origin-2", 2, false); lastRecord = record; // Remove the last route. mProvider.removeRoute(mRoute2.id, null); assertTrue(mProvider.mRoutes.isEmpty()); assertTrue(mProvider.getClientIdToRecords().isEmpty()); assertEquals(mProvider.mLastRemovedRouteRecord, lastRecord); } @Test public void testCanJoin_matchingSessionId() { // Regular case. doReturn("session-id").when(mSessionController).getSessionId(); assertTrue(mProvider.canJoinExistingSession( "cast-session_session-id", "origin", 1, mock(CastMediaSource.class))); // The current session ID is null. doReturn(null).when(mSessionController).getSessionId(); assertFalse(mProvider.canJoinExistingSession( "cast-session_session-id", "origin", 1, mock(CastMediaSource.class))); // Mismatching session ID. doReturn("session-id").when(mSessionController).getSessionId(); assertFalse(mProvider.canJoinExistingSession( "cast-session_other-session-id", "origin", 1, mock(CastMediaSource.class))); } @Test public void testAutoJoin_usingLastRemovedRouteRecord() { doReturn("app-id-1").when(mSource1).getApplicationId(); doReturn("app-id-1").when(mSource2).getApplicationId(); doReturn("tab_and_origin_scoped").when(mSource2).getAutoJoinPolicy(); doReturn(mSource1).when(mSessionController).getSource(); mProvider.addRoute(mRoute1, "origin-1", 1, 1, false); mProvider.removeRoute(mRoute1.id, null); // Regular case. assertTrue(mProvider.canJoinExistingSession("auto-join", "origin-1", 1, mSource2)); // Mismatching origin. assertFalse(mProvider.canJoinExistingSession("auto-join", "origin-2", 1, mSource2)); // Mismatching tab id. assertFalse(mProvider.canJoinExistingSession("auto-join", "origin-1", 2, mSource2)); } @Test public void testAutoJoin_mismatchingSources() { doReturn("app-id-1").when(mSource1).getApplicationId(); doReturn("app-id-1").when(mSource2).getApplicationId(); doReturn("tab_and_origin_scoped").when(mSource2).getAutoJoinPolicy(); doReturn(mSource1).when(mSessionController).getSource(); mProvider.addRoute(mRoute1, "origin-1", 1, 1, false); mProvider.removeRoute(mRoute1.id, null); // Page scoped auto-join policy. doReturn("page_scoped").when(mSource2).getAutoJoinPolicy(); assertFalse(mProvider.canJoinExistingSession("auto-join", "origin-1", 1, mSource2)); // Mismatching app ID. doReturn("tab_and_origin_scoped").when(mSource2).getAutoJoinPolicy(); doReturn("app-id-2").when(mSource2).getApplicationId(); assertFalse(mProvider.canJoinExistingSession("auto-join", "origin-1", 1, mSource2)); } @Test public void testAutoJoin_originScoped() { doReturn("app-id-1").when(mSource1).getApplicationId(); doReturn("app-id-1").when(mSource2).getApplicationId(); doReturn("origin_scoped").when(mSource2).getAutoJoinPolicy(); doReturn(mSource1).when(mSessionController).getSource(); mProvider.addRoute(mRoute1, "origin-1", 1, 1, false); // Normal case. assertTrue(mProvider.canJoinExistingSession("auto-join", "origin-1", 1, mSource2)); // Mismatching tab ID is allowed. assertTrue(mProvider.canJoinExistingSession("auto-join", "origin-1", 2, mSource2)); // Mismatching origin is not allowed. assertFalse(mProvider.canJoinExistingSession("auto-join", "origin-2", 1, mSource2)); } @Test public void testAutoJoin_tabAndOriginScoped() { doReturn("app-id-1").when(mSource1).getApplicationId(); doReturn("app-id-1").when(mSource2).getApplicationId(); doReturn("tab_and_origin_scoped").when(mSource2).getAutoJoinPolicy(); doReturn(mSource1).when(mSessionController).getSource(); mProvider.addRoute(mRoute1, "origin-1", 1, 1, false); // Normal case. assertTrue(mProvider.canJoinExistingSession("auto-join", "origin-1", 1, mSource2)); // Mismatching tab ID is not allowed. assertFalse(mProvider.canJoinExistingSession("auto-join", "origin-1", 2, mSource2)); // Mismatching origin is not allowed. assertFalse(mProvider.canJoinExistingSession("auto-join", "origin-2", 1, mSource2)); } private void verifyJoinRouteRequestError(InOrder inOrder, String error, int nativeRequestId) { inOrder.verify(mManager).onJoinRouteRequestError(error, nativeRequestId); inOrder.verify(mManager, never()) .onRouteCreated(anyString(), anyString(), anyInt(), any(CafBaseMediaRouteProvider.class), anyBoolean()); } private void verifyClientRecord(ClientRecord record, String routeId, String clientId, String appId, String origin, int tabId, boolean isConnected) { assertEquals(record.routeId, routeId); assertEquals(record.clientId, clientId); assertEquals(record.appId, appId); assertEquals(record.origin, origin); assertEquals(record.tabId, tabId); assertEquals(record.isConnected, isConnected); } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.impl.protocol.jabber; import java.util.*; import net.java.sip.communicator.service.protocol.*; import net.java.sip.communicator.service.protocol.jabber.*; import org.jivesoftware.smack.provider.*; import org.jivesoftware.smack.util.*; import org.osgi.framework.*; /** * The Jabber implementation of the ProtocolProviderFactory. * @author Damian Minkov */ public class ProtocolProviderFactoryJabberImpl extends ProtocolProviderFactory { /** * Indicates if ICE should be used. */ public static final String IS_USE_JINGLE_NODES = "JINGLE_NODES_ENABLED"; /** * Our provider manager instances. */ static ProviderManager providerManager = null; static { try { // Set the extension provider manager for classes that use // it directly ProviderManager.setInstance(new ProviderManagerExt()); // Set the Smack interop implementation for the classes that need // to support Smackv4 interoperation AbstractSmackInteroperabilityLayer.setImplementationClass( SmackV3InteroperabilityLayer.class); } catch(Throwable t) { // once loaded if we try to set instance second time // IllegalStateException is thrown } finally { providerManager = ProviderManager.getInstance(); } // checks class names, not using instanceof // tests do unloading and loading the protocol bundle and // ProviderManagerExt class get loaded two times from different // classloaders if (!(providerManager.getClass().getName() .equals(ProviderManagerExt.class.getName()))) { throw new RuntimeException( "ProviderManager set to the default one"); } } /** * Creates an instance of the ProtocolProviderFactoryJabberImpl. */ protected ProtocolProviderFactoryJabberImpl() { super(JabberActivator.getBundleContext(), ProtocolNames.JABBER); } /** * Ovverides the original in order give access to protocol implementation. * * @param accountID the account identifier. */ @Override protected void storeAccount(AccountID accountID) { super.storeAccount(accountID); } /** * Initializes and creates an account corresponding to the specified * accountProperties and registers the resulting ProtocolProvider in the * <tt>context</tt> BundleContext parameter. This method has a persistent * effect. Once created the resulting account will remain installed until * removed through the uninstall account method. * * @param userIDStr the user identifier for the new account * @param accountProperties a set of protocol (or implementation) * specific properties defining the new account. * @return the AccountID of the newly created account */ @Override public AccountID installAccount( String userIDStr, Map<String, String> accountProperties) { BundleContext context = JabberActivator.getBundleContext(); if (context == null) throw new NullPointerException( "The specified BundleContext was null"); if (userIDStr == null) throw new NullPointerException("The specified AccountID was null"); if (accountProperties == null) throw new NullPointerException( "The specified property map was null"); accountProperties.put(USER_ID, userIDStr); // if server address is null, we must extract it from userID if(accountProperties.get(SERVER_ADDRESS) == null) { String serverAddress = StringUtils.parseServer(userIDStr); if (serverAddress != null) accountProperties.put(SERVER_ADDRESS, StringUtils.parseServer(userIDStr)); else throw new IllegalArgumentException( "Should specify a server for user name " + userIDStr + "."); } // if server port is null, we will set default value if(accountProperties.get(SERVER_PORT) == null) { accountProperties.put(SERVER_PORT, "5222"); } AccountID accountID = new JabberAccountIDImpl(userIDStr, accountProperties); //make sure we haven't seen this account id before. if( registeredAccounts.containsKey(accountID) ) throw new IllegalStateException( "An account for id " + userIDStr + " was already installed!"); //first store the account and only then load it as the load generates //an osgi event, the osgi event triggers (through the UI) a call to //the register() method and it needs to access the configuration service //and check for a password. this.storeAccount(accountID, false); accountID = loadAccount(accountProperties); return accountID; } /** * Create an account. * * @param userID the user ID * @param accountProperties the properties associated with the user ID * @return new <tt>AccountID</tt> */ @Override protected AccountID createAccountID(String userID, Map<String, String> accountProperties) { return new JabberAccountIDImpl(userID, accountProperties); } @Override protected ProtocolProviderService createService(String userID, AccountID accountID) { ProtocolProviderServiceJabberImpl service = new ProtocolProviderServiceJabberImpl(); service.initialize(userID, accountID); return service; } /** * Modify an existing account. * * @param protocolProvider the <tt>ProtocolProviderService</tt> responsible * of the account * @param accountProperties modified properties to be set */ @Override public void modifyAccount( ProtocolProviderService protocolProvider, Map<String, String> accountProperties) throws NullPointerException { BundleContext context = JabberActivator.getBundleContext(); if (context == null) throw new NullPointerException( "The specified BundleContext was null"); if (protocolProvider == null) throw new NullPointerException( "The specified Protocol Provider was null"); JabberAccountIDImpl accountID = (JabberAccountIDImpl) protocolProvider.getAccountID(); // If the given accountID doesn't correspond to an existing account // we return. if(!registeredAccounts.containsKey(accountID)) return; ServiceRegistration registration = registeredAccounts.get(accountID); // kill the service if (registration != null) { // unregister provider before removing it. try { if(protocolProvider.isRegistered()) { protocolProvider.unregister(); protocolProvider.shutdown(); } } catch (Throwable e) { // we don't care for this, cause we are modifying and // will unregister the service and will register again } registration.unregister(); } if (accountProperties == null) throw new NullPointerException( "The specified property map was null"); accountProperties.put(USER_ID, accountID.getUserID()); String serverAddress = accountProperties.get(SERVER_ADDRESS); if(serverAddress == null) throw new NullPointerException("null is not a valid ServerAddress"); // if server port is null, we will set default value if(accountProperties.get(SERVER_PORT) == null) { accountProperties.put(SERVER_PORT, "5222"); } if (!accountProperties.containsKey(PROTOCOL)) accountProperties.put(PROTOCOL, ProtocolNames.JABBER); accountID.setAccountProperties(accountProperties); // First store the account and only then load it as the load generates // an osgi event, the osgi event triggers (trhgough the UI) a call to // the register() method and it needs to acces the configuration service // and check for a password. this.storeAccount(accountID); Hashtable<String, String> properties = new Hashtable<String, String>(); properties.put(PROTOCOL, ProtocolNames.JABBER); properties.put(USER_ID, accountID.getUserID()); ((ProtocolProviderServiceJabberImpl) protocolProvider) .initialize(accountID.getUserID(), accountID); // We store again the account in order to store all properties added // during the protocol provider initialization. this.storeAccount(accountID); registration = context.registerService( ProtocolProviderService.class.getName(), protocolProvider, properties); registeredAccounts.put(accountID, registration); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.state.service.store; import java.util.Map; import org.apache.commons.lang.RandomStringUtils; import org.apache.falcon.FalconException; import org.apache.falcon.cluster.util.EmbeddedCluster; import org.apache.falcon.entity.v0.Entity; import org.apache.falcon.entity.v0.EntityType; import org.apache.falcon.exception.StateStoreException; import org.apache.falcon.execution.ExecutionInstance; import org.apache.falcon.execution.FalconExecutionService; import org.apache.falcon.execution.MockDAGEngine; import org.apache.falcon.execution.NotificationHandler; import org.apache.falcon.notification.service.impl.AlarmService; import org.apache.falcon.notification.service.impl.DataAvailabilityService; import org.apache.falcon.notification.service.impl.JobCompletionService; import org.apache.falcon.notification.service.impl.SchedulerService; import org.apache.falcon.predicate.Predicate; import org.apache.falcon.service.Services; import org.apache.falcon.state.AbstractSchedulerTestBase; import org.apache.falcon.state.EntityClusterID; import org.apache.falcon.state.EntityID; import org.apache.falcon.state.EntityState; import org.apache.falcon.state.ID; import org.apache.falcon.state.InstanceID; import org.apache.falcon.state.InstanceState; import org.apache.falcon.state.store.jdbc.BeanMapperUtil; import org.apache.falcon.state.store.jdbc.JDBCStateStore; import org.apache.falcon.state.store.StateStore; import org.apache.falcon.service.FalconJPAService; import org.apache.falcon.util.StartupProperties; import org.apache.falcon.workflow.engine.DAGEngine; import org.apache.falcon.workflow.engine.DAGEngineFactory; import org.apache.falcon.workflow.engine.OozieDAGEngine; import org.joda.time.DateTime; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterTest; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Random; /** * Test cases for JDBCStateStore. */ public class TestJDBCStateStore extends AbstractSchedulerTestBase { private static StateStore stateStore = JDBCStateStore.get(); private static Random randomValGenerator = new Random(); private static FalconJPAService falconJPAService = FalconJPAService.get(); private AlarmService mockTimeService; private DataAvailabilityService mockDataService; private SchedulerService mockSchedulerService; private JobCompletionService mockCompletionService; private DAGEngine dagEngine; @BeforeClass public void setup() throws Exception { super.setup(); createDB(DB_SQL_FILE); falconJPAService.init(); this.dfsCluster = EmbeddedCluster.newCluster("testCluster"); this.conf = dfsCluster.getConf(); registerServices(); } private void registerServices() throws FalconException { mockTimeService = Mockito.mock(AlarmService.class); Mockito.when(mockTimeService.getName()).thenReturn("AlarmService"); Mockito.when(mockTimeService.createRequestBuilder(Mockito.any(NotificationHandler.class), Mockito.any(ID.class))).thenCallRealMethod(); mockDataService = Mockito.mock(DataAvailabilityService.class); Mockito.when(mockDataService.getName()).thenReturn("DataAvailabilityService"); Mockito.when(mockDataService.createRequestBuilder(Mockito.any(NotificationHandler.class), Mockito.any(ID.class))).thenCallRealMethod(); dagEngine = Mockito.mock(OozieDAGEngine.class); Mockito.doNothing().when(dagEngine).resume(Mockito.any(ExecutionInstance.class)); mockSchedulerService = Mockito.mock(SchedulerService.class); Mockito.when(mockSchedulerService.getName()).thenReturn("JobSchedulerService"); StartupProperties.get().setProperty("dag.engine.impl", MockDAGEngine.class.getName()); StartupProperties.get().setProperty("execution.service.impl", FalconExecutionService.class.getName()); dagEngine = Mockito.spy(DAGEngineFactory.getDAGEngine("testCluster")); Mockito.when(mockSchedulerService.createRequestBuilder(Mockito.any(NotificationHandler.class), Mockito.any(ID.class))).thenCallRealMethod(); mockCompletionService = Mockito.mock(JobCompletionService.class); Mockito.when(mockCompletionService.getName()).thenReturn("JobCompletionService"); Mockito.when(mockCompletionService.createRequestBuilder(Mockito.any(NotificationHandler.class), Mockito.any(ID.class))).thenCallRealMethod(); Services.get().register(mockTimeService); Services.get().register(mockDataService); Services.get().register(mockSchedulerService); Services.get().register(mockCompletionService); } @Test public void testInsertRetrieveAndUpdate() throws Exception { EntityState entityState = getEntityState(EntityType.PROCESS, "process"); stateStore.putEntity(entityState); EntityID entityID = new EntityID(entityState.getEntity()); EntityState actualEntityState = stateStore.getEntity(entityID); Assert.assertEquals(actualEntityState.getEntity(), entityState.getEntity()); Assert.assertEquals(actualEntityState.getCurrentState(), entityState.getCurrentState()); try { stateStore.putEntity(entityState); Assert.fail("Exception must have been thrown"); } catch (StateStoreException e) { //no op } entityState.setCurrentState(EntityState.STATE.SCHEDULED); stateStore.updateEntity(entityState); actualEntityState = stateStore.getEntity(entityID); Assert.assertEquals(actualEntityState.getEntity(), entityState.getEntity()); Assert.assertEquals(actualEntityState.getCurrentState(), entityState.getCurrentState()); stateStore.deleteEntity(entityID); boolean entityExists = stateStore.entityExists(entityID); Assert.assertEquals(entityExists, false); try { stateStore.getEntity(entityID); Assert.fail("Exception must have been thrown"); } catch (StateStoreException e){ // no op } try { stateStore.updateEntity(entityState); Assert.fail("Exception must have been thrown"); } catch (StateStoreException e) { // no op } try { stateStore.deleteEntity(entityID); Assert.fail("Exception must have been thrown"); } catch (StateStoreException e){ // no op } } @Test public void testGetEntities() throws Exception { EntityState entityState1 = getEntityState(EntityType.PROCESS, "process1"); EntityState entityState2 = getEntityState(EntityType.PROCESS, "process2"); EntityState entityState3 = getEntityState(EntityType.FEED, "feed1"); Collection<EntityState> result = stateStore.getAllEntities(); Assert.assertEquals(result.size(), 0); stateStore.putEntity(entityState1); stateStore.putEntity(entityState2); stateStore.putEntity(entityState3); result = stateStore.getAllEntities(); Assert.assertEquals(result.size(), 3); Collection<Entity> entities = stateStore.getEntities(EntityState.STATE.SUBMITTED); Assert.assertEquals(entities.size(), 3); } @Test public void testInstanceInsertionAndUpdate() throws Exception { storeEntity(EntityType.CLUSTER, "testCluster"); storeEntity(EntityType.FEED, "clicksFeed"); storeEntity(EntityType.FEED, "clicksSummary"); EntityState entityState = getEntityState(EntityType.PROCESS, "process"); ExecutionInstance executionInstance = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), System.currentTimeMillis(), "cluster", System.currentTimeMillis()); InstanceState instanceState = new InstanceState(executionInstance); initInstanceState(instanceState); stateStore.putExecutionInstance(instanceState); InstanceID instanceID = new InstanceID(instanceState.getInstance()); InstanceState actualInstanceState = stateStore.getExecutionInstance(instanceID); Assert.assertEquals(actualInstanceState, instanceState); instanceState.setCurrentState(InstanceState.STATE.RUNNING); Predicate predicate = new Predicate(Predicate.TYPE.DATA); instanceState.getInstance().getAwaitingPredicates().add(predicate); stateStore.updateExecutionInstance(instanceState); actualInstanceState = stateStore.getExecutionInstance(instanceID); Assert.assertEquals(actualInstanceState, instanceState); try { stateStore.putExecutionInstance(instanceState); Assert.fail("Exception must have been thrown"); } catch (StateStoreException e) { // no op } stateStore.deleteExecutionInstance(instanceID); try { stateStore.getExecutionInstance(instanceID); Assert.fail("Exception must have been thrown"); } catch (StateStoreException e) { // no op } try { stateStore.deleteExecutionInstance(instanceID); Assert.fail("Exception must have been thrown"); } catch (StateStoreException e) { // no op } try { stateStore.updateExecutionInstance(instanceState); Assert.fail("Exception must have been thrown"); } catch (StateStoreException e) { // no op } } @Test public void testBulkInstanceOperations() throws Exception { storeEntity(EntityType.CLUSTER, "testCluster"); storeEntity(EntityType.FEED, "clicksFeed"); storeEntity(EntityType.FEED, "clicksSummary"); EntityState entityState = getEntityState(EntityType.PROCESS, "process1"); ExecutionInstance processExecutionInstance1 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), System.currentTimeMillis() - 60000, "cluster1", System.currentTimeMillis() - 60000); InstanceState instanceState1 = new InstanceState(processExecutionInstance1); instanceState1.setCurrentState(InstanceState.STATE.READY); ExecutionInstance processExecutionInstance2 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), System.currentTimeMillis(), "cluster1", System.currentTimeMillis()); InstanceState instanceState2 = new InstanceState(processExecutionInstance2); instanceState2.setCurrentState(InstanceState.STATE.RUNNING); ExecutionInstance processExecutionInstance3 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), System.currentTimeMillis(), "cluster2", System.currentTimeMillis()); InstanceState instanceState3 = new InstanceState(processExecutionInstance3); instanceState3.setCurrentState(InstanceState.STATE.READY); stateStore.putExecutionInstance(instanceState1); stateStore.putExecutionInstance(instanceState2); stateStore.putExecutionInstance(instanceState3); Collection<InstanceState> actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1"); Assert.assertEquals(actualInstances.size(), 2); Assert.assertEquals(actualInstances.toArray()[0], instanceState1); Assert.assertEquals(actualInstances.toArray()[1], instanceState2); actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster2"); Assert.assertEquals(actualInstances.size(), 1); Assert.assertEquals(actualInstances.toArray()[0], instanceState3); List<InstanceState.STATE> states = new ArrayList<>(); states.add(InstanceState.STATE.READY); actualInstances = stateStore.getExecutionInstances(entityState.getEntity(), "cluster1", states); Assert.assertEquals(actualInstances.size(), 1); Assert.assertEquals(actualInstances.toArray()[0], instanceState1); EntityClusterID entityClusterID = new EntityClusterID(entityState.getEntity(), "testCluster"); actualInstances = stateStore.getExecutionInstances(entityClusterID, states); Assert.assertEquals(actualInstances.size(), 2); Assert.assertEquals(actualInstances.toArray()[0], instanceState1); Assert.assertEquals(actualInstances.toArray()[1], instanceState3); states.add(InstanceState.STATE.RUNNING); actualInstances = stateStore.getExecutionInstances(entityState.getEntity(), "cluster1", states); Assert.assertEquals(actualInstances.size(), 2); Assert.assertEquals(actualInstances.toArray()[0], instanceState1); Assert.assertEquals(actualInstances.toArray()[1], instanceState2); InstanceState lastInstanceState = stateStore.getLastExecutionInstance(entityState.getEntity(), "cluster1"); Assert.assertEquals(lastInstanceState, instanceState2); InstanceID instanceKey = new InstanceID(instanceState3.getInstance()); stateStore.deleteExecutionInstance(instanceKey); actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster2"); Assert.assertEquals(actualInstances.size(), 0); actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1"); Assert.assertEquals(actualInstances.size(), 2); stateStore.putExecutionInstance(instanceState3); actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster2"); Assert.assertEquals(actualInstances.size(), 1); stateStore.deleteExecutionInstances(entityClusterID.getEntityID()); actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1"); Assert.assertEquals(actualInstances.size(), 0); actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster2"); Assert.assertEquals(actualInstances.size(), 0); } @Test public void testGetExecutionInstancesWithRange() throws Exception { storeEntity(EntityType.CLUSTER, "testCluster"); storeEntity(EntityType.FEED, "clicksFeed"); storeEntity(EntityType.FEED, "clicksSummary"); long instance1Time = System.currentTimeMillis() - 180000; long instance2Time = System.currentTimeMillis(); EntityState entityState = getEntityState(EntityType.PROCESS, "process1"); ExecutionInstance processExecutionInstance1 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), instance1Time, "cluster1", instance1Time); InstanceState instanceState1 = new InstanceState(processExecutionInstance1); instanceState1.setCurrentState(InstanceState.STATE.RUNNING); ExecutionInstance processExecutionInstance2 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), instance2Time, "cluster1", instance2Time); InstanceState instanceState2 = new InstanceState(processExecutionInstance2); instanceState2.setCurrentState(InstanceState.STATE.RUNNING); ExecutionInstance processExecutionInstance3 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), instance2Time, "cluster2", instance2Time); InstanceState instanceState3 = new InstanceState(processExecutionInstance3); instanceState3.setCurrentState(InstanceState.STATE.RUNNING); stateStore.putExecutionInstance(instanceState1); stateStore.putExecutionInstance(instanceState2); stateStore.putExecutionInstance(instanceState3); List<InstanceState.STATE> states = new ArrayList<>(); states.add(InstanceState.STATE.RUNNING); Collection<InstanceState> actualInstances = stateStore.getExecutionInstances(entityState.getEntity(), "cluster1", states, new DateTime(instance1Time), new DateTime(instance1Time + 60000)); Assert.assertEquals(actualInstances.size(), 1); Assert.assertEquals(actualInstances.toArray()[0], instanceState1); actualInstances = stateStore.getExecutionInstances(entityState.getEntity(), "cluster1", states, new DateTime(instance2Time), new DateTime(instance2Time + 60000)); Assert.assertEquals(actualInstances.size(), 1); Assert.assertEquals(actualInstances.toArray()[0], instanceState2); // Ensure we can get instances for a different cluster actualInstances = stateStore.getExecutionInstances(entityState.getEntity(), "cluster2", states, new DateTime(instance2Time), new DateTime(instance2Time + 60000)); Assert.assertEquals(actualInstances.size(), 1); Assert.assertEquals(actualInstances.toArray()[0], instanceState3); } @Test public void testGetInstanceFromExternalID() throws Exception { storeEntity(EntityType.CLUSTER, "testCluster"); storeEntity(EntityType.FEED, "clicksFeed"); storeEntity(EntityType.FEED, "clicksSummary"); long instance1Time = System.currentTimeMillis() - 180000; long instance2Time = System.currentTimeMillis(); EntityState entityState = getEntityState(EntityType.PROCESS, "processext"); ExecutionInstance processExecutionInstance1 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), instance1Time, "cluster1", instance1Time); processExecutionInstance1.setExternalID("external_id_1"); InstanceState instanceState1 = new InstanceState(processExecutionInstance1); instanceState1.setCurrentState(InstanceState.STATE.RUNNING); ExecutionInstance processExecutionInstance2 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), instance2Time, "cluster1", instance2Time); processExecutionInstance2.setExternalID("external_id_2"); InstanceState instanceState2 = new InstanceState(processExecutionInstance2); instanceState2.setCurrentState(InstanceState.STATE.RUNNING); stateStore.putExecutionInstance(instanceState1); stateStore.putExecutionInstance(instanceState2); InstanceState actualInstanceState = stateStore.getExecutionInstance("external_id_1"); Assert.assertEquals(actualInstanceState.getInstance(), processExecutionInstance1); actualInstanceState = stateStore.getExecutionInstance("external_id_2"); Assert.assertEquals(actualInstanceState.getInstance(), processExecutionInstance2); } @Test public void testCascadingDelete() throws Exception { storeEntity(EntityType.CLUSTER, "testCluster"); storeEntity(EntityType.FEED, "clicksFeed"); storeEntity(EntityType.FEED, "clicksSummary"); EntityState entityState = getEntityState(EntityType.PROCESS, "process1"); stateStore.putEntity(entityState); ExecutionInstance processExecutionInstance1 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), System.currentTimeMillis() - 60000, "cluster1", System.currentTimeMillis() - 60000); InstanceState instanceState1 = new InstanceState(processExecutionInstance1); instanceState1.setCurrentState(InstanceState.STATE.READY); ExecutionInstance processExecutionInstance2 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), System.currentTimeMillis(), "cluster1", System.currentTimeMillis()); InstanceState instanceState2 = new InstanceState(processExecutionInstance2); instanceState2.setCurrentState(InstanceState.STATE.RUNNING); stateStore.putExecutionInstance(instanceState1); stateStore.putExecutionInstance(instanceState2); Collection<InstanceState> instances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1"); Assert.assertEquals(instances.size(), 2); stateStore.deleteEntity(new EntityID(entityState.getEntity())); deleteEntity(EntityType.PROCESS, "process1"); instances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1"); Assert.assertEquals(instances.size(), 0); } @Test public void testGetExecutionSummaryWithRange() throws Exception { storeEntity(EntityType.CLUSTER, "testCluster"); storeEntity(EntityType.FEED, "clicksFeed"); storeEntity(EntityType.FEED, "clicksSummary"); long instance1Time = System.currentTimeMillis() - 180000; long instance2Time = System.currentTimeMillis(); EntityState entityState = getEntityState(EntityType.PROCESS, "clicksProcess"); ExecutionInstance processExecutionInstance1 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), instance1Time, "cluster1", instance1Time); InstanceState instanceState1 = new InstanceState(processExecutionInstance1); instanceState1.setCurrentState(InstanceState.STATE.RUNNING); ExecutionInstance processExecutionInstance2 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), instance2Time, "cluster1", instance2Time); InstanceState instanceState2 = new InstanceState(processExecutionInstance2); instanceState2.setCurrentState(InstanceState.STATE.SUCCEEDED); stateStore.putExecutionInstance(instanceState1); stateStore.putExecutionInstance(instanceState2); Map<InstanceState.STATE, Long> summary = stateStore.getExecutionInstanceSummary(entityState.getEntity(), "cluster1", new DateTime(instance1Time), new DateTime(instance1Time + 60000)); Assert.assertEquals(summary.size(), 1); Assert.assertEquals(summary.get(InstanceState.STATE.RUNNING).longValue(), 1L); summary = stateStore.getExecutionInstanceSummary(entityState.getEntity(), "cluster1", new DateTime(instance2Time), new DateTime(instance2Time + 60000)); Assert.assertEquals(summary.size(), 1); Assert.assertEquals(summary.get(InstanceState.STATE.SUCCEEDED).longValue(), 1L); } private void initInstanceState(InstanceState instanceState) { instanceState.setCurrentState(InstanceState.STATE.READY); instanceState.getInstance().setExternalID(RandomStringUtils.randomNumeric(6)); instanceState.getInstance().setInstanceSequence(randomValGenerator.nextInt()); instanceState.getInstance().setActualStart(new DateTime(System.currentTimeMillis())); instanceState.getInstance().setActualEnd(new DateTime(System.currentTimeMillis())); List<Predicate> predicates = new ArrayList<>(); Predicate predicate = new Predicate(Predicate.TYPE.JOB_COMPLETION); predicates.add(predicate); instanceState.getInstance().setAwaitingPredicates(predicates); } private EntityState getEntityState(EntityType entityType, String name) throws Exception { storeEntity(entityType, name); Entity entity = getStore().get(entityType, name); Assert.assertNotNull(entity); return new EntityState(entity); } @AfterTest public void cleanUpTables() throws StateStoreException { try { stateStore.deleteEntities(); } catch (Exception e) { // ignore } } @AfterClass public void cleanup() throws IOException { super.cleanup(); } }
package net.contargo.validation.bigdecimal; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import java.math.BigDecimal; import javax.validation.ConstraintValidatorContext; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.text.IsEmptyString.isEmptyOrNullString; /** * Unittest of {@link net.contargo.validation.bigdecimal.BigDecimalConstraintValidator BigDecimalConstraintValidator}. * * @author Tobias Schneider - schneider@synyx.de */ @RunWith(MockitoJUnitRunner.class) public class BigDecimalValidatorUnitTest { private BigDecimalValidator sut; @Mock private ConstraintValidatorContext.ConstraintViolationBuilder constraintViolationBuilderMock; private BigDecimal bigDecimal; private BigDecimalValidationRules bigDecimalValidationRules; @Before public void setUp() { sut = new BigDecimalValidator(); } @Test public void validateMaxDecimalPlacesBorder() { bigDecimal = new BigDecimal("0"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxDecimalPlaces(1).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMaxDecimalPlacesOver() { bigDecimal = new BigDecimal("0"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxDecimalPlaces(2).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMaxDecimalPlacesUnder() { bigDecimal = new BigDecimal("10"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxDecimalPlaces(1).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isNotValid(result, "{net.contargo.validation.bigdecimal.decimaloutofrange}"); } @Test public void validateMinDecimalPlacesBorder() { bigDecimal = new BigDecimal("0"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minDecimalPlaces(1).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMinDecimalPlacesOver() { bigDecimal = new BigDecimal("100"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minDecimalPlaces(4).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isNotValid(result, "{net.contargo.validation.bigdecimal.decimaloutofrange}"); } @Test public void validateMinDecimalPlacesUnder() { bigDecimal = new BigDecimal("100"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minDecimalPlaces(2).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMaxFractionsBorder() { bigDecimal = new BigDecimal("0.00"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxFractionalPlaces(2).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMaxFractionsOver() { bigDecimal = new BigDecimal("0.00"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxFractionalPlaces(3).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMaxFractionsUnder() { bigDecimal = new BigDecimal("0.00"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxFractionalPlaces(1).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isNotValid(result, "{net.contargo.validation.bigdecimal.fractionaloutofrange}"); } @Test public void validateMinValueZeroBorder() { bigDecimal = new BigDecimal("0.0"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minValue(0).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMinValueZeroBorder2() { bigDecimal = new BigDecimal("0.00000000000000"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minValue(0).maxFractionalPlaces(100) .build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMinValueBorder() { bigDecimal = new BigDecimal("0.01"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minValue(0.01).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMinValueOver() { bigDecimal = new BigDecimal("0.01"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minValue(1).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isNotValid(result, "{net.contargo.validation.bigdecimal.toosmall}"); } @Test public void validateMinValueUnder() { bigDecimal = new BigDecimal("0.01"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minValue(0.00).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMaxValueBorder() { bigDecimal = new BigDecimal("123.02"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxValue(123.02).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validateMaxValueUnder() { bigDecimal = new BigDecimal("1.00"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxValue(0.00).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isNotValid(result, "{net.contargo.validation.bigdecimal.toobig}"); } @Test public void validateMaxValueOver() { bigDecimal = new BigDecimal("100.00"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxValue(124.12).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validatePowerOfMaxDecimalAndValueBorder() { bigDecimal = new BigDecimal("1E8"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxValue(100000000).maxDecimalPlaces(9) .build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validatePowerOfMaxDecimalUnder() { bigDecimal = new BigDecimal("1E8"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxDecimalPlaces(8).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isNotValid(result, "{net.contargo.validation.bigdecimal.decimaloutofrange}"); } @Test public void validatePowerOfMaxDecimalOver() { bigDecimal = new BigDecimal("1E8"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxDecimalPlaces(10).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validatePowerOfMinDecimalAndValueBorder() { bigDecimal = new BigDecimal("1E2"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minValue(100).maxDecimalPlaces(3).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validatePowerOfMinDecimalUnder() { bigDecimal = new BigDecimal("1E3"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minDecimalPlaces(2).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validatePowerOfMinDecimalOver() { bigDecimal = new BigDecimal("1E2"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().minDecimalPlaces(4).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isNotValid(result, "{net.contargo.validation.bigdecimal.decimaloutofrange}"); } @Test public void validatePowerNegativeMaxFractionsBorder() { bigDecimal = new BigDecimal("1E-88"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxFractionalPlaces(88).build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void validatePowerNegativeMaxFractionsOver() { bigDecimal = new BigDecimal("1E-88"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isNotValid(result, "{net.contargo.validation.bigdecimal.fractionaloutofrange}"); } @Test public void validatePowerNegativeMaxFractionsUnder() { bigDecimal = new BigDecimal("1E-1"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); isValid(result); } @Test public void isNullValue() { bigDecimal = null; bigDecimalValidationRules = new BigDecimalValidationRules.Builder().build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); assertThat(result.isValid(), is(false)); assertThat(result.getFailMessage(), is("{net.contargo.validation.bigdecimal.null}")); } @Test public void isFractionalDisabled() { sut = new BigDecimalValidator(false); bigDecimal = new BigDecimal("100.03"); bigDecimalValidationRules = new BigDecimalValidationRules.Builder().maxFractionalPlaces(0).maxValue(100) .build(); BigDecimalValidationResult result = sut.validate(bigDecimal, bigDecimalValidationRules); assertThat(result.isValid(), is(true)); } private void isValid(BigDecimalValidationResult result) { assertThat(result.isValid(), is(true)); assertThat(result.getFailMessage(), isEmptyOrNullString()); } private void isNotValid(BigDecimalValidationResult result, String expectedFailureMessage) { assertThat(result.isValid(), is(false)); assertThat(result.getFailMessage(), is(expectedFailureMessage)); } }
package com.google.sampling.experiential.server; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.codec.digest.DigestUtils; import org.joda.time.DateTimeZone; import com.google.appengine.api.ThreadManager; import com.google.common.base.Strings; import com.google.sampling.experiential.gcs.GCSFetcher; import com.google.sampling.experiential.model.Event; import com.google.sampling.experiential.server.stats.usage.UsageStatsBlobWriter; import com.google.sampling.experiential.shared.EventDAO; import com.google.sampling.experiential.shared.WhatDAO; import com.pacoapp.paco.shared.model2.ExperimentDAO; /** * Setup a job as a background thread to run a report. * This runs in a backend instance. * Also, update the ReportJobStatus. * * The reportservlet will ask this to kick off a job. * It will also ask it for the status, which will include pending, completed, failed. * If completed, the client can then access the report that was generated at a location. * * @author bobevans * */ public class ReportJobExecutor { private static final Logger log = Logger.getLogger(ReportJobExecutor.class.getName()); private static ReportJobExecutor instance; public static ReportJobExecutor getInstance() { if (instance == null) { instance = new ReportJobExecutor(); } return instance; } private ReportJobStatusManager statusMgr; public ReportJobExecutor() { super(); statusMgr = new ReportJobStatusManager(); } public String runReportJob(final String requestorEmail, final DateTimeZone timeZoneForClient, final List<Query> query, final boolean anon, final String reportFormat, final String originalQuery, final int limit, final String cursor, final boolean includePhotos,final Float pacoProtocol, final boolean fullBlobAddress) { // TODO get a real id function for jobs final String jobId = DigestUtils.md5Hex(requestorEmail + Long.toString(System.currentTimeMillis())); log.info("In runReportJob for job: " + jobId); statusMgr.startReport(requestorEmail, jobId); final ClassLoader cl = getClass().getClassLoader(); final Thread thread2 = ThreadManager.createBackgroundThread(new Runnable() { @Override public void run() { log.info("ReportJobExecutor running"); Thread.currentThread().setContextClassLoader(cl); try { String location = doJob(requestorEmail, timeZoneForClient, query, anon, jobId, reportFormat, originalQuery, limit, cursor, includePhotos, pacoProtocol, fullBlobAddress); statusMgr.completeReport(requestorEmail, jobId, location); } catch (Throwable e) { statusMgr.failReport(requestorEmail, jobId, e.getClass() + "." + e.getMessage()); log.severe("Could not run job: " + e.getMessage()); e.printStackTrace(); } } }); thread2.start(); log.info("Leaving runReportJob"); return jobId; } protected String doJob(String requestorEmail, DateTimeZone timeZoneForClient, List<Query> query, boolean anon, String jobId, String reportFormat, String originalQuery, int limit, String cursor, boolean includePhotos, Float pacoProtocol, boolean fullBlobAddress) throws IOException { log.info("starting doJob"); if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("stats")) { log.info("Running stats report for job: " + jobId); return runStatsReport(jobId, timeZoneForClient, requestorEmail); } String experimentId = null; for (Query query2 : query) { if (query2.getKey().equals("experimentId")) { experimentId = query2.getValue(); } } if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("csv")) { // TODO - get rid of the offset and limit params and rewrite the eventretriever call to loop until all results are retrieved. log.info("Getting events for job: " + jobId); EventQueryResultPair eventQueryResultPair = EventRetriever.getInstance().getEventsInBatchesOneBatch(query, requestorEmail, timeZoneForClient, limit, cursor); //EventRetriever.sortEvents(events); log.info("Got events for job: " + jobId); return generateCSVReport(anon, jobId, experimentId, eventQueryResultPair, pacoProtocol, includePhotos); } else if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("json")) { // TODO - get rid of the offset and limit params and rewrite the eventretriever call to loop until all results are retrieved. log.info("Getting events for job: " + jobId); EventQueryResultPair eventQueryResultPair = EventRetriever.getInstance().getEventsInBatchesOneBatch(query, requestorEmail, timeZoneForClient, limit, cursor); //EventRetriever.sortEvents(events); log.info("Got events for job: " + jobId); return generateJsonReport(anon, jobId, experimentId, eventQueryResultPair, timeZoneForClient, includePhotos, pacoProtocol, fullBlobAddress); } else if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("photozip")) { // TODO - get rid of the offset and limit params and rewrite the eventretriever call to loop until all results are retrieved. log.info("Getting events for job: " + jobId); EventQueryResultPair eventQueryResultPair = EventRetriever.getInstance().getEventsInBatches(query, requestorEmail, timeZoneForClient, limit, cursor); //EventRetriever.sortEvents(events); log.info("Got events for job: " + jobId); return generatePhotoZip(jobId, experimentId, eventQueryResultPair, anon, timeZoneForClient); } else { // TODO - get rid of the offset and limit params and rewrite the eventretriever call to loop until all results are retrieved. log.info("Getting events for job: " + jobId); EventQueryResultPair eventQueryResultPair = EventRetriever.getInstance().getEventsInBatches(query, requestorEmail, timeZoneForClient, limit, cursor); //EventRetriever.sortEvents(events); log.info("Got events for job: " + jobId); return generateHtmlReport(timeZoneForClient, anon, jobId, experimentId, eventQueryResultPair, originalQuery, requestorEmail, pacoProtocol, includePhotos, fullBlobAddress); } } public String runReportJobExperimental(final String requestorEmail, final DateTimeZone timeZoneForClient, final List<Query> query, final boolean anon, final String reportFormat, final String originalQuery, final boolean includePhotos, final Float pacoProtocol, final boolean fullBlobAddress) { // TODO get a real id function for jobs final String jobId = DigestUtils.md5Hex(requestorEmail + Long.toString(System.currentTimeMillis())); log.info("In runReportJobExperimental for job: " + jobId); statusMgr.startReport(requestorEmail, jobId); final ClassLoader cl = getClass().getClassLoader(); final Thread thread2 = ThreadManager.createBackgroundThread(new Runnable() { @Override public void run() { log.info("ReportJobExecutor Experimental running"); Thread.currentThread().setContextClassLoader(cl); try { String location = doJobExperimental(requestorEmail, timeZoneForClient, query, anon, jobId, reportFormat, originalQuery, includePhotos, pacoProtocol, fullBlobAddress); statusMgr.completeReport(requestorEmail, jobId, location); } catch (Throwable e) { statusMgr.failReport(requestorEmail, jobId, e.getClass() + "." + e.getMessage()); log.severe("Could not run job: " + e.getMessage()); log.log(Level.SEVERE, "Could not run job", e); e.printStackTrace(); } } }); thread2.start(); log.info("Leaving runReportJob"); return jobId; } protected String doJobExperimental(String requestorEmail, DateTimeZone timeZoneForClient, List<Query> query, boolean anon, String jobId, String reportFormat, String originalQuery, boolean includePhotos, Float pacoProtocol, boolean fullBlobAddress) throws IOException { log.info("starting doJob experimental"); String experimentId = null; for (Query query2 : query) { if (query2.getKey().equals("experimentId")) { experimentId = query2.getValue(); } } log.info("Getting events for job: " + jobId); EventQueryResultPair eventQueryResultPair = EventRetriever.getInstance().getEventsFromLowLevelDS(query, requestorEmail, timeZoneForClient); // EventRetriever.sortEvents(events); log.info("Got events for job: " + jobId); if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("csv2")) { return generateCSVReport(anon, jobId, experimentId, eventQueryResultPair, pacoProtocol, includePhotos); } else if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("json2")) { return generateJsonReport(anon, jobId, experimentId, eventQueryResultPair, timeZoneForClient, includePhotos, pacoProtocol, fullBlobAddress); } else if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("html2")) { return generateHtmlReport(timeZoneForClient, anon, jobId, experimentId, eventQueryResultPair, originalQuery, requestorEmail, pacoProtocol, includePhotos, fullBlobAddress); } return null; } protected String doJobExperimentalSplitLargeFilesAndCompose(String requestorEmail, DateTimeZone timeZoneForClient, List<Query> query, boolean anon, String jobId, String reportFormat, String originalQuery, boolean includePhotos, Float pacoProtocol, boolean fullBlobAddress) throws IOException { log.info("starting doJob split large files"); String experimentId = null; for (Query query2 : query) { if (query2.getKey().equals("experimentId")) { experimentId = query2.getValue(); } } if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("csv2")) { log.info("Getting events for csv job: " + jobId); EventQueryResultPair eventQueryResultPair = EventRetriever.getInstance().getEventsFromLowLevelDS(query, requestorEmail, timeZoneForClient); log.info("Got events for job: " + jobId); return generateCSVReport(anon, jobId, experimentId, eventQueryResultPair, pacoProtocol, includePhotos); } else if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("json2")) { log.info("Getting events for json job: " + jobId); EventQueryResultPair eventQueryResultPair = EventRetriever.getInstance().getEventsFromLowLevelDS(query, requestorEmail, timeZoneForClient); log.info("Got events for job: " + jobId); return generateJsonReport(anon, jobId, experimentId, eventQueryResultPair, timeZoneForClient, includePhotos, pacoProtocol, fullBlobAddress); } else if (!Strings.isNullOrEmpty(reportFormat) && reportFormat.equals("html2")) { return generateHtmlReportSplitLargeFiles(requestorEmail, timeZoneForClient, query, anon, jobId, originalQuery, experimentId, pacoProtocol, includePhotos, fullBlobAddress); } return null; } private String generateHtmlReportSplitLargeFiles(String requestorEmail, DateTimeZone timeZoneForClient, List<Query> query, boolean anon, String jobId, String originalQuery, String experimentId, Float pacoProtocol, boolean inlineBlobs, boolean fullBlobAddress) throws IOException { log.info("Getting events for html job: " + jobId); EventQueryResultPair eventQueryResultPair = EventRetriever.getInstance().getEventsFromLowLevelDS(query, requestorEmail, timeZoneForClient); log.info("Got events for job: " + jobId); // if (!Strings.isNullOrEmpty(experimentId)) { // String eodFile = generateEODHtml(anon, jobId, experimentId, eventQueryResultPair, timeZoneForClient.getID()); // if (eodFile != null) { // return eodFile; // } // } return new HtmlBlobWriter().writeNormalExperimentEventsAsHtml(anon, eventQueryResultPair, jobId, experimentId, timeZoneForClient.getID(), originalQuery, requestorEmail, pacoProtocol, inlineBlobs, fullBlobAddress); } private String generateJsonReport(boolean anon, String jobId, String experimentId, EventQueryResultPair eventQueryResultPair, DateTimeZone timeZoneForClient, boolean includePhotos, Float pacoProtocol, boolean fullBlobAddress) throws IOException { return new JSONBlobWriter().writeEventsAsJSON(anon, eventQueryResultPair, jobId, timeZoneForClient, includePhotos, pacoProtocol, fullBlobAddress); } // for json query - dup of frontend version // private EventQueryResultPair getEventsWithQuery(HttpServletRequest req, // List<com.google.sampling.experiential.server.Query> queries, // int limit, String cursor) { // User whoFromLogin = AuthUtil.getWhoFromLogin(); // return EventRetriever.getInstance().getEventsInBatches(queries, whoFromLogin.getEmail().toLowerCase(), // TimeUtil.getTimeZoneForClient(req), limit, cursor); // } private String runStatsReport(String jobId, DateTimeZone timeZoneForClient, String requestorEmail) throws IOException { String tz = timeZoneForClient != null ? timeZoneForClient.getID() : null; return new UsageStatsBlobWriter().writeStatsAsJson(jobId, tz, requestorEmail); } private String generatePhotoZip(String jobId, String experimentId, EventQueryResultPair eventQueryResultPair, boolean anon, DateTimeZone timeZoneForClient) { return new PhotoZipBlobWriter().writePhotoZipFile(anon, experimentId, eventQueryResultPair, jobId, timeZoneForClient.getID()); } private String generateHtmlReport(DateTimeZone timeZoneForClient, boolean anon, String jobId, String experimentId, EventQueryResultPair eventQueryResultPair, String originalQuery, String requestorEmail, Float pacoProtocol, boolean inlineBlobs, boolean fullBlobAddress) throws IOException { if (!Strings.isNullOrEmpty(experimentId)) { String eodFile = generateEODHtml(anon, jobId, experimentId, eventQueryResultPair, timeZoneForClient.getID(), pacoProtocol); if (eodFile != null) { return eodFile; } } return new HtmlBlobWriter().writeNormalExperimentEventsAsHtml(anon, eventQueryResultPair, jobId, experimentId, timeZoneForClient.getID(), originalQuery, requestorEmail, pacoProtocol, inlineBlobs, fullBlobAddress); } private String generateEODHtml(boolean anon, String jobId, String experimentId, EventQueryResultPair eventQueryResultPair, String timeZoneForClient, Float pacoProtocol) throws IOException { log.info("Checking referred experiment for job: " + jobId); ExperimentDAO referredExperiment = getReferredExperiment(experimentId); if (referredExperiment != null) { List<EventDAO> eodEventDAOs = EventRetriever.convertEventsToDAOs(eventQueryResultPair.getEvents()); List<EventDAO> dailyPingEodEventDAOs = new EndOfDayEventProcessor().breakEodResponsesIntoIndividualDailyEventResponses(eodEventDAOs); return new HtmlBlobWriter().writeEndOfDayExperimentEventsAsHtml(anon, jobId, experimentId, dailyPingEodEventDAOs, timeZoneForClient, pacoProtocol); } return null; } private ExperimentDAO getReferredExperiment(String experimentId) { return ExperimentServiceFactory.getExperimentService().getReferredExperiment(Long.parseLong(experimentId)); } private String generateCSVReport(boolean anon, String jobId, String experimentId, EventQueryResultPair eventQueryResultPair, Float pacoProtocol, boolean inlineBlobs) throws IOException { List<Event> events = eventQueryResultPair.getEvents(); for (Iterator iterator = events.iterator(); iterator.hasNext();) { Event event2 = (Event) iterator.next(); if (inlineBlobs) { // legacy GAE DS blob storage List<WhatDAO> whatMap = EventRetriever.convertToWhatDAOs(event2.getWhat()); EventJsonDownloader.fillInResponsesWithEncodedBlobData(event2, whatMap); // new GCS blob storage GCSFetcher.fillInResponsesWithEncodedBlobDataFromGCS(whatMap); } } if (!Strings.isNullOrEmpty(experimentId)) { String eodFile = generateEODCSV(anon, jobId, experimentId, events, pacoProtocol); if (eodFile != null) { return eodFile; } } List<EventDAO> eodEventDAOs = EventRetriever.convertEventsToDAOs(events); try { Long experimentIdLong = Long.parseLong(experimentId); ExperimentService es = ExperimentServiceFactory.getExperimentService(); ExperimentDAO experiment = es.getExperiment(experimentIdLong); return new CSVBlobWriter().writeNormalExperimentEventsAsCSV(experiment, eodEventDAOs, jobId, anon, pacoProtocol); } catch (NumberFormatException e) { log.warning("ExperimentId is not a long: " + experimentId); throw e; } } private String generateEODCSV(boolean anon, String jobId, String experimentId, List<Event> events, Float pacoProtocol) throws IOException { ExperimentDAO referredExperiment = getReferredExperiment(experimentId); if (referredExperiment != null) { List<EventDAO> eodEventDAOs = EventRetriever.convertEventsToDAOs(events); List<EventDAO> dailyPingEodEventDAOs = new EndOfDayEventProcessor().breakEodResponsesIntoIndividualDailyEventResponses(eodEventDAOs); return new CSVBlobWriter().writeEndOfDayExperimentEventsAsCSV(anon, dailyPingEodEventDAOs, jobId, pacoProtocol); } return null; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.test; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.flowable.bpmn.model.Activity; import org.flowable.bpmn.model.BoundaryEvent; import org.flowable.bpmn.model.BpmnModel; import org.flowable.bpmn.model.BusinessRuleTask; import org.flowable.bpmn.model.CallActivity; import org.flowable.bpmn.model.CancelEventDefinition; import org.flowable.bpmn.model.CompensateEventDefinition; import org.flowable.bpmn.model.EndEvent; import org.flowable.bpmn.model.ErrorEventDefinition; import org.flowable.bpmn.model.EventGateway; import org.flowable.bpmn.model.EventSubProcess; import org.flowable.bpmn.model.ExclusiveGateway; import org.flowable.bpmn.model.InclusiveGateway; import org.flowable.bpmn.model.IntermediateCatchEvent; import org.flowable.bpmn.model.ManualTask; import org.flowable.bpmn.model.MessageEventDefinition; import org.flowable.bpmn.model.ParallelGateway; import org.flowable.bpmn.model.ReceiveTask; import org.flowable.bpmn.model.ScriptTask; import org.flowable.bpmn.model.SendTask; import org.flowable.bpmn.model.ServiceTask; import org.flowable.bpmn.model.Signal; import org.flowable.bpmn.model.SignalEventDefinition; import org.flowable.bpmn.model.StartEvent; import org.flowable.bpmn.model.SubProcess; import org.flowable.bpmn.model.Task; import org.flowable.bpmn.model.ThrowEvent; import org.flowable.bpmn.model.TimerEventDefinition; import org.flowable.bpmn.model.Transaction; import org.flowable.bpmn.model.UserTask; import org.flowable.engine.common.api.delegate.Expression; import org.flowable.engine.impl.bpmn.behavior.AbstractBpmnActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.AdhocSubProcessActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.BoundaryCancelEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.BoundaryCompensateEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.BoundaryEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.BoundaryMessageEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.BoundarySignalEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.BoundaryTimerEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.CallActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.CancelEndEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.ErrorEndEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.EventBasedGatewayActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.EventSubProcessActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.EventSubProcessErrorStartEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.EventSubProcessMessageStartEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.EventSubProcessSignalStartEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.EventSubProcessTimerStartEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.ExclusiveGatewayActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.InclusiveGatewayActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.IntermediateCatchEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.IntermediateCatchMessageEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.IntermediateCatchSignalEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.IntermediateCatchTimerEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.IntermediateThrowCompensationEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.IntermediateThrowNoneEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.IntermediateThrowSignalEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.MailActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.ManualTaskActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.NoneEndEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.NoneStartEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.ParallelGatewayActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.ParallelMultiInstanceBehavior; import org.flowable.engine.impl.bpmn.behavior.ReceiveTaskActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.ScriptTaskActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.SequentialMultiInstanceBehavior; import org.flowable.engine.impl.bpmn.behavior.ServiceTaskDelegateExpressionActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.ServiceTaskExpressionActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.ShellActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.SubProcessActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.TaskActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.TerminateEndEventActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.TransactionActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.UserTaskActivityBehavior; import org.flowable.engine.impl.bpmn.behavior.WebServiceActivityBehavior; import org.flowable.engine.impl.bpmn.helper.ClassDelegate; import org.flowable.engine.impl.bpmn.parser.FieldDeclaration; import org.flowable.engine.impl.bpmn.parser.factory.AbstractBehaviorFactory; import org.flowable.engine.impl.bpmn.parser.factory.ActivityBehaviorFactory; import org.flowable.engine.impl.delegate.ActivityBehavior; import org.flowable.engine.impl.el.FixedValue; import org.flowable.engine.impl.test.NoOpServiceTask; /** * @author Joram Barrez */ public class TestActivityBehaviorFactory extends AbstractBehaviorFactory implements ActivityBehaviorFactory { /** * The ActivityBehaviorFactory that is constructed when the process engine was created This class delegates to this instance, unless some mocking has been defined. */ protected ActivityBehaviorFactory wrappedActivityBehaviorFactory; protected boolean allServiceTasksNoOp; protected Map<String, String> mockedClassDelegatesMapping = new HashMap<>(); protected Set<String> noOpServiceTaskIds = new HashSet<>(); protected Set<String> noOpServiceTaskClassNames = new HashSet<>(); public TestActivityBehaviorFactory() { } public TestActivityBehaviorFactory(ActivityBehaviorFactory wrappedActivityBehaviorFactory) { this.wrappedActivityBehaviorFactory = wrappedActivityBehaviorFactory; } public ActivityBehaviorFactory getWrappedActivityBehaviorFactory() { return wrappedActivityBehaviorFactory; } public void setWrappedActivityBehaviorFactory(ActivityBehaviorFactory wrappedActivityBehaviorFactory) { this.wrappedActivityBehaviorFactory = wrappedActivityBehaviorFactory; } @Override public NoneStartEventActivityBehavior createNoneStartEventActivityBehavior(StartEvent startEvent) { return wrappedActivityBehaviorFactory.createNoneStartEventActivityBehavior(startEvent); } @Override public TaskActivityBehavior createTaskActivityBehavior(Task task) { return wrappedActivityBehaviorFactory.createTaskActivityBehavior(task); } @Override public ManualTaskActivityBehavior createManualTaskActivityBehavior(ManualTask manualTask) { return wrappedActivityBehaviorFactory.createManualTaskActivityBehavior(manualTask); } @Override public ReceiveTaskActivityBehavior createReceiveTaskActivityBehavior(ReceiveTask receiveTask) { return wrappedActivityBehaviorFactory.createReceiveTaskActivityBehavior(receiveTask); } @Override public UserTaskActivityBehavior createUserTaskActivityBehavior(UserTask userTask) { return wrappedActivityBehaviorFactory.createUserTaskActivityBehavior(userTask); } @Override public ClassDelegate createClassDelegateServiceTask(ServiceTask serviceTask) { if (allServiceTasksNoOp || noOpServiceTaskIds.contains(serviceTask.getId()) || noOpServiceTaskClassNames.contains(serviceTask.getImplementation())) { return createNoOpServiceTask(serviceTask); } else if (serviceTask.getImplementation() != null && mockedClassDelegatesMapping.containsKey(serviceTask.getImplementation())) { return new ClassDelegate(mockedClassDelegatesMapping.get(serviceTask.getImplementation()), createFieldDeclarations(serviceTask.getFieldExtensions())); } return wrappedActivityBehaviorFactory.createClassDelegateServiceTask(serviceTask); } private ClassDelegate createNoOpServiceTask(ServiceTask serviceTask) { List<FieldDeclaration> fieldDeclarations = new ArrayList<>(); fieldDeclarations.add(new FieldDeclaration("name", Expression.class.getName(), new FixedValue(serviceTask.getImplementation()))); return new ClassDelegate(NoOpServiceTask.class, fieldDeclarations); } @Override public ServiceTaskDelegateExpressionActivityBehavior createServiceTaskDelegateExpressionActivityBehavior(ServiceTask serviceTask) { return wrappedActivityBehaviorFactory.createServiceTaskDelegateExpressionActivityBehavior(serviceTask); } @Override public ServiceTaskExpressionActivityBehavior createServiceTaskExpressionActivityBehavior(ServiceTask serviceTask) { return wrappedActivityBehaviorFactory.createServiceTaskExpressionActivityBehavior(serviceTask); } @Override public WebServiceActivityBehavior createWebServiceActivityBehavior(ServiceTask serviceTask, BpmnModel bpmnModel) { return wrappedActivityBehaviorFactory.createWebServiceActivityBehavior(serviceTask, bpmnModel); } @Override public WebServiceActivityBehavior createWebServiceActivityBehavior(SendTask sendTask, BpmnModel bpmnModel) { return wrappedActivityBehaviorFactory.createWebServiceActivityBehavior(sendTask, bpmnModel); } @Override public MailActivityBehavior createMailActivityBehavior(ServiceTask serviceTask) { return wrappedActivityBehaviorFactory.createMailActivityBehavior(serviceTask); } @Override public MailActivityBehavior createMailActivityBehavior(SendTask sendTask) { return wrappedActivityBehaviorFactory.createMailActivityBehavior(sendTask); } @Override public ActivityBehavior createDmnActivityBehavior(ServiceTask serviceTask) { return wrappedActivityBehaviorFactory.createDmnActivityBehavior(serviceTask); } @Override public ActivityBehavior createDmnActivityBehavior(SendTask sendTask) { return wrappedActivityBehaviorFactory.createDmnActivityBehavior(sendTask); } @Override public ActivityBehavior createMuleActivityBehavior(ServiceTask serviceTask) { return wrappedActivityBehaviorFactory.createMuleActivityBehavior(serviceTask); } @Override public ActivityBehavior createMuleActivityBehavior(SendTask sendTask) { return wrappedActivityBehaviorFactory.createMuleActivityBehavior(sendTask); } @Override public ActivityBehavior createCamelActivityBehavior(ServiceTask serviceTask) { return wrappedActivityBehaviorFactory.createCamelActivityBehavior(serviceTask); } @Override public ActivityBehavior createCamelActivityBehavior(SendTask sendTask) { return wrappedActivityBehaviorFactory.createCamelActivityBehavior(sendTask); } @Override public ShellActivityBehavior createShellActivityBehavior(ServiceTask serviceTask) { return wrappedActivityBehaviorFactory.createShellActivityBehavior(serviceTask); } @Override public ActivityBehavior createHttpActivityBehavior(ServiceTask serviceTask) { return wrappedActivityBehaviorFactory.createHttpActivityBehavior(serviceTask); } @Override public ActivityBehavior createBusinessRuleTaskActivityBehavior(BusinessRuleTask businessRuleTask) { return wrappedActivityBehaviorFactory.createBusinessRuleTaskActivityBehavior(businessRuleTask); } @Override public ScriptTaskActivityBehavior createScriptTaskActivityBehavior(ScriptTask scriptTask) { return wrappedActivityBehaviorFactory.createScriptTaskActivityBehavior(scriptTask); } @Override public ExclusiveGatewayActivityBehavior createExclusiveGatewayActivityBehavior(ExclusiveGateway exclusiveGateway) { return wrappedActivityBehaviorFactory.createExclusiveGatewayActivityBehavior(exclusiveGateway); } @Override public ParallelGatewayActivityBehavior createParallelGatewayActivityBehavior(ParallelGateway parallelGateway) { return wrappedActivityBehaviorFactory.createParallelGatewayActivityBehavior(parallelGateway); } @Override public InclusiveGatewayActivityBehavior createInclusiveGatewayActivityBehavior(InclusiveGateway inclusiveGateway) { return wrappedActivityBehaviorFactory.createInclusiveGatewayActivityBehavior(inclusiveGateway); } @Override public EventBasedGatewayActivityBehavior createEventBasedGatewayActivityBehavior(EventGateway eventGateway) { return wrappedActivityBehaviorFactory.createEventBasedGatewayActivityBehavior(eventGateway); } @Override public SequentialMultiInstanceBehavior createSequentialMultiInstanceBehavior(Activity activity, AbstractBpmnActivityBehavior innerActivityBehavior) { return wrappedActivityBehaviorFactory.createSequentialMultiInstanceBehavior(activity, innerActivityBehavior); } @Override public ParallelMultiInstanceBehavior createParallelMultiInstanceBehavior(Activity activity, AbstractBpmnActivityBehavior innerActivityBehavior) { return wrappedActivityBehaviorFactory.createParallelMultiInstanceBehavior(activity, innerActivityBehavior); } @Override public SubProcessActivityBehavior createSubprocessActivityBehavior(SubProcess subProcess) { return wrappedActivityBehaviorFactory.createSubprocessActivityBehavior(subProcess); } @Override public EventSubProcessActivityBehavior createEventSubprocessActivityBehavior(EventSubProcess eventSubProcess) { return wrappedActivityBehaviorFactory.createEventSubprocessActivityBehavior(eventSubProcess); } @Override public EventSubProcessErrorStartEventActivityBehavior createEventSubProcessErrorStartEventActivityBehavior(StartEvent startEvent) { return wrappedActivityBehaviorFactory.createEventSubProcessErrorStartEventActivityBehavior(startEvent); } @Override public EventSubProcessMessageStartEventActivityBehavior createEventSubProcessMessageStartEventActivityBehavior(StartEvent startEvent, MessageEventDefinition messageEventDefinition) { return wrappedActivityBehaviorFactory.createEventSubProcessMessageStartEventActivityBehavior(startEvent, messageEventDefinition); } @Override public EventSubProcessSignalStartEventActivityBehavior createEventSubProcessSignalStartEventActivityBehavior(StartEvent startEvent, SignalEventDefinition signalEventDefinition, Signal signal) { return wrappedActivityBehaviorFactory.createEventSubProcessSignalStartEventActivityBehavior(startEvent, signalEventDefinition, signal); } @Override public EventSubProcessTimerStartEventActivityBehavior createEventSubProcessTimerStartEventActivityBehavior(StartEvent startEvent, TimerEventDefinition timerEventDefinition) { return wrappedActivityBehaviorFactory.createEventSubProcessTimerStartEventActivityBehavior(startEvent, timerEventDefinition); } @Override public AdhocSubProcessActivityBehavior createAdhocSubprocessActivityBehavior(SubProcess subProcess) { return wrappedActivityBehaviorFactory.createAdhocSubprocessActivityBehavior(subProcess); } @Override public CallActivityBehavior createCallActivityBehavior(CallActivity callActivity) { return wrappedActivityBehaviorFactory.createCallActivityBehavior(callActivity); } @Override public TransactionActivityBehavior createTransactionActivityBehavior(Transaction transaction) { return wrappedActivityBehaviorFactory.createTransactionActivityBehavior(transaction); } @Override public IntermediateCatchEventActivityBehavior createIntermediateCatchEventActivityBehavior(IntermediateCatchEvent intermediateCatchEvent) { return wrappedActivityBehaviorFactory.createIntermediateCatchEventActivityBehavior(intermediateCatchEvent); } @Override public IntermediateCatchMessageEventActivityBehavior createIntermediateCatchMessageEventActivityBehavior(IntermediateCatchEvent intermediateCatchEvent, MessageEventDefinition messageEventDefinition) { return wrappedActivityBehaviorFactory.createIntermediateCatchMessageEventActivityBehavior(intermediateCatchEvent, messageEventDefinition); } @Override public IntermediateCatchTimerEventActivityBehavior createIntermediateCatchTimerEventActivityBehavior(IntermediateCatchEvent intermediateCatchEvent, TimerEventDefinition timerEventDefinition) { return wrappedActivityBehaviorFactory.createIntermediateCatchTimerEventActivityBehavior(intermediateCatchEvent, timerEventDefinition); } @Override public IntermediateCatchSignalEventActivityBehavior createIntermediateCatchSignalEventActivityBehavior(IntermediateCatchEvent intermediateCatchEvent, SignalEventDefinition signalEventDefinition, Signal signal) { return wrappedActivityBehaviorFactory.createIntermediateCatchSignalEventActivityBehavior(intermediateCatchEvent, signalEventDefinition, signal); } @Override public IntermediateThrowNoneEventActivityBehavior createIntermediateThrowNoneEventActivityBehavior(ThrowEvent throwEvent) { return wrappedActivityBehaviorFactory.createIntermediateThrowNoneEventActivityBehavior(throwEvent); } @Override public IntermediateThrowSignalEventActivityBehavior createIntermediateThrowSignalEventActivityBehavior(ThrowEvent throwEvent, SignalEventDefinition signalEventDefinition, Signal signal) { return wrappedActivityBehaviorFactory.createIntermediateThrowSignalEventActivityBehavior(throwEvent, signalEventDefinition, signal); } @Override public IntermediateThrowCompensationEventActivityBehavior createIntermediateThrowCompensationEventActivityBehavior(ThrowEvent throwEvent, CompensateEventDefinition compensateEventDefinition) { return wrappedActivityBehaviorFactory.createIntermediateThrowCompensationEventActivityBehavior(throwEvent, compensateEventDefinition); } @Override public NoneEndEventActivityBehavior createNoneEndEventActivityBehavior(EndEvent endEvent) { return wrappedActivityBehaviorFactory.createNoneEndEventActivityBehavior(endEvent); } @Override public ErrorEndEventActivityBehavior createErrorEndEventActivityBehavior(EndEvent endEvent, ErrorEventDefinition errorEventDefinition) { return wrappedActivityBehaviorFactory.createErrorEndEventActivityBehavior(endEvent, errorEventDefinition); } @Override public CancelEndEventActivityBehavior createCancelEndEventActivityBehavior(EndEvent endEvent) { return wrappedActivityBehaviorFactory.createCancelEndEventActivityBehavior(endEvent); } @Override public TerminateEndEventActivityBehavior createTerminateEndEventActivityBehavior(EndEvent endEvent) { return wrappedActivityBehaviorFactory.createTerminateEndEventActivityBehavior(endEvent); } @Override public BoundaryEventActivityBehavior createBoundaryEventActivityBehavior(BoundaryEvent boundaryEvent, boolean interrupting) { return wrappedActivityBehaviorFactory.createBoundaryEventActivityBehavior(boundaryEvent, interrupting); } @Override public BoundaryCancelEventActivityBehavior createBoundaryCancelEventActivityBehavior(CancelEventDefinition cancelEventDefinition) { return wrappedActivityBehaviorFactory.createBoundaryCancelEventActivityBehavior(cancelEventDefinition); } @Override public BoundaryTimerEventActivityBehavior createBoundaryTimerEventActivityBehavior(BoundaryEvent boundaryEvent, TimerEventDefinition timerEventDefinition, boolean interrupting) { return wrappedActivityBehaviorFactory.createBoundaryTimerEventActivityBehavior(boundaryEvent, timerEventDefinition, interrupting); } @Override public BoundarySignalEventActivityBehavior createBoundarySignalEventActivityBehavior(BoundaryEvent boundaryEvent, SignalEventDefinition signalEventDefinition, Signal signal, boolean interrupting) { return wrappedActivityBehaviorFactory.createBoundarySignalEventActivityBehavior(boundaryEvent, signalEventDefinition, signal, interrupting); } @Override public BoundaryMessageEventActivityBehavior createBoundaryMessageEventActivityBehavior(BoundaryEvent boundaryEvent, MessageEventDefinition messageEventDefinition, boolean interrupting) { return wrappedActivityBehaviorFactory.createBoundaryMessageEventActivityBehavior(boundaryEvent, messageEventDefinition, interrupting); } @Override public BoundaryCompensateEventActivityBehavior createBoundaryCompensateEventActivityBehavior(BoundaryEvent boundaryEvent, CompensateEventDefinition compensateEventDefinition, boolean interrupting) { return wrappedActivityBehaviorFactory.createBoundaryCompensateEventActivityBehavior(boundaryEvent, compensateEventDefinition, interrupting); } // Mock support ////////////////////////////////////////////////////// public void addClassDelegateMock(String originalClassFqn, Class<?> mockClass) { mockedClassDelegatesMapping.put(originalClassFqn, mockClass.getName()); } public void addClassDelegateMock(String originalClassFqn, String mockedClassFqn) { mockedClassDelegatesMapping.put(originalClassFqn, mockedClassFqn); } public void addNoOpServiceTaskById(String id) { noOpServiceTaskIds.add(id); } public void addNoOpServiceTaskByClassName(String className) { noOpServiceTaskClassNames.add(className); } public void setAllServiceTasksNoOp() { allServiceTasksNoOp = true; } public void reset() { this.mockedClassDelegatesMapping.clear(); this.noOpServiceTaskIds.clear(); this.noOpServiceTaskClassNames.clear(); allServiceTasksNoOp = false; NoOpServiceTask.reset(); } }
// HTMLParser Library - A java-based parser for HTML // http://htmlparser.org // Copyright (C) 2006 Derrick Oswald // // Revision Control Information // // $URL: https://svn.sourceforge.net/svnroot/htmlparser/trunk/filterbuilder/src/main/java/org/htmlparser/parserapplications/filterbuilder/wrappers/StringFilterWrapper.java $ // $Author: derrickoswald $ // $Date: 2006-09-16 10:44:17 -0400 (Sat, 16 Sep 2006) $ // $Revision: 4 $ // // This library is free software; you can redistribute it and/or // modify it under the terms of the Common Public License; either // version 1.0 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // Common Public License for more details. // // You should have received a copy of the Common Public License // along with this library; if not, the license is available from // the Open Source Initiative (OSI) website: // http://opensource.org/licenses/cpl1.0.php package org.htmlparser.parserapplications.filterbuilder.wrappers; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.Locale; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JTextArea; import javax.swing.border.BevelBorder; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.text.BadLocationException; import javax.swing.text.Document; import org.htmlparser.Node; import org.htmlparser.NodeFilter; import org.htmlparser.Parser; import org.htmlparser.filters.StringFilter; import org.htmlparser.parserapplications.filterbuilder.Filter; /** * Wrapper for StringFilters. */ public class StringFilterWrapper extends Filter implements ActionListener, DocumentListener, Runnable { /** * The underlying filter. */ protected StringFilter mFilter; /** * Text to check for. */ protected JTextArea mPattern; /** * The check box for case sensitivity. */ protected JCheckBox mCaseSensitivity; /** * Combo box for locale. */ protected JComboBox mLocale; /** * Cached locales. */ protected static Locale[] mLocales = null; /** * Create a wrapper over a new StringFilter. */ public StringFilterWrapper () { Thread thread; mFilter = new StringFilter (); mFilter.setCaseSensitive (true); // add the text pattern mPattern = new JTextArea (2, 20); mPattern.setBorder (new BevelBorder (BevelBorder.LOWERED)); add (mPattern); mPattern.getDocument ().addDocumentListener (this); mPattern.setText (mFilter.getPattern ()); // add the case sensitivity flag mCaseSensitivity = new JCheckBox ("Case Sensitive"); add (mCaseSensitivity); mCaseSensitivity.addActionListener (this); mCaseSensitivity.setSelected (mFilter.getCaseSensitive ()); // add the locales choice mLocale = new JComboBox (); synchronized (mLocale) { mLocale.addItem (mFilter.getLocale ().getDisplayName ()); thread = new Thread (this); thread.setName ("locale_getter"); thread.setPriority (Thread.MIN_PRIORITY); thread.run (); } add (mLocale); mLocale.addActionListener (this); mLocale.setSelectedIndex (0); mLocale.setVisible (!mFilter.getCaseSensitive ()); } // // Filter overrides and concrete implementations // /** * Get the name of the filter. * @return A descriptive name for the filter. */ public String getDescription () { return ("Nodes containing string"); } /** * Get the resource name for the icon. * @return The icon resource specification. */ public String getIconSpec () { return ("images/StringFilter.gif"); } /** * Get the underlying node filter object. * @return The node filter object suitable for serialization. */ public NodeFilter getNodeFilter () { StringFilter ret; ret = new StringFilter (); ret.setCaseSensitive (mFilter.getCaseSensitive ()); ret.setLocale (mFilter .getLocale ()); ret.setPattern (mFilter.getPattern ()); return (ret); } /** * Assign the underlying node filter for this wrapper. * @param filter The filter to wrap. * @param context The parser to use for conditioning this filter. * Some filters need contextual information to provide to the user, * i.e. for tag names or attribute names or values, * so the Parser context is provided. */ public void setNodeFilter (NodeFilter filter, Parser context) { mFilter = (StringFilter)filter; mPattern.setText (mFilter.getPattern ()); mCaseSensitivity.setSelected (mFilter.getCaseSensitive ()); mLocale.setVisible (!mFilter.getCaseSensitive ()); mLocale.setSelectedItem (mFilter.getLocale ().getDisplayName ()); } /** * Get the underlying node filter's subordinate filters. * @return The node filter object's contained filters. */ public NodeFilter[] getSubNodeFilters () { return (new NodeFilter[0]); } /** * Assign the underlying node filter's subordinate filters. * @param filters The filters to insert into the underlying node filter. */ public void setSubNodeFilters (NodeFilter[] filters) { // should we complain? } /** * Convert this filter into Java code. * Output whatever text necessary and return the variable name. * @param out The output buffer. * @param context Three integers as follows: * <li>indent level - the number of spaces to insert at the beginning of each line</li> * <li>filter number - the next available filter number</li> * <li>filter array number - the next available array of filters number</li> * @return The variable name to use when referencing this filter (usually "filter" + context[1]++) */ public String toJavaCode (StringBuffer out, int[] context) { String ret; ret = "filter" + context[1]++; spaces (out, context[0]); out.append ("StringFilter "); out.append (ret); out.append (" = new StringFilter ();"); newline (out); spaces (out, context[0]); out.append (ret); out.append (".setCaseSensitive ("); out.append (mFilter.getCaseSensitive () ? "true" : "false"); out.append (");"); newline (out); spaces (out, context[0]); out.append (ret); out.append (".setLocale (new java.util.Locale (\""); out.append (mFilter .getLocale ().getLanguage ()); out.append ("\", \""); out.append (mFilter .getLocale ().getCountry ()); out.append ("\", \""); out.append (mFilter .getLocale ().getVariant ()); out.append ("\"));"); newline (out); spaces (out, context[0]); out.append (ret); out.append (".setPattern (\""); out.append (mFilter.getPattern ()); out.append ("\");"); newline (out); return (ret); } // // NodeFilter interface // /** * Predicate to determine whether or not to keep the given node. * The behaviour based on this outcome is determined by the context * in which it is called. It may lead to the node being added to a list * or printed out. See the calling routine for details. * @return <code>true</code> if the node is to be kept, <code>false</code> * if it is to be discarded. * @param node The node to test. */ public boolean accept (Node node) { return (mFilter.accept (node)); } // // ActionListener interface // /** * Invoked when an action occurs on the combo box. * @param event Details about the action event. */ public void actionPerformed (ActionEvent event) { Object source; boolean sensitive; Object[] selection; String locale; source = event.getSource (); if (source == mCaseSensitivity) { sensitive = mCaseSensitivity.isSelected (); mFilter.setCaseSensitive (sensitive); mLocale.setVisible (!sensitive); mLocale.setSelectedItem (mFilter.getLocale ().getDisplayName ()); } else if (source == mLocale) { synchronized (mLocale) { selection = mLocale.getSelectedObjects (); if ((null != selection) && (0 != selection.length)) { locale = (String)selection[0]; for (int i = 0; i < mLocales.length; i++) if (locale.equals (mLocales[i].getDisplayName ())) mFilter.setLocale (mLocales[i]); } } } } // // Runnable interface // /** * Background thread task to get the available locales. */ public void run () { String locale; synchronized (mLocale) { mLocales = Locale.getAvailableLocales (); locale = mFilter.getLocale ().getDisplayName (); for (int i = 0; i < mLocales.length; i++) if (!locale.equals (mLocales[i].getDisplayName ())) mLocale.addItem (mLocales[i].getDisplayName ()); mLocale.invalidate (); } } // // DocumentListener interface // /** * Handle an insert update event. * @param e Details about the insert event. */ public void insertUpdate (DocumentEvent e) { Document doc; doc = e.getDocument (); try { mFilter.setPattern (doc.getText (0, doc.getLength ())); } catch (BadLocationException ble) { ble.printStackTrace (); } } /** * Handle a remove update event. * @param e Details about the remove event. */ public void removeUpdate (DocumentEvent e) { Document doc; doc = e.getDocument (); try { mFilter.setPattern (doc.getText (0, doc.getLength ())); } catch (BadLocationException ble) { ble.printStackTrace (); } } /** * Handle a change update event. * @param e Details about the change event. */ public void changedUpdate (DocumentEvent e) { // plain text components don't fire these events } }
/** * Copyright 2015 Santhosh Kumar Tekuri * * The JLibs authors license this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package jlibs.xml.sax.dog.expr.func; import jlibs.xml.sax.dog.DataType; import jlibs.xml.sax.dog.expr.Expression; import jlibs.xml.sax.dog.path.LocationPath; import javax.xml.xpath.XPathFunction; import javax.xml.xpath.XPathFunctionException; import java.util.*; /** * @author Santhosh Kumar T */ public class Functions{ public static Expression typeCast(Object current, DataType expected){ if(current instanceof Expression){ Expression expr = (Expression)current; DataType exprResultType = expr.resultType; if(exprResultType==expected || expected==DataType.STRINGS) return expr; if(expected==DataType.NUMBERS){ if(exprResultType==DataType.NUMBER) return expr; else expected = DataType.NUMBER; } if(expected==DataType.PRIMITIVE){ switch(exprResultType){ case STRING: case BOOLEAN: case NUMBER: return expr; default: expected = DataType.STRING; } } FunctionCall function = new FunctionCall(new Functions.TypeCast(expected)); function.addValidMember(expr, 0); return function.simplify(); }else return ((LocationPath)current).typeCast(expected).simplify(); } public static class TypeCast extends Function{ public TypeCast(DataType resultType){ super(resultType.name().toLowerCase(), resultType, false, DataType.PRIMITIVE); } @Override public Object evaluate(Object... args){ return resultType.convert(args[0]); } } public static class UserFunction extends Function{ public final XPathFunction xpathFunction; public UserFunction(String namespace, String name, XPathFunction xpathFunction){ super(namespace, name, DataType.PRIMITIVE, true, DataType.PRIMITIVE); this.xpathFunction = xpathFunction; } @Override public Object evaluate(Object... args){ try{ return xpathFunction.evaluate(Arrays.asList(args)); }catch(XPathFunctionException ex){ throw new RuntimeException(ex); } } } /*-------------------------------------------------[ Arithmetic ]---------------------------------------------------*/ private static abstract class ArithmeticFunction extends PeekingFunction{ protected ArithmeticFunction(String name){ super(name, DataType.NUMBER, false, DataType.NUMBER, DataType.NUMBER); } @Override protected final Object onMemberResult(int index, Object result){ Double d = (Double)result; return d.isNaN() || d.isInfinite() ? d : null; } } public static final ArithmeticFunction ADD = new ArithmeticFunction("+"){ @Override public Object evaluate(Object... args){ return (Double)args[0] + (Double)args[1]; } }; public static final ArithmeticFunction SUBSTRACT = new ArithmeticFunction("-"){ @Override public Object evaluate(Object... args){ return (Double)args[0] - (Double)args[1]; } }; public static final ArithmeticFunction MULTIPLY = new ArithmeticFunction("*"){ @Override public Object evaluate(Object... args){ return (Double)args[0] * (Double)args[1]; } }; public static final ArithmeticFunction DIV = new ArithmeticFunction("div"){ @Override public Object evaluate(Object... args){ return (Double)args[0] / (Double)args[1]; } }; public static final ArithmeticFunction MOD = new ArithmeticFunction("mod"){ @Override public Object evaluate(Object... args){ return (Double)args[0] % (Double)args[1]; } }; /*-------------------------------------------------[ Numeric ]---------------------------------------------------*/ public static final Function CEIL = new Function("ceiling", DataType.NUMBER, false, DataType.NUMBER, DataType.NUMBER){ @Override public Object evaluate(Object... args){ Double d = (Double)args[0]; if(Double.isNaN(d) || Double.isInfinite(d)) return d; return Math.ceil(d); } }; public static final Function FLOOR = new Function("floor", DataType.NUMBER, false, DataType.NUMBER, DataType.NUMBER){ @Override public Object evaluate(Object... args){ Double d = (Double)args[0]; if(Double.isNaN(d) || Double.isInfinite(d)) return d; return Math.floor(d); } }; public static final Function ROUND = new Function("round", DataType.NUMBER, false, DataType.NUMBER, DataType.NUMBER){ @Override public Object evaluate(Object... args){ Double d = (Double)args[0]; if(Double.isNaN(d) || Double.isInfinite(d)) return d; return (double)Math.round(d); } }; /*-------------------------------------------------[ String ]---------------------------------------------------*/ public static final Function LENGTH = new Function("string-length", DataType.NUMBER, false, DataType.STRING){ @Override public Object evaluate(Object... args){ String str = (String)args[0]; return (double)str.codePointCount(0, str.length()); } }; public static final Function STARTS_WITH = new Function("starts-with", DataType.BOOLEAN, false, DataType.STRING, DataType.STRING){ @Override public Object evaluate(Object... args){ return ((String)args[0]).startsWith((String)args[1]); } }; public static final Function ENDS_WITH = new Function("ends-with", DataType.BOOLEAN, false, DataType.STRING, DataType.STRING){ @Override public Object evaluate(Object... args){ return ((String)args[0]).endsWith((String)args[1]); } }; public static final Function CONTAINS = new Function("contains", DataType.BOOLEAN, false, DataType.STRING, DataType.STRING){ @Override public Object evaluate(Object... args){ return ((String)args[0]).contains((String)args[1]); } }; public static final Function CONCAT = new Function("concat", DataType.STRING, true, DataType.STRING, DataType.STRING){ @Override public Object evaluate(Object... args){ StringBuilder buff = new StringBuilder(); for(Object arg: args) buff.append(arg); return buff.toString(); } }; public static final Function LANGUAGE_MATCH = new Function("language-match", DataType.BOOLEAN, false, DataType.STRING, DataType.STRING){ @Override public Object evaluate(Object... args){ String sublang = (String)args[0]; String lang = (String)args[1]; if(sublang.equalsIgnoreCase(lang)) return true; int len = lang.length(); return sublang.length() > len && sublang.charAt(len) == '-' && sublang.substring(0, len).equalsIgnoreCase(lang); } }; public static final Function TRANSLATE = new Function("translate", DataType.STRING, false, DataType.STRING, DataType.STRING, DataType.STRING){ @Override public Object evaluate(Object... args){ return translate((String)args[0], (String)args[1], (String)args[2]); } public String translate(String input, String from, String to){ // Initialize the mapping in a HashMap Map<String, String> characterMap = new HashMap<String, String>(); String[] fromCharacters = toUnicodeCharacters(from); String[] toCharacters = toUnicodeCharacters(to); int fromLen = fromCharacters.length; int toLen = toCharacters.length; for(int i=0; i<fromLen; i++){ String cFrom = fromCharacters[i]; if(characterMap.containsKey(cFrom)) // We've seen the character before, ignore continue; if(i<toLen) // Will change characterMap.put(cFrom, toCharacters[i]); else // Will delete characterMap.put(cFrom, null); } // Process the input string thru the map StringBuilder output = new StringBuilder(input.length()); String[] inCharacters = toUnicodeCharacters(input); int inLen = inCharacters.length; for(int i=0; i<inLen; i++){ String cIn = inCharacters[i]; if(characterMap.containsKey(cIn)){ String cTo = characterMap.get(cIn); if(cTo!=null) output.append(cTo); }else output.append(cIn); } return output.toString(); } private String[] toUnicodeCharacters(final String s){ String[] result = new String[s.length()]; int stringLength = 0; int slen = s.length(); for(int i=0; i<slen; i++){ char c1 = s.charAt(i); if(c1>=0xD800 && c1<=0xDBFF){ // isHighSurrogate(c1) try{ char c2 = s.charAt(i+1); if(c2>=0xDC00 && c2<=0xDFFF){ //isLowSurrogate(c2) result[stringLength] = (c1 + "" + c2).intern(); i++; }else throw new IllegalArgumentException("Mismatched surrogate pair in translate function"); }catch (StringIndexOutOfBoundsException ex){ throw new IllegalArgumentException("High surrogate without low surrogate at end of string passed to translate function"); } }else result[stringLength] = String.valueOf(c1).intern(); stringLength++; } if(stringLength==result.length) return result; // trim array String[] trimmed = new String[stringLength]; System.arraycopy(result, 0, trimmed, 0, stringLength); return trimmed; } }; public static final Function NORMALIZE_SPACE = new Function("normalize-space", DataType.STRING, false, DataType.STRING){ @Override public Object evaluate(Object... args){ return normalize((String)args[0]); } public String normalize(String str){ char[] buffer = str.toCharArray(); int write = 0; int lastWrite = 0; boolean wroteOne = false; int read = 0; while (read < buffer.length){ if (isXMLSpace(buffer[read])){ if (wroteOne) buffer[write++] = ' '; do{ read++; }while(read < buffer.length && isXMLSpace(buffer[read])); }else{ buffer[write++] = buffer[read++]; wroteOne = true; lastWrite = write; } } return new String(buffer, 0, lastWrite); } private boolean isXMLSpace(char c) { return c==' ' || c=='\n' || c=='\r' || c=='\t'; } }; public static final Function SUBSTRING = new Function("", "substring", DataType.STRING, false, 2, DataType.STRING, DataType.NUMBER, DataType.NUMBER){ @Override public Object evaluate(Object... args){ String str = (String)args[0]; if(str==null) return ""; int stringLength = ((Double)LENGTH.evaluate(str)).intValue(); if(stringLength==0) return ""; Double d1 = (Double)args[1]; if(d1.isNaN()) return ""; int start = ((Double)ROUND.evaluate(d1)).intValue() - 1; // subtract 1 as Java strings are zero based int substringLength = stringLength; if(args.length==3){ Double d2 = (Double)args[2]; if(!d2.isNaN()) substringLength = ((Double)ROUND.evaluate(d2)).intValue(); else substringLength = 0; } if (substringLength<0) return ""; int end = start + substringLength; if(args.length==2) end = stringLength; if(start<0) // negative start is treated as 0 start = 0; else if(start>stringLength) return ""; if(end>stringLength) end = stringLength; else if(end<start) return ""; if(stringLength==str.length()) // // easy case; no surrogate pairs return str.substring(start, end); else return unicodeSubstring(str, start, end); } private String unicodeSubstring(String s, int start, int end){ StringBuffer result = new StringBuffer(s.length()); for(int jChar=0, uChar=0; uChar<end; jChar++, uChar++){ char c = s.charAt(jChar); if(uChar>=start) result.append(c); if(c>=0xD800){ // get the low surrogate // ???? we could check here that this is indeed a low surroagte // we could also catch StringIndexOutOfBoundsException jChar++; if(uChar>=start) result.append(s.charAt(jChar)); } } return result.toString(); } }; public static abstract class ChangeCase extends Function{ protected ChangeCase(String name){ super("", name, DataType.STRING, false, 1, DataType.STRING, DataType.STRING); } @Override public Object evaluate(Object... args){ Locale locale = Locale.ENGLISH; if(args.length>1){ locale = findLocale((String)args[1]); if(locale==null) locale = Locale.ENGLISH; } return evaluate((String)args[0], locale); } protected abstract String evaluate(String arg, Locale locale); /** * Tries to find a Locale instance by name using * <a href="http://www.ietf.org/rfc/rfc3066.txt" target="_top">RFC 3066</a> * language tags such as 'en', 'en-US', 'en-US-Brooklyn'. * * @param localeText the RFC 3066 language tag * @return the locale for the given text or null if one could not * be found */ public static Locale findLocale(String localeText) { StringTokenizer tokens = new StringTokenizer( localeText, "-" ); if(tokens.hasMoreTokens()){ String language = tokens.nextToken(); if(!tokens.hasMoreTokens()) return findLocaleForLanguage(language); else{ String country = tokens.nextToken(); if(!tokens.hasMoreTokens()) return new Locale(language, country); else{ String variant = tokens.nextToken(); return new Locale(language, country, variant); } } } return null; } /** * Finds the locale with the given language name with no country * or variant, such as Locale.ENGLISH or Locale.FRENCH * * @param language the language code to look for * @return the locale for the given language or null if one could not * be found */ private static Locale findLocaleForLanguage(String language) { for(Locale locale: Locale.getAvailableLocales()){ if(language.equals(locale.getLanguage())){ String country = locale.getCountry(); if(country==null || country.length()==0){ String variant = locale.getVariant(); if(variant==null || variant.length()==0) return locale; } } } return null; } } public static final ChangeCase UPPER_CASE = new ChangeCase("upper-case"){ @Override protected String evaluate(String arg, Locale locale){ return arg.toUpperCase(locale); } }; public static final ChangeCase LOWER_CASE = new ChangeCase("lower-case"){ @Override protected String evaluate(String arg, Locale locale){ return arg.toLowerCase(locale); } }; /*-------------------------------------------------[ Boolean ]---------------------------------------------------*/ public static final Function AND = new PeekingFunction("and", DataType.BOOLEAN, false, DataType.BOOLEAN, DataType.BOOLEAN){ @Override public Object evaluate(Object... args){ return (Boolean)args[0] && (Boolean)args[1]; } @Override protected Object onMemberResult(int index, Object result){ return result==Boolean.FALSE ? result : null; } }; public static final Function OR = new PeekingFunction("or", DataType.BOOLEAN, false, DataType.BOOLEAN, DataType.BOOLEAN){ @Override public Object evaluate(Object... args){ return (Boolean)args[0] || (Boolean)args[1]; } @Override protected Object onMemberResult(int index, Object result){ return result==Boolean.TRUE ? result : null; } }; public static final Function NOT = new Function("not", DataType.BOOLEAN, false, DataType.BOOLEAN){ @Override public Object evaluate(Object... args){ return !(Boolean)args[0]; } }; /*-------------------------------------------------[ Equals ]---------------------------------------------------*/ public static final Function NUMBER_EQUALS_NUMBER = new PeekingFunction("=", DataType.BOOLEAN, false, DataType.NUMBER, DataType.NUMBER){ @Override public Object evaluate(Object... args){ return args[0].equals(args[1]); } @Override protected Object onMemberResult(int index, Object result){ return Double.isNaN((Double)result) ? Boolean.FALSE : null; } }; public static final Function STRING_EQUALS_STRING = new Function("=", DataType.BOOLEAN, false, DataType.STRING, DataType.STRING){ @Override public Object evaluate(Object... args){ assert args[0] instanceof String; assert args[1] instanceof String; return args[0].equals(args[1]); } }; public static final Function STRINGS_EQUALS_STRING = new PeekingFunction("=", DataType.BOOLEAN, false, DataType.STRINGS, DataType.STRING){ @Override public Object evaluate(Object... args){ assert args[1] instanceof String; if(args[0] instanceof Collection) return ((Collection)args[0]).contains(args[1]); else return args[0].equals(args[1]); } @Override protected Object onMemberResult(int index, Object result){ return result instanceof Collection && ((Collection)result).size()==0 ? Boolean.FALSE : null; } }; public static final Function NUMBERS_EQUALS_NUMBER = new PeekingFunction("=", DataType.BOOLEAN, false, DataType.NUMBERS, DataType.NUMBER){ @Override public Object evaluate(Object... args){ double rhs = (Double)args[1]; if(Double.isNaN(rhs)) return false; if(args[0] instanceof Collection) return ((Collection)args[0]).contains(args[1]); else return (Double)args[0]==rhs; } @Override protected Object onMemberResult(int index, Object result){ if(result instanceof Double) return Double.isNaN((Double)result) ? Boolean.FALSE : null; else return ((Collection)result).size()==0 ? Boolean.FALSE : null; } }; public static final Function STRINGS_EQUALS_STRINGS = new PeekingFunction("=", DataType.BOOLEAN, false, DataType.STRINGS, DataType.STRINGS){ @Override public Object evaluate(Object... args){ boolean list0 = args[0] instanceof Collection; boolean list1 = args[1] instanceof Collection; if(list0 && list1){ Collection rhs = (Collection)args[1]; for(Object lhs: (Collection)args[0]){ if(rhs.contains(lhs)) return true; } return false; }else if(list0) return ((Collection)args[0]).contains(args[1]); else if(list1) return ((Collection)args[1]).contains(args[0]); else return args[0].equals(args[1]); } @Override protected Object onMemberResult(int index, Object result){ return result instanceof Collection && ((Collection)result).size()==0 ? Boolean.FALSE : null; } }; /*-------------------------------------------------[ Comparison ]---------------------------------------------------*/ private abstract static class Comparison extends PeekingFunction{ protected Comparison(String name, DataType memberType){ super(name, DataType.BOOLEAN, false, memberType, memberType); } @Override public final Object evaluate(Object[] args){ boolean list1 = args[0] instanceof Collection; boolean list2 = args[1] instanceof Collection; if(list1 && list2){ Collection rhsCollection = (Collection)args[1]; for(Object lhs: (Collection)args[0]){ for(Object rhs: rhsCollection){ if(evaluateObjectObject(lhs, rhs)) return true; } } }else if(list1){ for(Object lhs: (Collection)args[0]){ if(evaluateObjectObject(lhs, args[1])) return true; } }else if(list2){ for(Object rhs: (Collection)args[1]){ if(evaluateObjectObject(args[0], rhs)) return true; } }else return evaluateObjectObject(args[0], args[1]); return false; } @Override protected Object onMemberResult(int index, Object result){ return result instanceof Collection && ((Collection)result).size()==0 ? Boolean.FALSE : null; } protected abstract boolean evaluateObjectObject(Object lhs, Object rhs); } private static abstract class Equality extends Comparison{ public Equality(String name){ super(name, DataType.STRINGS); } @Override protected final boolean evaluateObjectObject( Object lhs, Object rhs){ assert lhs!=null && rhs!=null; if(lhs instanceof Boolean || rhs instanceof Boolean) return evaluateObjects(DataType.asBoolean(lhs), DataType.asBoolean(rhs)); else if(lhs instanceof Double || rhs instanceof Double) return evaluateObjects(DataType.asNumber(lhs), DataType.asNumber(rhs)); else return evaluateObjects(lhs.toString(), rhs.toString()); } protected abstract boolean evaluateObjects(Object lhs, Object rhs); } public static final Function EQUALS = new Equality("="){ @Override protected boolean evaluateObjects(Object lhs, Object rhs){ if(lhs instanceof Double){ if(Double.isNaN((Double)lhs) || Double.isNaN((Double)rhs)) return false; } return lhs.equals(rhs); } @Override protected final Object onMemberResult(int index, Object result){ if(result instanceof Double) return Double.isNaN((Double)result) ? Boolean.FALSE : null; return super.onMemberResult(index, result); } }; public static final Function NOT_EQUALS = new Equality("!="){ @Override protected boolean evaluateObjects(Object lhs, Object rhs){ if(lhs instanceof Double){ if(Double.isNaN((Double)lhs) || Double.isNaN((Double)rhs)) return true; } return !lhs.equals(rhs); } @Override protected final Object onMemberResult(int index, Object result){ if(result instanceof Double) return Double.isNaN((Double)result) ? Boolean.TRUE : null; return super.onMemberResult(index, result); } }; /*-------------------------------------------------[ Relational ]---------------------------------------------------*/ private static abstract class Relational extends PeekingFunction{ public Relational(String name){ super(name, DataType.BOOLEAN, false, DataType.NUMBERS, DataType.NUMBERS); } public final Object evaluate(Object[] args){ boolean list1 = args[0] instanceof Collection; boolean list2 = args[1] instanceof Collection; if(list1 && list2){ Collection rhsCollection = (Collection)args[1]; for(Object lhs: (Collection)args[0]){ double lhsNum = (Double)lhs; if(!Double.isNaN(lhsNum)){ for(Object rhs: rhsCollection){ double rhsNum = (Double)rhs; if(!Double.isNaN(rhsNum) && evaluateDoubles(lhsNum, rhsNum)) return true; } } } }else if(list1){ double rhsNum = (Double)args[1]; if(!Double.isNaN(rhsNum)){ for(Object lhs: (Collection)args[0]){ double lhsNum = (Double)lhs; if(!Double.isNaN(lhsNum) && evaluateDoubles(lhsNum, rhsNum)) return true; } } }else if(list2){ double lhsNum = (Double)args[0]; if(!Double.isNaN(lhsNum)){ for(Object rhs: (Collection)args[1]){ double rhsNum = (Double)rhs; if(!Double.isNaN(rhsNum) && evaluateDoubles(lhsNum, rhsNum)) return true; } } }else{ double lhsNum = (Double)args[0]; if(Double.isNaN(lhsNum)) return false; double rhsNum = (Double)args[1]; return !Double.isNaN(rhsNum) && evaluateDoubles(lhsNum, rhsNum); } return false; } protected abstract boolean evaluateDoubles(double lhs, double rhs); @Override protected final Object onMemberResult(int index, Object result){ if(result instanceof Collection) return ((Collection)result).size()==0 ? Boolean.FALSE : null; else return ((Double)result).isNaN() ? Boolean.FALSE : null; } } public static final Function GREATER_THAN = new Relational(">"){ @Override protected boolean evaluateDoubles(double lhs, double rhs){ return lhs>rhs; } }; public static final Function GREATER_THAN_EQUAL = new Relational(">="){ @Override protected boolean evaluateDoubles(double lhs, double rhs){ return lhs>=rhs; } }; public static final Function LESS_THAN = new Relational("<"){ @Override protected boolean evaluateDoubles(double lhs, double rhs){ return lhs<rhs; } }; public static final Function LESS_THAN_EQUAL = new Relational("<="){ @Override protected boolean evaluateDoubles(double lhs, double rhs){ return lhs<=rhs; } }; /*-------------------------------------------------[ Lookup ]---------------------------------------------------*/ public static final Map<String, Function> library = new HashMap<String, Function>(); static{ Function functions[] = { ADD, SUBSTRACT, MULTIPLY, DIV, MOD, CEIL, FLOOR, ROUND, LENGTH, STARTS_WITH, ENDS_WITH, CONTAINS, CONCAT, LANGUAGE_MATCH, TRANSLATE, NORMALIZE_SPACE, SUBSTRING, UPPER_CASE, LOWER_CASE, AND, OR, NOT, EQUALS, NOT_EQUALS, GREATER_THAN, GREATER_THAN_EQUAL, LESS_THAN, LESS_THAN_EQUAL, new TypeCast(DataType.STRING), new TypeCast(DataType.NUMBER), new TypeCast(DataType.BOOLEAN) }; for(Function f: functions) library.put(f.name, f); } }
package edu.bupt.mms; import java.util.Locale; import edu.bupt.mms.DropDownActivity.DummySectionFragment; import android.app.ActionBar; import android.app.FragmentTransaction; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.app.NavUtils; import android.support.v4.view.ViewPager; import android.view.Gravity; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.TextView; public class SeperateCardActivity extends FragmentActivity implements ActionBar.TabListener,ActionBar.OnNavigationListener { /** * The {@link android.support.v4.view.PagerAdapter} that will provide * fragments for each of the sections. We use a * {@link android.support.v4.app.FragmentPagerAdapter} derivative, which * will keep every loaded fragment in memory. If this becomes too memory * intensive, it may be best to switch to a * {@link android.support.v4.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter; /** * The {@link ViewPager} that will host the section contents. */ ViewPager mViewPager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_seperate_card); final ActionBar actionBar2 = getActionBar(); actionBar2.setDisplayShowTitleEnabled(false); actionBar2.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); // Set up the dropdown list navigation in the action bar. actionBar2.setListNavigationCallbacks( // Specify a SpinnerAdapter to populate the dropdown list. new ArrayAdapter<String>(actionBar2.getThemedContext(), android.R.layout.simple_list_item_1, android.R.id.text1, new String[] { getString(R.string.title_section1), getString(R.string.title_section2), getString(R.string.title_section3), }), this); // Set up the action bar. final ActionBar actionBar = getActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS); // Create the adapter that will return a fragment for each of the three // primary sections of the app. mSectionsPagerAdapter = new SectionsPagerAdapter( getSupportFragmentManager()); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setAdapter(mSectionsPagerAdapter); // When swiping between different sections, select the corresponding // tab. We can also use ActionBar.Tab#select() to do this if we have // a reference to the Tab. mViewPager .setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { actionBar.setSelectedNavigationItem(position); } }); // For each of the sections in the app, add a tab to the action bar. for (int i = 0; i < mSectionsPagerAdapter.getCount(); i++) { // Create a tab with text corresponding to the page title defined by // the adapter. Also specify this Activity object, which implements // the TabListener interface, as the callback (listener) for when // this tab is selected. actionBar.addTab(actionBar.newTab() .setText(mSectionsPagerAdapter.getPageTitle(i)) .setTabListener(this)); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.seperate_card, menu); return true; } @Override public void onTabSelected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { // When the given tab is selected, switch to the corresponding page in // the ViewPager. mViewPager.setCurrentItem(tab.getPosition()); } @Override public void onTabUnselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } @Override public void onTabReselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { // getItem is called to instantiate the fragment for the given page. // Return a DummySectionFragment (defined as a static inner class // below) with the page number as its lone argument. Fragment fragment = new DummySectionFragment(); Bundle args = new Bundle(); args.putInt(DummySectionFragment.ARG_SECTION_NUMBER, position + 1); fragment.setArguments(args); return fragment; } @Override public int getCount() { // Show 3 total pages. return 3; } @Override public CharSequence getPageTitle(int position) { Locale l = Locale.getDefault(); switch (position) { case 0: return getString(R.string.title_section1).toUpperCase(l); case 1: return getString(R.string.title_section2).toUpperCase(l); case 2: return getString(R.string.title_section3).toUpperCase(l); } return null; } } /** * A dummy fragment representing a section of the app, but that simply * displays dummy text. */ public static class DummySectionFragment extends Fragment { /** * The fragment argument representing the section number for this * fragment. */ public static final String ARG_SECTION_NUMBER = "section_number"; public DummySectionFragment() { } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate( R.layout.fragment_seperate_card_dummy, container, false); TextView dummyTextView = (TextView) rootView .findViewById(R.id.section_label); dummyTextView.setText(Integer.toString(getArguments().getInt( ARG_SECTION_NUMBER))); return rootView; } } @Override public boolean onNavigationItemSelected(int position, long id) { // TODO Auto-generated method stub Fragment fragment = new DummySectionFragment2(); Bundle args = new Bundle(); args.putInt(DummySectionFragment2.ARG_SECTION_NUMBER, position + 1); fragment.setArguments(args); getSupportFragmentManager().beginTransaction() .replace(R.id.container, fragment).commit(); return true; //return false; } public static class DummySectionFragment2 extends Fragment { /** * The fragment argument representing the section number for this * fragment. */ public static final String ARG_SECTION_NUMBER = "section_number"; public DummySectionFragment2() { } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_drop_down_dummy, container, false); TextView dummyTextView = (TextView) rootView .findViewById(R.id.section_label); dummyTextView.setText(Integer.toString(getArguments().getInt( ARG_SECTION_NUMBER))); return rootView; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.aggregation; import com.facebook.presto.operator.aggregation.state.CentralMomentsState; import com.facebook.presto.operator.aggregation.state.CorrelationState; import com.facebook.presto.operator.aggregation.state.CovarianceState; import com.facebook.presto.operator.aggregation.state.RegressionState; import com.facebook.presto.operator.aggregation.state.VarianceState; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.type.TypeSignature; import com.google.common.base.CaseFormat; import java.util.List; import java.util.function.Function; import static com.google.common.base.Preconditions.checkArgument; import static java.util.Locale.ENGLISH; public final class AggregationUtils { private AggregationUtils() { } public static void updateVarianceState(VarianceState state, double value) { state.setCount(state.getCount() + 1); double delta = value - state.getMean(); state.setMean(state.getMean() + delta / state.getCount()); state.setM2(state.getM2() + delta * (value - state.getMean())); } public static void updateCentralMomentsState(CentralMomentsState state, double value) { long n1 = state.getCount(); long n = n1 + 1; double m1 = state.getM1(); double m2 = state.getM2(); double m3 = state.getM3(); double delta = value - m1; double deltaN = delta / n; double deltaN2 = deltaN * deltaN; double dm2 = delta * deltaN * n1; state.setCount(n); state.setM1(m1 + deltaN); state.setM2(m2 + dm2); state.setM3(m3 + dm2 * deltaN * (n - 2) - 3 * deltaN * m2); state.setM4(state.getM4() + dm2 * deltaN2 * (n * (double) n - 3 * n + 3) + 6 * deltaN2 * m2 - 4 * deltaN * m3); } public static void updateCovarianceState(CovarianceState state, double x, double y) { state.setCount(state.getCount() + 1); state.setSumXY(state.getSumXY() + x * y); state.setSumX(state.getSumX() + x); state.setSumY(state.getSumY() + y); } public static double getCovarianceSample(CovarianceState state) { return (state.getSumXY() - state.getSumX() * state.getSumY() / state.getCount()) / (state.getCount() - 1); } public static double getCovariancePopulation(CovarianceState state) { return (state.getSumXY() - state.getSumX() * state.getSumY() / state.getCount()) / state.getCount(); } public static void updateCorrelationState(CorrelationState state, double x, double y) { updateCovarianceState(state, x, y); state.setSumXSquare(state.getSumXSquare() + x * x); state.setSumYSquare(state.getSumYSquare() + y * y); } public static double getCorrelation(CorrelationState state) { // This is defined as covariance(x, y) / (stdev(x) * stdev(y)) double covariance = state.getCount() * state.getSumXY() - state.getSumX() * state.getSumY(); double stdevX = Math.sqrt(state.getCount() * state.getSumXSquare() - state.getSumX() * state.getSumX()); double stdevY = Math.sqrt(state.getCount() * state.getSumYSquare() - state.getSumY() * state.getSumY()); // stdevX and stdevY deliberately not checked for zero because the result can be Infinity or NaN even // if they are both not zero return covariance / stdevX / stdevY; } public static void updateRegressionState(RegressionState state, double x, double y) { updateCovarianceState(state, x, y); state.setSumXSquare(state.getSumXSquare() + x * x); } public static double getRegressionSlope(RegressionState state) { // Math comes from ISO9075-2:2011(E) 10.9 General Rules 7 c xii double dividend = state.getCount() * state.getSumXY() - state.getSumX() * state.getSumY(); double divisor = state.getCount() * state.getSumXSquare() - state.getSumX() * state.getSumX(); // divisor deliberately not checked for zero because the result can be Infty or NaN even if it is not zero return dividend / divisor; } public static double getRegressionIntercept(RegressionState state) { // Math comes from ISO9075-2:2011(E) 10.9 General Rules 7 c xiii double dividend = state.getSumY() * state.getSumXSquare() - state.getSumX() * state.getSumXY(); double divisor = state.getCount() * state.getSumXSquare() - state.getSumX() * state.getSumX(); // divisor deliberately not checked for zero because the result can be Infty or NaN even if it is not zero return dividend / divisor; } public static void mergeVarianceState(VarianceState state, VarianceState otherState) { long count = otherState.getCount(); double mean = otherState.getMean(); double m2 = otherState.getM2(); checkArgument(count >= 0, "count is negative"); if (count == 0) { return; } long newCount = count + state.getCount(); double newMean = ((count * mean) + (state.getCount() * state.getMean())) / (double) newCount; double delta = mean - state.getMean(); double m2Delta = m2 + delta * delta * count * state.getCount() / (double) newCount; state.setM2(state.getM2() + m2Delta); state.setCount(newCount); state.setMean(newMean); } public static void mergeCentralMomentsState(CentralMomentsState state, CentralMomentsState otherState) { long na = state.getCount(); long nb = otherState.getCount(); checkArgument(nb >= 0, "count is negative"); if (nb == 0) { return; } double m1a = state.getM1(); double m2a = state.getM2(); double m3a = state.getM3(); double m1b = otherState.getM1(); double m2b = otherState.getM2(); double m3b = otherState.getM3(); double n = na + nb; // Use double as type of n to avoid integer overflow for n*n and n*n*n double delta = m1b - m1a; double delta2 = delta * delta; double delta3 = delta * delta2; double delta4 = delta2 * delta2; state.setCount((long) n); state.setM1((na * m1a + nb * m1b) / n); state.setM2(m2a + m2b + delta2 * na * nb / n); state.setM3(m3a + m3b + delta3 * na * nb * (na - nb) / (n * n) + 3 * delta * (na * m2b - nb * m2a) / n); state.setM4(state.getM4() + otherState.getM4() + delta4 * na * nb * (na * na - na * nb + nb * nb) / (n * n * n) + 6 * delta2 * (na * na * m2b + nb * nb * m2a) / (n * n) + 4 * delta * (na * m3b - nb * m3a) / n); } private static void updateCovarianceState(CovarianceState state, CovarianceState otherState) { state.setSumX(state.getSumX() + otherState.getSumX()); state.setSumY(state.getSumY() + otherState.getSumY()); state.setSumXY(state.getSumXY() + otherState.getSumXY()); state.setCount(state.getCount() + otherState.getCount()); } public static void mergeCovarianceState(CovarianceState state, CovarianceState otherState) { if (otherState.getCount() == 0) { return; } updateCovarianceState(state, otherState); } public static void mergeCorrelationState(CorrelationState state, CorrelationState otherState) { if (otherState.getCount() == 0) { return; } updateCovarianceState(state, otherState); state.setSumXSquare(state.getSumXSquare() + otherState.getSumXSquare()); state.setSumYSquare(state.getSumYSquare() + otherState.getSumYSquare()); } public static void mergeRegressionState(RegressionState state, RegressionState otherState) { if (otherState.getCount() == 0) { return; } updateCovarianceState(state, otherState); state.setSumXSquare(state.getSumXSquare() + otherState.getSumXSquare()); } public static String generateAggregationName(String baseName, TypeSignature outputType, List<TypeSignature> inputTypes) { StringBuilder sb = new StringBuilder(); sb.append(CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, outputType.toString())); for (TypeSignature inputType : inputTypes) { sb.append(CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, inputType.toString())); } sb.append(CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, baseName.toLowerCase(ENGLISH))); return sb.toString(); } // used by aggregation compiler @SuppressWarnings("UnusedDeclaration") public static Function<Integer, Block> pageBlockGetter(final Page page) { return page::getBlock; } }
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.hammer.utils; import net.java.sip.communicator.impl.protocol.jabber.extensions.colibri.*; import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.*; import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.CandidateType; import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.ContentPacketExtension.*; import net.java.sip.communicator.service.protocol.media.*; import org.ice4j.socket.*; import org.jitsi.hammer.extension.*; import org.jitsi.service.libjitsi.*; import org.jitsi.service.neomedia.*; import org.jitsi.service.neomedia.device.*; import org.jitsi.service.neomedia.format.*; import org.jitsi.util.Logger; import org.ice4j.*; import org.ice4j.ice.*; import java.net.*; import java.util.*; /** * The class contains a number of utility methods that are meant to facilitate * the handling of a Jingle session and the created ICE stream and media stream. * * @author Thomas Kuntz */ public class HammerUtils { /** * The <tt>Logger</tt> used by the <tt>HammerUtils</tt> class and its * instances for logging output. */ private static final Logger logger = Logger.getLogger(HammerUtils.class); /** * Select the favorite <tt>MediaFormat</tt> of a list of <tt>MediaFormat</tt> * * @param mediaType The type of the <tt>MediaFormat</tt> * in <tt>mediaFormatList</tt> * * @param mediaFormatList a list of <tt>MediaFormat</tt> * (their <tt>MediaType</tt> should be the same as <tt>mediaType</tt> * * * @return the favorite <tt>MediaFormat</tt> * of a list of <tt>MediaFormat</tt> */ public static MediaFormat selectFormat( String mediaType, List<MediaFormat> mediaFormatList) { MediaFormat returnedFormat = null; /* * returnedFormat take the value of the first element in the list, * so that if the favorite MediaFormat isn't found on the list, * then this function return the first MediaFormat of the list. * * For now, this function prefer opus for the audio format, and * vp8 for the video format */ switch(MediaType.parseString(mediaType)) { case AUDIO: for(MediaFormat fmt : mediaFormatList) { if(returnedFormat == null) returnedFormat = fmt; if(fmt.getEncoding().equalsIgnoreCase("opus")) { returnedFormat = fmt; break; } } break; case VIDEO: for(MediaFormat fmt : mediaFormatList) { if(returnedFormat == null) returnedFormat = fmt; if(fmt.getEncoding().equalsIgnoreCase("vp8")) { returnedFormat = fmt; break; } } break; default : break; } return returnedFormat; } /** * Add the remote transport candidates of each * <tt>ContentPacketExtension</tt> in <tt>contentList</tt> to their * associated <tt>IceMediaStream</tt> inside <tt>agent</tt>. * * @param agent the <tt>Agent</tt> containing the IceMediaStream to which * will be added the remote transport candidates. * @param contentList the list of <tt>ContentPacketExtension</tt> containing * the remote transport candidates to add to the <tt>Agent</tt>. */ public static void addRemoteCandidateToAgent( Agent agent, Collection<ContentPacketExtension> contentList) { IceUdpTransportPacketExtension transports = null; List<CandidatePacketExtension> candidates = null; String contentName = null; IceMediaStream stream = null; Component component = null; RemoteCandidate relatedCandidate = null; TransportAddress mainAddr = null, relatedAddr = null; RemoteCandidate remoteCandidate; for(ContentPacketExtension content : contentList) { contentName = content.getName(); stream = agent.getStream(contentName); if(stream == null) continue; transports = content.getFirstChildOfType(IceUdpTransportPacketExtension.class); if(transports == null) continue; stream.setRemotePassword(transports.getPassword()); stream.setRemoteUfrag(transports.getUfrag()); candidates = transports.getChildExtensionsOfType(CandidatePacketExtension.class); Collections.sort(candidates); for(CandidatePacketExtension candidate : candidates) { component = stream.getComponent(candidate.getComponent()); if( (component != null) && (candidate.getGeneration() == agent.getGeneration())) { if((candidate.getIP() != null) && (candidate.getPort() > 0)) { mainAddr = new TransportAddress( candidate.getIP(), candidate.getPort(), Transport.parse(candidate.getProtocol().toLowerCase())); relatedCandidate = null; if( (candidate.getRelAddr() != null) && (candidate.getRelPort() > 0)) { relatedAddr = new TransportAddress( candidate.getRelAddr(), candidate.getRelPort(), Transport.parse(candidate.getProtocol().toLowerCase())); relatedCandidate = component.findRemoteCandidate(relatedAddr); } remoteCandidate = new RemoteCandidate( mainAddr, component, org.ice4j.ice.CandidateType.parse(candidate.getType().toString()), candidate.getFoundation(), candidate.getPriority(), relatedCandidate); component.addRemoteCandidate(remoteCandidate); } } } } } /** * Add the local transport candidates contained in <tt>agent</tt> to * their associated (by the stream/content name) * <tt>ContentPacketExtension</tt>. * * @param agent the <tt>Agent</tt> from which we will get the local * transport candidates. * @param contentList the list of <tt>ContentPacketExtension</tt> to which * will be added the local transport candidates. */ public static void addLocalCandidateToContentList( Agent agent, Collection<ContentPacketExtension> contentList) { IceMediaStream iceMediaStream = null; IceUdpTransportPacketExtension transport = null; DtlsFingerprintPacketExtension fingerprint = null; CandidatePacketExtension candidate = null; long candidateID = 0; for(ContentPacketExtension content : contentList) { transport = new IceUdpTransportPacketExtension(); iceMediaStream = agent.getStream(content.getName()); transport.setPassword( agent.getLocalPassword() ); transport.setUfrag( agent.getLocalUfrag() ); if(iceMediaStream != null) { fingerprint = new DtlsFingerprintPacketExtension(); fingerprint.setFingerprint(""); fingerprint.setHash(""); for(Component component : iceMediaStream.getComponents()) { for(LocalCandidate localCandidate : component.getLocalCandidates()) { candidate = new CandidatePacketExtension(); candidate.setNamespace(IceUdpTransportPacketExtension.NAMESPACE); candidate.setFoundation(localCandidate.getFoundation()); candidate.setComponent(localCandidate.getParentComponent().getComponentID()); candidate.setProtocol(localCandidate.getTransport().toString()); candidate.setPriority(localCandidate.getPriority()); candidate.setIP(localCandidate.getTransportAddress().getHostAddress()); candidate.setPort(localCandidate.getTransportAddress().getPort()); candidate.setType(CandidateType.valueOf(localCandidate.getType().toString())); candidate.setGeneration(agent.getGeneration()); candidate.setNetwork(0); candidate.setID(String.valueOf(candidateID++)); if( localCandidate.getRelatedAddress() != null ) { candidate.setRelAddr(localCandidate.getRelatedAddress().getHostAddress()); candidate.setRelPort(localCandidate.getRelatedAddress().getPort()); } transport.addCandidate(candidate); } } } content.addChildExtension(transport); } } /** * Create a Map of <tt>MediaStream</tt> containing an AUDIO and VIDEO stream, * indexed by the String equivalent of their <tt>MediaType</tt> , with * just the <tt>MediaType</tt> set and with a <tt>DtlsControl</tt> for * <tt>SrtpControl</tt>. Anything else need to be set later * * @return a Map of newly created <tt>MediaStream</tt>, indexed by * the String equivalent of their <tt>MediaType</tt>* */ public static Map<String,MediaStream> createMediaStreams() { MediaService mediaService = LibJitsi.getMediaService(); Map<String,MediaStream> mediaStreamMap = new HashMap<String,MediaStream>(); MediaStream stream = null; /* * AUDIO STREAM */ stream = mediaService.createMediaStream( null, MediaType.AUDIO, mediaService.createSrtpControl(SrtpControlType.DTLS_SRTP)); mediaStreamMap.put(MediaType.AUDIO.toString(), stream); /* * VIDEO STREAM */ stream = mediaService.createMediaStream( null, MediaType.VIDEO, mediaService.createSrtpControl(SrtpControlType.DTLS_SRTP)); mediaStreamMap.put(MediaType.VIDEO.toString(), stream); return mediaStreamMap; } /** * Configure the <tt>MediaStream</tt> contained in <tt>mediaStreamMap</tt> * with the informations the others arguments gives. * * It will set the streams with the <tt>MediaFormat</tt> associated to its * name/MediaType, and with the selected <tt>MediaDevice</tt> returned by * <tt>mediaDeviceChooser</tt> for the <tt>MediaType</tt> of the * <tt>MediaFormat</tt> of the stream. * * It will also create the streams with a <tt>DtlsControl</tt> that need * to be configured later. * The stream will be set to SENDONLY. * * @param mediaFormatMap a <tt>Map</tt> of <tt>MediaFormat</tt> indexed by * the name/<tt>MediaType</tt> of the MediaStreams set with this * <tt>MediaFormat</tt>. * @param mediaDeviceChooser used to chose the MediaDevice for each stream * @param ptRegistry the <tt>DynamicPayloadTypeRegistry</tt> containing * the dynamic payload type of the <tt>MediaFormat</tt> (if necessary). * @param rtpExtRegistry */ public static void configureMediaStream( Map<String,MediaStream> mediaStreamMap, Map<String,MediaFormat> mediaFormatMap, Map<String,List<RTPExtension>> rtpExtensionMap, MediaDeviceChooser mediaDeviceChooser, DynamicPayloadTypeRegistry ptRegistry, DynamicRTPExtensionsRegistry rtpExtRegistry) { MediaStream stream = null; MediaFormat format = null; MediaDevice device = null; MediaService mediaService = LibJitsi.getMediaService(); for(String mediaName : mediaFormatMap.keySet()) { format = mediaFormatMap.get(mediaName); if(format == null) continue; stream = mediaStreamMap.get(mediaName); device = mediaDeviceChooser.getMediaDevice(format.getMediaType()); if(device != null) stream.setDevice(device); stream.setFormat(format); stream.setName(mediaName); stream.setRTPTranslator(mediaService.createRTPTranslator()); /* XXX if SENDRECV is set instead of SENDONLY or RECVONLY, * the audio stream will take 100% of a core of the CPU * * It also seems like if I remove the 2 function of the * AudioSilenceMediaDevice createPlayer and createSession, that * return null for the Player, the bug is also avoided : maybe * libjitsi doesn't handle correctly a null player.. */ stream.setDirection(MediaDirection.SENDONLY); if(format.getRTPPayloadType() == MediaFormat.RTP_PAYLOAD_TYPE_UNKNOWN) { stream.addDynamicRTPPayloadType( ptRegistry.getPayloadType(format), format); } /* * Add the rtp extension learned in the session-initial, and * supported by the MediaDevice of this MediaStream */ for(RTPExtension rtpExtension : rtpExtensionMap.get(mediaName)) { byte extensionID = rtpExtRegistry.getExtensionMapping(rtpExtension); stream.addRTPExtension(extensionID , rtpExtension); } //I just add the dynamic payload type of RED (116) so that //the MediaStream don't complain when it will received RED packet //from the Jitsi Meet user if(format.getMediaType() == MediaType.VIDEO) stream.addDynamicRTPPayloadType( (byte) 116, mediaService.getFormatFactory().createMediaFormat("red")); mediaStreamMap.put(mediaName, stream); } } /** * Add the <tt>DatagramSocket</tt> created by the IceMediaStreams of an * <tt>Agent</tt> (so after ICE was TERMINATED) to their associated * <tt>MediaStream</tt> contained in a <tt>Map</tt> and indexed by the * name of their associated IceMediaStream. * * @param agent the <tt>Agent</tt> containing the <tt>IceMediaStream</tt> * from which we will get the <tt>DatagramSocket</tt> * @param mediaStreamMap the <tt>Map</tt> of <tt>MediaStream</tt> to which * will be added the <tt>DatagramSocket</tt> of their corresponding * <tt>IceMediaStream</tt> contained in the <tt>Agent</tt>. */ public static void addSocketToMediaStream( Agent agent, Map<String,MediaStream> mediaStreamMap, boolean dropIncomingRtpPackets) { IceMediaStream iceMediaStream = null; CandidatePair rtpPair = null; CandidatePair rtcpPair = null; DatagramSocket rtpSocket = null; DatagramSocket rtcpSocket = null; StreamConnector connector = null; MediaStream stream = null; String str = "Transport candidates selected for RTP:\n"; for(String mediaName : agent.getStreamNames()) { iceMediaStream = agent.getStream(mediaName); stream = mediaStreamMap.get(mediaName); rtpPair = iceMediaStream.getComponent(Component.RTP) .getSelectedPair(); rtcpPair = iceMediaStream.getComponent(Component.RTCP) .getSelectedPair(); str = str + "-" + mediaName + " stream :\n" + rtpPair + "\n"; rtpSocket = rtpPair.getIceSocketWrapper().getUDPSocket(); if (dropIncomingRtpPackets && rtpSocket instanceof MultiplexingDatagramSocket) { try { // We are not going to handle any incoming RTP packets // anyway, so we might as well drop them early and not // waste resources processing them further. // This sets up a filtered socket, which receives only // DTLS packets. rtpSocket = ((MultiplexingDatagramSocket) rtpSocket) .getSocket(new DTLSDatagramFilter()); } catch (SocketException se) { // Whatever, this is just an optimization, anyway. } } rtcpSocket = rtcpPair.getIceSocketWrapper().getUDPSocket(); connector = new DefaultStreamConnector(rtpSocket, rtcpSocket); stream.setConnector(connector); stream.setTarget( new MediaStreamTarget( rtpPair.getRemoteCandidate().getTransportAddress(), rtcpPair.getRemoteCandidate().getTransportAddress()) ); } logger.info(str); } /** * Add the remote fingerprint & hash function contained in * <tt>remoteContentList</tt> to the <tt>DtlsControl</tt> of the * <tt>MediaStream</tt>. * Add the local fingerprint & hash function from the <tt>DtlsControl</tt> of * the <tt>MediaStream</tt> to the <tt>localContentList</tt>. * * @param mediaStreamMap a Map containing the <tt>MediaStream</tt> to * which will be added the remote fingerprints, from which we will get * the local fingerprints. * @param localContentList The list of <tt>ContentPacketExtension</tt> to * which will be added the local fingerprints * @param remoteContentList The list of <tt>ContentPacketExtension</tt> from * which we will get the remote fingerprints */ public static void setDtlsEncryptionOnTransport( Map<String,MediaStream> mediaStreamMap, List<ContentPacketExtension> localContentList, List<ContentPacketExtension> remoteContentList) { MediaStream stream = null; IceUdpTransportPacketExtension transport = null; List<DtlsFingerprintPacketExtension> fingerprints = null; SrtpControl srtpControl = null; DtlsControl dtlsControl = null; DtlsControl.Setup dtlsSetup = null; for(ContentPacketExtension remoteContent : remoteContentList) { transport = remoteContent.getFirstChildOfType(IceUdpTransportPacketExtension.class); dtlsSetup = null; stream = mediaStreamMap.get(remoteContent.getName()); if(stream == null) continue; srtpControl = stream.getSrtpControl(); if(srtpControl == null) continue; if( (srtpControl instanceof DtlsControl) && (transport != null) ) { dtlsControl = (DtlsControl)srtpControl; fingerprints = transport.getChildExtensionsOfType( DtlsFingerprintPacketExtension.class); if (!fingerprints.isEmpty()) { Map<String,String> remoteFingerprints = new LinkedHashMap<String,String>(); //XXX videobridge send a session-initiate with only one //fingerprint, so I'm not sure using a loop here is useful for(DtlsFingerprintPacketExtension fingerprint : fingerprints) { remoteFingerprints.put( fingerprint.getHash(), fingerprint.getFingerprint()); //get the setup attribute of the fingerprint //(the first setup found will be taken) if(dtlsSetup == null) { String setup = fingerprint.getAttributeAsString("setup"); if(setup != null) { dtlsSetup = DtlsControl.Setup.parseSetup(setup); } else { // Default to ACTPASS (and not ACTIVE, as // RFC4145 defines), because this is what is // expected in a jitsi-videobridge + (jicofo or // jitsi-meet) environment. dtlsSetup = DtlsControl.Setup.ACTPASS; } } } dtlsControl.setRemoteFingerprints(remoteFingerprints); dtlsSetup = getDtlsSetupForAnswer(dtlsSetup); dtlsControl.setSetup(dtlsSetup); } } } //This code add the fingerprint of the local MediaStream to the content //that will be sent with the session-accept for(ContentPacketExtension localContent : localContentList) { transport = localContent.getFirstChildOfType( IceUdpTransportPacketExtension.class); stream = mediaStreamMap.get(localContent.getName()); if(stream == null) continue; srtpControl = stream.getSrtpControl(); if( (srtpControl instanceof DtlsControl) && (transport != null)) { DtlsFingerprintPacketExtension fingerprint = new DtlsFingerprintPacketExtension(); dtlsControl = (DtlsControl) srtpControl; fingerprint.setHash(dtlsControl.getLocalFingerprintHashFunction()); fingerprint.setFingerprint(dtlsControl.getLocalFingerprint()); fingerprint.setAttribute("setup", dtlsSetup); transport.addChildExtension(fingerprint); } } } /** * Get a correct DTLS Setup SDP attribute for the local DTLS engine from * the Setup offered by the remote target. * @param setup The DTLS Setup offered by the remote target. * @return a correct DTLS Setup SDP attribute for the local DTLS engine from * the Setup offered by the remote target. */ public static DtlsControl.Setup getDtlsSetupForAnswer(DtlsControl.Setup setup) { DtlsControl.Setup returnedSetup = null; if(setup != null) { if(setup.equals(DtlsControl.Setup.ACTPASS)) returnedSetup = DtlsControl.Setup.ACTIVE; else if(setup.equals(DtlsControl.Setup.PASSIVE)) returnedSetup = DtlsControl.Setup.ACTIVE; else if(setup.equals(DtlsControl.Setup.ACTIVE)) returnedSetup = DtlsControl.Setup.PASSIVE; else if(setup.equals(DtlsControl.Setup.HOLDCONN)) returnedSetup = DtlsControl.Setup.HOLDCONN; } return returnedSetup; } /** * Set the ssrc attribute of each <tt>MediaStream</tt> to their corresponding * <tt>RtpDescriptionPacketExtension</tt>, and also add a 'source' element * to it, describing the msid,mslabel,label and cname of the stream. * * @param contentMap the Map of <tt>ContentPacketExtension</tt> to which * will be set the ssrc and addec the "source" element. * @param mediaStreamMap the Map of <tt>MediaStream</tt> from which will be * gotten the ssrc and other informations. */ public static void addSSRCToContent( Map<String,ContentPacketExtension> contentMap, Map<String,MediaStream> mediaStreamMap) { ContentPacketExtension content = null; RtpDescriptionPacketExtension description = null; MediaStream mediaStream = null; for(String mediaName : contentMap.keySet()) { long ssrc; content = contentMap.get(mediaName); mediaStream = mediaStreamMap.get(mediaName); if((content == null) || (mediaStream == null)) continue; ssrc = mediaStream.getLocalSourceID(); description = content.getFirstChildOfType( RtpDescriptionPacketExtension.class); description.setSsrc(String.valueOf(ssrc)); addSourceExtension(description, ssrc); } } /** * Adds a <tt>SourcePacketExtension</tt> as a child element of * <tt>description</tt>. See XEP-0339. * * @param description the <tt>RtpDescriptionPacketExtension</tt> to which * a child element will be added. * @param ssrc the SSRC for the <tt>SourcePacketExtension</tt> to use. */ public static void addSourceExtension( RtpDescriptionPacketExtension description, long ssrc) { MediaService mediaService = LibJitsi.getMediaService(); String msLabel = UUID.randomUUID().toString(); String label = UUID.randomUUID().toString(); SourcePacketExtension sourcePacketExtension = new SourcePacketExtension(); SsrcPacketExtension ssrcPacketExtension = new SsrcPacketExtension(); sourcePacketExtension.setSSRC(ssrc); sourcePacketExtension.addChildExtension( new ParameterPacketExtension("cname", mediaService.getRtpCname())); sourcePacketExtension.addChildExtension( new ParameterPacketExtension("msid", msLabel + " " + label)); sourcePacketExtension.addChildExtension( new ParameterPacketExtension("mslabel", msLabel)); sourcePacketExtension.addChildExtension( new ParameterPacketExtension("label", label)); description.addChildExtension(sourcePacketExtension); ssrcPacketExtension.setSsrc(String.valueOf(ssrc)); ssrcPacketExtension.setCname(mediaService.getRtpCname()); ssrcPacketExtension.setMsid(msLabel + " " + label); ssrcPacketExtension.setMslabel(msLabel); ssrcPacketExtension.setLabel(label); description.addChildExtension(ssrcPacketExtension); } /** * Create a relatively empty <tt>ContentPacketExtension</tt> for 'data' * (<tt>MediaType.DATA</tt>) rtp content type, because * <tt>JingleUtils.createDescription</tt> doesn't handle this type for now. * * @param creator indicates whether the person who originally created this * content was the initiator or the responder of the jingle session. * @param senders indicates the direction of the media in this stream. * @return a <tt>ContentPacketExtension</tt> for 'data' content. */ public static ContentPacketExtension createDescriptionForDataContent( CreatorEnum creator, SendersEnum senders) { ContentPacketExtension content = new ContentPacketExtension(); RtpDescriptionPacketExtension description = new RtpDescriptionPacketExtension(); content.setCreator(creator); content.setName("data"); //senders - only if we have them and if they are different from default if(senders != null && senders != SendersEnum.both) content.setSenders(senders); description.setMedia("data"); //RTP description content.addChildExtension(description); return content; } }
/* * The MIT License * * Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi, * Yahoo! Inc., Erik Ramfelt, Tom Huybrechts * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson; import hudson.PluginManager.PluginInstanceStore; import jenkins.YesNoMaybe; import jenkins.model.Jenkins; import hudson.model.UpdateCenter; import hudson.model.UpdateSite; import hudson.util.VersionNumber; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.Closeable; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.jar.Manifest; import java.util.logging.Logger; import static java.util.logging.Level.WARNING; import org.apache.commons.logging.LogFactory; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.HttpResponses; import java.util.Enumeration; import java.util.jar.JarFile; /** * Represents a Jenkins plug-in and associated control information * for Jenkins to control {@link Plugin}. * * <p> * A plug-in is packaged into a jar file whose extension is <tt>".jpi"</tt> (or <tt>".hpi"</tt> for backward compatability), * A plugin needs to have a special manifest entry to identify what it is. * * <p> * At the runtime, a plugin has two distinct state axis. * <ol> * <li>Enabled/Disabled. If enabled, Jenkins is going to use it * next time Jenkins runs. Otherwise the next run will ignore it. * <li>Activated/Deactivated. If activated, that means Jenkins is using * the plugin in this session. Otherwise it's not. * </ol> * <p> * For example, an activated but disabled plugin is still running but the next * time it won't. * * @author Kohsuke Kawaguchi */ public class PluginWrapper implements Comparable<PluginWrapper> { /** * {@link PluginManager} to which this belongs to. */ public final PluginManager parent; /** * Plugin manifest. * Contains description of the plugin. */ private final Manifest manifest; /** * {@link ClassLoader} for loading classes from this plugin. * Null if disabled. */ public final ClassLoader classLoader; /** * Base URL for loading static resources from this plugin. * Null if disabled. The static resources are mapped under * <tt>CONTEXTPATH/plugin/SHORTNAME/</tt>. */ public final URL baseResourceURL; /** * Used to control enable/disable setting of the plugin. * If this file exists, plugin will be disabled. */ private final File disableFile; /** * Used to control the unpacking of the bundled plugin. * If a pin file exists, Jenkins assumes that the user wants to pin down a particular version * of a plugin, and will not try to overwrite it. Otherwise, it'll be overwritten * by a bundled copy, to ensure consistency across upgrade/downgrade. * @since 1.325 */ private final File pinFile; /** * Short name of the plugin. The artifact Id of the plugin. * This is also used in the URL within Jenkins, so it needs * to remain stable even when the *.jpi file name is changed * (like Maven does.) */ private final String shortName; /** * True if this plugin is activated for this session. * The snapshot of <tt>disableFile.exists()</tt> as of the start up. */ private final boolean active; private final List<Dependency> dependencies; private final List<Dependency> optionalDependencies; /** * Is this plugin bundled in jenkins.war? */ /*package*/ boolean isBundled; static final class Dependency { public final String shortName; public final String version; public final boolean optional; public Dependency(String s) { int idx = s.indexOf(':'); if(idx==-1) throw new IllegalArgumentException("Illegal dependency specifier "+s); this.shortName = s.substring(0,idx); this.version = s.substring(idx+1); boolean isOptional = false; String[] osgiProperties = s.split(";"); for (int i = 1; i < osgiProperties.length; i++) { String osgiProperty = osgiProperties[i].trim(); if (osgiProperty.equalsIgnoreCase("resolution:=optional")) { isOptional = true; } } this.optional = isOptional; } @Override public String toString() { return shortName + " (" + version + ")"; } } /** * @param archive * A .jpi archive file jar file, or a .jpl linked plugin. * @param manifest * The manifest for the plugin * @param baseResourceURL * A URL pointing to the resources for this plugin * @param classLoader * a classloader that loads classes from this plugin and its dependencies * @param disableFile * if this file exists on startup, the plugin will not be activated * @param dependencies a list of mandatory dependencies * @param optionalDependencies a list of optional dependencies */ public PluginWrapper(PluginManager parent, File archive, Manifest manifest, URL baseResourceURL, ClassLoader classLoader, File disableFile, List<Dependency> dependencies, List<Dependency> optionalDependencies) { this.parent = parent; this.manifest = manifest; this.shortName = computeShortName(manifest, archive); this.baseResourceURL = baseResourceURL; this.classLoader = classLoader; this.disableFile = disableFile; this.pinFile = new File(archive.getPath() + ".pinned"); this.active = !disableFile.exists(); this.dependencies = dependencies; this.optionalDependencies = optionalDependencies; } /** * Returns the URL of the index page jelly script. */ public URL getIndexPage() { // In the current impl dependencies are checked first, so the plugin itself // will add the last entry in the getResources result. URL idx = null; try { Enumeration<URL> en = classLoader.getResources("index.jelly"); while (en.hasMoreElements()) idx = en.nextElement(); } catch (IOException ignore) { } // In case plugin has dependencies but is missing its own index.jelly, // check that result has this plugin's artifactId in it: return idx != null && idx.toString().contains(shortName) ? idx : null; } private String computeShortName(Manifest manifest, File archive) { // use the name captured in the manifest, as often plugins // depend on the specific short name in its URLs. String n = manifest.getMainAttributes().getValue("Short-Name"); if(n!=null) return n; // maven seems to put this automatically, so good fallback to check. n = manifest.getMainAttributes().getValue("Extension-Name"); if(n!=null) return n; // otherwise infer from the file name, since older plugins don't have // this entry. return getBaseName(archive); } /** * Gets the "abc" portion from "abc.ext". */ static String getBaseName(File archive) { String n = archive.getName(); int idx = n.lastIndexOf('.'); if(idx>=0) n = n.substring(0,idx); return n; } public List<Dependency> getDependencies() { return dependencies; } public List<Dependency> getOptionalDependencies() { return optionalDependencies; } /** * Returns the short name suitable for URL. */ public String getShortName() { return shortName; } /** * Gets the instance of {@link Plugin} contributed by this plugin. */ public Plugin getPlugin() { return Jenkins.lookup(PluginInstanceStore.class).store.get(this); } /** * Gets the URL that shows more information about this plugin. * @return * null if this information is unavailable. * @since 1.283 */ public String getUrl() { // first look for the manifest entry. This is new in maven-hpi-plugin 1.30 String url = manifest.getMainAttributes().getValue("Url"); if(url!=null) return url; // fallback to update center metadata UpdateSite.Plugin ui = getInfo(); if(ui!=null) return ui.wiki; return null; } @Override public String toString() { return "Plugin:" + getShortName(); } /** * Returns a one-line descriptive name of this plugin. */ public String getLongName() { String name = manifest.getMainAttributes().getValue("Long-Name"); if(name!=null) return name; return shortName; } /** * Does this plugin supports dynamic loading? */ public YesNoMaybe supportsDynamicLoad() { String v = manifest.getMainAttributes().getValue("Support-Dynamic-Loading"); if (v==null) return YesNoMaybe.MAYBE; return Boolean.parseBoolean(v) ? YesNoMaybe.YES : YesNoMaybe.NO; } /** * Returns the version number of this plugin */ public String getVersion() { String v = manifest.getMainAttributes().getValue("Plugin-Version"); if(v!=null) return v; // plugins generated before maven-hpi-plugin 1.3 should still have this attribute v = manifest.getMainAttributes().getValue("Implementation-Version"); if(v!=null) return v; return "???"; } /** * Returns the version number of this plugin */ public VersionNumber getVersionNumber() { return new VersionNumber(getVersion()); } /** * Returns true if the version of this plugin is older than the given version. */ public boolean isOlderThan(VersionNumber v) { try { return getVersionNumber().compareTo(v) < 0; } catch (IllegalArgumentException e) { // if we can't figure out our current version, it probably means it's very old, // since the version information is missing only from the very old plugins return true; } } /** * Terminates the plugin. */ public void stop() { LOGGER.info("Stopping "+shortName); try { getPlugin().stop(); } catch(Throwable t) { LOGGER.log(WARNING, "Failed to shut down "+shortName, t); } // Work around a bug in commons-logging. // See http://www.szegedi.org/articles/memleak.html LogFactory.release(classLoader); } public void releaseClassLoader() { if (classLoader instanceof Closeable) try { ((Closeable) classLoader).close(); } catch (IOException e) { LOGGER.log(WARNING, "Failed to shut down classloader",e); } } /** * Enables this plugin next time Jenkins runs. */ public void enable() throws IOException { if(!disableFile.delete()) throw new IOException("Failed to delete "+disableFile); } /** * Disables this plugin next time Jenkins runs. */ public void disable() throws IOException { // creates an empty file OutputStream os = new FileOutputStream(disableFile); os.close(); } /** * Returns true if this plugin is enabled for this session. */ public boolean isActive() { return active; } public boolean isBundled() { return isBundled; } /** * If true, the plugin is going to be activated next time * Jenkins runs. */ public boolean isEnabled() { return !disableFile.exists(); } public Manifest getManifest() { return manifest; } public void setPlugin(Plugin plugin) { Jenkins.lookup(PluginInstanceStore.class).store.put(this,plugin); plugin.wrapper = this; } public String getPluginClass() { return manifest.getMainAttributes().getValue("Plugin-Class"); } public boolean hasLicensesXml() { try { new URL(baseResourceURL,"WEB-INF/licenses.xml").openStream().close(); return true; } catch (IOException e) { return false; } } /** * Makes sure that all the dependencies exist, and then accept optional dependencies * as real dependencies. * * @throws IOException * thrown if one or several mandatory dependencies doesn't exists. */ /*package*/ void resolvePluginDependencies() throws IOException { List<String> missingDependencies = new ArrayList<String>(); // make sure dependencies exist for (Dependency d : dependencies) { if (parent.getPlugin(d.shortName) == null) missingDependencies.add(d.toString()); } if (!missingDependencies.isEmpty()) throw new IOException("Dependency "+Util.join(missingDependencies, ", ")+" doesn't exist"); // add the optional dependencies that exists for (Dependency d : optionalDependencies) { if (parent.getPlugin(d.shortName) != null) dependencies.add(d); } } /** * If the plugin has {@link #getUpdateInfo() an update}, * returns the {@link UpdateSite.Plugin} object. * * @return * This method may return null &mdash; for example, * the user may have installed a plugin locally developed. */ public UpdateSite.Plugin getUpdateInfo() { UpdateCenter uc = Jenkins.getInstance().getUpdateCenter(); UpdateSite.Plugin p = uc.getPlugin(getShortName()); if(p!=null && p.isNewerThan(getVersion())) return p; return null; } /** * returns the {@link UpdateSite.Plugin} object, or null. */ public UpdateSite.Plugin getInfo() { UpdateCenter uc = Jenkins.getInstance().getUpdateCenter(); return uc.getPlugin(getShortName()); } /** * Returns true if this plugin has update in the update center. * * <p> * This method is conservative in the sense that if the version number is incomprehensible, * it always returns false. */ public boolean hasUpdate() { return getUpdateInfo()!=null; } public boolean isPinned() { return pinFile.exists(); } /** * Sort by short name. */ public int compareTo(PluginWrapper pw) { return shortName.compareToIgnoreCase(pw.shortName); } /** * returns true if backup of previous version of plugin exists */ public boolean isDowngradable() { return getBackupFile().exists(); } /** * Where is the backup file? */ public File getBackupFile() { return new File(Jenkins.getInstance().getRootDir(),"plugins/"+getShortName() + ".bak"); } /** * returns the version of the backed up plugin, * or null if there's no back up. */ public String getBackupVersion() { File backup = getBackupFile(); if (backup.exists()) { try { JarFile backupPlugin = new JarFile(backup); return backupPlugin.getManifest().getMainAttributes().getValue("Plugin-Version"); } catch (IOException e) { LOGGER.log(WARNING, "Failed to get backup version ", e); return null; } } else { return null; } } // // // Action methods // // public HttpResponse doMakeEnabled() throws IOException { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); enable(); return HttpResponses.ok(); } public HttpResponse doMakeDisabled() throws IOException { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); disable(); return HttpResponses.ok(); } public HttpResponse doPin() throws IOException { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); new FileOutputStream(pinFile).close(); return HttpResponses.ok(); } public HttpResponse doUnpin() throws IOException { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); pinFile.delete(); return HttpResponses.ok(); } private static final Logger LOGGER = Logger.getLogger(PluginWrapper.class.getName()); }
// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Copyright (c) 2014 Intel Corporation. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.xwalk.core.xwview.test; import android.test.suitebuilder.annotation.SmallTest; import android.util.Pair; import android.webkit.WebResourceResponse; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; import org.chromium.base.test.util.DisabledTest; import org.chromium.base.test.util.Feature; import org.chromium.base.test.util.TestFileUtil; import org.chromium.content.browser.test.util.CallbackHelper; import org.chromium.content.browser.test.util.TestCallbackHelperContainer.OnReceivedErrorHelper; import org.chromium.net.test.util.TestWebServer; import org.xwalk.core.xwview.test.util.CommonResources; import org.xwalk.core.XWalkView; import org.xwalk.core.XWalkResourceClient; /** * Test case for XWalkResourceClient.shouldInterceptRequest callback * * Note the major part of this file is migrated from android_webview/. */ public class ShouldInterceptLoadRequestTest extends XWalkViewTestBase { /** * Customized XWalkResourceClient implementation for shouldInterceptRequest */ private class TestXWalkResourceClient1 extends XWalkViewTestBase.TestXWalkResourceClient { @Override public WebResourceResponse shouldInterceptLoadRequest(XWalkView view, String url) { return mTestHelperBridge.shouldInterceptLoadRequest(url); } } private String addPageToTestServer(TestWebServer webServer, String httpPath, String html) { List<Pair<String, String>> headers = new ArrayList<Pair<String, String>>(); headers.add(Pair.create("Content-Type", "text/html")); headers.add(Pair.create("Cache-Control", "no-store")); return webServer.setResponse(httpPath, html, headers); } private String addAboutPageToTestServer(TestWebServer webServer) { return addPageToTestServer(webServer, "/" + CommonResources.ABOUT_FILENAME, CommonResources.ABOUT_HTML); } private WebResourceResponse stringToWebResourceResponse(String input) throws Throwable { final String mimeType = "text/html"; final String encoding = "UTF-8"; return new WebResourceResponse( mimeType, encoding, new ByteArrayInputStream(input.getBytes(encoding))); } private TestWebServer mWebServer; private TestXWalkResourceClient1 mTestXWalkResourceClient; private TestHelperBridge.ShouldInterceptLoadRequestHelper mShouldInterceptLoadRequestHelper; private TestHelperBridge.OnLoadStartedHelper mOnLoadStartedHelper; @Override protected void setUp() throws Exception { super.setUp(); setXWalkClient(new XWalkViewTestBase.TestXWalkClient()); getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mTestXWalkResourceClient = new TestXWalkResourceClient1(); getXWalkView().setResourceClient(mTestXWalkResourceClient); mShouldInterceptLoadRequestHelper = mTestHelperBridge.getShouldInterceptLoadRequestHelper(); mOnLoadStartedHelper = mTestHelperBridge.getOnLoadStartedHelper(); } }); mWebServer = new TestWebServer(false); } @Override protected void tearDown() throws Exception { mWebServer.shutdown(); super.tearDown(); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCalledWithCorrectUrl() throws Throwable { final String aboutPageUrl = addAboutPageToTestServer(mWebServer); int callCount = mShouldInterceptLoadRequestHelper.getCallCount(); int onPageFinishedCallCount = mTestHelperBridge.getOnPageFinishedHelper().getCallCount(); loadUrlAsync(aboutPageUrl); mShouldInterceptLoadRequestHelper.waitForCallback(callCount); assertEquals(1, mShouldInterceptLoadRequestHelper.getUrls().size()); assertEquals(aboutPageUrl, mShouldInterceptLoadRequestHelper.getUrls().get(0)); mTestHelperBridge.getOnPageFinishedHelper().waitForCallback(onPageFinishedCallCount); assertEquals(CommonResources.ABOUT_TITLE, getTitleOnUiThread()); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testOnLoadResourceCalledWithCorrectUrl() throws Throwable { final String aboutPageUrl = addAboutPageToTestServer(mWebServer); int callCount = mOnLoadStartedHelper.getCallCount(); loadUrlAsync(aboutPageUrl); mOnLoadStartedHelper.waitForCallback(callCount); assertEquals(aboutPageUrl, mOnLoadStartedHelper.getUrl()); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testDoesNotCrashOnInvalidData() throws Throwable { final String aboutPageUrl = addAboutPageToTestServer(mWebServer); mShouldInterceptLoadRequestHelper.setReturnValue( new WebResourceResponse("text/html", "UTF-8", null)); int callCount = mShouldInterceptLoadRequestHelper.getCallCount(); loadUrlAsync(aboutPageUrl); mShouldInterceptLoadRequestHelper.waitForCallback(callCount); mShouldInterceptLoadRequestHelper.setReturnValue( new WebResourceResponse(null, null, new ByteArrayInputStream(new byte[0]))); callCount = mShouldInterceptLoadRequestHelper.getCallCount(); loadUrlAsync(aboutPageUrl); mShouldInterceptLoadRequestHelper.waitForCallback(callCount); mShouldInterceptLoadRequestHelper.setReturnValue( new WebResourceResponse(null, null, null)); callCount = mShouldInterceptLoadRequestHelper.getCallCount(); loadUrlAsync(aboutPageUrl); mShouldInterceptLoadRequestHelper.waitForCallback(callCount); } private static class EmptyInputStream extends InputStream { @Override public int available() { return 0; } @Override public int read() throws IOException { return -1; } @Override public int read(byte b[]) throws IOException { return -1; } @Override public int read(byte b[], int off, int len) throws IOException { return -1; } @Override public long skip(long n) throws IOException { if (n < 0) throw new IOException("skipping negative number of bytes"); return 0; } } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testDoesNotCrashOnEmptyStream() throws Throwable { final String aboutPageUrl = addAboutPageToTestServer(mWebServer); mShouldInterceptLoadRequestHelper.setReturnValue( new WebResourceResponse("text/html", "UTF-8", new EmptyInputStream())); int shouldInterceptRequestCallCount = mShouldInterceptLoadRequestHelper.getCallCount(); int onPageFinishedCallCount = mTestHelperBridge.getOnPageFinishedHelper().getCallCount(); loadUrlAsync(aboutPageUrl); mShouldInterceptLoadRequestHelper.waitForCallback(shouldInterceptRequestCallCount); mTestHelperBridge.getOnPageFinishedHelper().waitForCallback(onPageFinishedCallCount); } private static class SlowWebResourceResponse extends WebResourceResponse { private CallbackHelper mReadStartedCallbackHelper = new CallbackHelper(); private CountDownLatch mLatch = new CountDownLatch(1); public SlowWebResourceResponse(String mimeType, String encoding, InputStream data) { super(mimeType, encoding, data); } @Override public InputStream getData() { mReadStartedCallbackHelper.notifyCalled(); try { mLatch.await(); } catch (InterruptedException e) { // ignore } return super.getData(); } public void unblockReads() { mLatch.countDown(); } public CallbackHelper getReadStartedCallbackHelper() { return mReadStartedCallbackHelper; } } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testHttpStatusField() throws Throwable { final String syncGetUrl = mWebServer.getResponseUrl("/intercept_me"); final String syncGetJs = "(function() {" + " var xhr = new XMLHttpRequest();" + " xhr.open('GET', '" + syncGetUrl + "', false);" + " xhr.send(null);" + " console.info('xhr.status = ' + xhr.status);" + " return xhr.status;" + "})();"; getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { getXWalkView().getSettings().setJavaScriptEnabled(true); } }); final String aboutPageUrl = addAboutPageToTestServer(mWebServer); loadUrlSync(aboutPageUrl); mShouldInterceptLoadRequestHelper.setReturnValue( new WebResourceResponse("text/html", "UTF-8", null)); assertEquals("404", executeJavaScriptAndWaitForResult(syncGetJs)); mShouldInterceptLoadRequestHelper.setReturnValue( new WebResourceResponse("text/html", "UTF-8", new EmptyInputStream())); assertEquals("200", executeJavaScriptAndWaitForResult(syncGetJs)); } private String makePageWithTitle(String title) { return CommonResources.makeHtmlPageFrom("<title>" + title + "</title>", "<div> The title is: " + title + " </div>"); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCanInterceptMainFrame() throws Throwable { final String expectedTitle = "testShouldInterceptLoadRequestCanInterceptMainFrame"; final String expectedPage = makePageWithTitle(expectedTitle); mShouldInterceptLoadRequestHelper.setReturnValue( stringToWebResourceResponse(expectedPage)); final String aboutPageUrl = addAboutPageToTestServer(mWebServer); loadUrlSync(aboutPageUrl); assertEquals(expectedTitle, getTitleOnUiThread()); assertEquals(0, mWebServer.getRequestCount("/" + CommonResources.ABOUT_FILENAME)); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testDoesNotChangeReportedUrl() throws Throwable { mShouldInterceptLoadRequestHelper.setReturnValue( stringToWebResourceResponse(makePageWithTitle("some title"))); final String aboutPageUrl = addAboutPageToTestServer(mWebServer); loadUrlSync(aboutPageUrl); assertEquals(aboutPageUrl, mTestHelperBridge.getOnPageFinishedHelper().getUrl()); assertEquals(aboutPageUrl, mTestHelperBridge.getOnPageStartedHelper().getUrl()); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testNullInputStreamCausesErrorForMainFrame() throws Throwable { final OnReceivedErrorHelper onReceivedErrorHelper = mTestHelperBridge.getOnReceivedErrorHelper(); mShouldInterceptLoadRequestHelper.setReturnValue( new WebResourceResponse("text/html", "UTF-8", null)); final String aboutPageUrl = addAboutPageToTestServer(mWebServer); final int callCount = onReceivedErrorHelper.getCallCount(); loadUrlAsync(aboutPageUrl); onReceivedErrorHelper.waitForCallback(callCount); assertEquals(0, mWebServer.getRequestCount("/" + CommonResources.ABOUT_FILENAME)); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCalledForImage() throws Throwable { final String imagePath = "/" + CommonResources.FAVICON_FILENAME; mWebServer.setResponseBase64(imagePath, CommonResources.FAVICON_DATA_BASE64, CommonResources.getImagePngHeaders(true)); final String pageWithImage = addPageToTestServer(mWebServer, "/page_with_image.html", CommonResources.getOnImageLoadedHtml(CommonResources.FAVICON_FILENAME)); int callCount = mShouldInterceptLoadRequestHelper.getCallCount(); loadUrlSync(pageWithImage); mShouldInterceptLoadRequestHelper.waitForCallback(callCount, 2); assertEquals(2, mShouldInterceptLoadRequestHelper.getUrls().size()); assertTrue(mShouldInterceptLoadRequestHelper.getUrls().get(1).endsWith( CommonResources.FAVICON_FILENAME)); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testOnReceivedErrorCallback() throws Throwable { final OnReceivedErrorHelper onReceivedErrorHelper = mTestHelperBridge.getOnReceivedErrorHelper(); mShouldInterceptLoadRequestHelper.setReturnValue(new WebResourceResponse(null, null, null)); int onReceivedErrorHelperCallCount = onReceivedErrorHelper.getCallCount(); loadUrlSync("foo://bar"); onReceivedErrorHelper.waitForCallback(onReceivedErrorHelperCallCount, 1); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testNoOnReceivedErrorCallback() throws Throwable { final String imagePath = "/" + CommonResources.FAVICON_FILENAME; final String imageUrl = mWebServer.setResponseBase64(imagePath, CommonResources.FAVICON_DATA_BASE64, CommonResources.getImagePngHeaders(true)); final String pageWithImage = addPageToTestServer(mWebServer, "/page_with_image.html", CommonResources.getOnImageLoadedHtml(CommonResources.FAVICON_FILENAME)); final OnReceivedErrorHelper onReceivedErrorHelper = mTestHelperBridge.getOnReceivedErrorHelper(); mShouldInterceptLoadRequestHelper.setReturnValueForUrl( imageUrl, new WebResourceResponse(null, null, null)); int onReceivedErrorHelperCallCount = onReceivedErrorHelper.getCallCount(); loadUrlSync(pageWithImage); assertEquals(onReceivedErrorHelperCallCount, onReceivedErrorHelper.getCallCount()); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCalledForIframe() throws Throwable { final String aboutPageUrl = addAboutPageToTestServer(mWebServer); final String pageWithIframe = addPageToTestServer(mWebServer, "/page_with_iframe.html", CommonResources.makeHtmlPageFrom("", "<iframe src=\"" + aboutPageUrl + "\"/>")); int callCount = mShouldInterceptLoadRequestHelper.getCallCount(); // These callbacks can race with favicon.ico callback. mShouldInterceptLoadRequestHelper.setUrlToWaitFor(aboutPageUrl); loadUrlSync(pageWithIframe); mShouldInterceptLoadRequestHelper.waitForCallback(callCount, 1); assertEquals(1, mShouldInterceptLoadRequestHelper.getUrls().size()); assertEquals(aboutPageUrl, mShouldInterceptLoadRequestHelper.getUrls().get(0)); } private void calledForUrlTemplate(final String url) throws Exception { int callCount = mShouldInterceptLoadRequestHelper.getCallCount(); int onPageStartedCallCount = mTestHelperBridge.getOnPageStartedHelper().getCallCount(); loadUrlAsync(url); mShouldInterceptLoadRequestHelper.waitForCallback(callCount); assertEquals(url, mShouldInterceptLoadRequestHelper.getUrls().get(0)); mTestHelperBridge.getOnPageStartedHelper().waitForCallback(onPageStartedCallCount); assertEquals(onPageStartedCallCount + 1, mTestHelperBridge.getOnPageStartedHelper().getCallCount()); } private void notCalledForUrlTemplate(final String url) throws Exception { int callCount = mShouldInterceptLoadRequestHelper.getCallCount(); loadUrlSync(url); // The intercepting must happen before onPageFinished. Since the IPC messages from the // renderer should be delivered in order waiting for onPageFinished is sufficient to // 'flush' any pending interception messages. assertEquals(callCount, mShouldInterceptLoadRequestHelper.getCallCount()); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCalledForUnsupportedSchemes() throws Throwable { calledForUrlTemplate("foobar://resource/1"); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCalledForNonexistentFiles() throws Throwable { calledForUrlTemplate("file:///somewhere/something"); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCalledForExistingFiles() throws Throwable { final String tmpDir = getInstrumentation().getTargetContext().getCacheDir().getPath(); final String fileName = tmpDir + "/testfile.html"; final String title = "existing file title"; TestFileUtil.deleteFile(fileName); // Remove leftover file if any. TestFileUtil.createNewHtmlFile(fileName, title, ""); final String existingFileUrl = "file://" + fileName; int callCount = mShouldInterceptLoadRequestHelper.getCallCount(); int onPageFinishedCallCount = mTestHelperBridge.getOnPageFinishedHelper().getCallCount(); loadUrlAsync(existingFileUrl); mShouldInterceptLoadRequestHelper.waitForCallback(callCount); assertEquals(existingFileUrl, mShouldInterceptLoadRequestHelper.getUrls().get(0)); mTestHelperBridge.getOnPageFinishedHelper().waitForCallback(onPageFinishedCallCount); assertEquals(title, getTitleOnUiThread()); assertEquals(onPageFinishedCallCount + 1, mTestHelperBridge.getOnPageFinishedHelper().getCallCount()); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testNotCalledForExistingResource() throws Throwable { notCalledForUrlTemplate("file:///android_res/raw/resource_file.html"); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCalledForNonexistentResource() throws Throwable { calledForUrlTemplate("file:///android_res/raw/no_file.html"); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testNotCalledForExistingAsset() throws Throwable { notCalledForUrlTemplate("file:///android_asset/www/index.html"); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCalledForNonexistentAsset() throws Throwable { calledForUrlTemplate("file:///android_res/raw/no_file.html"); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testNotCalledForExistingContentUrl() throws Throwable { final String contentResourceName = "target"; final String existingContentUrl = TestContentProvider.createContentUrl(contentResourceName); TestContentProvider.resetResourceRequestCount( getInstrumentation().getTargetContext(), contentResourceName); notCalledForUrlTemplate(existingContentUrl); int contentRequestCount = TestContentProvider.getResourceRequestCount( getInstrumentation().getTargetContext(), contentResourceName); assertEquals(1, contentRequestCount); } @SmallTest @Feature({"ShouldInterceptLoadRequest"}) public void testCalledForNonexistentContentUrl() throws Throwable { calledForUrlTemplate("content://org.xwalk.core.test.NoSuchProvider/foo"); } }
package org.hisp.dhis.dxf2.webmessage; /* * Copyright (c) 2004-2018, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.hisp.dhis.dxf2.importsummary.ImportStatus; import org.hisp.dhis.dxf2.importsummary.ImportSummaries; import org.hisp.dhis.dxf2.importsummary.ImportSummary; import org.hisp.dhis.dxf2.metadata.feedback.ImportReport; import org.hisp.dhis.dxf2.scheduling.JobConfigurationWebMessageResponse; import org.hisp.dhis.dxf2.webmessage.responses.ErrorReportsWebMessageResponse; import org.hisp.dhis.dxf2.webmessage.responses.ImportReportWebMessageResponse; import org.hisp.dhis.dxf2.webmessage.responses.ObjectReportWebMessageResponse; import org.hisp.dhis.dxf2.webmessage.responses.TypeReportWebMessageResponse; import org.hisp.dhis.feedback.ErrorReport; import org.hisp.dhis.feedback.ObjectReport; import org.hisp.dhis.feedback.Status; import org.hisp.dhis.feedback.TypeReport; import org.hisp.dhis.scheduling.JobConfiguration; import org.springframework.http.HttpStatus; import java.util.List; /** * @author Morten Olav Hansen <mortenoh@gmail.com> */ public final class WebMessageUtils { public static WebMessage createWebMessage( String message, Status status, HttpStatus httpStatus ) { WebMessage webMessage = new WebMessage( status, httpStatus ); webMessage.setMessage( message ); return webMessage; } public static WebMessage createWebMessage( String message, String devMessage, Status status, HttpStatus httpStatus ) { WebMessage webMessage = new WebMessage( status, httpStatus ); webMessage.setMessage( message ); webMessage.setDevMessage( devMessage ); return webMessage; } public static WebMessage ok( String message ) { return createWebMessage( message, Status.OK, HttpStatus.OK ); } public static WebMessage ok( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.OK, HttpStatus.OK ); } public static WebMessage created( String message ) { return createWebMessage( message, Status.OK, HttpStatus.CREATED ); } public static WebMessage created( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.OK, HttpStatus.CREATED ); } public static WebMessage notFound( String message ) { return createWebMessage( message, Status.ERROR, HttpStatus.NOT_FOUND ); } public static WebMessage notFound( Class<?> klass, String id ) { String message = klass.getSimpleName() + " with id " + id + " could not be found."; return createWebMessage( message, Status.ERROR, HttpStatus.NOT_FOUND ); } public static WebMessage notFound( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.ERROR, HttpStatus.NOT_FOUND ); } public static WebMessage conflict( String message ) { return createWebMessage( message, Status.ERROR, HttpStatus.CONFLICT ); } public static WebMessage conflict( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.ERROR, HttpStatus.CONFLICT ); } public static WebMessage error( String message ) { return createWebMessage( message, Status.ERROR, HttpStatus.INTERNAL_SERVER_ERROR ); } public static WebMessage error( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.ERROR, HttpStatus.INTERNAL_SERVER_ERROR ); } public static WebMessage badRequest( String message ) { return createWebMessage( message, Status.ERROR, HttpStatus.BAD_REQUEST ); } public static WebMessage badRequest( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.ERROR, HttpStatus.BAD_REQUEST ); } public static WebMessage forbidden( String message ) { return createWebMessage( message, Status.ERROR, HttpStatus.FORBIDDEN ); } public static WebMessage forbidden( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.ERROR, HttpStatus.FORBIDDEN ); } public static WebMessage serviceUnavailable( String message ) { return createWebMessage( message, Status.ERROR, HttpStatus.SERVICE_UNAVAILABLE ); } public static WebMessage serviceUnavailable( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.ERROR, HttpStatus.SERVICE_UNAVAILABLE ); } public static WebMessage unprocessableEntity( String message ) { return createWebMessage( message, Status.ERROR, HttpStatus.UNPROCESSABLE_ENTITY ); } public static WebMessage unprocessableEntity( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.ERROR, HttpStatus.UNPROCESSABLE_ENTITY ); } public static WebMessage unathorized( String message ) { return createWebMessage( message, Status.ERROR, HttpStatus.UNAUTHORIZED ); } public static WebMessage unathorized( String message, String devMessage ) { return createWebMessage( message, devMessage, Status.ERROR, HttpStatus.UNAUTHORIZED ); } public static WebMessage importSummary( ImportSummary importSummary ) { WebMessage webMessage = new WebMessage(); if ( importSummary.isStatus( ImportStatus.ERROR ) ) { webMessage.setMessage( "An error occurred, please check import summary." ); webMessage.setStatus( Status.ERROR ); webMessage.setHttpStatus( HttpStatus.CONFLICT ); } else if ( importSummary.isStatus( ImportStatus.WARNING ) ) { webMessage.setMessage( "One more conflicts encountered, please check import summary." ); webMessage.setStatus( Status.WARNING ); webMessage.setHttpStatus( HttpStatus.CONFLICT ); } else { webMessage.setMessage( "Import was successful." ); webMessage.setStatus( Status.OK ); webMessage.setHttpStatus( HttpStatus.OK ); } webMessage.setResponse( importSummary ); return webMessage; } public static WebMessage importSummaries( ImportSummaries importSummaries ) { WebMessage webMessage = new WebMessage(); if ( importSummaries.isStatus( ImportStatus.ERROR ) ) { webMessage.setMessage( "An error occurred, please check import summary." ); webMessage.setStatus( Status.ERROR ); webMessage.setHttpStatus( HttpStatus.CONFLICT ); } else if ( importSummaries.isStatus( ImportStatus.WARNING ) ) { webMessage.setMessage( "One or more conflicts encountered, please check import summary." ); webMessage.setStatus( Status.WARNING ); webMessage.setHttpStatus( HttpStatus.CONFLICT ); } else { webMessage.setMessage( "Import was successful." ); webMessage.setStatus( Status.OK ); webMessage.setHttpStatus( HttpStatus.OK ); } webMessage.setResponse( importSummaries ); return webMessage; } public static WebMessage importReport( ImportReport importReport ) { WebMessage webMessage = new WebMessage(); webMessage.setResponse( new ImportReportWebMessageResponse( importReport ) ); webMessage.setStatus( importReport.getStatus() ); if ( webMessage.getStatus() != Status.OK ) { webMessage.setMessage( "One more more errors occurred, please see full details in import report." ); webMessage.setStatus( Status.WARNING ); webMessage.setHttpStatus( HttpStatus.CONFLICT ); } return webMessage; } public static WebMessage typeReport( TypeReport typeReport ) { WebMessage webMessage = new WebMessage(); webMessage.setResponse( new TypeReportWebMessageResponse( typeReport ) ); if ( typeReport.getErrorReports().isEmpty() ) { webMessage.setStatus( Status.OK ); webMessage.setHttpStatus( HttpStatus.OK ); } else { webMessage.setMessage( "One more more errors occurred, please see full details in import report." ); webMessage.setStatus( Status.ERROR ); webMessage.setHttpStatus( HttpStatus.CONFLICT ); } return webMessage; } public static WebMessage objectReport( ImportReport importReport ) { WebMessage webMessage = new WebMessage( Status.OK, HttpStatus.OK ); if ( !importReport.getTypeReports().isEmpty() ) { TypeReport typeReport = importReport.getTypeReports().get( 0 ); if ( !typeReport.getObjectReports().isEmpty() ) { return objectReport( typeReport.getObjectReports().get( 0 ) ); } } return webMessage; } public static WebMessage objectReport( ObjectReport objectReport ) { WebMessage webMessage = new WebMessage(); webMessage.setResponse( new ObjectReportWebMessageResponse( objectReport ) ); if ( objectReport.isEmpty() ) { webMessage.setStatus( Status.OK ); webMessage.setHttpStatus( HttpStatus.OK ); } else { webMessage.setMessage( "One more more errors occurred, please see full details in import report." ); webMessage.setStatus( Status.WARNING ); webMessage.setHttpStatus( HttpStatus.CONFLICT ); } return webMessage; } public static WebMessage jobConfigurationReport( JobConfiguration jobConfiguration ) { WebMessage webMessage = WebMessageUtils.ok( "Initiated " + jobConfiguration.getName() ); webMessage.setResponse( new JobConfigurationWebMessageResponse( jobConfiguration ) ); return webMessage; } public static WebMessage errorReports( List<ErrorReport> errorReports ) { WebMessage webMessage = new WebMessage(); webMessage.setResponse( new ErrorReportsWebMessageResponse( errorReports ) ); if ( !errorReports.isEmpty() ) { webMessage.setStatus( Status.ERROR ); webMessage.setHttpStatus( HttpStatus.BAD_REQUEST ); } return webMessage; } private WebMessageUtils() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.hdfs.scheduler; import java.io.FileReader; import java.io.IOException; import java.util.Map; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputSplit; import org.apache.hyracks.api.client.NodeControllerInfo; import org.apache.hyracks.api.client.NodeStatus; import org.apache.hyracks.api.comm.NetworkAddress; import org.apache.hyracks.api.topology.ClusterTopology; import org.apache.hyracks.api.topology.TopologyDefinitionParser; import org.apache.hyracks.test.support.TestUtils; import org.junit.Assert; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import junit.framework.TestCase; @SuppressWarnings("deprecation") public class SchedulerTest extends TestCase { private static String TOPOLOGY_PATH = "src/test/resources/topology.xml"; private ClusterTopology parseTopology() throws IOException, SAXException { FileReader fr = new FileReader(TOPOLOGY_PATH); InputSource in = new InputSource(fr); try { return TopologyDefinitionParser.parse(in); } finally { fr.close(); } } /** * Test the scheduler for the case when the Hyracks cluster is the HDFS cluster * * @throws Exception */ public void testSchedulerSimple() throws Exception { Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097); InputSplit[] fileSplits = new InputSplit[6]; fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }); fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" }); fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" }); fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }); String[] expectedResults = new String[] { "nc1", "nc4", "nc6", "nc2", "nc3", "nc5" }; Scheduler scheduler = new Scheduler(ncNameToNcInfos); String[] locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } ClusterTopology topology = parseTopology(); scheduler = new Scheduler(ncNameToNcInfos, topology); locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } } /** * Test the case where the HDFS cluster is a larger than the Hyracks cluster * * @throws Exception */ public void testSchedulerLargerHDFS() throws Exception { int dataPort = 5099; int resultPort = 5098; int messagingPort = 5097; Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(4, "nc", "10.0.0.", dataPort, resultPort, messagingPort); ncNameToNcInfos.put("nc7", new NodeControllerInfo("nc7", NodeStatus.ACTIVE, new NetworkAddress("10.0.0.7", dataPort), new NetworkAddress("10.0.0.5", resultPort), new NetworkAddress("10.0.0.5", messagingPort), 2)); ncNameToNcInfos.put("nc12", new NodeControllerInfo("nc12", NodeStatus.ACTIVE, new NetworkAddress("10.0.0.12", dataPort), new NetworkAddress("10.0.0.5", resultPort), new NetworkAddress("10.0.0.5", messagingPort), 2)); InputSplit[] fileSplits = new InputSplit[12]; fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }); fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" }); fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" }); fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }); fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }); fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[8] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.14", "10.0.0.11", "10.0.0.13" }); fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" }); fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.7" }); fileSplits[11] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" }); Scheduler scheduler = new Scheduler(ncNameToNcInfos); String[] locationConstraints = scheduler.getLocationConstraints(fileSplits); String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc12", "nc7", "nc7", "nc12" }; for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc7", "nc12", "nc7", "nc12" }; ClusterTopology topology = parseTopology(); scheduler = new Scheduler(ncNameToNcInfos, topology); locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } } /** * Test the case where the HDFS cluster is a larger than the Hyracks cluster * * @throws Exception */ public void testSchedulerSmallerHDFS() throws Exception { Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097); InputSplit[] fileSplits = new InputSplit[12]; fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }); fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" }); fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" }); fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }); fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }); fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[8] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" }); fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" }); fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[11] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }); String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6", "nc5", "nc6" }; Scheduler scheduler = new Scheduler(ncNameToNcInfos); String[] locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } ClusterTopology topology = parseTopology(); scheduler = new Scheduler(ncNameToNcInfos, topology); locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } } /** * Test the case where the HDFS cluster is a larger than the Hyracks cluster * * @throws Exception */ public void testSchedulerSmallerHDFSOdd() throws Exception { Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097); InputSplit[] fileSplits = new InputSplit[13]; fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }); fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" }); fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" }); fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }); fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }); fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[8] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" }); fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" }); fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }); fileSplits[11] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }); fileSplits[12] = new FileSplit(new Path("part-13"), 0, 0, new String[] { "10.0.0.2", "10.0.0.4", "10.0.0.5" }); String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc1", "nc5", "nc2", "nc4" }; Scheduler scheduler = new Scheduler(ncNameToNcInfos); String[] locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } ClusterTopology topology = parseTopology(); scheduler = new Scheduler(ncNameToNcInfos, topology); locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } } /** * Test boundary cases where splits array is empty or null * * @throws Exception */ public void testSchedulercBoundary() throws Exception { Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097); /** test empty file splits */ InputSplit[] fileSplits = new InputSplit[0]; String[] expectedResults = new String[] {}; Scheduler scheduler = new Scheduler(ncNameToNcInfos); String[] locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } ClusterTopology topology = parseTopology(); scheduler = new Scheduler(ncNameToNcInfos, topology); locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } fileSplits = null; expectedResults = new String[] {}; scheduler = new Scheduler(ncNameToNcInfos); locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } scheduler = new Scheduler(ncNameToNcInfos, topology); locationConstraints = scheduler.getLocationConstraints(fileSplits); for (int i = 0; i < locationConstraints.length; i++) { Assert.assertEquals(locationConstraints[i], expectedResults[i]); } } }
package org.web3j.protocol.rx; import java.io.IOException; import java.math.BigInteger; import java.util.List; import java.util.concurrent.ScheduledExecutorService; import java.util.stream.Collectors; import rx.Observable; import rx.Scheduler; import rx.Subscriber; import rx.schedulers.Schedulers; import rx.subscriptions.Subscriptions; import org.web3j.protocol.Web3j; import org.web3j.protocol.core.DefaultBlockParameter; import org.web3j.protocol.core.DefaultBlockParameterName; import org.web3j.protocol.core.DefaultBlockParameterNumber; import org.web3j.protocol.core.filters.BlockFilter; import org.web3j.protocol.core.filters.LogFilter; import org.web3j.protocol.core.filters.PendingTransactionFilter; import org.web3j.protocol.core.methods.response.EthBlock; import org.web3j.protocol.core.methods.response.Log; import org.web3j.protocol.core.methods.response.Transaction; import org.web3j.utils.Observables; /** * web3j reactive API implementation. */ public class JsonRpc2_0Rx { private final Web3j web3j; private final ScheduledExecutorService scheduledExecutorService; private final Scheduler scheduler; public JsonRpc2_0Rx(Web3j web3j, ScheduledExecutorService scheduledExecutorService) { this.web3j = web3j; this.scheduledExecutorService = scheduledExecutorService; this.scheduler = Schedulers.from(scheduledExecutorService); } public Observable<String> ethBlockHashObservable(long pollingInterval) { return Observable.create(subscriber -> { BlockFilter blockFilter = new BlockFilter( web3j, subscriber::onNext); run(blockFilter, subscriber, pollingInterval); }); } public Observable<String> ethPendingTransactionHashObservable(long pollingInterval) { return Observable.create(subscriber -> { PendingTransactionFilter pendingTransactionFilter = new PendingTransactionFilter( web3j, subscriber::onNext); run(pendingTransactionFilter, subscriber, pollingInterval); }); } public Observable<Log> ethLogObservable( org.web3j.protocol.core.methods.request.EthFilter ethFilter, long pollingInterval) { return Observable.create((Subscriber<? super Log> subscriber) -> { LogFilter logFilter = new LogFilter( web3j, subscriber::onNext, ethFilter); run(logFilter, subscriber, pollingInterval); }); } private <T> void run( org.web3j.protocol.core.filters.Filter<T> filter, Subscriber<? super T> subscriber, long pollingInterval) { filter.run(scheduledExecutorService, pollingInterval); subscriber.add(Subscriptions.create(filter::cancel)); } public Observable<Transaction> transactionObservable(long pollingInterval) { return blockObservable(true, pollingInterval) .flatMapIterable(JsonRpc2_0Rx::toTransactions); } public Observable<Transaction> pendingTransactionObservable(long pollingInterval) { return ethPendingTransactionHashObservable(pollingInterval) .flatMap(transactionHash -> web3j.ethGetTransactionByHash(transactionHash).observable()) .map(ethTransaction -> ethTransaction.getTransaction().get()); } public Observable<EthBlock> blockObservable( boolean fullTransactionObjects, long pollingInterval) { return ethBlockHashObservable(pollingInterval) .flatMap(blockHash -> web3j.ethGetBlockByHash(blockHash, fullTransactionObjects).observable()); } public Observable<EthBlock> replayBlocksObservable( DefaultBlockParameter startBlock, DefaultBlockParameter endBlock, boolean fullTransactionObjects) { // We use a scheduler to ensure this Observable runs asynchronously for users to be // consistent with the other Observables return replayBlocksObservableSync(startBlock, endBlock, fullTransactionObjects) .subscribeOn(scheduler); } private Observable<EthBlock> replayBlocksObservableSync( DefaultBlockParameter startBlock, DefaultBlockParameter endBlock, boolean fullTransactionObjects) { BigInteger startBlockNumber = null; BigInteger endBlockNumber = null; try { startBlockNumber = getBlockNumber(startBlock); endBlockNumber = getBlockNumber(endBlock); } catch (IOException e) { Observable.error(e); } return Observables.range(startBlockNumber, endBlockNumber) .flatMap(i -> web3j.ethGetBlockByNumber( new DefaultBlockParameterNumber(i), fullTransactionObjects).observable()); } public Observable<Transaction> replayTransactionsObservable( DefaultBlockParameter startBlock, DefaultBlockParameter endBlock) { return replayBlocksObservable(startBlock, endBlock, true) .flatMapIterable(JsonRpc2_0Rx::toTransactions); } public Observable<EthBlock> catchUpToLatestBlockObservable( DefaultBlockParameter startBlock, boolean fullTransactionObjects, Observable<EthBlock> onCompleteObservable) { // We use a scheduler to ensure this Observable runs asynchronously for users to be // consistent with the other Observables return catchUpToLatestBlockObservableSync( startBlock, fullTransactionObjects, onCompleteObservable) .subscribeOn(scheduler); } public Observable<EthBlock> catchUpToLatestBlockObservable( DefaultBlockParameter startBlock, boolean fullTransactionObjects) { return catchUpToLatestBlockObservable( startBlock, fullTransactionObjects, Observable.empty()); } private Observable<EthBlock> catchUpToLatestBlockObservableSync( DefaultBlockParameter startBlock, boolean fullTransactionObjects, Observable<EthBlock> onCompleteObservable) { BigInteger startBlockNumber; BigInteger latestBlockNumber; try { startBlockNumber = getBlockNumber(startBlock); latestBlockNumber = getLatestBlockNumber(); } catch (IOException e) { return Observable.error(e); } if (startBlockNumber.compareTo(latestBlockNumber) > -1) { return onCompleteObservable; } else { return Observable.concat( replayBlocksObservableSync( new DefaultBlockParameterNumber(startBlockNumber), new DefaultBlockParameterNumber(latestBlockNumber), fullTransactionObjects), Observable.defer(() -> catchUpToLatestBlockObservableSync( new DefaultBlockParameterNumber(latestBlockNumber.add(BigInteger.ONE)), fullTransactionObjects, onCompleteObservable))); } } public Observable<Transaction> catchUpToLatestTransactionObservable( DefaultBlockParameter startBlock) { return catchUpToLatestBlockObservable( startBlock, true, Observable.empty()) .flatMapIterable(JsonRpc2_0Rx::toTransactions); } public Observable<EthBlock> catchUpToLatestAndSubscribeToNewBlocksObservable( DefaultBlockParameter startBlock, boolean fullTransactionObjects, long pollingInterval) { return catchUpToLatestBlockObservable( startBlock, fullTransactionObjects, blockObservable(fullTransactionObjects, pollingInterval)); } public Observable<Transaction> catchUpToLatestAndSubscribeToNewTransactionsObservable( DefaultBlockParameter startBlock, long pollingInterval) { return catchUpToLatestAndSubscribeToNewBlocksObservable( startBlock, true, pollingInterval) .flatMapIterable(JsonRpc2_0Rx::toTransactions); } private BigInteger getLatestBlockNumber() throws IOException { return getBlockNumber(DefaultBlockParameterName.LATEST); } private BigInteger getBlockNumber( DefaultBlockParameter defaultBlockParameter) throws IOException { if (defaultBlockParameter instanceof DefaultBlockParameterNumber) { return ((DefaultBlockParameterNumber) defaultBlockParameter).getBlockNumber(); } else { EthBlock latestEthBlock = web3j.ethGetBlockByNumber( defaultBlockParameter, false).send(); return latestEthBlock.getBlock().getNumber(); } } private static List<Transaction> toTransactions(EthBlock ethBlock) { // If you ever see an exception thrown here, it's probably due to an incomplete chain in // Geth/Parity. You should resync to solve. return ethBlock.getBlock().getTransactions().stream() .map(transactionResult -> (Transaction) transactionResult.get()) .collect(Collectors.toList()); } }
package guiOptions; import gui.GridLayout2; import java.awt.Canvas; import java.awt.Point; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import javax.swing.*; import nTupleTD.TDParams; import nTupleTD.TDParams.GameInterval; import com.sun.java.swing.plaf.windows.WindowsLookAndFeel; public class OptionsIntervals extends JDialog { private static final long serialVersionUID = -563934103210279835L; // Only add those components as class variables, which are also needed in // other classes // ---------------------------------------------------------------------- // Labels // Buttons JButton bApply; JButton bDeleteEntry; // Textfields final JTextField tStartInterval; final JTextField tStepBy; final JTextField tEndInterval; // Combo-Boxes final JComboBox<TDParams.GameInterval> cboxIntervals; /** * Constructor: */ OptionsIntervals(JFrame parent, TDParams.GameInterval[] iList) { // Set title of this frame super(parent, "Measurement-Intervals"); // ----------------------------------------------------------------- // Generate Combobox, that contains all intervals // ----------------------------------------------------------------- cboxIntervals = new JComboBox<TDParams.GameInterval>(iList); // ----------------------------------------------------------------- // Generate "Create new Interval" Button // ----------------------------------------------------------------- JButton bCreateNew = new JButton("Create new"); bCreateNew.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // Add new Element to ComboBox cboxIntervals.addItem(new TDParams.GameInterval(0, 10000, 0)); cboxIntervals.setSelectedIndex(cboxIntervals.getItemCount() - 1); } }); // ----------------------------------------------------------------- // Create "Start of Interval" Label and Textfield // ----------------------------------------------------------------- JLabel lStartInterval = new JLabel("Start of Interval:", JLabel.RIGHT); tStartInterval = new JTextField("0"); // ----------------------------------------------------------------- // Create "Step-Size" Label and Textfield // ----------------------------------------------------------------- JLabel lStepBy = new JLabel("Step by:", JLabel.RIGHT); tStepBy = new JTextField("10000"); // ----------------------------------------------------------------- // Create "End of Interval" Label and Textfield // ----------------------------------------------------------------- JLabel lEndInterval = new JLabel("End of Interval:", JLabel.RIGHT); tEndInterval = new JTextField("0"); // ----------------------------------------------------------------- // Generate "Delete Interval" Button // ----------------------------------------------------------------- bDeleteEntry = new JButton("Delete"); bDeleteEntry.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // Remove selected Item from ComboBox if (cboxIntervals.getItemCount() > 0) { int index = cboxIntervals.getSelectedIndex(); cboxIntervals.removeItemAt(index); } } }); // ----------------------------------------------------------------- // Generate "Apply" Button // ----------------------------------------------------------------- bApply = new JButton("Apply"); bApply.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // Save the values of the Textfields in the item of the combobox TDParams.GameInterval gi = (TDParams.GameInterval) cboxIntervals .getSelectedItem(); // Get values from the Textfields int startInterval = getStartInterval(); int stepBy = getStepBy(); int endInterval = getEndInterval(); // Save values in the corresponding item gi.startInterval = startInterval; gi.stepBy = stepBy; gi.endInterval = endInterval; // Refresh Combobox cboxIntervals.repaint(); } private int getEndInterval() { double end = Double.valueOf(tEndInterval.getText()) .doubleValue(); int endInterval = (int) (end * TDParams.SCALE_GAME_BY_MILLION); return endInterval; } private int getStepBy() { double stepBy = Double.valueOf(tStepBy.getText()).doubleValue(); return (int) (stepBy * TDParams.SCALE_GAME_BY_THOUSEND); } private int getStartInterval() { double start = Double.valueOf(tStartInterval.getText()) .doubleValue(); int startInterval = (int) (start * TDParams.SCALE_GAME_BY_MILLION); return startInterval; } }); // ----------------------------------------------------------------- // Create OK-Button // ----------------------------------------------------------------- JButton bOK = new JButton("OK"); bOK.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { setVisible(false); } }); cboxIntervals.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { refresh(); } }); // ----------------------------------------------------------------- // Create Panel and add components to it JPanel p = new JPanel(new GridLayout2(0, 4, 10, 4)); // ----------------------------------------------------------------- p.add(cboxIntervals); p.add(bCreateNew); p.add(new Canvas()); p.add(new Canvas()); p.add(new Canvas()); p.add(new Canvas()); p.add(new Canvas()); p.add(new Canvas()); p.add(lStartInterval); p.add(tStartInterval); p.add(new JLabel("Games (in millions)", JLabel.LEFT)); p.add(new Canvas()); p.add(lStepBy); p.add(tStepBy); p.add(new JLabel("Games (in thousends)", JLabel.LEFT)); p.add(new Canvas()); p.add(lEndInterval); p.add(tEndInterval); p.add(new JLabel("Games (in millions)", JLabel.LEFT)); p.add(new Canvas()); p.add(new Canvas()); p.add(new Canvas()); p.add(bApply); p.add(bDeleteEntry); p.add(bOK); // ----------------------------------------------------------------- // Add JScrollPane sp = new JScrollPane(p, JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); add(sp); refresh(); Point pos = parent.getLocation(); setLocation(pos.x + 40, pos.y +40); setModal(true); pack(); setVisible(false); setResizable(false); } private void refresh() { // Write the values of the selected item into the textfields TDParams.GameInterval gi = (TDParams.GameInterval) cboxIntervals .getSelectedItem(); if (gi != null) { enableComponents(true); tStartInterval.setText(gi.startInterval / TDParams.SCALE_GAME_BY_MILLION + ""); tStepBy.setText(gi.stepBy / TDParams.SCALE_GAME_BY_THOUSEND + ""); tEndInterval.setText(gi.endInterval / TDParams.SCALE_GAME_BY_MILLION + ""); } else { // Deactivate all Textfields enableComponents(false); } } private void enableComponents(boolean enabled) { tStartInterval.setEnabled(enabled); tStepBy.setEnabled(enabled); tEndInterval.setEnabled(enabled); bApply.setEnabled(enabled); bDeleteEntry.setEnabled(enabled); } public GameInterval[] getIntervalList() { ComboBoxModel<GameInterval> model = cboxIntervals.getModel(); int size = model.getSize(); GameInterval[] gi = new GameInterval[size]; for(int i=0;i<size;i++) gi[i] = model.getElementAt(i); return gi; } private static class WindowClosingAdapter extends WindowAdapter { public WindowClosingAdapter() { } public void windowClosing(WindowEvent event) { event.getWindow().setVisible(false); event.getWindow().dispose(); System.exit(0); } } public static void main(String args[]) { try { UIManager.setLookAndFeel(new WindowsLookAndFeel()); } catch (Exception e) { } TDParams.GameInterval[] iList = new TDParams.GameInterval[1]; iList[0] = new TDParams.GameInterval(0, 10000, 2000000); OptionsIntervals op = new OptionsIntervals(null, iList); op.addWindowListener(new WindowClosingAdapter()); op.setVisible(true); } }
/* * Copyright (C) 2015 The Google Cloud Dataflow Hadoop Library Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.contrib.hadoop; import com.google.cloud.dataflow.sdk.coders.Coder; import com.google.cloud.dataflow.sdk.coders.KvCoder; import com.google.cloud.dataflow.sdk.coders.VoidCoder; import com.google.cloud.dataflow.sdk.io.BoundedSource; import com.google.cloud.dataflow.sdk.io.Read; import com.google.cloud.dataflow.sdk.options.PipelineOptions; import com.google.cloud.dataflow.sdk.values.KV; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.List; import java.util.ListIterator; import java.util.NoSuchElementException; import javax.annotation.Nullable; /** * A {@code BoundedSource} for reading files resident in a Hadoop filesystem using a * Hadoop file-based input format. * * <p>To read a {@link com.google.cloud.dataflow.sdk.values.PCollection} of * {@link com.google.cloud.dataflow.sdk.values.KV} key-value pairs from one or more * Hadoop files, use {@link HadoopFileSource#from} to specify the path(s) of the files to * read, the Hadoop {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}, the * key class and the value class. * * <p>A {@code HadoopFileSource} can be read from using the * {@link com.google.cloud.dataflow.sdk.io.Read} transform. For example: * * <pre> * {@code * HadoopFileSource<K, V> source = HadoopFileSource.from(path, MyInputFormat.class, * MyKey.class, MyValue.class); * PCollection<KV<MyKey, MyValue>> records = Read.from(mySource); * } * </pre> * * <p>The {@link HadoopFileSource#readFrom} method is a convenience method * that returns a read transform. For example: * * <pre> * {@code * PCollection<KV<MyKey, MyValue>> records = HadoopFileSource.readFrom(path, * MyInputFormat.class, MyKey.class, MyValue.class); * } * </pre> * * Implementation note: Since Hadoop's {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat} * determines the input splits, this class extends {@link BoundedSource} rather than * {@link com.google.cloud.dataflow.sdk.io.OffsetBasedSource}, since the latter * dictates input splits. * @param <K> The type of keys to be read from the source. * @param <V> The type of values to be read from the source. */ public class HadoopFileSource<K, V> extends BoundedSource<KV<K, V>> { private static final long serialVersionUID = 0L; private final String filepattern; private final Class<? extends FileInputFormat<?, ?>> formatClass; private final Class<K> keyClass; private final Class<V> valueClass; private final SerializableSplit serializableSplit; /** * Creates a {@code Read} transform that will read from an {@code HadoopFileSource} * with the given file name or pattern ("glob") using the given Hadoop * {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}, * with key-value types specified by the given key class and value class. */ public static <K, V, T extends FileInputFormat<K, V>> Read.Bounded<KV<K, V>> readFrom( String filepattern, Class<T> formatClass, Class<K> keyClass, Class<V> valueClass) { return Read.from(from(filepattern, formatClass, keyClass, valueClass)); } /** * Creates a {@code HadoopFileSource} that reads from the given file name or pattern ("glob") * using the given Hadoop {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}, * with key-value types specified by the given key class and value class. */ public static <K, V, T extends FileInputFormat<K, V>> HadoopFileSource<K, V> from( String filepattern, Class<T> formatClass, Class<K> keyClass, Class<V> valueClass) { @SuppressWarnings("unchecked") HadoopFileSource<K, V> source = (HadoopFileSource<K, V>) new HadoopFileSource(filepattern, formatClass, keyClass, valueClass); return source; } /** * Create a {@code HadoopFileSource} based on a file or a file pattern specification. */ private HadoopFileSource(String filepattern, Class<? extends FileInputFormat<?, ?>> formatClass, Class<K> keyClass, Class<V> valueClass) { this(filepattern, formatClass, keyClass, valueClass, null); } /** * Create a {@code HadoopFileSource} based on a single Hadoop input split, which won't be * split up further. */ private HadoopFileSource(String filepattern, Class<? extends FileInputFormat<?, ?>> formatClass, Class<K> keyClass, Class<V> valueClass, SerializableSplit serializableSplit) { this.filepattern = filepattern; this.formatClass = formatClass; this.keyClass = keyClass; this.valueClass = valueClass; this.serializableSplit = serializableSplit; } public String getFilepattern() { return filepattern; } public Class<? extends FileInputFormat<?, ?>> getFormatClass() { return formatClass; } public Class<K> getKeyClass() { return keyClass; } public Class<V> getValueClass() { return valueClass; } @Override public void validate() { Preconditions.checkNotNull(filepattern, "need to set the filepattern of a HadoopFileSource"); Preconditions.checkNotNull(formatClass, "need to set the format class of a HadoopFileSource"); Preconditions.checkNotNull(keyClass, "need to set the key class of a HadoopFileSource"); Preconditions.checkNotNull(valueClass, "need to set the value class of a HadoopFileSource"); } @Override public List<? extends BoundedSource<KV<K, V>>> splitIntoBundles(long desiredBundleSizeBytes, PipelineOptions options) throws Exception { if (serializableSplit == null) { return Lists.transform(computeSplits(desiredBundleSizeBytes), new Function<InputSplit, BoundedSource<KV<K, V>>>() { @Nullable @Override public BoundedSource<KV<K, V>> apply(@Nullable InputSplit inputSplit) { return new HadoopFileSource<K, V>(filepattern, formatClass, keyClass, valueClass, new SerializableSplit(inputSplit)); } }); } else { return ImmutableList.of(this); } } private FileInputFormat<?, ?> createFormat(Job job) throws IOException, IllegalAccessException, InstantiationException { Path path = new Path(filepattern); FileInputFormat.addInputPath(job, path); return formatClass.newInstance(); } private List<InputSplit> computeSplits(long desiredBundleSizeBytes) throws IOException, IllegalAccessException, InstantiationException { Job job = Job.getInstance(); FileInputFormat.setMinInputSplitSize(job, desiredBundleSizeBytes); FileInputFormat.setMaxInputSplitSize(job, desiredBundleSizeBytes); return createFormat(job).getSplits(job); } @Override public BoundedReader<KV<K, V>> createReader(PipelineOptions options) throws IOException { this.validate(); if (serializableSplit == null) { return new HadoopFileReader<>(this, filepattern, formatClass); } else { return new HadoopFileReader<>(this, filepattern, formatClass, serializableSplit.getSplit()); } } @Override public Coder<KV<K, V>> getDefaultOutputCoder() { return KvCoder.of(getDefaultCoder(keyClass), getDefaultCoder(valueClass)); } @SuppressWarnings("unchecked") private <T> Coder<T> getDefaultCoder(Class<T> c) { if (Writable.class.isAssignableFrom(c)) { Class<? extends Writable> writableClass = (Class<? extends Writable>) c; return (Coder<T>) WritableCoder.of(writableClass); } else if (Void.class.equals(c)) { return (Coder<T>) VoidCoder.of(); } // TODO: how to use registered coders here? throw new IllegalStateException("Cannot find coder for " + c); } // BoundedSource @Override public long getEstimatedSizeBytes(PipelineOptions options) { long size = 0; try { Job job = Job.getInstance(); // new instance for (FileStatus st : listStatus(createFormat(job), job)) { size += st.getLen(); } } catch (IOException | NoSuchMethodException | InvocationTargetException | IllegalAccessException | InstantiationException e) { // ignore, and return 0 } return size; } private <K, V> List<FileStatus> listStatus(FileInputFormat<K, V> format, JobContext jobContext) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { // FileInputFormat#listStatus is protected, so call using reflection Method listStatus = FileInputFormat.class.getDeclaredMethod("listStatus", JobContext.class); listStatus.setAccessible(true); @SuppressWarnings("unchecked") List<FileStatus> stat = (List<FileStatus>) listStatus.invoke(format, jobContext); return stat; } @Override public boolean producesSortedKeys(PipelineOptions options) throws Exception { return false; } static class HadoopFileReader<K, V> extends BoundedSource.BoundedReader<KV<K, V>> { private final BoundedSource<KV<K, V>> source; private final String filepattern; private final Class formatClass; private FileInputFormat<?, ?> format; private TaskAttemptContext attemptContext; private List<InputSplit> splits; private ListIterator<InputSplit> splitsIterator; private Configuration conf; private RecordReader<K, V> currentReader; private KV<K, V> currentPair; private volatile boolean done = false; /** * Create a {@code HadoopFileReader} based on a file or a file pattern specification. */ public HadoopFileReader(BoundedSource<KV<K, V>> source, String filepattern, Class<? extends FileInputFormat<?, ?>> formatClass) { this(source, filepattern, formatClass, null); } /** * Create a {@code HadoopFileReader} based on a single Hadoop input split. */ public HadoopFileReader(BoundedSource<KV<K, V>> source, String filepattern, Class<? extends FileInputFormat<?, ?>> formatClass, InputSplit split) { this.source = source; this.filepattern = filepattern; this.formatClass = formatClass; if (split != null) { this.splits = ImmutableList.of(split); this.splitsIterator = splits.listIterator(); } } @Override public boolean start() throws IOException { Job job = Job.getInstance(); // new instance Path path = new Path(filepattern); FileInputFormat.addInputPath(job, path); try { @SuppressWarnings("unchecked") FileInputFormat<K, V> f = (FileInputFormat<K, V>) formatClass.newInstance(); this.format = f; } catch (InstantiationException | IllegalAccessException e) { throw new IOException("Cannot instantiate file input format " + formatClass, e); } this.attemptContext = new TaskAttemptContextImpl(job.getConfiguration(), new TaskAttemptID()); if (splitsIterator == null) { this.splits = format.getSplits(job); this.splitsIterator = splits.listIterator(); } this.conf = job.getConfiguration(); return advance(); } @Override public boolean advance() throws IOException { try { if (currentReader != null && currentReader.nextKeyValue()) { currentPair = nextPair(); return true; } else { while (splitsIterator.hasNext()) { // advance the reader and see if it has records InputSplit nextSplit = splitsIterator.next(); @SuppressWarnings("unchecked") RecordReader<K, V> reader = (RecordReader<K, V>) format.createRecordReader(nextSplit, attemptContext); if (currentReader != null) { currentReader.close(); } currentReader = reader; currentReader.initialize(nextSplit, attemptContext); if (currentReader.nextKeyValue()) { currentPair = nextPair(); return true; } currentReader.close(); currentReader = null; } // either no next split or all readers were empty currentPair = null; done = true; return false; } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException(e); } } @SuppressWarnings("unchecked") private KV<K, V> nextPair() throws IOException, InterruptedException { K key = currentReader.getCurrentKey(); V value = currentReader.getCurrentValue(); // clone Writable objects since they are reused between calls to RecordReader#nextKeyValue if (key instanceof Writable) { key = (K) WritableUtils.clone((Writable) key, conf); } if (value instanceof Writable) { value = (V) WritableUtils.clone((Writable) value, conf); } return KV.of(key, value); } @Override public KV<K, V> getCurrent() throws NoSuchElementException { if (currentPair == null) { throw new NoSuchElementException(); } return currentPair; } @Override public void close() throws IOException { if (currentReader != null) { currentReader.close(); currentReader = null; } currentPair = null; } @Override public BoundedSource<KV<K, V>> getCurrentSource() { return source; } // BoundedReader @Override public Double getFractionConsumed() { if (currentReader == null) { return 0.0; } if (splits.isEmpty()) { return 1.0; } int index = splitsIterator.previousIndex(); int numReaders = splits.size(); if (index == numReaders) { return 1.0; } double before = 1.0 * index / numReaders; double after = 1.0 * (index + 1) / numReaders; Double fractionOfCurrentReader = getProgress(); if (fractionOfCurrentReader == null) { return before; } return before + fractionOfCurrentReader * (after - before); } private Double getProgress() { try { return (double) currentReader.getProgress(); } catch (IOException | InterruptedException e) { return null; } } @Override public final long getSplitPointsRemaining() { if (done) { return 0; } // This source does not currently support dynamic work rebalancing, so remaining // parallelism is always 1. return 1; } @Override public BoundedSource<KV<K, V>> splitAtFraction(double fraction) { // Not yet supported. To implement this, the sizes of the splits should be used to // calculate the remaining splits that constitute the given fraction, then a // new source backed by those splits should be returned. return null; } } /** * A wrapper to allow Hadoop {@link org.apache.hadoop.mapreduce.InputSplit}s to be * serialized using Java's standard serialization mechanisms. Note that the InputSplit * has to be Writable (which most are). */ public static class SerializableSplit implements Externalizable { private static final long serialVersionUID = 0L; private InputSplit split; public SerializableSplit() { } public SerializableSplit(InputSplit split) { Preconditions.checkArgument(split instanceof Writable, "Split is not writable: " + split); this.split = split; } public InputSplit getSplit() { return split; } @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeUTF(split.getClass().getCanonicalName()); ((Writable) split).write(out); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { String className = in.readUTF(); try { split = (InputSplit) Class.forName(className).newInstance(); ((Writable) split).readFields(in); } catch (InstantiationException | IllegalAccessException e) { throw new IOException(e); } } } }
/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package br.com.cams7.siscom.member; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import br.com.cams7.siscom.member.vo.Member; import br.com.cams7.siscom.util.RestUtil; public final class MemberList extends Activity { private static final String TAG = "MemberList"; private static final String URL = "http://192.168.0.160:8080/siscom/rest/member/list"; private Button bAddMember; private ListView lvMembers; private CheckBox cbShowInvisible; private boolean showInvisible; private TextView tvIsConnected; private TextView tvResponse; private List<Member> members; /** * Called when the activity is first created. Responsible for initializing * the UI. */ @Override public void onCreate(Bundle savedInstanceState) { Log.v(TAG, "Activity State: onCreate()"); super.onCreate(savedInstanceState); setContentView(R.layout.member_list); // Obtain handles to UI objects bAddMember = (Button) findViewById(R.id.bAddMember); lvMembers = (ListView) findViewById(R.id.lvMembers); cbShowInvisible = (CheckBox) findViewById(R.id.cbShowInvisible); tvIsConnected = (TextView) findViewById(R.id.tvIsConnected); tvResponse = (TextView) findViewById(R.id.tvResponse); // Initialize class properties showInvisible = false; cbShowInvisible.setChecked(showInvisible); // Register handler for UI elements bAddMember.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { Log.d(TAG, "bAddMember clicked"); launchMemberEdit(); } }); cbShowInvisible .setOnCheckedChangeListener(new OnCheckedChangeListener() { public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { Log.d(TAG, "cbShowInvisible changed: " + isChecked); showInvisible = isChecked; populateMembers(); } }); // check if you are connected or not if (RestUtil.isConnected(this)) { tvIsConnected.setBackgroundColor(0xFF00CC00); tvIsConnected.setText(R.string.msg_connected); } else { tvIsConnected.setText(R.string.msg_not_connected); } // call AsynTask to perform network operation on separate thread new HttpAsyncTask().execute(URL); } /** * Populate the contact list based on account currently selected in the * account spinner. */ private void populateMembers() { final MemberArrayAdapter adapter; if (showInvisible) adapter = new MemberArrayAdapter(this, members); else { List<Member> membersActive = new ArrayList<Member>(); for (Member member : members) if (member.isActive()) membersActive.add(member); adapter = new MemberArrayAdapter(this, membersActive); } lvMembers.setAdapter(adapter); } /** * Launches the ContactAdder activity to add a new contact to the selected * accont. */ private void launchMemberEdit() { Intent intent = new Intent(this, MemberEdit.class); startActivity(intent); } private static String GET(String url, String errorMsg) { // create HttpClient HttpClient httpclient = new DefaultHttpClient(); try { // make GET request to the given URL HttpResponse httpResponse = httpclient.execute(new HttpGet(url)); // receive response as inputStream InputStream inputStream = httpResponse.getEntity().getContent(); if (inputStream == null) { Log.d(RestUtil.TAG_INPUT_STREAM, "InputStream is null"); return errorMsg; } return RestUtil.convertInputStreamToString(inputStream); } catch (ClientProtocolException e) { Log.d(RestUtil.TAG_INPUT_STREAM, e.getLocalizedMessage(), e.getCause()); } catch (IOException e) { Log.d(RestUtil.TAG_INPUT_STREAM, e.getLocalizedMessage(), e.getCause()); } return errorMsg; } private class MemberArrayAdapter extends ArrayAdapter<Member> { private Context context; private List<Member> members; public MemberArrayAdapter(Context context, List<Member> members) { super(context, R.layout.member_entry, members); this.context = context; this.members = members; } /* * (non-Javadoc) * * @see android.widget.ArrayAdapter#getView(int, android.view.View, * android.view.ViewGroup) */ @Override public View getView(int position, View convertView, ViewGroup parent) { LayoutInflater inflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); View rowView = inflater.inflate(R.layout.member_entry, parent, false); TextView tvName = (TextView) rowView.findViewById(R.id.rowTvName); tvName.setText(members.get(position).getName()); // TextView tvEmail = (TextView) rowView // .findViewById(R.id.emailEntryText); // tvEmail.setText(members.get(position).getEmail()); return rowView; } /* * (non-Javadoc) * * @see android.widget.ArrayAdapter#getItemId(int) */ @Override public long getItemId(int position) { Member item = getItem(position); return item.getId(); } /* * (non-Javadoc) * * @see android.widget.BaseAdapter#hasStableIds() */ @Override public boolean hasStableIds() { return true; } } private class HttpAsyncTask extends AsyncTask<String, Void, String> { /* * (non-Javadoc) * * @see android.os.AsyncTask#doInBackground(java.lang.Object[]) */ protected String doInBackground(String... urls) { return GET(urls[0], getString(R.string.msg_error)); } // onPostExecute displays the results of the AsyncTask. @Override protected void onPostExecute(String result) { Toast.makeText(getBaseContext(), getString(R.string.msg_received), Toast.LENGTH_LONG).show(); String errorMsg = getString(R.string.msg_error); members = new ArrayList<Member>(); if (!errorMsg.equals(result)) { try { JSONArray array = new JSONArray(result); if (array.length() > 0) { for (int i = 0; i < array.length(); i++) { JSONObject object = array.getJSONObject(i); Member member = new Member(); member.setId(object.getLong("id")); member.setName(object.getString("name")); member.setEmail(object.getString("email")); member.setPhoneNumber(object .getString("phoneNumber")); member.setActive(true); members.add(member); } lvMembers.setVisibility(View.VISIBLE); } else { tvResponse.setVisibility(View.VISIBLE); tvResponse.setText(R.string.msg_list_empty); } } catch (JSONException e) { Log.d(TAG, e.getLocalizedMessage(), e.getCause()); } } else { tvResponse.setVisibility(View.VISIBLE); tvResponse.setText(errorMsg); } // Populate the contact list populateMembers(); } } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.grid.internal.utils; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHttpEntityEnclosingRequest; import org.apache.http.message.BasicHttpRequest; import org.openqa.grid.common.RegistrationRequest; import org.openqa.grid.common.exception.GridConfigurationException; import org.openqa.grid.common.exception.GridException; import org.openqa.grid.internal.utils.configuration.GridHubConfiguration; import org.openqa.grid.internal.utils.configuration.GridNodeConfiguration; import org.openqa.grid.shared.GridNodeServer; import org.openqa.selenium.Platform; import org.openqa.selenium.remote.DesiredCapabilities; import org.openqa.selenium.remote.internal.HttpClientFactory; import org.openqa.selenium.remote.server.log.LoggingManager; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URL; import java.security.InvalidParameterException; import java.util.logging.Logger; public class SelfRegisteringRemote { private static final Logger LOG = Logger.getLogger(SelfRegisteringRemote.class.getName()); private RegistrationRequest nodeConfig; private final HttpClientFactory httpClientFactory; public SelfRegisteringRemote(RegistrationRequest config) { this.nodeConfig = config; this.httpClientFactory = new HttpClientFactory(); nodeConfig.validate(); try { GridHubConfiguration hubConfiguration = getHubConfiguration(); if (hubConfiguration.timeout != null) { nodeConfig.getConfiguration().timeout = hubConfiguration.timeout; } if (hubConfiguration.browserTimeout != null) { nodeConfig.getConfiguration().browserTimeout = hubConfiguration.browserTimeout; } } catch (Exception e) { LOG.warning( "error getting the parameters from the hub. The node may end up with wrong timeouts." + e .getMessage()); } } public URL getRemoteURL() { String host = nodeConfig.getConfiguration().host; Integer port = nodeConfig.getConfiguration().port; String url = "http://" + host + ":" + port; try { return new URL(url); } catch (MalformedURLException e) { throw new GridConfigurationException("error building the node url " + e.getMessage(), e); } } private GridNodeServer server; public void setRemoteServer(GridNodeServer server) { this.server = server; } public void startRemoteServer() throws Exception { if (server == null) { throw new GridConfigurationException("no server set to register to the hub"); } server.boot(); } public void stopRemoteServer() { if (server != null) { server.stop(); } } public void deleteAllBrowsers() { nodeConfig.getCapabilities().clear(); } /** * Adding the browser described by the capability, automatically finding out what platform the * node is launched from * * @param cap describing the browser * @param instances number of times this browser can be started on the node. */ public void addBrowser(DesiredCapabilities cap, int instances) { String s = cap.getBrowserName(); if (s == null || "".equals(s)) { throw new InvalidParameterException(cap + " does seems to be a valid browser."); } if (cap.getPlatform() == null) { cap.setPlatform(Platform.getCurrent()); } cap.setCapability(RegistrationRequest.MAX_INSTANCES, instances); nodeConfig.getCapabilities().add(cap); } /** * sends 1 registration request, bypassing the retry logic and the proxy already registered check. * Use only for testing. */ public void sendRegistrationRequest() { registerToHub(false); } /** * register the hub following the configuration : * <p> * - check if the proxy is already registered before sending a reg request. * <p> * - register again every X ms is specified in the config of the node. */ public void startRegistrationProcess() { LOG.fine("Using the json request : " + nodeConfig.toJSON()); Boolean register = nodeConfig.getConfiguration().register; if (!register) { LOG.info("No registration sent ( register = false )"); } else { final int registerCycleInterval = nodeConfig.getConfiguration().registerCycle; if (registerCycleInterval > 0) { new Thread(new Runnable() { // Thread safety reviewed public void run() { boolean first = true; LOG.info("Starting auto registration thread. Will try to register every " + registerCycleInterval + " ms."); while (true) { try { boolean checkForPresence = true; if (first) { first = false; checkForPresence = false; } registerToHub(checkForPresence); } catch (GridException e) { LOG.info("Couldn't register this node: " + e.getMessage()); } try { Thread.sleep(registerCycleInterval); } catch (InterruptedException e) { e.printStackTrace(); } // While we wait for someone to rewrite server logging. LoggingManager.perSessionLogHandler().clearThreadTempLogs(); } } }).start(); } else { registerToHub(false); } } LoggingManager.perSessionLogHandler().clearThreadTempLogs(); } public void setTimeout(int timeout, int cycle) { nodeConfig.getConfiguration().timeout = timeout; nodeConfig.getConfiguration().cleanUpCycle = cycle; } public void setMaxConcurrent(int max) { nodeConfig.getConfiguration().maxSession = max; } public GridNodeConfiguration getConfiguration() { return nodeConfig.getConfiguration(); } private void registerToHub(boolean checkPresenceFirst) { if (!checkPresenceFirst || !isAlreadyRegistered(nodeConfig)) { String tmp = "http://" + nodeConfig.getConfiguration().getHubHost() + ":" + nodeConfig.getConfiguration().getHubPort() + "/grid/register"; HttpClient client = httpClientFactory.getHttpClient(); try { URL registration = new URL(tmp); LOG.info("Registering the node to the hub: " + registration); BasicHttpEntityEnclosingRequest r = new BasicHttpEntityEnclosingRequest("POST", registration.toExternalForm()); updateConfigWithRealPort(); String json = nodeConfig.toJSON(); r.setEntity(new StringEntity(json,"UTF-8")); HttpHost host = new HttpHost(registration.getHost(), registration.getPort()); HttpResponse response = client.execute(host, r); if (response.getStatusLine().getStatusCode() != 200) { throw new GridException(String.format("The hub responded with %s:%s", response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase())); } LOG.info("The node is registered to the hub and ready to use"); } catch (Exception e) { throw new GridException("Error sending the registration request: " + e.getMessage()); } } else { LOG.fine("The node is already present on the hub. Skipping registration."); } } void updateConfigWithRealPort() throws MalformedURLException { if (nodeConfig.getConfiguration().port != 0) { return; } nodeConfig.getConfiguration().port = server.getRealPort(); } /** * uses the hub API to get some of its configuration. * @return json object of the current hub configuration * @throws Exception */ private GridHubConfiguration getHubConfiguration() throws Exception { String hubApi = "http://" + nodeConfig.getConfiguration().getHubHost() + ":" + nodeConfig.getConfiguration().getHubPort() + "/grid/api/hub"; HttpClient client = httpClientFactory.getHttpClient(); URL api = new URL(hubApi); HttpHost host = new HttpHost(api.getHost(), api.getPort()); String url = api.toExternalForm(); BasicHttpRequest r = new BasicHttpRequest("GET", url); HttpResponse response = client.execute(host, r); return GridHubConfiguration.loadFromJSON(extractObject(response)); } private boolean isAlreadyRegistered(RegistrationRequest node) { HttpClient client = httpClientFactory.getHttpClient(); try { String tmp = "http://" + node.getConfiguration().getHubHost() + ":" + node.getConfiguration().getHubPort() + "/grid/api/proxy"; URL api = new URL(tmp); HttpHost host = new HttpHost(api.getHost(), api.getPort()); String id = node.getConfiguration().id; if (id == null) { id = node.getConfiguration().getRemoteHost(); } BasicHttpRequest r = new BasicHttpRequest("GET", api.toExternalForm() + "?id=" + id); HttpResponse response = client.execute(host, r); if (response.getStatusLine().getStatusCode() != 200) { throw new GridException(String.format("The hub responded with %s:%s", response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase())); } JsonObject o = extractObject(response); return o.get("success").getAsBoolean(); } catch (Exception e) { throw new GridException("The hub is down or not responding: " + e.getMessage()); } } private static JsonObject extractObject(HttpResponse resp) throws IOException { BufferedReader rd = new BufferedReader(new InputStreamReader(resp.getEntity().getContent())); StringBuilder s = new StringBuilder(); String line; while ((line = rd.readLine()) != null) { s.append(line); } rd.close(); return new JsonParser().parse(s.toString()).getAsJsonObject(); } }
/*L * Copyright (c) 2006 SAIC, SAIC-F. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/rembrandt/LICENSE.txt for details. */ // Generated by OJB SchemeGenerator package gov.nih.nci.rembrandt.dbbean; /** * caIntegrator License * * Copyright 2001-2005 Science Applications International Corporation ("SAIC"). * The software subject to this notice and license includes both human readable source code form and machine readable, * binary, object code form ("the caIntegrator Software"). The caIntegrator Software was developed in conjunction with * the National Cancer Institute ("NCI") by NCI employees and employees of SAIC. * To the extent government employees are authors, any rights in such works shall be subject to Title 17 of the United States * Code, section 105. * This caIntegrator Software License (the "License") is between NCI and You. "You (or "Your") shall mean a person or an * entity, and all other entities that control, are controlled by, or are under common control with the entity. "Control" * for purposes of this definition means (i) the direct or indirect power to cause the direction or management of such entity, * whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) * beneficial ownership of such entity. * This License is granted provided that You agree to the conditions described below. NCI grants You a non-exclusive, * worldwide, perpetual, fully-paid-up, no-charge, irrevocable, transferable and royalty-free right and license in its rights * in the caIntegrator Software to (i) use, install, access, operate, execute, copy, modify, translate, market, publicly * display, publicly perform, and prepare derivative works of the caIntegrator Software; (ii) distribute and have distributed * to and by third parties the caIntegrator Software and any modifications and derivative works thereof; * and (iii) sublicense the foregoing rights set out in (i) and (ii) to third parties, including the right to license such * rights to further third parties. For sake of clarity, and not by way of limitation, NCI shall have no right of accounting * or right of payment from You or Your sublicensees for the rights granted under this License. This License is granted at no * charge to You. * 1. Your redistributions of the source code for the Software must retain the above copyright notice, this list of conditions * and the disclaimer and limitation of liability of Article 6, below. Your redistributions in object code form must reproduce * the above copyright notice, this list of conditions and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * 2. Your end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This * product includes software developed by SAIC and the National Cancer Institute." If You do not include such end-user * documentation, You shall include this acknowledgment in the Software itself, wherever such third-party acknowledgments * normally appear. * 3. You may not use the names "The National Cancer Institute", "NCI" "Science Applications International Corporation" and * "SAIC" to endorse or promote products derived from this Software. This License does not authorize You to use any * trademarks, service marks, trade names, logos or product names of either NCI or SAIC, except as required to comply with * the terms of this License. * 4. For sake of clarity, and not by way of limitation, You may incorporate this Software into Your proprietary programs and * into any third party proprietary programs. However, if You incorporate the Software into third party proprietary * programs, You agree that You are solely responsible for obtaining any permission from such third parties required to * incorporate the Software into such third party proprietary programs and for informing Your sublicensees, including * without limitation Your end-users, of their obligation to secure any required permissions from such third parties * before incorporating the Software into such third party proprietary software programs. In the event that You fail * to obtain such permissions, You agree to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such permissions. * 5. For sake of clarity, and not by way of limitation, You may add Your own copyright statement to Your modifications and * to the derivative works, and You may provide additional or different license terms and conditions in Your sublicenses * of modifications of the Software, or any derivative works of the Software as a whole, provided Your use, reproduction, * and distribution of the Work otherwise complies with the conditions stated in this License. * 6. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY, NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. * IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ public class GeneDimOld { private String alignments; private String chromosome; private String cytogeneticMap; private String ec; private Long geneId; private String geneSymbol; private String geneTitle; private String genomeVersion; private Long kbEnd; private Long kbStart; private String llId; private String ominId; private String unigeneId; private java.util.Vector collRefseqProteinId; private java.util.Vector collGeneOntology; private java.util.Vector collGeneClone; private java.util.Vector collArrayGenoAbnFact; private java.util.Vector collSwissprot; private java.util.Vector collRefseqMrnaId; private java.util.Vector collDifferentialExpressionGfact; private java.util.Vector collGeneProbeset; private java.util.Vector collDifferentialExpressionSfact; private java.util.Vector collProteinFamily; private java.util.Vector collGenePathway; public String getAlignments() { return this.alignments; } public void setAlignments(String param) { this.alignments = param; } public String getChromosome() { return this.chromosome; } public void setChromosome(String param) { this.chromosome = param; } public String getCytogeneticMap() { return this.cytogeneticMap; } public void setCytogeneticMap(String param) { this.cytogeneticMap = param; } public String getEc() { return this.ec; } public void setEc(String param) { this.ec = param; } public Long getGeneId() { return this.geneId; } public void setGeneId(Long param) { this.geneId = param; } public String getGeneSymbol() { return this.geneSymbol; } public void setGeneSymbol(String param) { this.geneSymbol = param; } public String getGeneTitle() { return this.geneTitle; } public void setGeneTitle(String param) { this.geneTitle = param; } public String getGenomeVersion() { return this.genomeVersion; } public void setGenomeVersion(String param) { this.genomeVersion = param; } public Long getKbEnd() { return this.kbEnd; } public void setKbEnd(Long param) { this.kbEnd = param; } public Long getKbStart() { return this.kbStart; } public void setKbStart(Long param) { this.kbStart = param; } public String getLlId() { return this.llId; } public void setLlId(String param) { this.llId = param; } public String getOminId() { return this.ominId; } public void setOminId(String param) { this.ominId = param; } public String getUnigeneId() { return this.unigeneId; } public void setUnigeneId(String param) { this.unigeneId = param; } public java.util.Vector getCollRefseqProteinId() { return this.collRefseqProteinId; } public void setCollRefseqProteinId(java.util.Vector param) { this.collRefseqProteinId = param; } public java.util.Vector getCollGeneOntology() { return this.collGeneOntology; } public void setCollGeneOntology(java.util.Vector param) { this.collGeneOntology = param; } public java.util.Vector getCollGeneClone() { return this.collGeneClone; } public void setCollGeneClone(java.util.Vector param) { this.collGeneClone = param; } public java.util.Vector getCollArrayGenoAbnFact() { return this.collArrayGenoAbnFact; } public void setCollArrayGenoAbnFact(java.util.Vector param) { this.collArrayGenoAbnFact = param; } public java.util.Vector getCollSwissprot() { return this.collSwissprot; } public void setCollSwissprot(java.util.Vector param) { this.collSwissprot = param; } public java.util.Vector getCollRefseqMrnaId() { return this.collRefseqMrnaId; } public void setCollRefseqMrnaId(java.util.Vector param) { this.collRefseqMrnaId = param; } public java.util.Vector getCollDifferentialExpressionGfact() { return this.collDifferentialExpressionGfact; } public void setCollDifferentialExpressionGfact(java.util.Vector param) { this.collDifferentialExpressionGfact = param; } public java.util.Vector getCollGeneProbeset() { return this.collGeneProbeset; } public void setCollGeneProbeset(java.util.Vector param) { this.collGeneProbeset = param; } public java.util.Vector getCollDifferentialExpressionSfact() { return this.collDifferentialExpressionSfact; } public void setCollDifferentialExpressionSfact(java.util.Vector param) { this.collDifferentialExpressionSfact = param; } public java.util.Vector getCollProteinFamily() { return this.collProteinFamily; } public void setCollProteinFamily(java.util.Vector param) { this.collProteinFamily = param; } public java.util.Vector getCollGenePathway() { return this.collGenePathway; } public void setCollGenePathway(java.util.Vector param) { this.collGenePathway = param; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.support.replication; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.index.shard.ShardId; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomIntBetween; /** * Helper methods for generating cluster states */ public class ClusterStateCreationUtils { /** * Creates cluster state with and index that has one shard and #(replicaStates) replicas * * @param index name of the index * @param activePrimaryLocal if active primary should coincide with the local node in the cluster state * @param primaryState state of primary * @param replicaStates states of the replicas. length of this array determines also the number of replicas */ public static ClusterState state(String index, boolean activePrimaryLocal, ShardRoutingState primaryState, ShardRoutingState... replicaStates) { final int numberOfReplicas = replicaStates.length; int numberOfNodes = numberOfReplicas + 1; if (primaryState == ShardRoutingState.RELOCATING) { numberOfNodes++; } for (ShardRoutingState state : replicaStates) { if (state == ShardRoutingState.RELOCATING) { numberOfNodes++; } } numberOfNodes = Math.max(2, numberOfNodes); // we need a non-local master to test shard failures final ShardId shardId = new ShardId(index, "_na_", 0); DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); Set<String> unassignedNodes = new HashSet<>(); for (int i = 0; i < numberOfNodes + 1; i++) { final DiscoveryNode node = newNode(i); discoBuilder = discoBuilder.put(node); unassignedNodes.add(node.getId()); } discoBuilder.localNodeId(newNode(0).getId()); discoBuilder.masterNodeId(newNode(1).getId()); // we need a non-local master to test shard failures final int primaryTerm = randomInt(200); IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(Settings.builder() .put(SETTING_VERSION_CREATED, Version.CURRENT) .put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas) .put(SETTING_CREATION_DATE, System.currentTimeMillis())).primaryTerm(0, primaryTerm).build(); RoutingTable.Builder routing = new RoutingTable.Builder(); routing.addAsNew(indexMetaData); IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); String primaryNode = null; String relocatingNode = null; UnassignedInfo unassignedInfo = null; if (primaryState != ShardRoutingState.UNASSIGNED) { if (activePrimaryLocal) { primaryNode = newNode(0).getId(); unassignedNodes.remove(primaryNode); } else { Set<String> unassignedNodesExecludingPrimary = new HashSet<>(unassignedNodes); unassignedNodesExecludingPrimary.remove(newNode(0).getId()); primaryNode = selectAndRemove(unassignedNodesExecludingPrimary); } if (primaryState == ShardRoutingState.RELOCATING) { relocatingNode = selectAndRemove(unassignedNodes); } } else { unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null); } indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, 0, primaryNode, relocatingNode, null, true, primaryState, unassignedInfo)); for (ShardRoutingState replicaState : replicaStates) { String replicaNode = null; relocatingNode = null; unassignedInfo = null; if (replicaState != ShardRoutingState.UNASSIGNED) { assert primaryNode != null : "a replica is assigned but the primary isn't"; replicaNode = selectAndRemove(unassignedNodes); if (replicaState == ShardRoutingState.RELOCATING) { relocatingNode = selectAndRemove(unassignedNodes); } } else { unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null); } indexShardRoutingBuilder.addShard( TestShardRouting.newShardRouting(index, shardId.id(), replicaNode, relocatingNode, null, false, replicaState, unassignedInfo)); } ClusterState.Builder state = ClusterState.builder(new ClusterName("test")); state.nodes(discoBuilder); state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded()); state.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(indexMetaData.getIndex()) .addIndexShard(indexShardRoutingBuilder.build())).build()); return state.build(); } /** * Creates cluster state with several shards and one replica and all shards STARTED. */ public static ClusterState stateWithAssignedPrimariesAndOneReplica(String index, int numberOfShards) { int numberOfNodes = 2; // we need a non-local master to test shard failures DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); for (int i = 0; i < numberOfNodes + 1; i++) { final DiscoveryNode node = newNode(i); discoBuilder = discoBuilder.put(node); } discoBuilder.localNodeId(newNode(0).getId()); discoBuilder.masterNodeId(newNode(1).getId()); // we need a non-local master to test shard failures IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(Settings.builder() .put(SETTING_VERSION_CREATED, Version.CURRENT) .put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1) .put(SETTING_CREATION_DATE, System.currentTimeMillis())).build(); ClusterState.Builder state = ClusterState.builder(new ClusterName("test")); state.nodes(discoBuilder); state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded()); IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(indexMetaData.getIndex()); for (int i = 0; i < numberOfShards; i++) { RoutingTable.Builder routing = new RoutingTable.Builder(); routing.addAsNew(indexMetaData); final ShardId shardId = new ShardId(index, "_na_", i); IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(0).getId(), null, null, true, ShardRoutingState.STARTED, null)); indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(1).getId(), null, null, false, ShardRoutingState.STARTED, null)); indexRoutingTableBuilder.addIndexShard(indexShardRoutingBuilder.build()); } state.routingTable(RoutingTable.builder().add(indexRoutingTableBuilder.build()).build()); return state.build(); } /** * Creates cluster state with and index that has one shard and as many replicas as numberOfReplicas. * Primary will be STARTED in cluster state but replicas will be one of UNASSIGNED, INITIALIZING, STARTED or RELOCATING. * * @param index name of the index * @param activePrimaryLocal if active primary should coincide with the local node in the cluster state * @param numberOfReplicas number of replicas */ public static ClusterState stateWithActivePrimary(String index, boolean activePrimaryLocal, int numberOfReplicas) { int assignedReplicas = randomIntBetween(0, numberOfReplicas); return stateWithActivePrimary(index, activePrimaryLocal, assignedReplicas, numberOfReplicas - assignedReplicas); } /** * Creates cluster state with and index that has one shard and as many replicas as numberOfReplicas. * Primary will be STARTED in cluster state. Some (unassignedReplicas) will be UNASSIGNED and * some (assignedReplicas) will be one of INITIALIZING, STARTED or RELOCATING. * * @param index name of the index * @param activePrimaryLocal if active primary should coincide with the local node in the cluster state * @param assignedReplicas number of replicas that should have INITIALIZING, STARTED or RELOCATING state * @param unassignedReplicas number of replicas that should be unassigned */ public static ClusterState stateWithActivePrimary(String index, boolean activePrimaryLocal, int assignedReplicas, int unassignedReplicas) { ShardRoutingState[] replicaStates = new ShardRoutingState[assignedReplicas + unassignedReplicas]; // no point in randomizing - node assignment later on does it too. for (int i = 0; i < assignedReplicas; i++) { replicaStates[i] = randomFrom(ShardRoutingState.INITIALIZING, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING); } for (int i = assignedReplicas; i < replicaStates.length; i++) { replicaStates[i] = ShardRoutingState.UNASSIGNED; } return state(index, activePrimaryLocal, randomFrom(ShardRoutingState.STARTED, ShardRoutingState.RELOCATING), replicaStates); } /** * Creates a cluster state with no index */ public static ClusterState stateWithNoShard() { int numberOfNodes = 2; DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); discoBuilder.localNodeId(newNode(0).getId()); discoBuilder.masterNodeId(newNode(1).getId()); ClusterState.Builder state = ClusterState.builder(new ClusterName("test")); state.nodes(discoBuilder); state.metaData(MetaData.builder().generateClusterUuidIfNeeded()); state.routingTable(RoutingTable.builder().build()); return state.build(); } /** * Creates a cluster state where local node and master node can be specified * * @param localNode node in allNodes that is the local node * @param masterNode node in allNodes that is the master node. Can be null if no master exists * @param allNodes all nodes in the cluster * @return cluster state */ public static ClusterState state(DiscoveryNode localNode, DiscoveryNode masterNode, DiscoveryNode... allNodes) { DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); for (DiscoveryNode node : allNodes) { discoBuilder.put(node); } if (masterNode != null) { discoBuilder.masterNodeId(masterNode.getId()); } discoBuilder.localNodeId(localNode.getId()); ClusterState.Builder state = ClusterState.builder(new ClusterName("test")); state.nodes(discoBuilder); state.metaData(MetaData.builder().generateClusterUuidIfNeeded()); return state.build(); } private static DiscoveryNode newNode(int nodeId) { return new DiscoveryNode("node_" + nodeId, DummyTransportAddress.INSTANCE, Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); } static private String selectAndRemove(Set<String> strings) { String selection = randomFrom(strings.toArray(new String[strings.size()])); strings.remove(selection); return selection; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper; import java.io.File; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.zookeeper.AsyncCallback.MultiCallback; import org.apache.zookeeper.AsyncCallback.StringCallback; import org.apache.zookeeper.AsyncCallback.VoidCallback; import org.apache.zookeeper.KeeperException.Code; import org.apache.zookeeper.common.PathUtils; import org.apache.zookeeper.data.ACL; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ZKUtil { private static final Logger LOG = LoggerFactory.getLogger(ZKUtil.class); private static final Map<Integer, String> permCache = new ConcurrentHashMap<Integer, String>(); /** * Recursively delete the node with the given path. * <p> * Important: All versions, of all nodes, under the given node are deleted. * <p> * If there is an error with deleting one of the sub-nodes in the tree, * this operation would abort and would be the responsibility of the app to handle the same. * * * @throws IllegalArgumentException if an invalid path is specified */ public static boolean deleteRecursive( ZooKeeper zk, final String pathRoot, final int batchSize) throws InterruptedException, KeeperException { PathUtils.validatePath(pathRoot); List<String> tree = listSubTreeBFS(zk, pathRoot); LOG.debug("Deleting tree: {}", tree); return deleteInBatch(zk, tree, batchSize); } /** * Same as {@link #deleteRecursive(org.apache.zookeeper.ZooKeeper, java.lang.String, int)} * kept here for compatibility with 3.5 clients. * * @since 3.6.1 */ public static void deleteRecursive( ZooKeeper zk, final String pathRoot) throws InterruptedException, KeeperException { deleteRecursive(zk, pathRoot, 1000); } private static class BatchedDeleteCbContext { public Semaphore sem; public AtomicBoolean success; public BatchedDeleteCbContext(int rateLimit) { sem = new Semaphore(rateLimit); success = new AtomicBoolean(true); } } private static boolean deleteInBatch(ZooKeeper zk, List<String> tree, int batchSize) throws InterruptedException { int rateLimit = 10; List<Op> ops = new ArrayList<>(); BatchedDeleteCbContext context = new BatchedDeleteCbContext(rateLimit); MultiCallback cb = (rc, path, ctx, opResults) -> { ((BatchedDeleteCbContext) ctx).sem.release(); if (rc != Code.OK.intValue()) { ((BatchedDeleteCbContext) ctx).success.set(false); } }; // Delete the leaves first and eventually get rid of the root for (int i = tree.size() - 1; i >= 0; --i) { // Create Op to delete all versions of the node with -1. ops.add(Op.delete(tree.get(i), -1)); if (ops.size() == batchSize || i == 0) { if (!context.success.get()) { // fail fast break; } context.sem.acquire(); zk.multi(ops, cb, context); ops = new ArrayList<>(); } } // wait for all callbacks to finish context.sem.acquire(rateLimit); return context.success.get(); } /** * Recursively delete the node with the given path. (async version). * * <p> * Important: All versions, of all nodes, under the given node are deleted. * <p> * If there is an error with deleting one of the sub-nodes in the tree, * this operation would abort and would be the responsibility of the app to handle the same. * <p> * @param zk the zookeeper handle * @param pathRoot the path to be deleted * @param cb call back method * @param ctx the context the callback method is called with * @throws IllegalArgumentException if an invalid path is specified */ public static void deleteRecursive( ZooKeeper zk, final String pathRoot, VoidCallback cb, Object ctx) throws InterruptedException, KeeperException { PathUtils.validatePath(pathRoot); List<String> tree = listSubTreeBFS(zk, pathRoot); LOG.debug("Deleting tree: {}", tree); for (int i = tree.size() - 1; i >= 0; --i) { //Delete the leaves first and eventually get rid of the root zk.delete(tree.get(i), -1, cb, ctx); //Delete all versions of the node with -1. } } /** * @param filePath the file path to be validated * @return Returns null if valid otherwise error message */ public static String validateFileInput(String filePath) { File file = new File(filePath); if (!file.exists()) { return "File '" + file.getAbsolutePath() + "' does not exist."; } if (!file.canRead()) { return "Read permission is denied on the file '" + file.getAbsolutePath() + "'"; } if (file.isDirectory()) { return "'" + file.getAbsolutePath() + "' is a directory. it must be a file."; } return null; } /** * BFS Traversal of the system under pathRoot, with the entries in the list, in the * same order as that of the traversal. * <p> * <b>Important:</b> This is <i>not an atomic snapshot</i> of the tree ever, but the * state as it exists across multiple RPCs from zkClient to the ensemble. * For practical purposes, it is suggested to bring the clients to the ensemble * down (i.e. prevent writes to pathRoot) to 'simulate' a snapshot behavior. * * @param zk the zookeeper handle * @param pathRoot The znode path, for which the entire subtree needs to be listed. * @throws InterruptedException * @throws KeeperException */ public static List<String> listSubTreeBFS( ZooKeeper zk, final String pathRoot) throws KeeperException, InterruptedException { Queue<String> queue = new ArrayDeque<>(); List<String> tree = new ArrayList<String>(); queue.add(pathRoot); tree.add(pathRoot); while (!queue.isEmpty()) { String node = queue.poll(); List<String> children = zk.getChildren(node, false); for (final String child : children) { // Fix IllegalArgumentException when list "/". final String childPath = (node.equals("/") ? "" : node) + "/" + child; queue.add(childPath); tree.add(childPath); } } return tree; } /** * Visits the subtree with root as given path and calls the passed callback with each znode * found during the search. It performs a depth-first, pre-order traversal of the tree. * <p> * <b>Important:</b> This is <i>not an atomic snapshot</i> of the tree ever, but the * state as it exists across multiple RPCs from zkClient to the ensemble. * For practical purposes, it is suggested to bring the clients to the ensemble * down (i.e. prevent writes to pathRoot) to 'simulate' a snapshot behavior. */ public static void visitSubTreeDFS( ZooKeeper zk, final String path, boolean watch, StringCallback cb) throws KeeperException, InterruptedException { PathUtils.validatePath(path); zk.getData(path, watch, null); cb.processResult(Code.OK.intValue(), path, null, path); visitSubTreeDFSHelper(zk, path, watch, cb); } @SuppressWarnings("unchecked") private static void visitSubTreeDFSHelper( ZooKeeper zk, final String path, boolean watch, StringCallback cb) throws KeeperException, InterruptedException { // we've already validated, therefore if the path is of length 1 it's the root final boolean isRoot = path.length() == 1; try { List<String> children = zk.getChildren(path, watch, null); Collections.sort(children); for (String child : children) { String childPath = (isRoot ? path : path + "/") + child; cb.processResult(Code.OK.intValue(), childPath, null, child); } for (String child : children) { String childPath = (isRoot ? path : path + "/") + child; visitSubTreeDFSHelper(zk, childPath, watch, cb); } } catch (KeeperException.NoNodeException e) { // Handle race condition where a node is listed // but gets deleted before it can be queried return; // ignore } } /** * @param perms * ACL permissions * @return string representation of permissions */ public static String getPermString(int perms) { return permCache.computeIfAbsent(perms, k -> constructPermString(k)); } private static String constructPermString(int perms) { StringBuilder p = new StringBuilder(); if ((perms & ZooDefs.Perms.CREATE) != 0) { p.append('c'); } if ((perms & ZooDefs.Perms.DELETE) != 0) { p.append('d'); } if ((perms & ZooDefs.Perms.READ) != 0) { p.append('r'); } if ((perms & ZooDefs.Perms.WRITE) != 0) { p.append('w'); } if ((perms & ZooDefs.Perms.ADMIN) != 0) { p.append('a'); } return p.toString(); } public static String aclToString(List<ACL> acls) { StringBuilder sb = new StringBuilder(); for (ACL acl : acls) { sb.append(acl.getId().getScheme()); sb.append(":"); sb.append(acl.getId().getId()); sb.append(":"); sb.append(getPermString(acl.getPerms())); } return sb.toString(); } }
/* Copyright 2011-2016 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.Gui.Actions.CActionProxy; import com.google.security.zynamics.binnavi.Gui.CTableSearcherHelper; import com.google.security.zynamics.binnavi.Gui.FilterPanel.CFilteredTable; import com.google.security.zynamics.binnavi.Gui.HotKeys; import com.google.security.zynamics.binnavi.Help.IHelpInformation; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import javax.swing.AbstractAction; import javax.swing.InputMap; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JPopupMenu; import javax.swing.JTree; import javax.swing.KeyStroke; import javax.swing.SwingUtilities; /** * Base class for all tables to be displayed on the right side of the main window. * * @param <T> Type of the elements shown in the table. */ public abstract class CAbstractTreeTable<T> extends CFilteredTable<T> { /** * The project tree of the main window. */ private final JTree tree; /** * The raw table model of the table. Sorting is done in the super class. */ private final CAbstractTreeTableModel<T> treeTableModel; /** * Mouse listener that handles clicks on the table. */ private final InternalMouseListener mouseListener = new InternalMouseListener(); /** * Creates a new abstract tree table object. * * @param projectTree The project tree shown in the main window. * @param model The raw model that is responsible for the table layout. * @param helpInfo Provides context-sensitive information for the table. */ public CAbstractTreeTable(final JTree projectTree, final CAbstractTreeTableModel<T> model, final IHelpInformation helpInfo) { super(model, helpInfo); treeTableModel = Preconditions.checkNotNull(model, "IE01939: Model argument can't be null"); tree = Preconditions.checkNotNull(projectTree, "IE02343: Project tree argument can not be null"); addMouseListener(mouseListener); setDefaultRenderer(String.class, new CProjectTreeTableRenderer()); final InputMap windowImap = getInputMap(JComponent.WHEN_FOCUSED); windowImap.put(HotKeys.SEARCH_HK.getKeyStroke(), "SEARCH"); getActionMap().put("SEARCH", CActionProxy.proxy(new SearchAction())); windowImap.put(HotKeys.DELETE_HK.getKeyStroke(), "DELETE"); getActionMap().put("DELETE", CActionProxy.proxy(new DeleteAction())); updateUI(); } /** * Creates a popup menu depending on where the user clicked and shows that context menu in the * table. * * @param event The mouse event that was created when the user clicked. */ private void displayPopupMenu(final MouseEvent event) { final int selectedIndex = getSelectionIndex(event); if (selectedIndex != -1) { final JPopupMenu popupMenu = getPopupMenu(event.getX(), event.getY(), selectedIndex); if (popupMenu != null) { popupMenu.show(this, event.getX(), event.getY()); } } } /** * Uses information from a mouse event to determine what row was clicked. * * @param event The mouse event. * * @return The index of the row that was clicked or -1 if the row could not be determined. */ private int getSelectionIndex(final MouseEvent event) { return convertRowIndexToModel(rowAtPoint(event.getPoint())); } /** * Deletes the selected rows. */ protected void deleteRows() { } /** * Returns the parent window of the tree table. * * @return The parent window of the tree table. */ protected JFrame getParentWindow() { return (JFrame) SwingUtilities.getWindowAncestor(tree); } /** * Creates a table-specific popup menu. * * @param x The x coordinate where the user clicked. * @param y The y coordinate where the user clicked. * @param selectedIndex The index of the row where the user clicked. * * @return The popup menu to be shown or null if no popup menu should be shown. */ protected abstract JPopupMenu getPopupMenu(int x, int y, int selectedIndex); /** * Returns the project tree of the main window. * * @return The project tree of the main window. */ protected JTree getProjectTree() { return tree; } /** * Returns the normalized indices of the selected rows. * * @return The normalized indices of the selected rows. */ protected int[] getSortSelectedRows() { final int[] rows = getSelectedRows(); for (int i = 0; i < rows.length; i++) { rows[i] = convertRowIndexToModel(rows[i]); } return rows; } /** * Handles double-clicks on table rows. * * @param row The index of the row that was clicked. */ protected abstract void handleDoubleClick(int row); @Override protected boolean processKeyBinding(final KeyStroke keyStroke, final KeyEvent event, final int condition, final boolean pressed) { // turn off edit but still can cause actions if (event.getKeyCode() == KeyEvent.VK_DELETE) { putClientProperty("JTable.autoStartsEdit", Boolean.FALSE); final boolean retvalue = super.processKeyBinding(keyStroke, event, condition, pressed); putClientProperty("JTable.autoStartsEdit", Boolean.TRUE); return retvalue; } return super.processKeyBinding(keyStroke, event, condition, pressed); } /** * Clean-up function. */ @Override public void dispose() { removeMouseListener(mouseListener); treeTableModel.delete(); } @Override public CAbstractTreeTableModel<T> getTreeTableModel() { return treeTableModel; } /** * Action class that handles the deletion of rows. */ private class DeleteAction extends AbstractAction { @Override public void actionPerformed(final ActionEvent event) { deleteRows(); } } /** * Listens on mouse events and handles right-clicks and double-clicks. */ private class InternalMouseListener extends MouseAdapter { @Override public void mouseClicked(final MouseEvent event) { if ((event.getButton() == MouseEvent.BUTTON1) && (event.getClickCount() == 2)) { handleDoubleClick(getSelectionIndex(event)); } } @Override public void mousePressed(final MouseEvent event) { if (event.isPopupTrigger()) { displayPopupMenu(event); } } @Override public void mouseReleased(final MouseEvent event) { if (event.isPopupTrigger()) { displayPopupMenu(event); } } } /** * Action class that handles table searching. */ private class SearchAction extends AbstractAction { @Override public void actionPerformed(final ActionEvent event) { CTableSearcherHelper.search(SwingUtilities.getWindowAncestor(CAbstractTreeTable.this), CAbstractTreeTable.this); } } }
package org.kie.internal.query.data; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.kie.internal.jaxb.StringKeyObjectValueMapXmlAdapter; @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @Deprecated // see org.jbpm.query.jpa.data.QueryWhere and org.jbpm.query.jpa.data.QueryCriteria public class QueryParameters { @XmlTransient private boolean union = true; @XmlTransient private boolean like = false; @XmlTransient private boolean range = false; @XmlElement @XmlJavaTypeAdapter(StringKeyObjectValueMapXmlAdapter.class) private Map<String, List<? extends Object>> unionParameters = null; @XmlElement @XmlJavaTypeAdapter(StringKeyObjectValueMapXmlAdapter.class) private Map<String, List<? extends Object>> intersectParameters = null; @XmlElement @XmlJavaTypeAdapter(StringKeyObjectValueMapXmlAdapter.class) private Map<String, List<String>> unionRegexParameters = null; @XmlElement @XmlJavaTypeAdapter(StringKeyObjectValueMapXmlAdapter.class) private Map<String, List<String>> intersectRegexParameters = null; @XmlElement @XmlJavaTypeAdapter(StringKeyObjectValueMapXmlAdapter.class) private Map<String, List<? extends Object>> unionRangeParameters = null; @XmlElement @XmlJavaTypeAdapter(StringKeyObjectValueMapXmlAdapter.class) private Map<String, List<? extends Object>> intersectRangeParameters = null; public QueryParameters() { // JAXB constructor } // getters public Map<String, List<? extends Object>> getUnionParameters() { if( unionParameters == null ) { unionParameters = new HashMap<String, List<? extends Object>>(); } return unionParameters; } public boolean unionParametersAreEmpty() { return unionParameters == null ? true : unionParameters.isEmpty(); } public Map<String, List<? extends Object>> getIntersectParameters() { if( intersectParameters == null ) { intersectParameters = new HashMap<String, List<? extends Object>>(); } return intersectParameters; } public boolean intersectParametersAreEmpty() { return intersectParameters == null ? true : intersectParameters.isEmpty(); } public Map<String, List<String>> getUnionRegexParameters() { if( unionRegexParameters == null ) { unionRegexParameters = new HashMap<String, List<String>>(); } return unionRegexParameters; } public boolean unionRegexParametersAreEmpty() { return unionRegexParameters == null ? true : unionRegexParameters.isEmpty(); } public Map<String, List<String>> getIntersectRegexParameters() { if( intersectRegexParameters == null ) { intersectRegexParameters = new HashMap<String, List<String>>(); } return intersectRegexParameters; } public boolean intersectRegexParametersAreEmpty() { return intersectRegexParameters == null ? true : intersectRegexParameters.isEmpty(); } public Map<String, List<? extends Object>> getUnionRangeParameters() { if( unionRangeParameters == null ) { unionRangeParameters = new HashMap<String, List<? extends Object>>(); } return unionRangeParameters; } public boolean unionRangeParametersAreEmpty() { return unionRangeParameters == null ? true : unionRangeParameters.isEmpty(); } public Map<String, List<? extends Object>> getIntersectRangeParameters() { if( intersectRangeParameters == null ) { intersectRangeParameters = new HashMap<String, List<? extends Object>>(); } return intersectRangeParameters; } public boolean intersectRangeParametersAreEmpty() { return intersectRangeParameters == null ? true : intersectRangeParameters.isEmpty(); } // other logic public <T> void addAppropriateParam( String listId, T... param ) { if( param.length == 0 ) { return; } List<T> listParams = getAppropriateParamList(listId, param[0], param.length); listParams.addAll(Arrays.asList(param)); } public <T> void addRangeParameter( String listId, T param, boolean start ) { this.range = true; List<T> listParams = getAppropriateParamList(listId, param, 2); int index = start ? 0 : 1; listParams.set(index, param); this.range = false; } @SuppressWarnings("unchecked") public <T> List<T> getAppropriateParamList(String listId, T param, int size) { List<T> listParams = null; if( like ) { if( ! (param instanceof String) ) { throw new IllegalArgumentException("Only String parameters may be used in regular expressions."); } List<String> stringListParams = null; if( union ) { listParams = (List<T>) getUnionRegexParameters().get(listId); if( listParams == null ) { stringListParams = new ArrayList<String>(size); getUnionRegexParameters().put(listId, stringListParams); } } else { listParams = (List<T>) getIntersectParameters().get(listId); if( listParams == null ) { stringListParams = new ArrayList<String>(size); getIntersectRegexParameters().put(listId, stringListParams); } } if( listParams == null ) { return (List<T>) stringListParams; } else { return listParams; } } else if( range ) { if( union ) { listParams = (List<T>) getUnionRangeParameters().get(listId); if( listParams == null ) { listParams = Arrays.asList(null, null); getUnionRangeParameters().put(listId, listParams); } } else { listParams = (List<T>) getIntersectRangeParameters().get(listId); if( listParams == null ) { listParams = Arrays.asList(null, null); getIntersectRangeParameters().put(listId, listParams); } } } else { if( union ) { listParams = (List<T>) getUnionParameters().get(listId); if( listParams == null ) { listParams = new ArrayList<T>(size); getUnionParameters().put(listId, listParams); } } else { listParams = (List<T>) getIntersectParameters().get(listId); if( listParams == null ) { listParams = new ArrayList<T>(size); getIntersectParameters().put(listId, listParams); } } } return listParams; } public void setToUnion() { this.union = true; } public void setToIntersection() { this.union = false; } public boolean isUnion() { return this.union; } public void setToLike() { this.like = true; } public boolean isLike() { return this.like; } public void setToEquals() { this.like = false; } public void setToRange() { this.range = true; } public void setToPrecise() { this.range = false; } public boolean isRange() { return this.range; } public void clear() { union = true; like = false; range = false; Map [] maps = { unionParameters, intersectParameters, unionRegexParameters, intersectRegexParameters, unionRangeParameters, intersectRangeParameters }; for( Map paramMap : maps ) { if( paramMap != null ) { paramMap.clear(); } } } public QueryParameters(QueryParameters queryParameters) { this.union = queryParameters.union; this.like = queryParameters.like; this.range = queryParameters.range; this.intersectParameters = queryParameters.intersectParameters == null ? null : new HashMap<String, List<? extends Object>>(queryParameters.intersectParameters); this.unionParameters = queryParameters.unionParameters == null ? null : new HashMap<String, List<? extends Object>>(queryParameters.unionParameters); this.intersectRangeParameters = queryParameters.intersectRangeParameters == null ? null : new HashMap<String, List<? extends Object>>(queryParameters.intersectRangeParameters); this.unionRangeParameters = queryParameters.unionRangeParameters == null ? null : new HashMap<String, List<? extends Object>>(queryParameters.unionRangeParameters); this.intersectRegexParameters = queryParameters.intersectRegexParameters == null ? null : new HashMap<String, List<String>>(queryParameters.intersectRegexParameters); this.unionRegexParameters = queryParameters.unionRegexParameters == null ? null : new HashMap<String, List<String>>(queryParameters.unionRegexParameters); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.test; import static org.apache.pig.ExecType.MAPREDUCE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.mapreduce.Job; import org.apache.pig.ExecType; import org.apache.pig.PigServer; import org.apache.pig.ResourceSchema; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POProject; import org.apache.pig.builtin.PigStorage; import org.apache.pig.data.DataType; import org.apache.pig.data.Tuple; import org.apache.pig.impl.PigContext; import org.apache.pig.impl.io.FileLocalizer; import org.apache.pig.impl.logicalLayer.FrontendException; import org.apache.pig.impl.logicalLayer.schema.Schema; import org.apache.pig.impl.util.Utils; import org.apache.pig.test.utils.TypeCheckingTestUtil; import org.codehaus.jackson.JsonGenerationException; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; import org.junit.Assert; public class TestPigStorage { protected final Log log = LogFactory.getLog(getClass()); private static MiniCluster cluster = MiniCluster.buildCluster(); static PigServer pig; static final String datadir = "build/test/tmpdata/"; PigContext pigContext = new PigContext(ExecType.LOCAL, new Properties()); Map<String, String> fileNameMap = new HashMap<String, String>(); @Before public void setup() throws IOException { // some tests are in map-reduce mode and some in local - so before // each test, we will de-initialize FileLocalizer so that temp files // are created correctly depending on the ExecType in the test. FileLocalizer.setInitialized(false); // If needed, a test can change that. Most tests are local so we save a bit // of typing here. pig = new PigServer(ExecType.LOCAL, cluster.getProperties()); Util.deleteDirectory(new File(datadir)); try { pig.mkdirs(datadir); } catch (IOException e) {}; Util.createLocalInputFile(datadir + "originput", new String[] {"A,1", "B,2", "C,3", "D,2", "A,5", "B,5", "C,8", "A,8", "D,8", "A,9"}); } @After public void tearDown() throws Exception { Util.deleteDirectory(new File(datadir)); pig.shutdown(); } @AfterClass public static void shutdown() { cluster.shutDown(); } @Test public void testBlockBoundary() throws ExecException { // This tests PigStorage loader with records exectly // on the boundary of the file blocks. Properties props = new Properties(); for (Entry<Object, Object> entry : cluster.getProperties().entrySet()) { props.put(entry.getKey(), entry.getValue()); } props.setProperty("mapred.max.split.size", "20"); PigServer pigServer = new PigServer(MAPREDUCE, props); String[] inputs = { "abcdefgh1", "abcdefgh2", "abcdefgh3", "abcdefgh4", "abcdefgh5", "abcdefgh6", "abcdefgh7", "abcdefgh8", "abcdefgh9" }; String[] expected = { "(abcdefgh1)", "(abcdefgh2)", "(abcdefgh3)", "(abcdefgh4)", "(abcdefgh5)", "(abcdefgh6)", "(abcdefgh7)", "(abcdefgh8)", "(abcdefgh9)" }; System.setProperty("pig.overrideBlockSize", "20"); String INPUT_FILE = "tmp.txt"; try { PrintWriter w = new PrintWriter(new FileWriter(INPUT_FILE)); for (String s : inputs) { w.println(s); } w.close(); Util.copyFromLocalToCluster(cluster, INPUT_FILE, INPUT_FILE); pigServer.registerQuery("a = load '" + INPUT_FILE + "';"); Iterator<Tuple> iter = pigServer.openIterator("a"); int counter = 0; while (iter.hasNext()){ assertEquals(expected[counter++].toString(), iter.next().toString()); } assertEquals(expected.length, counter); } catch (Exception e) { e.printStackTrace(); Assert.fail(); } finally { new File(INPUT_FILE).delete(); try { Util.deleteFile(cluster, INPUT_FILE); } catch (IOException e) { e.printStackTrace(); Assert.fail(); } } } /** * Test to verify that PigStorage works fine in the following scenario: * The column prune optimization determines only columns 2 and 3 are needed * and there are records in the data which have only 1 column (malformed data). * In this case, PigStorage should return an empty tuple to represent columns * 2 and 3 and {@link POProject} would handle catching any * {@link IndexOutOfBoundsException} resulting from accessing a field in the * tuple and substitute a null. */ @Test public void testPruneColumnsWithMissingFields() throws IOException { String inputFileName = "TestPigStorage-testPruneColumnsWithMissingFields-input.txt"; Util.createLocalInputFile( inputFileName, new String[] {"1\t2\t3", "4", "5\t6\t7"}); String script = "a = load '" + inputFileName + "' as (i:int, j:int, k:int);" + "b = foreach a generate j, k;"; Util.registerMultiLineQuery(pig, script); Iterator<Tuple> it = pig.openIterator("b"); assertEquals(Util.createTuple(new Integer[] { 2, 3}), it.next()); assertEquals(Util.createTuple(new Integer[] { null, null}), it.next()); assertEquals(Util.createTuple(new Integer[] { 6, 7}), it.next()); assertFalse(it.hasNext()); } @Test public void testPigStorageNoSchema() throws Exception { //if the schema file does not exist, and '-schema' option is used // it should result in an error pigContext.connect(); String query = "a = LOAD '" + datadir + "originput' using PigStorage('\\t', '-schema') " + "as (f1:chararray, f2:int);"; pig.registerQuery(query); try{ pig.dumpSchema("a"); }catch(FrontendException ex){ return; } fail("no exception caught"); } @Test public void testPigStorageSchema() throws Exception { pigContext.connect(); String query = "a = LOAD '" + datadir + "originput' using PigStorage('\\t') " + "as (f1:chararray, f2:int);"; pig.registerQuery(query); Schema origSchema = pig.dumpSchema("a"); pig.store("a", datadir + "aout", "PigStorage('\\t', '-schema')"); // aout now has a schema. // Verify that loading a-out with no given schema produces // the original schema. pig.registerQuery("b = LOAD '" + datadir + "aout' using PigStorage('\\t');"); Schema genSchema = pig.dumpSchema("b"); Assert.assertTrue("generated schema equals original" , Schema.equals(genSchema, origSchema, true, false)); // Verify that giving our own schema works String [] aliases ={"foo", "bar"}; byte[] types = {DataType.INTEGER, DataType.LONG}; Schema newSchema = TypeCheckingTestUtil.genFlatSchema( aliases,types); pig.registerQuery("c = LOAD '" + datadir + "aout' using PigStorage('\\t', '-schema') "+ "as (foo:int, bar:long);"); Schema newGenSchema = pig.dumpSchema("c"); Assert.assertTrue("explicit schema overrides metadata", Schema.equals(newSchema, newGenSchema, true, false)); // Verify that explicitly requesting no schema works pig.registerQuery("d = LOAD '" + datadir + "aout' using PigStorage('\\t', '-noschema');"); genSchema = pig.dumpSchema("d"); assertNull(genSchema); } @Test public void testSchemaConversion() throws Exception { Util.createLocalInputFile(datadir + "originput2", new String[] {"1", "2", "3", "2", "5", "5", "8", "8", "8", "9"}); pig.registerQuery("A = LOAD '" + datadir + "originput2' using PigStorage('\\t') " + "as (f:int);"); pig.registerQuery("B = group A by f;"); Schema origSchema = pig.dumpSchema("B"); ResourceSchema rs1 = new ResourceSchema(origSchema); pig.registerQuery("STORE B into '" + datadir + "bout' using PigStorage('\\t', '-schema');"); pig.registerQuery("C = LOAD '" + datadir + "bout' using PigStorage('\\t', '-schema');"); Schema genSchema = pig.dumpSchema("C"); ResourceSchema rs2 = new ResourceSchema(genSchema); Assert.assertTrue("generated schema equals original" , ResourceSchema.equals(rs1, rs2)); pig.registerQuery("C1 = LOAD '" + datadir + "bout' as (a0:int, A: {t: (f:int) } );"); pig.registerQuery("D = foreach C1 generate a0, SUM(A);"); List<Tuple> expectedResults = Util.getTuplesFromConstantTupleStrings( new String[] { "(1,1L)", "(2,4L)", "(3,3L)", "(5,10L)", "(8,24L)", "(9,9L)" }); Iterator<Tuple> iter = pig.openIterator("D"); int counter = 0; while (iter.hasNext()) { Assert.assertEquals(expectedResults.get(counter++).toString(), iter.next().toString()); } Assert.assertEquals(expectedResults.size(), counter); } @Test public void testSchemaConversion2() throws Exception { pig.registerQuery("A = LOAD '" + datadir + "originput' using PigStorage(',') " + "as (f1:chararray, f2:int);"); pig.registerQuery("B = group A by f1;"); Schema origSchema = pig.dumpSchema("B"); ResourceSchema rs1 = new ResourceSchema(origSchema); pig.registerQuery("STORE B into '" + datadir + "cout' using PigStorage('\\t', '-schema');"); pig.registerQuery("C = LOAD '" + datadir + "cout' using PigStorage('\\t', '-schema');"); Schema genSchema = pig.dumpSchema("C"); ResourceSchema rs2 = new ResourceSchema(genSchema); Assert.assertTrue("generated schema equals original" , ResourceSchema.equals(rs1, rs2)); pig.registerQuery("C1 = LOAD '" + datadir + "cout' as (a0:chararray, A: {t: (f1:chararray, f2:int) } );"); pig.registerQuery("D = foreach C1 generate a0, SUM(A.f2);"); List<Tuple> expectedResults = Util.getTuplesFromConstantTupleStrings( new String[] { "('A',23L)", "('B',7L)", "('C',11L)", "('D',10L)" }); Iterator<Tuple> iter = pig.openIterator("D"); int counter = 0; while (iter.hasNext()) { Assert.assertEquals(expectedResults.get(counter++).toString(), iter.next().toString()); } Assert.assertEquals(expectedResults.size(), counter); } /** * See PIG-1830 * @throws IOException */ @Test public void testByteArrayConversion() throws IOException { Util.createLocalInputFile(datadir + "originput2", new String[] {"peter\t1", "samir\t2", "michael\t4", "peter\t2", "peter\t4", "samir\t1", "john\t" }); Util.createLocalInputFile(datadir + ".pig_schema", new String[] { "{\"fields\":[{\"name\":\"name\",\"type\":55,\"schema\":null," + "\"description\":\"autogenerated from Pig Field Schema\"}," + "{\"name\":\"val\",\"type\":10,\"schema\":null,\"description\":"+ "\"autogenerated from Pig Field Schema\"}],\"version\":0," + "\"sortKeys\":[],\"sortKeyOrders\":[]}" }); pig.registerQuery("Events = LOAD '" + datadir + "originput2' USING PigStorage('\\t', '-schema');"); pig.registerQuery("Sessions = GROUP Events BY name;"); Iterator<Tuple> sessions = pig.openIterator("Sessions"); while (sessions.hasNext()) { System.out.println(sessions.next()); } } // See PIG-1993 @Test public void testColumnPrune() throws IOException { Util.createLocalInputFile(datadir + "originput2", new String[] {"peter\t1", "samir\t2", "michael\t4", "peter\t2", "peter\t4", "samir\t1", "john\t" }); Util.createLocalInputFile(datadir + ".pig_schema", new String[] { "{\"fields\":[{\"name\":\"name\",\"type\":55,\"schema\":null," + "\"description\":\"autogenerated from Pig Field Schema\"}," + "{\"name\":\"val\",\"type\":10,\"schema\":null,\"description\":"+ "\"autogenerated from Pig Field Schema\"}],\"version\":0," + "\"sortKeys\":[],\"sortKeyOrders\":[]}" }); pig.registerQuery("Events = LOAD '" + datadir + "originput2' USING PigStorage('\\t', '-schema');"); pig.registerQuery("EventsName = foreach Events generate name;"); Iterator<Tuple> sessions = pig.openIterator("EventsName"); sessions.next().toString().equals("(1)"); sessions.next().toString().equals("(2)"); sessions.next().toString().equals("(4)"); sessions.next().toString().equals("(2)"); sessions.next().toString().equals("(4)"); sessions.next().toString().equals("(1)"); sessions.next().toString().equals("()"); Assert.assertFalse(sessions.hasNext()); } @Test public void testPigStorageSchemaHeaderDelimiter() throws Exception { pigContext.connect(); String query = "a = LOAD '" + datadir + "originput' using PigStorage(',') " + "as (foo:chararray, bar:int);"; pig.registerQuery(query); pig.registerQuery("STORE a into '" + datadir + "dout' using PigStorage('#', '-schema');"); pig.registerQuery("STORE a into '" + datadir + "eout' using PigStorage('\\t', '-schema');"); String outPath = FileLocalizer.fullPath(datadir + "dout/.pig_header", pig.getPigContext()); Assert.assertTrue(FileLocalizer.fileExists(outPath, pig.getPigContext())); String[] header = Util.readOutput(pig.getPigContext(), outPath); Assert.assertArrayEquals("Headers are not the same.", new String[] {"foo#bar"}, header); outPath = FileLocalizer.fullPath(datadir + "eout/.pig_header", pig.getPigContext()); Assert.assertTrue(FileLocalizer.fileExists(outPath, pig.getPigContext())); header = Util.readOutput(pig.getPigContext(), outPath); Assert.assertArrayEquals("Headers are not the same.", new String[] {"foo\tbar"}, header); } private void putInputFile(String filename) throws IOException { Util.createLocalInputFile(filename, new String[] {}); } private void putSchemaFile(String schemaFilename, ResourceSchema testSchema) throws JsonGenerationException, JsonMappingException, IOException { new ObjectMapper().writeValue(new File(schemaFilename), testSchema); } @Test public void testPigStorageSchemaSearch() throws Exception { String globtestdir = "build/test/tmpglobbingdata/"; ResourceSchema testSchema = new ResourceSchema(Utils.parseSchema("a0:chararray")); PigStorage pigStorage = new PigStorage(); pigContext.connect(); try{ Util.deleteDirectory(new File(datadir)); pig.mkdirs(globtestdir+"a"); pig.mkdirs(globtestdir+"a/a0"); putInputFile(globtestdir+"a/a0/input"); pig.mkdirs(globtestdir+"a/b0"); putInputFile(globtestdir+"a/b0/input"); pig.mkdirs(globtestdir+"b"); } catch (IOException e) {}; // if schema file is not found, schema is null ResourceSchema schema = pigStorage.getSchema(globtestdir, new Job(ConfigurationUtil.toConfiguration(pigContext.getProperties()))); Assert.assertTrue(schema==null); // .pig_schema.input in along with input file putSchemaFile(globtestdir+"a/a0/.pig_schema.input", testSchema); schema = pigStorage.getSchema(globtestdir+"a/a0/*", new Job(ConfigurationUtil.toConfiguration(pigContext.getProperties()))); Assert.assertTrue(ResourceSchema.equals(schema, testSchema)); new File(globtestdir+"a/a0/.pig_schema.input").delete(); // if .pig_schema is in the input directory putSchemaFile(globtestdir+"a/a0/.pig_schema", testSchema); schema = pigStorage.getSchema(globtestdir+"a/a0", new Job(ConfigurationUtil.toConfiguration(pigContext.getProperties()))); Assert.assertTrue(ResourceSchema.equals(schema, testSchema)); new File(globtestdir+"a/a0/.pig_schema").delete(); // .pig_schema in one of globStatus returned directory putSchemaFile(globtestdir+"a/.pig_schema", testSchema); schema = pigStorage.getSchema(globtestdir+"*", new Job(ConfigurationUtil.toConfiguration(pigContext.getProperties()))); Assert.assertTrue(ResourceSchema.equals(schema, testSchema)); new File(globtestdir+"a/.pig_schema").delete(); putSchemaFile(globtestdir+"b/.pig_schema", testSchema); schema = pigStorage.getSchema(globtestdir+"*", new Job(ConfigurationUtil.toConfiguration(pigContext.getProperties()))); Assert.assertTrue(ResourceSchema.equals(schema, testSchema)); new File(globtestdir+"b/.pig_schema").delete(); // if .pig_schema is deep in the globbing, it will not get used putSchemaFile(globtestdir+"a/a0/.pig_schema", testSchema); schema = pigStorage.getSchema(globtestdir+"*", new Job(ConfigurationUtil.toConfiguration(pigContext.getProperties()))); Assert.assertTrue(schema==null); putSchemaFile(globtestdir+"a/.pig_schema", testSchema); schema = pigStorage.getSchema(globtestdir+"*", new Job(ConfigurationUtil.toConfiguration(pigContext.getProperties()))); Assert.assertTrue(ResourceSchema.equals(schema, testSchema)); new File(globtestdir+"a/a0/.pig_schema").delete(); new File(globtestdir+"a/.pig_schema").delete(); } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.server.dreams; import static android.Manifest.permission.BIND_DREAM_SERVICE; import com.android.internal.util.DumpUtils; import com.android.server.FgThread; import com.android.server.SystemService; import android.Manifest; import android.app.ActivityManager; import android.content.BroadcastReceiver; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.content.pm.ServiceInfo; import android.os.Binder; import android.os.Build; import android.os.Handler; import android.os.IBinder; import android.os.Looper; import android.os.PowerManager; import android.os.PowerManagerInternal; import android.os.SystemClock; import android.os.SystemProperties; import android.os.UserHandle; import android.provider.Settings; import android.service.dreams.DreamManagerInternal; import android.service.dreams.DreamService; import android.service.dreams.IDreamManager; import android.text.TextUtils; import android.util.Slog; import android.view.Display; import java.io.FileDescriptor; import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; import libcore.util.Objects; /** * Service api for managing dreams. * * @hide */ public final class DreamManagerService extends SystemService { private static final boolean DEBUG = false; private static final String TAG = "DreamManagerService"; private final Object mLock = new Object(); private final Context mContext; private final DreamHandler mHandler; private final DreamController mController; private final PowerManager mPowerManager; private final PowerManagerInternal mPowerManagerInternal; private final PowerManager.WakeLock mDozeWakeLock; private Binder mCurrentDreamToken; private ComponentName mCurrentDreamName; private int mCurrentDreamUserId; private boolean mCurrentDreamIsTest; private boolean mCurrentDreamCanDoze; private boolean mCurrentDreamIsDozing; private boolean mCurrentDreamIsWaking; private int mCurrentDreamDozeScreenState = Display.STATE_UNKNOWN; private int mCurrentDreamDozeScreenBrightness = PowerManager.BRIGHTNESS_DEFAULT; public DreamManagerService(Context context) { super(context); mContext = context; mHandler = new DreamHandler(FgThread.get().getLooper()); mController = new DreamController(context, mHandler, mControllerListener); mPowerManager = (PowerManager)context.getSystemService(Context.POWER_SERVICE); mPowerManagerInternal = getLocalService(PowerManagerInternal.class); mDozeWakeLock = mPowerManager.newWakeLock(PowerManager.DOZE_WAKE_LOCK, TAG); } @Override public void onStart() { publishBinderService(DreamService.DREAM_SERVICE, new BinderService()); publishLocalService(DreamManagerInternal.class, new LocalService()); } @Override public void onBootPhase(int phase) { if (phase == SystemService.PHASE_THIRD_PARTY_APPS_CAN_START) { if (Build.IS_DEBUGGABLE) { SystemProperties.addChangeCallback(mSystemPropertiesChanged); } mContext.registerReceiver(new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { synchronized (mLock) { stopDreamLocked(false /*immediate*/); } } }, new IntentFilter(Intent.ACTION_USER_SWITCHED), null, mHandler); } } private void dumpInternal(PrintWriter pw) { pw.println("DREAM MANAGER (dumpsys dreams)"); pw.println(); pw.println("mCurrentDreamToken=" + mCurrentDreamToken); pw.println("mCurrentDreamName=" + mCurrentDreamName); pw.println("mCurrentDreamUserId=" + mCurrentDreamUserId); pw.println("mCurrentDreamIsTest=" + mCurrentDreamIsTest); pw.println("mCurrentDreamCanDoze=" + mCurrentDreamCanDoze); pw.println("mCurrentDreamIsDozing=" + mCurrentDreamIsDozing); pw.println("mCurrentDreamIsWaking=" + mCurrentDreamIsWaking); pw.println("mCurrentDreamDozeScreenState=" + Display.stateToString(mCurrentDreamDozeScreenState)); pw.println("mCurrentDreamDozeScreenBrightness=" + mCurrentDreamDozeScreenBrightness); pw.println("getDozeComponent()=" + getDozeComponent()); pw.println(); DumpUtils.dumpAsync(mHandler, new DumpUtils.Dump() { @Override public void dump(PrintWriter pw, String prefix) { mController.dump(pw); } }, pw, "", 200); } private boolean isDreamingInternal() { synchronized (mLock) { return mCurrentDreamToken != null && !mCurrentDreamIsTest && !mCurrentDreamIsWaking; } } private boolean isDozingInternal() { synchronized (mLock) { return mCurrentDreamIsDozing; } } private void requestDreamInternal() { // Ask the power manager to nap. It will eventually call back into // startDream() if/when it is appropriate to start dreaming. // Because napping could cause the screen to turn off immediately if the dream // cannot be started, we keep one eye open and gently poke user activity. long time = SystemClock.uptimeMillis(); mPowerManager.userActivity(time, true /*noChangeLights*/); mPowerManager.nap(time); } private void requestAwakenInternal() { // Treat an explicit request to awaken as user activity so that the // device doesn't immediately go to sleep if the timeout expired, // for example when being undocked. long time = SystemClock.uptimeMillis(); mPowerManager.userActivity(time, false /*noChangeLights*/); stopDreamInternal(false /*immediate*/); } private void finishSelfInternal(IBinder token, boolean immediate) { if (DEBUG) { Slog.d(TAG, "Dream finished: " + token + ", immediate=" + immediate); } // Note that a dream finishing and self-terminating is not // itself considered user activity. If the dream is ending because // the user interacted with the device then user activity will already // have been poked so the device will stay awake a bit longer. // If the dream is ending on its own for other reasons and no wake // locks are held and the user activity timeout has expired then the // device may simply go to sleep. synchronized (mLock) { if (mCurrentDreamToken == token) { stopDreamLocked(immediate); } } } private void testDreamInternal(ComponentName dream, int userId) { synchronized (mLock) { startDreamLocked(dream, true /*isTest*/, false /*canDoze*/, userId); } } private void startDreamInternal(boolean doze) { final int userId = ActivityManager.getCurrentUser(); final ComponentName dream = chooseDreamForUser(doze, userId); if (dream != null) { synchronized (mLock) { startDreamLocked(dream, false /*isTest*/, doze, userId); } } } private void stopDreamInternal(boolean immediate) { synchronized (mLock) { stopDreamLocked(immediate); } } private void startDozingInternal(IBinder token, int screenState, int screenBrightness) { if (DEBUG) { Slog.d(TAG, "Dream requested to start dozing: " + token + ", screenState=" + screenState + ", screenBrightness=" + screenBrightness); } synchronized (mLock) { if (mCurrentDreamToken == token && mCurrentDreamCanDoze) { mCurrentDreamDozeScreenState = screenState; mCurrentDreamDozeScreenBrightness = screenBrightness; mPowerManagerInternal.setDozeOverrideFromDreamManager( screenState, screenBrightness); if (!mCurrentDreamIsDozing) { mCurrentDreamIsDozing = true; mDozeWakeLock.acquire(); } } } } private void stopDozingInternal(IBinder token) { if (DEBUG) { Slog.d(TAG, "Dream requested to stop dozing: " + token); } synchronized (mLock) { if (mCurrentDreamToken == token && mCurrentDreamIsDozing) { mCurrentDreamIsDozing = false; mDozeWakeLock.release(); mPowerManagerInternal.setDozeOverrideFromDreamManager( Display.STATE_UNKNOWN, PowerManager.BRIGHTNESS_DEFAULT); } } } private ComponentName chooseDreamForUser(boolean doze, int userId) { if (doze) { ComponentName dozeComponent = getDozeComponent(userId); return validateDream(dozeComponent) ? dozeComponent : null; } ComponentName[] dreams = getDreamComponentsForUser(userId); return dreams != null && dreams.length != 0 ? dreams[0] : null; } private boolean validateDream(ComponentName component) { if (component == null) return false; final ServiceInfo serviceInfo = getServiceInfo(component); if (serviceInfo == null) { Slog.w(TAG, "Dream " + component + " does not exist"); return false; } else if (serviceInfo.applicationInfo.targetSdkVersion >= Build.VERSION_CODES.LOLLIPOP && !BIND_DREAM_SERVICE.equals(serviceInfo.permission)) { Slog.w(TAG, "Dream " + component + " is not available because its manifest is missing the " + BIND_DREAM_SERVICE + " permission on the dream service declaration."); return false; } return true; } private ComponentName[] getDreamComponentsForUser(int userId) { String names = Settings.Secure.getStringForUser(mContext.getContentResolver(), Settings.Secure.SCREENSAVER_COMPONENTS, userId); ComponentName[] components = componentsFromString(names); // first, ensure components point to valid services List<ComponentName> validComponents = new ArrayList<ComponentName>(); if (components != null) { for (ComponentName component : components) { if (validateDream(component)) { validComponents.add(component); } } } // fallback to the default dream component if necessary if (validComponents.isEmpty()) { ComponentName defaultDream = getDefaultDreamComponentForUser(userId); if (defaultDream != null) { Slog.w(TAG, "Falling back to default dream " + defaultDream); validComponents.add(defaultDream); } } return validComponents.toArray(new ComponentName[validComponents.size()]); } private void setDreamComponentsForUser(int userId, ComponentName[] componentNames) { Settings.Secure.putStringForUser(mContext.getContentResolver(), Settings.Secure.SCREENSAVER_COMPONENTS, componentsToString(componentNames), userId); } private ComponentName getDefaultDreamComponentForUser(int userId) { String name = Settings.Secure.getStringForUser(mContext.getContentResolver(), Settings.Secure.SCREENSAVER_DEFAULT_COMPONENT, userId); return name == null ? null : ComponentName.unflattenFromString(name); } private ComponentName getDozeComponent() { return getDozeComponent(ActivityManager.getCurrentUser()); } private ComponentName getDozeComponent(int userId) { // Read the component from a system property to facilitate debugging. // Note that for production devices, the dream should actually be declared in // a config.xml resource. String name = Build.IS_DEBUGGABLE ? SystemProperties.get("debug.doze.component") : null; if (TextUtils.isEmpty(name)) { // Read the component from a config.xml resource. // The value should be specified in a resource overlay for the product. name = mContext.getResources().getString( com.android.internal.R.string.config_dozeComponent); } boolean enabled = Settings.Secure.getIntForUser(mContext.getContentResolver(), Settings.Secure.DOZE_ENABLED, mContext.getResources().getBoolean( com.android.internal.R.bool.config_doze_enabled_by_default) ? 1 : 0, userId) != 0; return TextUtils.isEmpty(name) || !enabled ? null : ComponentName.unflattenFromString(name); } private ServiceInfo getServiceInfo(ComponentName name) { try { return name != null ? mContext.getPackageManager().getServiceInfo(name, 0) : null; } catch (NameNotFoundException e) { return null; } } private void startDreamLocked(final ComponentName name, final boolean isTest, final boolean canDoze, final int userId) { if (Objects.equal(mCurrentDreamName, name) && mCurrentDreamIsTest == isTest && mCurrentDreamCanDoze == canDoze && mCurrentDreamUserId == userId) { return; } stopDreamLocked(true /*immediate*/); Slog.i(TAG, "Entering dreamland."); final Binder newToken = new Binder(); mCurrentDreamToken = newToken; mCurrentDreamName = name; mCurrentDreamIsTest = isTest; mCurrentDreamCanDoze = canDoze; mCurrentDreamUserId = userId; mHandler.post(new Runnable() { @Override public void run() { mController.startDream(newToken, name, isTest, canDoze, userId); } }); } private void stopDreamLocked(final boolean immediate) { if (mCurrentDreamToken != null) { if (immediate) { Slog.i(TAG, "Leaving dreamland."); cleanupDreamLocked(); } else if (mCurrentDreamIsWaking) { return; // already waking } else { Slog.i(TAG, "Gently waking up from dream."); mCurrentDreamIsWaking = true; } mHandler.post(new Runnable() { @Override public void run() { mController.stopDream(immediate); } }); } } private void cleanupDreamLocked() { mCurrentDreamToken = null; mCurrentDreamName = null; mCurrentDreamIsTest = false; mCurrentDreamCanDoze = false; mCurrentDreamUserId = 0; mCurrentDreamIsWaking = false; if (mCurrentDreamIsDozing) { mCurrentDreamIsDozing = false; mDozeWakeLock.release(); } mCurrentDreamDozeScreenState = Display.STATE_UNKNOWN; mCurrentDreamDozeScreenBrightness = PowerManager.BRIGHTNESS_DEFAULT; } private void checkPermission(String permission) { if (mContext.checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) { throw new SecurityException("Access denied to process: " + Binder.getCallingPid() + ", must have permission " + permission); } } private static String componentsToString(ComponentName[] componentNames) { StringBuilder names = new StringBuilder(); if (componentNames != null) { for (ComponentName componentName : componentNames) { if (names.length() > 0) { names.append(','); } names.append(componentName.flattenToString()); } } return names.toString(); } private static ComponentName[] componentsFromString(String names) { if (names == null) { return null; } String[] namesArray = names.split(","); ComponentName[] componentNames = new ComponentName[namesArray.length]; for (int i = 0; i < namesArray.length; i++) { componentNames[i] = ComponentName.unflattenFromString(namesArray[i]); } return componentNames; } private final DreamController.Listener mControllerListener = new DreamController.Listener() { @Override public void onDreamStopped(Binder token) { synchronized (mLock) { if (mCurrentDreamToken == token) { cleanupDreamLocked(); } } } }; /** * Handler for asynchronous operations performed by the dream manager. * Ensures operations to {@link DreamController} are single-threaded. */ private final class DreamHandler extends Handler { public DreamHandler(Looper looper) { super(looper, null, true /*async*/); } } private final class BinderService extends IDreamManager.Stub { @Override // Binder call protected void dump(FileDescriptor fd, PrintWriter pw, String[] args) { if (mContext.checkCallingOrSelfPermission(Manifest.permission.DUMP) != PackageManager.PERMISSION_GRANTED) { pw.println("Permission Denial: can't dump DreamManager from from pid=" + Binder.getCallingPid() + ", uid=" + Binder.getCallingUid()); return; } final long ident = Binder.clearCallingIdentity(); try { dumpInternal(pw); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public ComponentName[] getDreamComponents() { checkPermission(android.Manifest.permission.READ_DREAM_STATE); final int userId = UserHandle.getCallingUserId(); final long ident = Binder.clearCallingIdentity(); try { return getDreamComponentsForUser(userId); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public void setDreamComponents(ComponentName[] componentNames) { checkPermission(android.Manifest.permission.WRITE_DREAM_STATE); final int userId = UserHandle.getCallingUserId(); final long ident = Binder.clearCallingIdentity(); try { setDreamComponentsForUser(userId, componentNames); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public ComponentName getDefaultDreamComponent() { checkPermission(android.Manifest.permission.READ_DREAM_STATE); final int userId = UserHandle.getCallingUserId(); final long ident = Binder.clearCallingIdentity(); try { return getDefaultDreamComponentForUser(userId); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public boolean isDreaming() { checkPermission(android.Manifest.permission.READ_DREAM_STATE); final long ident = Binder.clearCallingIdentity(); try { return isDreamingInternal(); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public boolean isDozing() { checkPermission(android.Manifest.permission.READ_DREAM_STATE); final long ident = Binder.clearCallingIdentity(); try { return isDozingInternal(); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public void dream() { checkPermission(android.Manifest.permission.WRITE_DREAM_STATE); final long ident = Binder.clearCallingIdentity(); try { requestDreamInternal(); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public void testDream(ComponentName dream) { if (dream == null) { throw new IllegalArgumentException("dream must not be null"); } checkPermission(android.Manifest.permission.WRITE_DREAM_STATE); final int callingUserId = UserHandle.getCallingUserId(); final int currentUserId = ActivityManager.getCurrentUser(); if (callingUserId != currentUserId) { // This check is inherently prone to races but at least it's something. Slog.w(TAG, "Aborted attempt to start a test dream while a different " + " user is active: callingUserId=" + callingUserId + ", currentUserId=" + currentUserId); return; } final long ident = Binder.clearCallingIdentity(); try { testDreamInternal(dream, callingUserId); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public void awaken() { checkPermission(android.Manifest.permission.WRITE_DREAM_STATE); final long ident = Binder.clearCallingIdentity(); try { requestAwakenInternal(); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public void finishSelf(IBinder token, boolean immediate) { // Requires no permission, called by Dream from an arbitrary process. if (token == null) { throw new IllegalArgumentException("token must not be null"); } final long ident = Binder.clearCallingIdentity(); try { finishSelfInternal(token, immediate); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public void startDozing(IBinder token, int screenState, int screenBrightness) { // Requires no permission, called by Dream from an arbitrary process. if (token == null) { throw new IllegalArgumentException("token must not be null"); } final long ident = Binder.clearCallingIdentity(); try { startDozingInternal(token, screenState, screenBrightness); } finally { Binder.restoreCallingIdentity(ident); } } @Override // Binder call public void stopDozing(IBinder token) { // Requires no permission, called by Dream from an arbitrary process. if (token == null) { throw new IllegalArgumentException("token must not be null"); } final long ident = Binder.clearCallingIdentity(); try { stopDozingInternal(token); } finally { Binder.restoreCallingIdentity(ident); } } } private final class LocalService extends DreamManagerInternal { @Override public void startDream(boolean doze) { startDreamInternal(doze); } @Override public void stopDream(boolean immediate) { stopDreamInternal(immediate); } @Override public boolean isDreaming() { return isDreamingInternal(); } @Override public boolean isDozing() { return isDozingInternal(); } } private final Runnable mSystemPropertiesChanged = new Runnable() { @Override public void run() { if (DEBUG) Slog.d(TAG, "System properties changed"); synchronized (mLock) { if (mCurrentDreamName != null && mCurrentDreamCanDoze && !mCurrentDreamName.equals(getDozeComponent())) { // May have updated the doze component, wake up mPowerManager.wakeUp(SystemClock.uptimeMillis(), "android.server.dreams:SYSPROP"); } } } }; }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.wizard; import com.google.common.base.Splitter; import com.google.common.collect.Lists; import com.intellij.openapi.util.io.FileUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.TreeModelListener; import javax.swing.tree.TreeModel; import javax.swing.tree.TreePath; import java.io.File; import java.util.List; /** * In-memory tree representation of a file tree. Must be created with a file tree, * and then additional files (which may or may not exist) may be added to the representation. * Can be rendered to a {@link JTree} using the {@link FileTreeCellRenderer}. */ public class FileTreeModel implements TreeModel { /** * Root file that this model was created with. */ private File myRoot; /** * Root of the data structure representation. */ private Node myRootNode; private boolean myHideIrrelevantFiles; public FileTreeModel(@NotNull File root, boolean hideIrrelevantFiles) { this(root); myHideIrrelevantFiles = hideIrrelevantFiles; } public FileTreeModel(@NotNull File root) { myRoot = root; myRootNode = makeTree(root); } /** * Return the root {@link Node} of this representation. */ @Override public Object getRoot() { if (myHideIrrelevantFiles && !myRootNode.isProposedFile) { return null; } return myRootNode; } /** * Get the Nth child {@link Node} of the given parent. */ @Override public Object getChild(Object parent, int index) { Node n = (Node)parent; if (!myHideIrrelevantFiles) { return n.children.get(index); } for (int i = 0; i < n.children.size(); i++) { Node child = n.children.get(i); if (child.isProposedFile && index == 0) { return child; } else if (child.isProposedFile) { index--; } } return null; } /** * Get the number of children that the given parent {@link Node} has. */ @Override public int getChildCount(Object parent) { if (!myHideIrrelevantFiles) { return ((Node)parent).children.size(); } int count = 0; for (Node n : ((Node)parent).children) { if (n.isProposedFile) { count++; } } return count; } /** * Returns true iff the given {@link Node} has no children (is a leaf) */ @Override public boolean isLeaf(Object node) { if (!myHideIrrelevantFiles) { return ((Node)node).children.isEmpty(); } for (Node n : ((Node)node).children) { if (n.isProposedFile) { return false; } } return true; } @Override public void valueForPathChanged(TreePath path, Object newValue) { // Not implemented } /** * Returns the index of the given child inside the given parent or -1 if given node is not a child of the parent. */ @Override public int getIndexOfChild(Object parent, Object child) { if (!myHideIrrelevantFiles) { //noinspection SuspiciousMethodCalls return ((Node)parent).children.indexOf(child); } Node n = (Node)parent; int index = 0; for (int i = 0; i < n.children.size(); i++) { Node candidate = n.children.get(i); if (candidate.equals(child)) { return index; } if (candidate.isProposedFile) { index++; } } return -1; } @Override public void addTreeModelListener(TreeModelListener l) { // Not implemented } @Override public void removeTreeModelListener(TreeModelListener l) { // Not implemented } /** * Check to see if there are any conflicts (multiple files added to the same location) in the tree. */ public boolean hasConflicts() { if (myRootNode == null) { return false; } return treeHasConflicts(myRootNode); } /** * DFS through the tree looking for conflicted nodes. */ private static boolean treeHasConflicts(Node root) { if (root.isConflicted) { return true; } for (Node n : root.children) { if (treeHasConflicts(n)) { return true; } } return false; } /** * Add the given file to the representation. * This is a no-op if the given path already exists within the tree. */ public void addFile(@NotNull File f) { addFile(f, null); } /** * Add the given file to the representation and mark it with the given icon. * This is a no-op if the given path already exists within the tree. */ public void addFile(@NotNull File f, @Nullable Icon ic) { String s = f.isAbsolute() ? FileUtil.getRelativePath(myRoot, f) : f.getPath(); if (s != null) { List<String> parts = Lists.newLinkedList(Splitter.on(File.separatorChar).split(s)); makeNode(myRootNode, parts, ic, false); } } /** * Add the given file to the representation and mark it with the given icon. * If the path already exists within the tree it will be marked as a conflicting path. */ public void forceAddFile(@NotNull File f, @Nullable Icon ic) { String s = f.isAbsolute() ? FileUtil.getRelativePath(myRoot, f) : f.getPath(); if (s != null) { List<String> parts = Lists.newLinkedList(Splitter.on(File.separatorChar).split(s)); makeNode(myRootNode, parts, ic, true); } } /** * Representation of a node within the tree */ protected static class Node { public String name; public List<Node> children = Lists.newLinkedList(); public boolean existsOnDisk; public boolean isConflicted; public boolean isProposedFile; public Icon icon; @Override public String toString() { return name; } /** * Returns true iff this node has a child with the given name. */ public boolean hasChild(String name) { for (Node child : children) { if (child.name.equals(name)) { return true; } } return false; } /** * Returns the child with the given name or null */ @Nullable public Node getChild(String name) { for (Node child : children) { if (child.name.equals(name)) { return child; } } return null; } } /** * Recursively build the node(s) specified in the given path hierarchy starting at the given root. * Mark the last node in the path with the given icon. If markConflict is set, mark the final node * as conflicted if it already exists. */ private static void makeNode(@NotNull Node root, @NotNull List<String> path, @Nullable Icon ic, boolean markConflict) { root.isProposedFile = true; if (path.isEmpty()) { return; } String name = path.get(0); if (markConflict) { if (path.size() == 1 && root.name.equals(name)) { root.isConflicted = true; return; } } if (root.name.equals(name)) { // Continue down along already-created paths makeNode(root, rest(path), ic, markConflict); } else if (root.hasChild(name)) { // Allow paths relative to root (rather than including root explicitly) if (markConflict && path.size() == 1) { Node targetNode = root.getChild(name); targetNode.isConflicted = true; targetNode.icon = ic; targetNode.isProposedFile = true; return; } //noinspection ConstantConditions makeNode(root.getChild(name), rest(path), ic, markConflict); } else { // If this node in the path doesn't exist, then create it. Node n = new Node(); n.name = name; root.children.add(n); if (path.size() == 1) { // If this is the end of the path, mark with the given icon n.icon = ic; n.isProposedFile = true; } else { // Continue down to create the rest of the path makeNode(n, rest(path), ic, markConflict); } } } /** * Populate a tree from the file hierarchy rooted at the given file. */ private static Node makeTree(@NotNull File root) { Node n = new Node(); n.name = root.getName(); n.existsOnDisk = root.exists(); if (root.isDirectory()) { File[] children = root.listFiles(); if (children != null) { for (File f : children) { if (!f.isHidden()) { n.children.add(makeTree(f)); } } } } return n; } /** * Convenience function. Operates on a list and returns a list containing all elements but the first. */ private static <T> List<T> rest(List<T> list) { return list.subList(1, list.size()); } @Override public String toString() { StringBuilder sb = new StringBuilder(); toString(sb, myRootNode); return sb.toString(); } /** * DFS over the tree to build a string representation e.g. (root (child (grandchild) (grandchild)) (child)) */ private void toString(StringBuilder sb, Node root) { sb.append('('); sb.append(root.name); if (!isLeaf(root)) { sb.append(' '); } for (Node child : root.children) { toString(sb, child); } sb.append(')'); } }
package com.getcapacitor.plugin; import android.Manifest; import android.content.Context; import android.content.pm.PackageManager; import android.media.MediaScannerConnection; import android.net.Uri; import android.os.Environment; import android.util.Base64; import android.util.Log; import com.getcapacitor.JSArray; import com.getcapacitor.JSObject; import com.getcapacitor.NativePlugin; import com.getcapacitor.Plugin; import com.getcapacitor.PluginCall; import com.getcapacitor.PluginMethod; import com.getcapacitor.PluginRequestCodes; import org.json.JSONException; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.nio.channels.FileChannel; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; @NativePlugin(requestCodes = { PluginRequestCodes.FILESYSTEM_REQUEST_WRITE_FILE_PERMISSIONS, PluginRequestCodes.FILESYSTEM_REQUEST_WRITE_FOLDER_PERMISSIONS, PluginRequestCodes.FILESYSTEM_REQUEST_READ_FILE_PERMISSIONS, PluginRequestCodes.FILESYSTEM_REQUEST_READ_FOLDER_PERMISSIONS, PluginRequestCodes.FILESYSTEM_REQUEST_DELETE_FILE_PERMISSIONS, PluginRequestCodes.FILESYSTEM_REQUEST_DELETE_FOLDER_PERMISSIONS, PluginRequestCodes.FILESYSTEM_REQUEST_URI_PERMISSIONS, PluginRequestCodes.FILESYSTEM_REQUEST_STAT_PERMISSIONS, PluginRequestCodes.FILESYSTEM_REQUEST_RENAME_PERMISSIONS, PluginRequestCodes.FILESYSTEM_REQUEST_COPY_PERMISSIONS, }) public class Filesystem extends Plugin { private static final String PERMISSION_DENIED_ERROR = "Unable to do file operation, user denied permission request"; private Charset getEncoding(String encoding) { if (encoding == null) { return null; } switch(encoding) { case "utf8": return StandardCharsets.UTF_8; case "utf16": return StandardCharsets.UTF_16; case "ascii": return StandardCharsets.US_ASCII; } return null; } private File getDirectory(String directory) { Context c = bridge.getContext(); switch(directory) { case "APPLICATION": return c.getFilesDir(); case "DOCUMENTS": return Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOCUMENTS); case "DATA": return c.getFilesDir(); case "CACHE": return c.getCacheDir(); case "EXTERNAL": return c.getExternalFilesDir(null); case "EXTERNAL_STORAGE": return Environment.getExternalStorageDirectory(); } return null; } private File getFileObject(String path, String directory) { if (directory == null) { Uri u = Uri.parse(path); if (u.getScheme() == null || u.getScheme().equals("file")) { return new File(u.getPath()); } } File androidDirectory = this.getDirectory(directory); if (androidDirectory == null) { return null; } else { if(!androidDirectory.exists()) { androidDirectory.mkdir(); } } return new File(androidDirectory, path); } private InputStream getInputStream(String path, String directory) throws IOException { if (directory == null) { Uri u = Uri.parse(path); if (u.getScheme().equals("content")) { return getContext().getContentResolver().openInputStream(u); } else { return new FileInputStream(new File(u.getPath())); } } File androidDirectory = this.getDirectory(directory); if (androidDirectory == null) { throw new IOException("Directory not found"); } return new FileInputStream(new File(androidDirectory, path)); } private String readFileAsString(InputStream is, String encoding) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length = 0; while ((length = is.read(buffer)) != -1) { outputStream.write(buffer, 0, length); }; return outputStream.toString(encoding); } private String readFileAsBase64EncodedData(InputStream is) throws IOException { FileInputStream fileInputStreamReader = (FileInputStream) is; ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int c; while ((c = fileInputStreamReader.read(buffer)) != -1) { byteStream.write(buffer, 0, c); } fileInputStreamReader.close(); return new String(Base64.encodeToString(byteStream.toByteArray(), Base64.DEFAULT)); } @PluginMethod() public void readFile(PluginCall call) { saveCall(call); String file = call.getString("path"); String data = call.getString("data"); String directory = getDirectoryParameter(call); String encoding = call.getString("encoding"); Charset charset = this.getEncoding(encoding); if(encoding != null && charset == null) { call.error("Unsupported encoding provided: " + encoding); return; } if (!isPublicDirectory(directory) || isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_READ_FILE_PERMISSIONS, Manifest.permission.READ_EXTERNAL_STORAGE)) { try { InputStream is = getInputStream(file, directory); String dataStr; if (charset != null) { dataStr = readFileAsString(is, charset.name()); } else { dataStr = readFileAsBase64EncodedData(is); } JSObject ret = new JSObject(); ret.putOpt("data", dataStr); call.success(ret); } catch (FileNotFoundException ex) { call.error("File does not exist", ex); } catch (IOException ex) { call.error("Unable to read file", ex); } catch(JSONException ex) { call.error("Unable to return value for reading file", ex); } } } @PluginMethod() public void writeFile(PluginCall call) { saveCall(call); String path = call.getString("path"); String data = call.getString("data"); if (path == null) { Log.e(getLogTag(), "No path or filename retrieved from call"); call.error("NO_PATH"); return; } if (data == null) { Log.e(getLogTag(), "No data retrieved from call"); call.error("NO_DATA"); return; } String directory = getDirectoryParameter(call); if (directory != null) { if (!isPublicDirectory(directory) || isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_WRITE_FILE_PERMISSIONS, Manifest.permission.WRITE_EXTERNAL_STORAGE)) { // create directory because it might not exist File androidDir = getDirectory(directory); if (androidDir != null) { if (androidDir.exists() || androidDir.mkdirs()) { // path might include directories as well File fileObject = new File(androidDir, path); if (fileObject.getParentFile().exists() || fileObject.getParentFile().mkdirs()) { saveFile(call, fileObject, data); } } else { Log.e(getLogTag(), "Not able to create '" + directory + "'!"); call.error("NOT_CREATED_DIR"); } } else { Log.e(getLogTag(), "Directory ID '" + directory + "' is not supported by plugin"); call.error("INVALID_DIR"); } } } else { // check if file:// Uri u = Uri.parse(path); if ("file".equals(u.getScheme())) { File fileObject = new File(u.getPath()); // do not know where the file is being store so checking the permission to be secure // TODO to prevent permission checking we need a property from the call if (isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_WRITE_FILE_PERMISSIONS, Manifest.permission.WRITE_EXTERNAL_STORAGE)) { if (fileObject.getParentFile().exists() || fileObject.getParentFile().mkdirs()) { saveFile(call, fileObject, data); } } } } } private void saveFile(PluginCall call, File file, String data) { String encoding = call.getString("encoding"); boolean append = call.getBoolean("append", false); Charset charset = this.getEncoding(encoding); if (encoding != null && charset == null) { call.error("Unsupported encoding provided: " + encoding); return; } // if charset is not null assume its a plain text file the user wants to save boolean success = false; if (charset != null) { try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(file, append), charset))) { writer.write(data); success = true; } catch (IOException e) { Log.e(getLogTag(), "Creating text file '" + file.getPath() + "' with charset '" + charset + "' failed. Error: " + e.getMessage(), e); } } else { //remove header from dataURL if(data.indexOf(",") != -1) { data = data.split(",")[1]; } try (FileOutputStream fos = new FileOutputStream(file, append)) { fos.write(Base64.decode(data, Base64.NO_WRAP)); success = true; } catch (IOException e) { Log.e(getLogTag(), "Creating binary file '" + file.getPath() + "' failed. Error: " + e.getMessage(), e); } } if (success) { // update mediaStore index only if file was written to external storage if (isPublicDirectory(getDirectoryParameter(call))) { MediaScannerConnection.scanFile(getContext(), new String[] {file.getAbsolutePath()}, null, null); } Log.d(getLogTag(), "File '" + file.getAbsolutePath() + "' saved!"); call.success(); } else { call.error("FILE_NOTCREATED"); } } @PluginMethod() public void appendFile(PluginCall call) { try { call.getData().putOpt("append", true); } catch(JSONException ex) {} this.writeFile(call); } @PluginMethod() public void deleteFile(PluginCall call) { saveCall(call); String file = call.getString("path"); String directory = getDirectoryParameter(call); File fileObject = getFileObject(file, directory); if (!isPublicDirectory(directory) || isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_DELETE_FILE_PERMISSIONS, Manifest.permission.WRITE_EXTERNAL_STORAGE)) { if (!fileObject.exists()) { call.error("File does not exist"); return; } boolean deleted = fileObject.delete(); if(deleted == false) { call.error("Unable to delete file"); } else { call.success(); } } } @PluginMethod() public void mkdir(PluginCall call) { saveCall(call); String path = call.getString("path"); String directory = getDirectoryParameter(call); boolean intermediate = call.getBoolean("createIntermediateDirectories", false).booleanValue(); File fileObject = getFileObject(path, directory); if (fileObject.exists()) { call.error("Directory exists"); return; } if (!isPublicDirectory(directory) || isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_WRITE_FOLDER_PERMISSIONS, Manifest.permission.WRITE_EXTERNAL_STORAGE)) { boolean created = false; if (intermediate) { created = fileObject.mkdirs(); } else { created = fileObject.mkdir(); } if(created == false) { call.error("Unable to create directory, unknown reason"); } else { call.success(); } } } @PluginMethod() public void rmdir(PluginCall call) { saveCall(call); String path = call.getString("path"); String directory = getDirectoryParameter(call); Boolean recursive = call.getBoolean("recursive", false); File fileObject = getFileObject(path, directory); if (!isPublicDirectory(directory) || isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_DELETE_FOLDER_PERMISSIONS, Manifest.permission.WRITE_EXTERNAL_STORAGE)) { if (!fileObject.exists()) { call.error("Directory does not exist"); return; } if (fileObject.isDirectory() && fileObject.listFiles().length != 0 && !recursive) { call.error("Directory is not empty"); return; } boolean deleted = false; try { deleteRecursively(fileObject); deleted = true; } catch (IOException ignored) { } if(deleted == false) { call.error("Unable to delete directory, unknown reason"); } else { call.success(); } } } @PluginMethod() public void readdir(PluginCall call) { saveCall(call); String path = call.getString("path"); String directory = getDirectoryParameter(call); File fileObject = getFileObject(path, directory); if (!isPublicDirectory(directory) || isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_READ_FOLDER_PERMISSIONS, Manifest.permission.READ_EXTERNAL_STORAGE)) { if (fileObject != null && fileObject.exists()) { String[] files = fileObject.list(); JSObject ret = new JSObject(); ret.put("files", JSArray.from(files)); call.success(ret); } else { call.error("Directory does not exist"); } } } @PluginMethod() public void getUri(PluginCall call) { saveCall(call); String path = call.getString("path"); String directory = getDirectoryParameter(call); File fileObject = getFileObject(path, directory); if (!isPublicDirectory(directory) || isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_URI_PERMISSIONS, Manifest.permission.READ_EXTERNAL_STORAGE)) { JSObject data = new JSObject(); data.put("uri", Uri.fromFile(fileObject).toString()); call.success(data); } } @PluginMethod() public void stat(PluginCall call) { saveCall(call); String path = call.getString("path"); String directory = getDirectoryParameter(call); File fileObject = getFileObject(path, directory); if (!isPublicDirectory(directory) || isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_STAT_PERMISSIONS, Manifest.permission.READ_EXTERNAL_STORAGE)) { if (!fileObject.exists()) { call.error("File does not exist"); return; } JSObject data = new JSObject(); data.put("type", fileObject.isDirectory() ? "directory" : "file"); data.put("size", fileObject.length()); data.put("ctime", null); data.put("mtime", fileObject.lastModified()); data.put("uri", Uri.fromFile(fileObject).toString()); call.success(data); } } /** * Helper function to recursively delete a directory * * @param file The file or directory to recursively delete * @throws IOException */ private static void deleteRecursively(File file) throws IOException { if (file.isFile()) { file.delete(); return; } for (File f : file.listFiles()) { deleteRecursively(f); } file.delete(); } /** * Helper function to recursively copy a directory structure (or just a file) * * @param src The source location * @param dst The destination location * @throws IOException */ private static void copyRecursively(File src, File dst) throws IOException { if (src.isDirectory()) { dst.mkdir(); for (String file : src.list()) { copyRecursively(new File(src, file), new File(dst, file)); } return; } if (!dst.getParentFile().exists()) { dst.getParentFile().mkdirs(); } if (!dst.exists()) { dst.createNewFile(); } try (FileChannel source = new FileInputStream(src).getChannel(); FileChannel destination = new FileOutputStream(dst).getChannel()) { destination.transferFrom(source, 0, source.size()); } } private void _copy(PluginCall call, boolean doRename) { saveCall(call); String from = call.getString("from"); String to = call.getString("to"); String directory = call.getString("directory"); String toDirectory = call.getString("toDirectory"); if (toDirectory == null) { toDirectory = directory; } if (from == null || from.isEmpty() || to == null || to.isEmpty()) { call.error("Both to and from must be provided"); return; } File fromObject = getFileObject(from, directory); File toObject = getFileObject(to, toDirectory); assert fromObject != null; assert toObject != null; if (toObject.equals(fromObject)) { call.success(); return; } if (!fromObject.exists()) { call.error("The source object does not exist"); return; } if (toObject.getParentFile().isFile()) { call.error("The parent object of the destination is a file"); return; } if (!toObject.getParentFile().exists()) { call.error("The parent object of the destination does not exist"); return; } if (isPublicDirectory(directory) || isPublicDirectory(toDirectory)) { if (doRename) { if (!isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_RENAME_PERMISSIONS, Manifest.permission.WRITE_EXTERNAL_STORAGE)) { return; } } else { if (!isStoragePermissionGranted(PluginRequestCodes.FILESYSTEM_REQUEST_COPY_PERMISSIONS, Manifest.permission.WRITE_EXTERNAL_STORAGE)) { return; } } } if (toObject.isDirectory()) { call.error("Cannot overwrite a directory"); return; } toObject.delete(); assert fromObject != null; boolean modified = false; if (doRename) { modified = fromObject.renameTo(toObject); } else { try { copyRecursively(fromObject, toObject); modified = true; } catch (IOException ignored) { } } if (!modified) { call.error("Unable to perform action, unknown reason"); return; } call.success(); } @PluginMethod() public void rename(PluginCall call) { this._copy(call, true); } @PluginMethod() public void copy(PluginCall call) { this._copy(call, false); } /** * Checks the the given permission and requests them if they are not already granted. * @param permissionRequestCode the request code see {@link PluginRequestCodes} * @param permission the permission string * @return Returns true if the permission is granted and false if it is denied. */ private boolean isStoragePermissionGranted(int permissionRequestCode, String permission) { if (hasPermission(permission)) { Log.v(getLogTag(),"Permission '" + permission + "' is granted"); return true; } else { Log.v(getLogTag(),"Permission '" + permission + "' denied. Asking user for it."); pluginRequestPermissions(new String[] {permission}, permissionRequestCode); return false; } } /** * Reads the directory parameter from the plugin call * @param call the plugin call */ private String getDirectoryParameter(PluginCall call) { return call.getString("directory"); } /** * True if the given directory string is a public storage directory, which is accessible by the user or other apps. * @param directory the directory string. */ private boolean isPublicDirectory(String directory) { return "DOCUMENTS".equals(directory) || "EXTERNAL_STORAGE".equals(directory); } @Override protected void handleRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { super.handleRequestPermissionsResult(requestCode, permissions, grantResults); Log.d(getLogTag(),"handling request perms result"); if (getSavedCall() == null) { Log.d(getLogTag(),"No stored plugin call for permissions request result"); return; } PluginCall savedCall = getSavedCall(); for (int i = 0; i < grantResults.length; i++) { int result = grantResults[i]; String perm = permissions[i]; if(result == PackageManager.PERMISSION_DENIED) { Log.d(getLogTag(), "User denied storage permission: " + perm); savedCall.error(PERMISSION_DENIED_ERROR); this.freeSavedCall(); return; } } if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_WRITE_FILE_PERMISSIONS) { this.writeFile(savedCall); } else if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_WRITE_FOLDER_PERMISSIONS) { this.mkdir(savedCall); } else if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_READ_FILE_PERMISSIONS) { this.readFile(savedCall); } else if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_READ_FOLDER_PERMISSIONS) { this.readdir(savedCall); } else if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_DELETE_FILE_PERMISSIONS) { this.deleteFile(savedCall); } else if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_DELETE_FOLDER_PERMISSIONS) { this.rmdir(savedCall); } else if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_URI_PERMISSIONS) { this.getUri(savedCall); } else if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_STAT_PERMISSIONS) { this.stat(savedCall); } else if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_RENAME_PERMISSIONS) { this.rename(savedCall); } else if (requestCode == PluginRequestCodes.FILESYSTEM_REQUEST_COPY_PERMISSIONS) { this.copy(savedCall); } this.freeSavedCall(); } }
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.domain.materials.mercurial; import com.thoughtworks.go.domain.materials.Modification; import com.thoughtworks.go.domain.materials.Revision; import com.thoughtworks.go.util.command.CommandLine; import com.thoughtworks.go.util.command.InMemoryStreamConsumer; import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer; import com.thoughtworks.go.util.command.UrlArgument; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer; import static org.hamcrest.Matchers.not; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; public class HgCommandTest { @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(); private File serverRepo; private File clientRepo; private HgCommand hgCommand; private InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer(); private File workingDirectory; private static final String REVISION_0 = "b61d12de515d82d3a377ae3aae6e8abe516a2651"; private static final String REVISION_1 = "35ff2159f303ecf986b3650fc4299a6ffe5a14e1"; private static final String REVISION_2 = "ca3ebb67f527c0ad7ed26b789056823d8b9af23f"; private File secondBranchWorkingCopy; @Before public void setUp() throws IOException { serverRepo = temporaryFolder.newFolder("testHgServerRepo"); clientRepo = temporaryFolder.newFolder("testHgClientRepo"); secondBranchWorkingCopy = temporaryFolder.newFolder("second"); setUpServerRepoFromHgBundle(serverRepo, new File("../common/src/test/resources/data/hgrepo.hgbundle")); workingDirectory = new File(clientRepo.getPath()); hgCommand = new HgCommand(null, workingDirectory, "default", serverRepo.getAbsolutePath(), null); hgCommand.clone(outputStreamConsumer, new UrlArgument(serverRepo.getAbsolutePath())); } @Test public void shouldCloneFromRemoteRepo() { assertThat(clientRepo.listFiles().length > 0, is(true)); } @Test public void shouldGetLatestModifications() throws Exception { List<Modification> actual = hgCommand.latestOneModificationAsModifications(); assertThat(actual.size(), is(1)); final Modification modification = actual.get(0); assertThat(modification.getComment(), is("test")); assertThat(modification.getUserName(), is("cruise")); assertThat(modification.getModifiedFiles().size(), is(1)); } @Test public void shouldNotIncludeCommitFromAnotherBranchInGetLatestModifications() throws Exception { Modification lastCommit = hgCommand.latestOneModificationAsModifications().get(0); makeACommitToSecondBranch(); hg(workingDirectory, "pull").runOrBomb(null); Modification actual = hgCommand.latestOneModificationAsModifications().get(0); assertThat(actual, is(lastCommit)); assertThat(actual.getComment(), is(lastCommit.getComment())); } @Test public void shouldGetModifications() throws Exception { List<Modification> actual = hgCommand.modificationsSince(new StringRevision(REVISION_0)); assertThat(actual.size(), is(2)); assertThat(actual.get(0).getRevision(), is(REVISION_2)); assertThat(actual.get(1).getRevision(), is(REVISION_1)); } @Test public void shouldNotGetModificationsFromOtherBranches() throws Exception { makeACommitToSecondBranch(); hg(workingDirectory, "pull").runOrBomb(null); List<Modification> actual = hgCommand.modificationsSince(new StringRevision(REVISION_0)); assertThat(actual.size(), is(2)); assertThat(actual.get(0).getRevision(), is(REVISION_2)); assertThat(actual.get(1).getRevision(), is(REVISION_1)); } @Test public void shouldUpdateToSpecificRevision() { InMemoryStreamConsumer output = ProcessOutputStreamConsumer.inMemoryConsumer(); assertThat(output.getStdOut(), is("")); File newFile = new File(clientRepo, "test.txt"); assertThat(newFile.exists(), is(false)); Revision revision = createNewFileAndCheckIn(serverRepo); hgCommand.updateTo(revision, output); assertThat(output.getStdOut(), is(not(""))); assertThat(newFile.exists(), is(true)); } @Test public void shouldUpdateToSpecificRevisionOnGivenBranch() { makeACommitToSecondBranch(); InMemoryStreamConsumer output = ProcessOutputStreamConsumer.inMemoryConsumer(); File newFile = new File(workingDirectory, "test.txt"); hgCommand.updateTo(new StringRevision("tip"), output); assertThat(newFile.exists(), is(false)); } @Test(expected = RuntimeException.class) public void shouldThrowExceptionIfUpdateFails() throws Exception { InMemoryStreamConsumer output = ProcessOutputStreamConsumer.inMemoryConsumer(); // delete repository in order to fail the hg pull command assertThat(FileUtils.deleteQuietly(serverRepo), is(true)); // now hg pull will fail and throw an exception hgCommand.updateTo(new StringRevision("tip"), output); } @Test public void shouldGetWorkingUrl() { String workingUrl = hgCommand.workingRepositoryUrl().outputAsString(); assertThat(workingUrl, is(serverRepo.getAbsolutePath())); } @Test(expected = RuntimeException.class) public void shouldThrowExceptionForBadConnection() throws Exception { String url = "http://not-exists"; HgCommand hgCommand = new HgCommand(null, null, null, null, null); hgCommand.checkConnection(new UrlArgument(url)); } @Test public void shouldCloneOnlyTheSpecifiedBranchAndPointToIt() { String branchName = "second"; HgCommand hg = new HgCommand(null, secondBranchWorkingCopy, branchName, serverRepo.getAbsolutePath(), null); hg.clone(outputStreamConsumer, new UrlArgument(serverRepo.getAbsolutePath() + "#" + branchName)); String currentBranch = hg(secondBranchWorkingCopy, "branch").runOrBomb(null).outputAsString(); assertThat(currentBranch, is(branchName)); List<String> branches = hg(secondBranchWorkingCopy, "branches").runOrBomb(null).output(); ArrayList<String> branchNames = new ArrayList<>(); for (String branchDetails : branches) { branchNames.add(StringUtils.split(branchDetails, " ")[0]); } assertThat(branchNames.size(), is(2)); assertThat(branchNames.contains(branchName), is(true)); assertThat(branchNames.contains("default"), is(true)); } private void addLockTo(File hgRepoRootDir) throws IOException { File lock = new File(hgRepoRootDir, ".hg/store/lock"); FileUtils.touch(lock); } private CommandLine hg(File workingDir, String... arguments) { CommandLine hg = CommandLine.createCommandLine("hg").withArgs(arguments).withEncoding("utf-8"); hg.setWorkingDir(workingDir); return hg; } private void commit(String message, File workingDir) { CommandLine hg = hg(workingDir, "ci", "-u", "cruise-test", "-m", message); String[] input = new String[]{}; hg.runOrBomb(null, input); } private Revision latestRevisionOf() { CommandLine hg = hg(serverRepo, "log", "--limit", "1", "--template", "{node}"); String[] input = new String[]{}; return new StringRevision(hg.runOrBomb(null, input).outputAsString()); } private void addremove(File workingDir) { CommandLine hg = hg(workingDir, "addremove"); String[] input = new String[]{}; hg.runOrBomb(null, input); } private void createNewFileAndPushUpstream(File workingDir) { createNewFileAndCheckIn(workingDir); String branchName = hg(workingDir, "branch").runOrBomb(null).outputAsString(); hg(workingDir, "push", "--rev", branchName).runOrBomb(null); } private Revision createNewFileAndCheckIn(File directory) { try { new FileOutputStream(new File(directory, "test.txt")).close(); addremove(directory); commit("created test.txt", directory); } catch (IOException e) { e.printStackTrace(); } return latestRevisionOf(); } private void setUpServerRepoFromHgBundle(File serverRepo, File hgBundleFile) { String[] input = new String[]{}; CommandLine.createCommandLine("hg") .withArgs("clone", hgBundleFile.getAbsolutePath(), serverRepo.getAbsolutePath()).withEncoding("utf-8").runOrBomb(null, input); } private void makeACommitToSecondBranch() { HgCommand hg = new HgCommand(null, secondBranchWorkingCopy, "second", serverRepo.getAbsolutePath(), null); hg.clone(outputStreamConsumer, new UrlArgument(serverRepo.getAbsolutePath())); createNewFileAndPushUpstream(secondBranchWorkingCopy); } }
/* * Copyright 2017 LinkedIn Corp. Licensed under the BSD 2-Clause License (the "License"). See License in the project root for license information. */ package com.linkedin.kafka.cruisecontrol.analyzer; import com.linkedin.kafka.cruisecontrol.KafkaCruiseControlUnitTestUtils; import com.linkedin.kafka.cruisecontrol.analyzer.goals.CpuCapacityGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.CpuUsageDistributionGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.DiskCapacityGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.DiskUsageDistributionGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.NetworkInboundCapacityGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.NetworkInboundUsageDistributionGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.NetworkOutboundCapacityGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.NetworkOutboundUsageDistributionGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.PotentialNwOutGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.RackAwareGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.ReplicaCapacityGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.ReplicaDistributionGoal; import com.linkedin.kafka.cruisecontrol.analyzer.goals.TopicReplicaDistributionGoal; import com.linkedin.kafka.cruisecontrol.analyzer.kafkaassigner.KafkaAssignerDiskUsageDistributionGoal; import com.linkedin.kafka.cruisecontrol.analyzer.kafkaassigner.KafkaAssignerEvenRackAwareGoal; import com.linkedin.kafka.cruisecontrol.config.KafkaCruiseControlConfig; import com.linkedin.kafka.cruisecontrol.common.ClusterProperty; import com.linkedin.kafka.cruisecontrol.common.RandomCluster; import com.linkedin.kafka.cruisecontrol.common.TestConstants; import com.linkedin.kafka.cruisecontrol.model.ClusterModel; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.linkedin.kafka.cruisecontrol.analyzer.OptimizationVerifier.Verification.*; import static org.junit.Assert.assertTrue; @RunWith(Parameterized.class) public class RandomSelfHealingTest { private static final Logger LOG = LoggerFactory.getLogger(RandomSelfHealingTest.class); /** * Populate parameters for the {@link OptimizationVerifier}. All brokers are alive. * * @return Parameters for the {@link OptimizationVerifier}. */ @Parameters(name = "{1}-{0}") public static Collection<Object[]> data() { Collection<Object[]> p = new ArrayList<>(); Map<Integer, String> goalNameByPriority = new HashMap<>(); goalNameByPriority.put(1, RackAwareGoal.class.getName()); goalNameByPriority.put(2, ReplicaCapacityGoal.class.getName()); goalNameByPriority.put(3, DiskCapacityGoal.class.getName()); goalNameByPriority.put(4, NetworkInboundCapacityGoal.class.getName()); goalNameByPriority.put(5, NetworkOutboundCapacityGoal.class.getName()); goalNameByPriority.put(6, CpuCapacityGoal.class.getName()); goalNameByPriority.put(7, ReplicaDistributionGoal.class.getName()); goalNameByPriority.put(8, PotentialNwOutGoal.class.getName()); goalNameByPriority.put(9, DiskUsageDistributionGoal.class.getName()); goalNameByPriority.put(10, NetworkInboundUsageDistributionGoal.class.getName()); goalNameByPriority.put(11, NetworkOutboundUsageDistributionGoal.class.getName()); goalNameByPriority.put(12, CpuUsageDistributionGoal.class.getName()); goalNameByPriority.put(13, TopicReplicaDistributionGoal.class.getName()); Map<Integer, String> kafkaAssignerGoals = new HashMap<>(); kafkaAssignerGoals.put(0, KafkaAssignerEvenRackAwareGoal.class.getName()); kafkaAssignerGoals.put(1, KafkaAssignerDiskUsageDistributionGoal.class.getName()); Properties props = KafkaCruiseControlUnitTestUtils.getKafkaCruiseControlProperties(); props.setProperty(KafkaCruiseControlConfig.MAX_REPLICAS_PER_BROKER_CONFIG, Long.toString(2000L)); BalancingConstraint balancingConstraint = new BalancingConstraint(new KafkaCruiseControlConfig(props)); balancingConstraint.setResourceBalancePercentage(TestConstants.LOW_BALANCE_PERCENTAGE); balancingConstraint.setCapacityThreshold(TestConstants.MEDIUM_CAPACITY_THRESHOLD); List<OptimizationVerifier.Verification> verifications = Arrays.asList(NEW_BROKERS, DEAD_BROKERS, REGRESSION); List<OptimizationVerifier.Verification> kafkaAssignerVerifications = Arrays.asList(DEAD_BROKERS, REGRESSION, GOAL_VIOLATION); // -- TEST DECK #1: SINGLE DEAD BROKER. // Test: Single Goal. Map<ClusterProperty, Number> singleDeadBroker = new HashMap<>(); singleDeadBroker.put(ClusterProperty.NUM_DEAD_BROKERS, 1); int testId = 0; for (Map.Entry<Integer, String> entry : goalNameByPriority.entrySet()) { p.add(params(testId++, singleDeadBroker, Collections.singletonMap(entry.getKey(), entry.getValue()), balancingConstraint, Collections.emptySet(), verifications, true)); p.add(params(testId++, singleDeadBroker, Collections.singletonMap(entry.getKey(), entry.getValue()), balancingConstraint, Collections.emptySet(), verifications, false)); p.add(params(testId++, singleDeadBroker, Collections.singletonMap(entry.getKey(), entry.getValue()), balancingConstraint, Collections.singleton("T0"), verifications, true)); p.add(params(testId++, singleDeadBroker, Collections.singletonMap(entry.getKey(), entry.getValue()), balancingConstraint, Collections.singleton("T0"), verifications, false)); } p.add(params(testId++, singleDeadBroker, Collections.singletonMap(0, KafkaAssignerEvenRackAwareGoal.class.getName()), balancingConstraint, Collections.emptySet(), kafkaAssignerVerifications, true)); p.add(params(testId++, singleDeadBroker, Collections.singletonMap(0, KafkaAssignerEvenRackAwareGoal.class.getName()), balancingConstraint, Collections.emptySet(), kafkaAssignerVerifications, false)); p.add(params(testId++, singleDeadBroker, Collections.singletonMap(0, KafkaAssignerEvenRackAwareGoal.class.getName()), balancingConstraint, Collections.singleton("T0"), kafkaAssignerVerifications, true)); p.add(params(testId++, singleDeadBroker, Collections.singletonMap(0, KafkaAssignerEvenRackAwareGoal.class.getName()), balancingConstraint, Collections.singleton("T0"), kafkaAssignerVerifications, false)); props.setProperty(KafkaCruiseControlConfig.MAX_REPLICAS_PER_BROKER_CONFIG, Long.toString(5100L)); balancingConstraint = new BalancingConstraint(new KafkaCruiseControlConfig(props)); balancingConstraint.setResourceBalancePercentage(TestConstants.LOW_BALANCE_PERCENTAGE); balancingConstraint.setCapacityThreshold(TestConstants.MEDIUM_CAPACITY_THRESHOLD); // Test: All Goals. p.add(params(testId++, singleDeadBroker, goalNameByPriority, balancingConstraint, Collections.emptySet(), verifications, true)); p.add(params(testId++, singleDeadBroker, goalNameByPriority, balancingConstraint, Collections.singleton("T0"), verifications, true)); p.add(params(testId++, singleDeadBroker, kafkaAssignerGoals, balancingConstraint, Collections.emptySet(), kafkaAssignerVerifications, true)); p.add(params(testId++, singleDeadBroker, kafkaAssignerGoals, balancingConstraint, Collections.singleton("T0"), kafkaAssignerVerifications, true)); // -- TEST DECK #2: MULTIPLE DEAD BROKERS. // Test: Single Goal. Map<ClusterProperty, Number> multipleDeadBrokers = new HashMap<>(); multipleDeadBrokers.put(ClusterProperty.NUM_DEAD_BROKERS, 5); for (Map.Entry<Integer, String> entry : goalNameByPriority.entrySet()) { p.add(params(testId++, multipleDeadBrokers, Collections.singletonMap(entry.getKey(), entry.getValue()), balancingConstraint, Collections.emptySet(), verifications, true)); p.add(params(testId++, multipleDeadBrokers, Collections.singletonMap(entry.getKey(), entry.getValue()), balancingConstraint, Collections.singleton("T0"), verifications, true)); } p.add(params(testId++, multipleDeadBrokers, Collections.singletonMap(0, KafkaAssignerEvenRackAwareGoal.class.getName()), balancingConstraint, Collections.emptySet(), kafkaAssignerVerifications, true)); p.add(params(testId++, multipleDeadBrokers, Collections.singletonMap(0, KafkaAssignerEvenRackAwareGoal.class.getName()), balancingConstraint, Collections.singleton("T0"), kafkaAssignerVerifications, true)); // Test: All Goals. p.add(params(testId++, multipleDeadBrokers, goalNameByPriority, balancingConstraint, Collections.emptySet(), verifications, true)); p.add(params(testId++, multipleDeadBrokers, goalNameByPriority, balancingConstraint, Collections.singleton("T0"), verifications, true)); p.add(params(testId++, multipleDeadBrokers, kafkaAssignerGoals, balancingConstraint, Collections.emptySet(), kafkaAssignerVerifications, true)); p.add(params(testId++, multipleDeadBrokers, kafkaAssignerGoals, balancingConstraint, Collections.singleton("T0"), kafkaAssignerVerifications, true)); return p; } private static Object[] params(int testId, Map<ClusterProperty, Number> modifiedProperties, Map<Integer, String> goalNameByPriority, BalancingConstraint balancingConstraint, Collection<String> excludedTopics, List<OptimizationVerifier.Verification> verifications, boolean leaderInFirstPosition) { return new Object[]{ testId, modifiedProperties, goalNameByPriority, balancingConstraint, excludedTopics, verifications, leaderInFirstPosition }; } private int _testId; private Map<ClusterProperty, Number> _modifiedProperties; private Map<Integer, String> _goalNameByPriority; private BalancingConstraint _balancingConstraint; private Set<String> _excludedTopics; private List<OptimizationVerifier.Verification> _verifications; private boolean _leaderInFirstPosition; /** * Constructor of Self Healing Test. * * @param testId Test id. * @param modifiedProperties Modified cluster properties over the {@link TestConstants#BASE_PROPERTIES}. * @param goalNameByPriority Goal name by priority. * @param balancingConstraint Balancing constraint. * @param excludedTopics Excluded topics. * @param verifications the verifications to make. */ public RandomSelfHealingTest(int testId, Map<ClusterProperty, Number> modifiedProperties, Map<Integer, String> goalNameByPriority, BalancingConstraint balancingConstraint, Collection<String> excludedTopics, List<OptimizationVerifier.Verification> verifications, boolean leaderInFirstPosition) { _testId = testId; _modifiedProperties = modifiedProperties; _goalNameByPriority = goalNameByPriority; _balancingConstraint = balancingConstraint; _excludedTopics = new HashSet<>(excludedTopics); _verifications = verifications; _leaderInFirstPosition = leaderInFirstPosition; } @Test public void test() throws Exception { // Create cluster properties by applying modified properties to base properties. Map<ClusterProperty, Number> clusterProperties = new HashMap<>(TestConstants.BASE_PROPERTIES); clusterProperties.putAll(_modifiedProperties); LOG.debug("Replica distribution: {}.", TestConstants.Distribution.UNIFORM); ClusterModel clusterModel = RandomCluster.generate(clusterProperties); RandomCluster.populate(clusterModel, clusterProperties, TestConstants.Distribution.UNIFORM, true, _leaderInFirstPosition, _excludedTopics); assertTrue("Self Healing Test failed to improve the existing state.", OptimizationVerifier.executeGoalsFor(_balancingConstraint, clusterModel, _goalNameByPriority, _excludedTopics, _verifications)); } }
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.examples.complete.game; import com.google.cloud.dataflow.examples.complete.game.utils.WriteToBigQuery; import com.google.cloud.dataflow.sdk.Pipeline; import com.google.cloud.dataflow.sdk.coders.AvroCoder; import com.google.cloud.dataflow.sdk.coders.DefaultCoder; import com.google.cloud.dataflow.sdk.io.TextIO; import com.google.cloud.dataflow.sdk.options.Default; import com.google.cloud.dataflow.sdk.options.Description; import com.google.cloud.dataflow.sdk.options.PipelineOptions; import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory; import com.google.cloud.dataflow.sdk.options.Validation; import com.google.cloud.dataflow.sdk.transforms.Aggregator; import com.google.cloud.dataflow.sdk.transforms.DoFn; import com.google.cloud.dataflow.sdk.transforms.MapElements; import com.google.cloud.dataflow.sdk.transforms.PTransform; import com.google.cloud.dataflow.sdk.transforms.ParDo; import com.google.cloud.dataflow.sdk.transforms.Sum; import com.google.cloud.dataflow.sdk.values.KV; import com.google.cloud.dataflow.sdk.values.PCollection; import com.google.cloud.dataflow.sdk.values.TypeDescriptor; import org.apache.avro.reflect.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; /** * This class is the first in a series of four pipelines that tell a story in a 'gaming' domain. * Concepts: batch processing; reading input from Google Cloud Storage and writing output to * BigQuery; using standalone DoFns; use of the sum by key transform; examples of * Java 8 lambda syntax. * * <p> In this gaming scenario, many users play, as members of different teams, over the course of a * day, and their actions are logged for processing. Some of the logged game events may be late- * arriving, if users play on mobile devices and go transiently offline for a period. * * <p> This pipeline does batch processing of data collected from gaming events. It calculates the * sum of scores per user, over an entire batch of gaming data (collected, say, for each day). The * batch processing will not include any late data that arrives after the day's cutoff point. * * <p> To execute this pipeline using the Dataflow service and static example input data, specify * the pipeline configuration like this: * <pre>{@code * --project=YOUR_PROJECT_ID * --stagingLocation=gs://YOUR_STAGING_DIRECTORY * --runner=BlockingDataflowPipelineRunner * --dataset=YOUR-DATASET * } * </pre> * where the BigQuery dataset you specify must already exist. * * <p> Optionally include the --input argument to specify a batch input file. * See the --input default value for example batch data file, or use {@link injector.Injector} to * generate your own batch data. */ public class UserScore { /** * Class to hold info about a game event. */ @DefaultCoder(AvroCoder.class) static class GameActionInfo { @Nullable String user; @Nullable String team; @Nullable Integer score; @Nullable Long timestamp; public GameActionInfo() {} public GameActionInfo(String user, String team, Integer score, Long timestamp) { this.user = user; this.team = team; this.score = score; this.timestamp = timestamp; } public String getUser() { return this.user; } public String getTeam() { return this.team; } public Integer getScore() { return this.score; } public String getKey(String keyname) { if (keyname.equals("team")) { return this.team; } else { // return username as default return this.user; } } public Long getTimestamp() { return this.timestamp; } } /** * Parses the raw game event info into GameActionInfo objects. Each event line has the following * format: username,teamname,score,timestamp_in_ms,readable_time * e.g.: * user2_AsparagusPig,AsparagusPig,10,1445230923951,2015-11-02 09:09:28.224 * The human-readable time string is not used here. */ static class ParseEventFn extends DoFn<String, GameActionInfo> { // Log and count parse errors. private static final Logger LOG = LoggerFactory.getLogger(ParseEventFn.class); private final Aggregator<Long, Long> numParseErrors = createAggregator("ParseErrors", new Sum.SumLongFn()); @Override public void processElement(ProcessContext c) { String[] components = c.element().split(","); try { String user = components[0].trim(); String team = components[1].trim(); Integer score = Integer.parseInt(components[2].trim()); Long timestamp = Long.parseLong(components[3].trim()); GameActionInfo gInfo = new GameActionInfo(user, team, score, timestamp); c.output(gInfo); } catch (ArrayIndexOutOfBoundsException | NumberFormatException e) { numParseErrors.addValue(1L); LOG.info("Parse error on " + c.element() + ", " + e.getMessage()); } } } /** * A transform to extract key/score information from GameActionInfo, and sum the scores. The * constructor arg determines whether 'team' or 'user' info is extracted. */ // [START DocInclude_USExtractXform] public static class ExtractAndSumScore extends PTransform<PCollection<GameActionInfo>, PCollection<KV<String, Integer>>> { private final String field; ExtractAndSumScore(String field) { this.field = field; } @Override public PCollection<KV<String, Integer>> apply( PCollection<GameActionInfo> gameInfo) { return gameInfo .apply(MapElements .via((GameActionInfo gInfo) -> KV.of(gInfo.getKey(field), gInfo.getScore())) .withOutputType(new TypeDescriptor<KV<String, Integer>>() {})) .apply(Sum.<String>integersPerKey()); } } // [END DocInclude_USExtractXform] /** * Options supported by {@link UserScore}. */ public static interface Options extends PipelineOptions { @Description("Path to the data file(s) containing game data.") // The default maps to two large Google Cloud Storage files (each ~12GB) holding two subsequent // day's worth (roughly) of data. @Default.String("gs://dataflow-samples/game/gaming_data*.csv") String getInput(); void setInput(String value); @Description("BigQuery Dataset to write tables to. Must already exist.") @Validation.Required String getDataset(); void setDataset(String value); @Description("The BigQuery table name. Should not already exist.") @Default.String("user_score") String getTableName(); void setTableName(String value); } /** * Create a map of information that describes how to write pipeline output to BigQuery. This map * is passed to the {@link WriteToBigQuery} constructor to write user score sums. */ protected static Map<String, WriteToBigQuery.FieldInfo<KV<String, Integer>>> configureBigQueryWrite() { Map<String, WriteToBigQuery.FieldInfo<KV<String, Integer>>> tableConfigure = new HashMap<String, WriteToBigQuery.FieldInfo<KV<String, Integer>>>(); tableConfigure.put("user", new WriteToBigQuery.FieldInfo<KV<String, Integer>>("STRING", c -> c.element().getKey())); tableConfigure.put("total_score", new WriteToBigQuery.FieldInfo<KV<String, Integer>>("INTEGER", c -> c.element().getValue())); return tableConfigure; } /** * Run a batch pipeline. */ // [START DocInclude_USMain] public static void main(String[] args) throws Exception { // Begin constructing a pipeline configured by commandline flags. Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline pipeline = Pipeline.create(options); // Read events from a text file and parse them. pipeline.apply(TextIO.Read.from(options.getInput())) .apply(ParDo.named("ParseGameEvent").of(new ParseEventFn())) // Extract and sum username/score pairs from the event data. .apply("ExtractUserScore", new ExtractAndSumScore("user")) .apply("WriteUserScoreSums", new WriteToBigQuery<KV<String, Integer>>(options.getTableName(), configureBigQueryWrite())); // Run the batch pipeline. pipeline.run(); } // [END DocInclude_USMain] }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ecs.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ecs-2014-11-13/UpdateService" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateServiceRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The short name or full Amazon Resource Name (ARN) of the cluster that your service is running on. If you do not * specify a cluster, the default cluster is assumed. * </p> */ private String cluster; /** * <p> * The name of the service to update. * </p> */ private String service; /** * <p> * The number of instantiations of the task to place and keep running in your service. * </p> */ private Integer desiredCount; /** * <p> * The <code>family</code> and <code>revision</code> (<code>family:revision</code>) or full Amazon Resource Name * (ARN) of the task definition to run in your service. If a <code>revision</code> is not specified, the latest * <code>ACTIVE</code> revision is used. If you modify the task definition with <code>UpdateService</code>, Amazon * ECS spawns a task with the new version of the task definition and then stops an old task after the new version is * running. * </p> */ private String taskDefinition; /** * <p> * Optional deployment parameters that control how many tasks run during the deployment and the ordering of stopping * and starting tasks. * </p> */ private DeploymentConfiguration deploymentConfiguration; /** * <p> * The short name or full Amazon Resource Name (ARN) of the cluster that your service is running on. If you do not * specify a cluster, the default cluster is assumed. * </p> * * @param cluster * The short name or full Amazon Resource Name (ARN) of the cluster that your service is running on. If you * do not specify a cluster, the default cluster is assumed. */ public void setCluster(String cluster) { this.cluster = cluster; } /** * <p> * The short name or full Amazon Resource Name (ARN) of the cluster that your service is running on. If you do not * specify a cluster, the default cluster is assumed. * </p> * * @return The short name or full Amazon Resource Name (ARN) of the cluster that your service is running on. If you * do not specify a cluster, the default cluster is assumed. */ public String getCluster() { return this.cluster; } /** * <p> * The short name or full Amazon Resource Name (ARN) of the cluster that your service is running on. If you do not * specify a cluster, the default cluster is assumed. * </p> * * @param cluster * The short name or full Amazon Resource Name (ARN) of the cluster that your service is running on. If you * do not specify a cluster, the default cluster is assumed. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateServiceRequest withCluster(String cluster) { setCluster(cluster); return this; } /** * <p> * The name of the service to update. * </p> * * @param service * The name of the service to update. */ public void setService(String service) { this.service = service; } /** * <p> * The name of the service to update. * </p> * * @return The name of the service to update. */ public String getService() { return this.service; } /** * <p> * The name of the service to update. * </p> * * @param service * The name of the service to update. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateServiceRequest withService(String service) { setService(service); return this; } /** * <p> * The number of instantiations of the task to place and keep running in your service. * </p> * * @param desiredCount * The number of instantiations of the task to place and keep running in your service. */ public void setDesiredCount(Integer desiredCount) { this.desiredCount = desiredCount; } /** * <p> * The number of instantiations of the task to place and keep running in your service. * </p> * * @return The number of instantiations of the task to place and keep running in your service. */ public Integer getDesiredCount() { return this.desiredCount; } /** * <p> * The number of instantiations of the task to place and keep running in your service. * </p> * * @param desiredCount * The number of instantiations of the task to place and keep running in your service. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateServiceRequest withDesiredCount(Integer desiredCount) { setDesiredCount(desiredCount); return this; } /** * <p> * The <code>family</code> and <code>revision</code> (<code>family:revision</code>) or full Amazon Resource Name * (ARN) of the task definition to run in your service. If a <code>revision</code> is not specified, the latest * <code>ACTIVE</code> revision is used. If you modify the task definition with <code>UpdateService</code>, Amazon * ECS spawns a task with the new version of the task definition and then stops an old task after the new version is * running. * </p> * * @param taskDefinition * The <code>family</code> and <code>revision</code> (<code>family:revision</code>) or full Amazon Resource * Name (ARN) of the task definition to run in your service. If a <code>revision</code> is not specified, the * latest <code>ACTIVE</code> revision is used. If you modify the task definition with * <code>UpdateService</code>, Amazon ECS spawns a task with the new version of the task definition and then * stops an old task after the new version is running. */ public void setTaskDefinition(String taskDefinition) { this.taskDefinition = taskDefinition; } /** * <p> * The <code>family</code> and <code>revision</code> (<code>family:revision</code>) or full Amazon Resource Name * (ARN) of the task definition to run in your service. If a <code>revision</code> is not specified, the latest * <code>ACTIVE</code> revision is used. If you modify the task definition with <code>UpdateService</code>, Amazon * ECS spawns a task with the new version of the task definition and then stops an old task after the new version is * running. * </p> * * @return The <code>family</code> and <code>revision</code> (<code>family:revision</code>) or full Amazon Resource * Name (ARN) of the task definition to run in your service. If a <code>revision</code> is not specified, * the latest <code>ACTIVE</code> revision is used. If you modify the task definition with * <code>UpdateService</code>, Amazon ECS spawns a task with the new version of the task definition and then * stops an old task after the new version is running. */ public String getTaskDefinition() { return this.taskDefinition; } /** * <p> * The <code>family</code> and <code>revision</code> (<code>family:revision</code>) or full Amazon Resource Name * (ARN) of the task definition to run in your service. If a <code>revision</code> is not specified, the latest * <code>ACTIVE</code> revision is used. If you modify the task definition with <code>UpdateService</code>, Amazon * ECS spawns a task with the new version of the task definition and then stops an old task after the new version is * running. * </p> * * @param taskDefinition * The <code>family</code> and <code>revision</code> (<code>family:revision</code>) or full Amazon Resource * Name (ARN) of the task definition to run in your service. If a <code>revision</code> is not specified, the * latest <code>ACTIVE</code> revision is used. If you modify the task definition with * <code>UpdateService</code>, Amazon ECS spawns a task with the new version of the task definition and then * stops an old task after the new version is running. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateServiceRequest withTaskDefinition(String taskDefinition) { setTaskDefinition(taskDefinition); return this; } /** * <p> * Optional deployment parameters that control how many tasks run during the deployment and the ordering of stopping * and starting tasks. * </p> * * @param deploymentConfiguration * Optional deployment parameters that control how many tasks run during the deployment and the ordering of * stopping and starting tasks. */ public void setDeploymentConfiguration(DeploymentConfiguration deploymentConfiguration) { this.deploymentConfiguration = deploymentConfiguration; } /** * <p> * Optional deployment parameters that control how many tasks run during the deployment and the ordering of stopping * and starting tasks. * </p> * * @return Optional deployment parameters that control how many tasks run during the deployment and the ordering of * stopping and starting tasks. */ public DeploymentConfiguration getDeploymentConfiguration() { return this.deploymentConfiguration; } /** * <p> * Optional deployment parameters that control how many tasks run during the deployment and the ordering of stopping * and starting tasks. * </p> * * @param deploymentConfiguration * Optional deployment parameters that control how many tasks run during the deployment and the ordering of * stopping and starting tasks. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateServiceRequest withDeploymentConfiguration(DeploymentConfiguration deploymentConfiguration) { setDeploymentConfiguration(deploymentConfiguration); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCluster() != null) sb.append("Cluster: ").append(getCluster()).append(","); if (getService() != null) sb.append("Service: ").append(getService()).append(","); if (getDesiredCount() != null) sb.append("DesiredCount: ").append(getDesiredCount()).append(","); if (getTaskDefinition() != null) sb.append("TaskDefinition: ").append(getTaskDefinition()).append(","); if (getDeploymentConfiguration() != null) sb.append("DeploymentConfiguration: ").append(getDeploymentConfiguration()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateServiceRequest == false) return false; UpdateServiceRequest other = (UpdateServiceRequest) obj; if (other.getCluster() == null ^ this.getCluster() == null) return false; if (other.getCluster() != null && other.getCluster().equals(this.getCluster()) == false) return false; if (other.getService() == null ^ this.getService() == null) return false; if (other.getService() != null && other.getService().equals(this.getService()) == false) return false; if (other.getDesiredCount() == null ^ this.getDesiredCount() == null) return false; if (other.getDesiredCount() != null && other.getDesiredCount().equals(this.getDesiredCount()) == false) return false; if (other.getTaskDefinition() == null ^ this.getTaskDefinition() == null) return false; if (other.getTaskDefinition() != null && other.getTaskDefinition().equals(this.getTaskDefinition()) == false) return false; if (other.getDeploymentConfiguration() == null ^ this.getDeploymentConfiguration() == null) return false; if (other.getDeploymentConfiguration() != null && other.getDeploymentConfiguration().equals(this.getDeploymentConfiguration()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCluster() == null) ? 0 : getCluster().hashCode()); hashCode = prime * hashCode + ((getService() == null) ? 0 : getService().hashCode()); hashCode = prime * hashCode + ((getDesiredCount() == null) ? 0 : getDesiredCount().hashCode()); hashCode = prime * hashCode + ((getTaskDefinition() == null) ? 0 : getTaskDefinition().hashCode()); hashCode = prime * hashCode + ((getDeploymentConfiguration() == null) ? 0 : getDeploymentConfiguration().hashCode()); return hashCode; } @Override public UpdateServiceRequest clone() { return (UpdateServiceRequest) super.clone(); } }
/* * Copyright 2012-2016 Bart Verhoeven * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package be.nepherte.commons.cli; import be.nepherte.commons.cli.Option.Template; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * A test that covers templates and their builders. */ public class TemplateTest { @Test public void shortName() { Template.Builder builder = Option.newTemplate().shortName("-b"); assertEquals("b", new Template(builder).getShortName().get()); } @Test public void nullShortName() { Template.Builder builder = Option.newTemplate().shortName(null); assertFalse(new Template(builder).getShortName().isPresent()); } @Test public void blankShortName() { Template.Builder builder = Option.newTemplate().shortName(""); assertFalse(new Template(builder).getShortName().isPresent()); } @Test(expected = IllegalArgumentException.class) public void shortNameWithSpaces() { Option.newTemplate().shortName("s p a c e s"); } @Test public void longName() { Template.Builder builder = Option.newTemplate().longName("--blocks"); assertEquals("blocks", new Template(builder).getLongName().get()); } @Test public void nullLongName() { Template.Builder builder = Option.newTemplate().longName(null); assertFalse(new Template(builder).getLongName().isPresent()); } @Test public void blankLongName() { Template.Builder builder = Option.newTemplate().longName(""); assertFalse(new Template(builder).getLongName().isPresent()); } @Test(expected = IllegalArgumentException.class) public void longNameWithSpaces() { Option.newTemplate().longName("s p a c e s"); } @Test public void name() { // Only short name available. Template.Builder builder = Option.newTemplate().shortName("-b"); assertEquals("b", new Template(builder).getName()); // Only long name available. builder = Option.newTemplate().longName("--block-size"); assertEquals("block-size", new Template(builder).getName()); // Short name takes precedence over long name. builder = Option.newTemplate().shortName("-b").longName("--block-size"); assertEquals("b", new Template(builder).getName()); } @Test public void sortByName() { Template t1 = Option.newTemplate().shortName("a").build(); Template t2 = Option.newTemplate().longName("b").build(); Template t3 = Option.newTemplate().shortName("c").build(); assertEquals(0, Template.byName(t1, t1)); assertTrue(Template.byName(t2, t1) > 0); assertTrue(Template.byName(t1, t2) < 0); assertTrue(Template.byName(t2, t1) > 0 ); assertTrue(Template.byName(t3, t2) > 0 ); assertTrue(Template.byName(t3, t1) > 0); } @Test public void description() { Template.Builder builder = Option.newTemplate().description("bytes"); assertEquals("bytes", new Template(builder).getDescription().get()); } @Test public void nullDescription() { Template.Builder builder = Option.newTemplate().description(null); assertFalse(new Template(builder).getDescription().isPresent()); } @Test public void blankDescription() { Template.Builder builder = Option.newTemplate().description(""); assertFalse(new Template(builder).getDescription().isPresent()); } @Test public void optional() { Template.Builder builder = Option.newTemplate(); assertFalse(new Template(builder).isRequired()); } @Test public void required() { Template.Builder builder = Option.newTemplate().required(); assertTrue(new Template(builder).isRequired()); } @Test public void minValues() { Template.Builder builder = Option.newTemplate().minValues(8); assertEquals(8, new Template(builder).getMinValues()); } @Test(expected = IllegalArgumentException.class) public void negativeMinValues() { Option.newTemplate().minValues(-1); } @Test public void maxValues() { Template.Builder builder = Option.newTemplate().maxValues(1); assertEquals(1, new Template(builder).getMaxValues()); } @Test(expected = IllegalArgumentException.class) public void negativeMaxValues() { Option.newTemplate().maxValues(-1); } @Test public void requiresValues() { Template.Builder builder = Option.newTemplate().minValues(1); assertTrue(new Template(builder).requiresValues()); builder = Option.newTemplate().minValues(0); assertFalse(new Template(builder).requiresValues()); } @Test public void canHaveValues() { Template.Builder builder = Option.newTemplate().maxValues(1); assertTrue(new Template(builder).canHaveValues()); assertTrue(new Template(builder).canHaveValues(0)); assertTrue(new Template(builder).canHaveValues(1)); assertFalse(new Template(builder).canHaveValues(2)); } @Test public void valueName() { Template.Builder builder = Option.newTemplate().valueName("SIZE"); assertEquals("SIZE", new Template(builder).getValueName().get()); } @Test public void nullValueName() { Template.Builder builder = Option.newTemplate().valueName(null); assertFalse(new Template(builder).getValueName().isPresent()); } @Test public void blankValueName() { Template.Builder builder = Option.newTemplate().valueName(""); assertFalse(new Template(builder).getValueName().isPresent()); } @Test(expected = IllegalStateException.class) public void nameMissing() { Option.newTemplate().build(); } @Test(expected = IllegalStateException.class) public void tooFewValues() { Option.newTemplate().shortName("-b").minValues(2).maxValues(1).build(); } @Test public void builderReUsage() { Template.Builder builder = Option.newTemplate(); Template templateA = builder.shortName("a").build(); Template templateB = builder.shortName("b").build(); assertEquals("a", templateA.getShortName().get()); assertEquals("b", templateB.getShortName().get()); } @Test(expected = NullPointerException.class) public void builderIsNull() { new Template(null); } @Test public void stringValue() { // Builder with no name. Template.Builder builder = Option.newTemplate(); assertEquals("-<undefined>", builder.toString()); // Template with short name. builder = Option.newTemplate().shortName("a"); assertEquals("-a", builder.toString()); assertEquals("-a", new Template(builder).toString()); // Template with long name. builder = Option.newTemplate().longName("b"); assertEquals("--b", builder.toString()); assertEquals("--b", new Template(builder).toString()); // Template with optional values. builder = Option.newTemplate().valueName("value"); builder.shortName("-a").maxValues(1); assertEquals("-a=[<value>]", builder.toString()); assertEquals("-a=[<value>]", new Template(builder).toString()); // Template with required values. builder = Option.newTemplate().valueName("value"); builder.shortName("-a").minValues(1).maxValues(1); assertEquals("-a=<value>", builder.toString()); assertEquals("-a=<value>", new Template(builder).toString()); } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.entitlement.endpoint.util; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonPrimitive; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.wso2.balana.Balana; import org.wso2.balana.UnknownIdentifierException; import org.wso2.balana.XACMLConstants; import org.wso2.balana.attr.AttributeValue; import org.wso2.balana.ctx.Attribute; import org.wso2.balana.ctx.xacml3.RequestCtx; import org.wso2.balana.xacml3.Attributes; import org.wso2.balana.xacml3.AttributesReference; import org.wso2.balana.xacml3.MultiRequests; import org.wso2.balana.xacml3.RequestDefaults; import org.wso2.balana.xacml3.RequestReference; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.entitlement.endpoint.exception.RequestParseException; import javax.xml.bind.DatatypeConverter; import javax.xml.parsers.DocumentBuilderFactory; import java.io.ByteArrayInputStream; import java.net.URI; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * This class will deal with parsing a given JSON String to a * RequestCtx object, so that it can be evaluated by the engine. */ public class JSONRequestParser { private static Gson gson = new Gson(); /** * Static method that will convert a XACML JSON Request to a <code>{@link RequestCtx}</code> instance * * @param jsonRequest <code>String</code> with JSON request * @return <code>{@link RequestCtx}</code> instance that can be used to evaluate on Balana * @throws JsonParseException <code>{@link JsonParseException}</code> * @throws RequestParseException <code>{@link RequestParseException}</code> * @throws UnknownIdentifierException <code>{@link UnknownIdentifierException}</code> */ public static RequestCtx parse(String jsonRequest) throws JsonParseException, RequestParseException, UnknownIdentifierException { JsonObject requestObject = null; Set<Attributes> categories = new HashSet<>(); boolean returnPolicyIdList = false; boolean combinedDecision = false; MultiRequests multiRequests = null; RequestDefaults requestDefaults = null; try { requestObject = gson.fromJson(jsonRequest, JsonObject.class); requestObject = requestObject.get("Request").getAsJsonObject(); } catch (Exception e) { throw new JsonParseException("Error in JSON Request String"); } Set<Map.Entry<String, JsonElement>> jsonAttributes = requestObject.entrySet(); for (Map.Entry<String, JsonElement> jsonAttribute : jsonAttributes) { if (jsonAttribute.getValue().isJsonPrimitive()) { switch (jsonAttribute.getKey()) { case XACMLConstants.RETURN_POLICY_LIST: if (jsonAttribute.getValue().getAsBoolean() == true) { returnPolicyIdList = true; } break; case XACMLConstants.COMBINE_DECISION: if (jsonAttribute.getValue().getAsBoolean() == true) { combinedDecision = true; } break; case EntitlementEndpointConstants.XPATH_VERSION: String xPathVersion = jsonAttribute.getValue().getAsString(); requestDefaults = new RequestDefaults(xPathVersion); break; } } else if (!jsonAttribute.getValue().isJsonNull()) { JsonObject jsonCategory = null; if (jsonAttribute.getValue().isJsonObject()) { jsonCategory = jsonAttribute.getValue().getAsJsonObject(); jsonAttributeSeperator(jsonAttribute, jsonCategory, categories); } else if (jsonAttribute.getValue().isJsonArray()) { for (JsonElement jsonElement : jsonAttribute.getValue().getAsJsonArray()) { jsonCategory = jsonElement.getAsJsonObject(); jsonAttributeSeperator(jsonAttribute, jsonCategory, categories); } } else if (EntitlementEndpointConstants.MULTI_REQUESTS.equals(jsonAttribute.getKey())) { Set<Map.Entry<String, JsonElement>> jsonRequestReferences = jsonCategory.entrySet(); Set<RequestReference> requestReferences = new HashSet<>(); if (jsonRequestReferences.isEmpty()) { throw new RequestParseException("MultiRequest should contain at least one Reference Request"); } for (Map.Entry<String, JsonElement> jsonRequstReference : jsonRequestReferences) { requestReferences.add(jsonObjectToRequestReference(jsonRequstReference.getValue() .getAsJsonObject())); } multiRequests = new MultiRequests(requestReferences); } } } return new RequestCtx(null, categories, returnPolicyIdList, combinedDecision, multiRequests, requestDefaults); } /** * This is to seperate JSON to attributes * @param jsonAttribute - the map of category string and the JSON Element * @param jsonCategory - the main object category * @param categories - the set of categories * @throws RequestParseException * @throws UnknownIdentifierException */ private static void jsonAttributeSeperator(Map.Entry<String, JsonElement> jsonAttribute, JsonObject jsonCategory, Set<Attributes> categories) throws RequestParseException, UnknownIdentifierException { Node content = null; URI category = null; Set<Attribute> attributes = null; String id = null; if (EntitlementEndpointConstants.CATEGORY_DEFAULT.equals(jsonAttribute.getKey())) { if (jsonCategory.has(EntitlementEndpointConstants.CATEGORY_ID)) { category = stringCateogryToURI(jsonCategory .get(EntitlementEndpointConstants.CATEGORY_ID) .getAsString()); } } else { if (category == null) { category = stringCateogryToURI(jsonAttribute.getKey()); } if (jsonCategory.has(EntitlementEndpointConstants.ID)) { id = jsonCategory.get(EntitlementEndpointConstants.ID).getAsString(); } if (jsonCategory.has(EntitlementEndpointConstants.CONTENT)) { DocumentBuilderFactory dbf; Document doc = null; String xmlContent = stringContentToXMLContent(jsonCategory .get(EntitlementEndpointConstants.CONTENT) .getAsString()); dbf = IdentityUtil.getSecuredDocumentBuilderFactory(); dbf.setNamespaceAware(true); try (ByteArrayInputStream inputStream = new ByteArrayInputStream(xmlContent.getBytes())) { doc = dbf.newDocumentBuilder().parse(inputStream); } catch (Exception e) { throw new JsonParseException("DOM of request element can not be created from String.", e); } if (doc != null) { content = doc.getDocumentElement(); } } // Add all category attributes if (jsonCategory.has(EntitlementEndpointConstants.ATTRIBUTE)) { if (jsonCategory.get(EntitlementEndpointConstants.ATTRIBUTE).isJsonArray()) { attributes = new HashSet<>(); for (JsonElement jsonElement : jsonCategory.get(EntitlementEndpointConstants.ATTRIBUTE) .getAsJsonArray()) { attributes.add(jsonObjectToAttribute(jsonElement.getAsJsonObject())); } } } } //Build the Attributes object using above values Attributes attributesObj = new Attributes(category, content, attributes, id); categories.add(attributesObj); } /** * Private methods used by the parser to convert a given <code>{@link JsonObject}</code> * to a Balana <code>{@link Attribute}</code> * * @param jsonObject <code>{@link JsonObject}</code> representing the Attributes * @return <code>{@link Attribute}</code> * @throws RequestParseException * @throws UnknownIdentifierException */ private static Attribute jsonObjectToAttribute(JsonObject jsonObject) throws RequestParseException, UnknownIdentifierException { URI id = null; URI type = stringAttributeToURI(EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_STRING); boolean includeInResult = false; String issuer = null; List<AttributeValue> attributeValues = new ArrayList<>(); Set<Map.Entry<String, JsonElement>> properties = jsonObject.entrySet(); for (Map.Entry<String, JsonElement> property : properties) { if (property.getValue().isJsonPrimitive()) { switch (property.getKey()) { case EntitlementEndpointConstants.ATTRIBUTE_ID: id = stringAttributeToURI(property.getValue().getAsString()); break; case EntitlementEndpointConstants.ATTRIBUTE_ISSUER: issuer = property.getValue().getAsString(); break; case EntitlementEndpointConstants.ATTRIBUTE_INCLUDE_IN_RESULT: includeInResult = property.getValue().getAsBoolean(); break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE: type = stringAttributeToURI(property.getValue().getAsString()); break; case EntitlementEndpointConstants.ATTRIBUTE_VALUE: URI dataType = stringAttributeToURI( jsonElementToDataType(property.getValue().getAsJsonPrimitive())); //If a recognizable data type is given, it should replace the above if (type.equals(stringAttributeToURI(EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_STRING)) && dataType != null) { type = dataType; } attributeValues.add(getAttributeValue(property.getValue().getAsString(), dataType, type)); } } else if (property.getValue().isJsonArray()) { if (property.getKey().equals(EntitlementEndpointConstants.ATTRIBUTE_VALUE)) { JsonArray valueArray = property.getValue().getAsJsonArray(); for (JsonElement value : valueArray) { if (value.isJsonPrimitive()) { //check if each value's data type can be determined URI dataType = stringAttributeToURI( jsonElementToDataType(value.getAsJsonPrimitive())); attributeValues.add(getAttributeValue(value.getAsString(), dataType, type)); } } } /* Todo: Spec mentions resolve the type by checking all elements at the end */ } } if (id == null) { throw new RequestParseException("Attribute Id should be set"); } if (attributeValues.isEmpty()) { throw new RequestParseException("Attribute should have at least one value"); } return new Attribute(id, type, issuer, null, attributeValues, includeInResult, XACMLConstants.XACML_VERSION_3_0); } /** * Private methods constructing a Balana <code>{@link AttributeValue}</code> from given parameters * * @param value <code>String</code> with the actual value of the Attribute * @param dataType <code>URI</code> of the DataType of the value * @param parentDataType <code>URI</code> of the DataType of <code>{@link Attribute}</code> this belongs to * @return <code>{@link AttributeValue}</code> * @throws UnknownIdentifierException */ private static AttributeValue getAttributeValue(String value, URI dataType, URI parentDataType) throws UnknownIdentifierException { URI type = dataType; AttributeValue attributeValue = null; //check if dataType attribute is set, if not use the parent data type if (dataType == null) { type = parentDataType; } try { attributeValue = Balana.getInstance().getAttributeFactory().createValue(type, value); } catch (Exception e) { throw new UnknownIdentifierException(); } return attributeValue; } /** * Private method to convert a given <code>{@link JsonObject}</code> to a Balana <code>{@link RequestReference}</code> * * @param jsonRequestReference <code>{@link JsonObject}</code> * @return <code>{@link RequestReference}</code> */ private static RequestReference jsonObjectToRequestReference(JsonObject jsonRequestReference) { RequestReference requestReference = new RequestReference(); Set<AttributesReference> attributesReferences = new HashSet<>(); if (jsonRequestReference.has(EntitlementEndpointConstants.REFERENCE_ID)) { JsonArray referenceIds = jsonRequestReference.get(EntitlementEndpointConstants.REFERENCE_ID).getAsJsonArray(); for (JsonElement reference : referenceIds) { AttributesReference attributesReference = new AttributesReference(); attributesReference.setId(reference.getAsString()); attributesReferences.add(attributesReference); } requestReference.setReferences(attributesReferences); } return requestReference; } /** * Convert a given String category to it's full name URI * * @param category <code>String</code> with shorthand or fullname URI * @return <code>URI</code> */ private static URI stringCateogryToURI(String category) { URI uri = null; String uriName = category; switch (category) { case EntitlementEndpointConstants.CATEGORY_RESOURCE: uriName = EntitlementEndpointConstants.CATEGORY_RESOURCE_URI; break; case EntitlementEndpointConstants.CATEGORY_ACTION: uriName = EntitlementEndpointConstants.CATEGORY_ACTION_URI; break; case EntitlementEndpointConstants.CATEGORY_ENVIRONMENT: uriName = EntitlementEndpointConstants.CATEGORY_ENVIRONMENT_URI; break; case EntitlementEndpointConstants.CATEGORY_ACCESS_SUBJECT: uriName = EntitlementEndpointConstants.CATEGORY_ACCESS_SUBJECT_URI; break; case EntitlementEndpointConstants.CATEGORY_RECIPIENT_SUBJECT: uriName = EntitlementEndpointConstants.CATEGORY_RECIPIENT_SUBJECT_URI; break; case EntitlementEndpointConstants.CATEGORY_INTERMEDIARY_SUBJECT: uriName = EntitlementEndpointConstants.CATEGORY_INTERMEDIARY_SUBJECT_URI; break; case EntitlementEndpointConstants.CATEGORY_CODEBASE: uriName = EntitlementEndpointConstants.CATEGORY_CODEBASE_URI; break; case EntitlementEndpointConstants.CATEGORY_REQUESTING_MACHINE: uriName = EntitlementEndpointConstants.CATEGORY_REQUESTING_MACHINE_URI; break; } uri = URI.create(uriName); return uri; } /** * Converts a given <code>{@link JsonElement}</code> to a <code>String</code> DataType * Predicted based on XACML 3.0 JSON profile * * @param element * @return */ private static String jsonElementToDataType(JsonPrimitive element) { if (element.isString()) { return EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_STRING; } else if (element.isBoolean()) { return EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_BOOLEAN; } else if (element.isNumber()) { double n1 = element.getAsDouble(); int n2 = element.getAsInt(); if (Math.ceil(n1) == n2) { return EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_INTEGER; } else { return EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DOUBLE; } } return null; } /** * Converts a given String attribute to the corresponding <code>URI</code> * * @param attribute <code>String</code> * @return <code>URI</code> */ private static URI stringAttributeToURI(String attribute) { String uriName = attribute; switch (attribute) { case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_STRING_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_STRING; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_BOOLEAN_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_BOOLEAN; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_INTEGER_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_INTEGER; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DOUBLE_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DOUBLE; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_TIME_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_TIME; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DATE_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DATE; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DATE_TIME_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DATE_TIME; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DATE_TIME_DURATION_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DATE_TIME_DURATION; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_YEAR_MONTH_DURATION_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_YEAR_MONTH_DURATION; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_ANY_URI_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_ANY_URI; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_HEX_BINARY_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_HEX_BINARY; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_BASE64_BINARY_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_BASE64_BINARY; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_RFC_822_NAME_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_RFC_822_NAME; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_X_500_NAME_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_X_500_NAME; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_IP_ADDRESS_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_IP_ADDRESS; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DNS_NAME_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_DNS_NAME; break; case EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_XPATH_EXPRESSION_SHORT: uriName = EntitlementEndpointConstants.ATTRIBUTE_DATA_TYPE_XPATH_EXPRESSION; break; case EntitlementEndpointConstants.ATTRIBUTE_RESOURCE_ID_SHORTEN: uriName = EntitlementEndpointConstants.ATTRIBUTE_RESOURCE_ID; break; case EntitlementEndpointConstants.ATTRIBUTE_ACTION_ID__SHORTEN: uriName = EntitlementEndpointConstants.ATTRIBUTE_ACTION_ID; break; case EntitlementEndpointConstants.ATTRIBUTE_ENVIRONMENT_ID_SHORTEN: uriName = EntitlementEndpointConstants.ATTRIBUTE_ENVIRONMENT_ID; break; case EntitlementEndpointConstants.ATTRIBUTE_SUBJECT_ID_SHORTEN: uriName = EntitlementEndpointConstants.ATTRIBUTE_SUBJECT_ID; break; case EntitlementEndpointConstants.ATTRIBUTE_RECIPIENT_SUBJECT_ID_SHORTEN: uriName = EntitlementEndpointConstants.ATTRIBUTE_RECIPIENT_SUBJECT_ID; break; case EntitlementEndpointConstants.ATTRIBUTE_INTERMEDIARY_SUBJECT_ID_SHORTEN: uriName = EntitlementEndpointConstants.ATTRIBUTE_INTERMEDIARY_SUBJECT_ID; break; case EntitlementEndpointConstants.ATTRBUTE_REQUESTING_MACHINE_ID_SHORTEN: uriName = EntitlementEndpointConstants.ATTRBUTE_REQUESTING_MACHINE_ID; break; case EntitlementEndpointConstants.ATTRIBUTE_CODEBASE_ID_SHORTEN: uriName = EntitlementEndpointConstants.ATTRIBUTE_CODEBASE_ID; break; } return URI.create(uriName); } /** * Converts a given XML / Base64 encoded XML content to String XML content * * @param content XML or Base64 encoded XML * @return <code>String</code> with only XML * @throws RequestParseException */ private static String stringContentToXMLContent(String content) throws RequestParseException { if (content.startsWith("<")) { //todo : check if GSON automatically unescape the string return content; } else { //do base64 decoding return new String(DatatypeConverter.parseBase64Binary(content)); } } }
/* * Copyright (C) 2014-2016 LinkedIn Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. */ package gobblin.runtime; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import gobblin.annotation.Alpha; import gobblin.commit.CommitStep; import gobblin.configuration.ConfigurationKeys; import gobblin.configuration.WorkUnitState; import gobblin.metastore.StateStore; import gobblin.source.workunit.WorkUnit; import gobblin.util.Either; import gobblin.util.ExecutorsUtils; import gobblin.util.executors.IteratorExecutor; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; /** * Attempt of running multiple {@link Task}s generated from a list of{@link WorkUnit}s. * A {@link GobblinMultiTaskAttempt} is usually a unit of workunits that are assigned to one container. */ @Slf4j @RequiredArgsConstructor @Alpha public class GobblinMultiTaskAttempt { private final List<WorkUnit> workUnits; private final String jobId; private final JobState jobState; private final TaskStateTracker taskStateTracker; private final TaskExecutor taskExecutor; private final Optional<String> containerIdOptional; private final Optional<StateStore<TaskState>> taskStateStoreOptional; private List<Task> tasks; /** * Additional commit steps that may be added by different launcher, and can be environment specific. * Usually it should be clean-up steps, which are always executed at the end of {@link #commit()}. */ private List<CommitStep> cleanupCommitSteps; /** * Run {@link #workUnits} assigned in this attempt. * @throws IOException * @throws InterruptedException */ public void run() throws IOException, InterruptedException { if (workUnits.isEmpty()) { log.warn("No work units to run in container " + containerIdOptional.or("")); return; } CountDownLatch countDownLatch = new CountDownLatch(workUnits.size()); this.tasks = AbstractJobLauncher .runWorkUnits(jobId, jobState, workUnits, containerIdOptional, taskStateTracker, taskExecutor, countDownLatch); log.info("Waiting for submitted tasks of job {} to complete in container {}...", jobId, containerIdOptional.or("")); while (countDownLatch.getCount() > 0) { log.info(String.format("%d out of %d tasks of job %s are running in container %s", countDownLatch.getCount(), workUnits.size(), jobId, containerIdOptional.or(""))); if (countDownLatch.await(10, TimeUnit.SECONDS)) { break; } } log.info("All assigned tasks of job {} have completed in container {}", jobId, containerIdOptional.or("")); } /** * Commit {@link #tasks} by 1. calling {@link Task#commit()} in parallel; 2. executing any additional {@link CommitStep}; * 3. persist task statestore. * @throws IOException */ public void commit() throws IOException { if (this.tasks == null) { log.warn("No tasks to be committed in container " + containerIdOptional.or("")); return; } Iterator<Callable<Void>> callableIterator = Iterators.transform(this.tasks.iterator(), new Function<Task, Callable<Void>>() { @Override public Callable<Void> apply(final Task task) { return new Callable<Void>() { @Nullable @Override public Void call() throws Exception { task.commit(); return null; } }; } }); try { List<Either<Void, ExecutionException>> executionResults = new IteratorExecutor<>(callableIterator, this.getTaskCommitThreadPoolSize(), ExecutorsUtils.newDaemonThreadFactory(Optional.of(log), Optional.of("Task-committing-pool-%d"))) .executeAndGetResults(); IteratorExecutor.logFailures(executionResults, log, 10); } catch (InterruptedException ie) { log.error("Committing of tasks interrupted. Aborting."); throw new RuntimeException(ie); } finally { persistTaskStateStore(); if (this.cleanupCommitSteps != null) { for (CommitStep cleanupCommitStep : this.cleanupCommitSteps) { log.info("Executing additional commit step."); cleanupCommitStep.execute(); } } } } private void persistTaskStateStore() throws IOException { if (!this.taskStateStoreOptional.isPresent()) { log.info("Task state store does not exist."); return; } StateStore<TaskState> taskStateStore = this.taskStateStoreOptional.get(); for (WorkUnit workUnit : workUnits) { String taskId = workUnit.getProp(ConfigurationKeys.TASK_ID_KEY); // Delete the task state file for the task if it already exists. // This usually happens if the task is retried upon failure. if (taskStateStore.exists(jobId, taskId + AbstractJobLauncher.TASK_STATE_STORE_TABLE_SUFFIX)) { taskStateStore.delete(jobId, taskId + AbstractJobLauncher.TASK_STATE_STORE_TABLE_SUFFIX); } } boolean hasTaskFailure = false; for (Task task : tasks) { log.info("Writing task state for task " + task.getTaskId()); taskStateStore.put(task.getJobId(), task.getTaskId() + AbstractJobLauncher.TASK_STATE_STORE_TABLE_SUFFIX, task.getTaskState()); if (task.getTaskState().getWorkingState() == WorkUnitState.WorkingState.FAILED) { hasTaskFailure = true; } } if (hasTaskFailure) { for (Task task : tasks) { if (task.getTaskState().contains(ConfigurationKeys.TASK_FAILURE_EXCEPTION_KEY)) { log.error(String.format("Task %s failed due to exception: %s", task.getTaskId(), task.getTaskState().getProp(ConfigurationKeys.TASK_FAILURE_EXCEPTION_KEY))); } } throw new IOException( String.format("Not all tasks running in container %s completed successfully", containerIdOptional.or(""))); } } public boolean isSpeculativeExecutionSafe() { for (Task task : tasks) { if (!task.isSpeculativeExecutionSafe()) { log.info("One task is not safe for speculative execution."); return false; } } log.info("All tasks are safe for speculative execution."); return true; } private final int getTaskCommitThreadPoolSize() { return Integer.parseInt(this.jobState.getProp(ConfigurationKeys.TASK_EXECUTOR_THREADPOOL_SIZE_KEY, Integer.toString(ConfigurationKeys.DEFAULT_TASK_EXECUTOR_THREADPOOL_SIZE))); } public void addCleanupCommitStep(CommitStep commitStep) { if (this.cleanupCommitSteps == null) { this.cleanupCommitSteps = Lists.newArrayList(commitStep); } else { this.cleanupCommitSteps.add(commitStep); } } }
/* * The MIT License * Copyright (c) 2012 Microsoft Corporation * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package microsoft.exchange.webservices.data.property.complex.time; import microsoft.exchange.webservices.data.core.EwsServiceXmlReader; import microsoft.exchange.webservices.data.core.EwsServiceXmlWriter; import microsoft.exchange.webservices.data.core.XmlAttributeNames; import microsoft.exchange.webservices.data.core.XmlElementNames; import microsoft.exchange.webservices.data.enumeration.ExchangeVersion; import microsoft.exchange.webservices.data.enumeration.XmlNamespace; import microsoft.exchange.webservices.data.exception.InvalidOrUnsupportedTimeZoneDefinitionException; import microsoft.exchange.webservices.data.exception.ServiceLocalException; import microsoft.exchange.webservices.data.exception.ServiceXmlSerializationException; import microsoft.exchange.webservices.data.property.complex.ComplexProperty; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Represents a time zone as defined by the EWS schema. */ public class TimeZoneDefinition extends ComplexProperty implements Comparator<TimeZoneTransition> { /** * Prefix for generated ids. */ private static String NoIdPrefix = "NoId_"; /** * The Standard period id. */ protected final String StandardPeriodId = "Std"; /** * The Standard period name. */ protected final String StandardPeriodName = "Standard"; /** * The Daylight period id. */ protected final String DaylightPeriodId = "Dlt"; /** * The Daylight period name. */ protected final String DaylightPeriodName = "Daylight"; /** * The name. */ public String name; /** * The id. */ public String id; /** * The periods. */ private Map<String, TimeZonePeriod> periods = new HashMap<String, TimeZonePeriod>(); /** * The transition groups. */ private Map<String, TimeZoneTransitionGroup> transitionGroups = new HashMap<String, TimeZoneTransitionGroup>(); /** * The transitions. */ private List<TimeZoneTransition> transitions = new ArrayList<TimeZoneTransition>(); /** * Compares the transitions. * * @param x The first transition. * @param y The second transition. * @return A negative number if x is less than y, 0 if x and y are equal, a * positive number if x is greater than y. */ @Override public int compare(TimeZoneTransition x, TimeZoneTransition y) { if (x == y) { return 0; } else if (x instanceof TimeZoneTransition) { return -1; } else if (y instanceof TimeZoneTransition) { return 1; } else { AbsoluteDateTransition firstTransition = (AbsoluteDateTransition) x; AbsoluteDateTransition secondTransition = (AbsoluteDateTransition) y; return firstTransition.getDateTime().compareTo( secondTransition.getDateTime()); } } /** * Initializes a new instance of the TimeZoneDefinition class. */ public TimeZoneDefinition() { super(); } /** * Adds a transition group with a single transition to the specified period. * * @param timeZonePeriod the time zone period * @return A TimeZoneTransitionGroup. */ private TimeZoneTransitionGroup createTransitionGroupToPeriod( TimeZonePeriod timeZonePeriod) { TimeZoneTransition transitionToPeriod = new TimeZoneTransition(this, timeZonePeriod); TimeZoneTransitionGroup transitionGroup = new TimeZoneTransitionGroup( this, String.valueOf(this.transitionGroups.size())); transitionGroup.getTransitions().add(transitionToPeriod); this.transitionGroups.put(transitionGroup.getId(), transitionGroup); return transitionGroup; } /** * Reads the attribute from XML. * * @param reader the reader * @throws Exception the exception */ @Override public void readAttributesFromXml(EwsServiceXmlReader reader) throws Exception { this.name = reader.readAttributeValue(XmlAttributeNames.Name); this.id = reader.readAttributeValue(XmlAttributeNames.Id); // E14:319057 -- EWS can return a TimeZone definition with no Id. Generate a new Id in this case. if (this.id == null || this.id.isEmpty()) { String nameValue = (this.getName() == null || this. getName().isEmpty()) ? "" : this.getName(); this.setId(NoIdPrefix + Math.abs(nameValue.hashCode())); } } /** * Writes the attribute to XML. * * @param writer the writer * @throws ServiceXmlSerializationException the service xml serialization exception */ @Override public void writeAttributesToXml(EwsServiceXmlWriter writer) throws ServiceXmlSerializationException { // The Name attribute is only supported in Exchange 2010 and above. if (writer.getService().getRequestedServerVersion() != ExchangeVersion.Exchange2007_SP1) { writer.writeAttributeValue(XmlAttributeNames.Name, this.name); } writer.writeAttributeValue(XmlAttributeNames.Id, this.id); } /** * Tries to read element from XML. * * @param reader the reader * @return True if element was read. * @throws Exception the exception */ @Override public boolean tryReadElementFromXml(EwsServiceXmlReader reader) throws Exception { if (reader.getLocalName().equals(XmlElementNames.Periods)) { do { reader.read(); if (reader.isStartElement(XmlNamespace.Types, XmlElementNames.Period)) { TimeZonePeriod period = new TimeZonePeriod(); period.loadFromXml(reader); this.periods.put(period.getId(), period); } } while (!reader.isEndElement(XmlNamespace.Types, XmlElementNames.Periods)); return true; } else if (reader.getLocalName().equals( XmlElementNames.TransitionsGroups)) { do { reader.read(); if (reader.isStartElement(XmlNamespace.Types, XmlElementNames.TransitionsGroup)) { TimeZoneTransitionGroup transitionGroup = new TimeZoneTransitionGroup( this); transitionGroup.loadFromXml(reader); this.transitionGroups.put(transitionGroup.getId(), transitionGroup); } } while (!reader.isEndElement(XmlNamespace.Types, XmlElementNames.TransitionsGroups)); return true; } else if (reader.getLocalName().equals(XmlElementNames.Transitions)) { do { reader.read(); if (reader.isStartElement()) { TimeZoneTransition transition = TimeZoneTransition.create( this, reader.getLocalName()); transition.loadFromXml(reader); this.transitions.add(transition); } } while (!reader.isEndElement(XmlNamespace.Types, XmlElementNames.Transitions)); return true; } else { return false; } } /** * Loads from XML. * * @param reader the reader * @throws Exception the exception */ public void loadFromXml(EwsServiceXmlReader reader) throws Exception { this.loadFromXml(reader, XmlElementNames.TimeZoneDefinition); Collections.sort(this.transitions, new TimeZoneDefinition()); } /** * Writes elements to XML. * * @param writer the writer * @throws Exception the exception */ @Override public void writeElementsToXml(EwsServiceXmlWriter writer) throws Exception { // We only emit the full time zone definition against Exchange 2010 // servers and above. if (writer.getService().getRequestedServerVersion() != ExchangeVersion.Exchange2007_SP1) { if (this.periods.size() > 0) { writer.writeStartElement(XmlNamespace.Types, XmlElementNames.Periods); Iterator<TimeZonePeriod> it = this.periods.values().iterator(); while (it.hasNext()) { it.next().writeToXml(writer); } writer.writeEndElement(); // Periods } if (this.transitionGroups.size() > 0) { writer.writeStartElement(XmlNamespace.Types, XmlElementNames.TransitionsGroups); for (int i = 0; i < this.transitionGroups.size(); i++) { Object key[] = this.transitionGroups.keySet().toArray(); this.transitionGroups.get(key[i]).writeToXml(writer); } writer.writeEndElement(); // TransitionGroups } if (this.transitions.size() > 0) { writer.writeStartElement(XmlNamespace.Types, XmlElementNames.Transitions); for (TimeZoneTransition transition : this.transitions) { transition.writeToXml(writer); } writer.writeEndElement(); // Transitions } } } /** * Writes to XML. * * @param writer The writer. * @throws Exception the exception */ protected void writeToXml(EwsServiceXmlWriter writer) throws Exception { this.writeToXml(writer, XmlElementNames.TimeZoneDefinition); } /** * Validates this time zone definition. * * @throws InvalidOrUnsupportedTimeZoneDefinitionException thrown when time zone definition is not valid. */ public void validate() throws ServiceLocalException { // The definition must have at least one period, one transition group // and one transition, // and there must be as many transitions as there are transition groups. if (this.periods.size() < 1 || this.transitions.size() < 1 || this.transitionGroups.size() < 1 || this.transitionGroups.size() != this.transitions.size()) { throw new InvalidOrUnsupportedTimeZoneDefinitionException(); } // The first transition must be of type TimeZoneTransition. if (this.transitions.get(0).getClass() != TimeZoneTransition.class) { throw new InvalidOrUnsupportedTimeZoneDefinitionException(); } // All transitions must be to transition groups and be either // TimeZoneTransition or // AbsoluteDateTransition instances. for (TimeZoneTransition transition : this.transitions) { Class<?> transitionType = transition.getClass(); if (transitionType != TimeZoneTransition.class && transitionType != AbsoluteDateTransition.class) { throw new InvalidOrUnsupportedTimeZoneDefinitionException(); } if (transition.getTargetGroup() == null) { throw new InvalidOrUnsupportedTimeZoneDefinitionException(); } } // All transition groups must be valid. for (TimeZoneTransitionGroup transitionGroup : this.transitionGroups .values()) { transitionGroup.validate(); } } /** * Gets the name of this time zone definition. * * @return the name */ public String getName() { return this.name; } /** * Sets the name. * * @param name the new name */ protected void setName(String name) { this.name = name; } /** * Gets the Id of this time zone definition. * * @return the id */ public String getId() { return this.id; } /** * Sets the id. * * @param id the new id */ public void setId(String id) { this.id = id; } /** * Adds a transition group with a single transition to the specified period. * * @return A TimeZoneTransitionGroup. */ public Map<String, TimeZonePeriod> getPeriods() { return this.periods; } /** * Gets the transition groups associated with this time zone definition, * indexed by Id. * * @return the transition groups */ public Map<String, TimeZoneTransitionGroup> getTransitionGroups() { return this.transitionGroups; } /** * Writes to XML. * * @param writer accepts EwsServiceXmlWriter * @param xmlElementName accepts String * @throws Exception throws Exception */ public void writeToXml(EwsServiceXmlWriter writer, String xmlElementName) throws Exception { this.writeToXml(writer, this.getNamespace(), xmlElementName); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver15; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFTableStatusVer15 implements OFTableStatus { private static final Logger logger = LoggerFactory.getLogger(OFTableStatusVer15.class); // version: 1.5 final static byte WIRE_VERSION = 6; final static int MINIMUM_LENGTH = 24; // maximum OF message length: 16 bit, unsigned final static int MAXIMUM_LENGTH = 0xFFFF; private final static long DEFAULT_XID = 0x0L; // OF message fields private final long xid; private final OFTableReason reason; private final OFTableDesc table; // // package private constructor - used by readers, builders, and factory OFTableStatusVer15(long xid, OFTableReason reason, OFTableDesc table) { if(reason == null) { throw new NullPointerException("OFTableStatusVer15: property reason cannot be null"); } if(table == null) { throw new NullPointerException("OFTableStatusVer15: property table cannot be null"); } this.xid = U32.normalize(xid); this.reason = reason; this.table = table; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.TABLE_STATUS; } @Override public long getXid() { return xid; } @Override public long getRole()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property role not supported in version 1.5"); } @Override public OFTableReason getReason() { return reason; } @Override public OFTableDesc getTable() { return table; } public OFTableStatus.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFTableStatus.Builder { final OFTableStatusVer15 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean reasonSet; private OFTableReason reason; private boolean tableSet; private OFTableDesc table; BuilderWithParent(OFTableStatusVer15 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.TABLE_STATUS; } @Override public long getXid() { return xid; } @Override public OFTableStatus.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public long getRole()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property role not supported in version 1.5"); } @Override public OFTableStatus.Builder setRole(long role) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property role not supported in version 1.5"); } @Override public OFTableReason getReason() { return reason; } @Override public OFTableStatus.Builder setReason(OFTableReason reason) { this.reason = reason; this.reasonSet = true; return this; } @Override public OFTableDesc getTable() { return table; } @Override public OFTableStatus.Builder setTable(OFTableDesc table) { this.table = table; this.tableSet = true; return this; } @Override public OFTableStatus build() { long xid = this.xidSet ? this.xid : parentMessage.xid; OFTableReason reason = this.reasonSet ? this.reason : parentMessage.reason; if(reason == null) throw new NullPointerException("Property reason must not be null"); OFTableDesc table = this.tableSet ? this.table : parentMessage.table; if(table == null) throw new NullPointerException("Property table must not be null"); // return new OFTableStatusVer15( xid, reason, table ); } } static class Builder implements OFTableStatus.Builder { // OF message fields private boolean xidSet; private long xid; private boolean reasonSet; private OFTableReason reason; private boolean tableSet; private OFTableDesc table; @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.TABLE_STATUS; } @Override public long getXid() { return xid; } @Override public OFTableStatus.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public long getRole()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property role not supported in version 1.5"); } @Override public OFTableStatus.Builder setRole(long role) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property role not supported in version 1.5"); } @Override public OFTableReason getReason() { return reason; } @Override public OFTableStatus.Builder setReason(OFTableReason reason) { this.reason = reason; this.reasonSet = true; return this; } @Override public OFTableDesc getTable() { return table; } @Override public OFTableStatus.Builder setTable(OFTableDesc table) { this.table = table; this.tableSet = true; return this; } // @Override public OFTableStatus build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; if(!this.reasonSet) throw new IllegalStateException("Property reason doesn't have default value -- must be set"); if(reason == null) throw new NullPointerException("Property reason must not be null"); if(!this.tableSet) throw new IllegalStateException("Property table doesn't have default value -- must be set"); if(table == null) throw new NullPointerException("Property table must not be null"); return new OFTableStatusVer15( xid, reason, table ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFTableStatus> { @Override public OFTableStatus readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 6 byte version = bb.readByte(); if(version != (byte) 0x6) throw new OFParseError("Wrong version: Expected=OFVersion.OF_15(6), got="+version); // fixed value property type == 31 byte type = bb.readByte(); if(type != (byte) 0x1f) throw new OFParseError("Wrong type: Expected=OFType.TABLE_STATUS(31), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); OFTableReason reason = OFTableReasonSerializerVer15.readFrom(bb); // pad: 7 bytes bb.skipBytes(7); OFTableDesc table = OFTableDescVer15.READER.readFrom(bb); OFTableStatusVer15 tableStatusVer15 = new OFTableStatusVer15( xid, reason, table ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", tableStatusVer15); return tableStatusVer15; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFTableStatusVer15Funnel FUNNEL = new OFTableStatusVer15Funnel(); static class OFTableStatusVer15Funnel implements Funnel<OFTableStatusVer15> { private static final long serialVersionUID = 1L; @Override public void funnel(OFTableStatusVer15 message, PrimitiveSink sink) { // fixed value property version = 6 sink.putByte((byte) 0x6); // fixed value property type = 31 sink.putByte((byte) 0x1f); // FIXME: skip funnel of length sink.putLong(message.xid); OFTableReasonSerializerVer15.putTo(message.reason, sink); // skip pad (7 bytes) message.table.putTo(sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFTableStatusVer15> { @Override public void write(ByteBuf bb, OFTableStatusVer15 message) { int startIndex = bb.writerIndex(); // fixed value property version = 6 bb.writeByte((byte) 0x6); // fixed value property type = 31 bb.writeByte((byte) 0x1f); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); OFTableReasonSerializerVer15.writeTo(bb, message.reason); // pad: 7 bytes bb.writeZero(7); message.table.writeTo(bb); // update length field int length = bb.writerIndex() - startIndex; if (length > MAXIMUM_LENGTH) { throw new IllegalArgumentException("OFTableStatusVer15: message length (" + length + ") exceeds maximum (0xFFFF)"); } bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFTableStatusVer15("); b.append("xid=").append(xid); b.append(", "); b.append("reason=").append(reason); b.append(", "); b.append("table=").append(table); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFTableStatusVer15 other = (OFTableStatusVer15) obj; if( xid != other.xid) return false; if (reason == null) { if (other.reason != null) return false; } else if (!reason.equals(other.reason)) return false; if (table == null) { if (other.table != null) return false; } else if (!table.equals(other.table)) return false; return true; } @Override public boolean equalsIgnoreXid(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFTableStatusVer15 other = (OFTableStatusVer15) obj; // ignore XID if (reason == null) { if (other.reason != null) return false; } else if (!reason.equals(other.reason)) return false; if (table == null) { if (other.table != null) return false; } else if (!table.equals(other.table)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((reason == null) ? 0 : reason.hashCode()); result = prime * result + ((table == null) ? 0 : table.hashCode()); return result; } @Override public int hashCodeIgnoreXid() { final int prime = 31; int result = 1; // ignore XID result = prime * result + ((reason == null) ? 0 : reason.hashCode()); result = prime * result + ((table == null) ? 0 : table.hashCode()); return result; } }
package de.davherrmann.immutable; import static com.google.common.collect.Lists.newArrayList; import static de.davherrmann.immutable.NextImmutable.IMMUTABLE_NODE; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import java.util.List; import java.util.Map; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class ImmutableTest { @Rule public final ExpectedException thrown = ExpectedException.none(); private final Immutable<POJO> immutable = new Immutable<>(POJO.class); private final POJO path = immutable.path(); private final POJO pojo = immutable.asObject(); @Ignore("some room for ideas") @Test public void someRoomForIdeas() throws Exception { immutable // .in(path::title).set("Test") // .in(path::wantToClose).set(true) // .in(path.pojo()::wantToClose).update(wantToClose -> !wantToClose) // .in(path::title).update(title -> title + "!"); immutable.in(path::currentPage).update(page -> page + 1); // TODO offer a map set function? // immutable.in(path.myMap(), "key").set("value"); // TODO offer a merge function? // TODO set and update without in? // immutable.set(path::currentPage, 0); // immutable.update(path::currentPage, page -> page + 1); // immutable.setList(path::list, list); // immutable.updateList(path::list, list -> list.add("")); fail("please add @ignore and write another test - this is just some room for ideas!"); } @Test public void asObject_returnsObject() throws Exception { // given / then assertNotNull(pojo); } @Test public void get_returnsDefaultBooleanValue() throws Exception { // given / then assertThat(immutable.asObject().wantToClose(), is(false)); } @Test public void set_changesBooleanValue() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable.in(path::wantToClose).set(true); // then assertThat(newImmutable.asObject().wantToClose(), is(true)); } @Test public void update_changesBooleanValue() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable.in(path::wantToClose).update(value -> !value); // then assertThat(newImmutable.asObject().wantToClose(), is(true)); } @Test public void set_doesNotChangeCurrentObject() throws Exception { // given / when immutable.in(path::wantToClose).set(true); // then assertThat(pojo.wantToClose(), is(false)); } @Test public void set_returnsNewImmutable2() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable.in(path::wantToClose).set(false); // then assertThat(newImmutable, is(not(immutable))); } @Test public void set_returnsImmutable2_withSamePath() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable.in(path::wantToClose).set(false); // then assertThat(immutable.path(), is(newImmutable.path())); } @Test public void set_inNestedObject_changesBooleanValue() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable.in(path.pojo()::wantToClose).set(true); // then assertThat(newImmutable.asObject().wantToClose(), is(false)); assertThat(newImmutable.asObject().pojo().wantToClose(), is(true)); } @Test public void equals_returnsTrue_forEqualImmutables() throws Exception { // given / then assertThat(new Immutable<>(POJO.class), is(new Immutable<>(POJO.class))); } @Test public void diff_returnsChanges() throws Exception { // given final Immutable<POJO> newImmutable = immutable.in(path.pojo()::wantToClose).set(true); // TODO should you be able to pass a PathRecorder into constructor? // TODO -> one "unnecessary" variable: initialDiffImmutable final Immutable<POJO> initialDiffImmutable = new Immutable<>(POJO.class); final POJO diffPath = initialDiffImmutable.path(); final Immutable<POJO> diffImmutable = initialDiffImmutable.in(diffPath.pojo()::wantToClose).set(true); // when final Immutable<POJO> diff = immutable.diff(newImmutable); // then assertThat(diff, is(diffImmutable)); } @Test public void changeWithOwnUnusedPath_usingPathFromOtherImmutableInSameThread_changesImmutable() throws Exception { // when final Immutable<POJO> newImmutable = new Immutable<>(POJO.class).in(path::wantToClose).set(true); // then assertThat(newImmutable.get(path::wantToClose), is(true)); } @Test public void clear_returnsEmptyState() throws Exception { // when final Immutable<POJO> clearedImmutable = immutable // .in(path::wantToClose).set(true) // .clear(); // then assertThat(clearedImmutable.values(), is(IMMUTABLE_NODE)); } @Test public void setIn_overwriteCustomType() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable // .in(path.name()::firstname).set("Foo") // .in(path.name()::lastname).set("Bar") // .in(path::name).set(name("F", "B").asObject()); // then assertThat(newImmutable.asObject().name().firstname(), is("F")); assertThat(newImmutable.asObject().name().lastname(), is("B")); } @Test public void setIn_inCustomTypeObject() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable // .in(path::name).set(name("F", "B").asObject()) // .in(path.name()::firstname).set("Foo"); // then assertThat(newImmutable.asObject().name().firstname(), is("Foo")); assertThat(newImmutable.asObject().name().lastname(), is("B")); } @Test public void get_returnsCorrectList_whenImmutableListWasSet() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable.inList(path::titles).set(new ImmutableList<String>() // .add("foo").add("bar")); // then assertThat(newImmutable.asObject().titles(), is(newArrayList("foo", "bar"))); } @Test public void get_returnsCorrectList_whenListWasSet() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable.inList(path::titles).set(newArrayList("foo", "bar")); // then assertThat(newImmutable.asObject().titles(), is(newArrayList("foo", "bar"))); } @Test public void get_returnsUpdatedList_whenListWasUpdated() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable.inList(path::titles).updateList( list -> newArrayList("foo", "bar")); // then assertThat(newImmutable.asObject().titles(), is(newArrayList("foo", "bar"))); } @Test public void get_returnsUpdatedList_whenImmutableListWasUpdated() throws Exception { // given / when final Immutable<POJO> newImmutable = immutable.inList(path::titles).update(list -> list.add("foo").add("bar")); // then assertThat(newImmutable.asObject().titles(), is(newArrayList("foo", "bar"))); } @Test public void get_returnsClonedArray_whenMutableArrayWasSet() throws Exception { // given final Immutable<POJO> newImmutable = immutable.in(path::titleArray).set(new String[]{"foo", "bar"}); // when newImmutable.asObject().titleArray()[1] = "baz"; // then assertThat(newImmutable.asObject().titleArray(), is(new String[]{"foo", "bar"})); } @Test public void changingASetMutableArray_doesNotChangeImmutable() throws Exception { // given final String[] array = {"foo", "bar"}; final Immutable<POJO> newImmutable = immutable.in(path::titleArray).set(array); // when array[1] = "baz"; // then assertThat(newImmutable.asObject().titleArray(), is(new String[]{"foo", "bar"})); } @Test public void diff_ofImmutablesWithEqualStringSet_shouldBeEmpty() throws Exception { // given final Immutable<POJO> immutable1 = immutable.in(path::title).set("foo"); final Immutable<POJO> immutable2 = immutable.in(path::title).set("foo"); // when final Immutable<POJO> diff = immutable1.diff(immutable2); // then assertThat(diff.values(), is(IMMUTABLE_NODE)); } @Test public void diff_ofImmutablesWithEqualArrays_shouldBeEmpty() throws Exception { // given final Immutable<POJO> immutable1 = immutable.in(path::titleArray).set(new String[]{"foo", "bar"}); final Immutable<POJO> immutable2 = immutable.in(path::titleArray).set(new String[]{"foo", "bar"}); // when final Immutable<POJO> diff = immutable1.diff(immutable2); // then assertThat(diff.values(), is(IMMUTABLE_NODE)); } @Test public void setIn_worksWithPathMapping() throws Exception { // given / when final Immutable<POJO> initialisedImmutable = immutable // .in(path -> path.pojo()::title) // .set("Foo"); // then assertThat(initialisedImmutable.asObject().pojo().title(), is("Foo")); } @Test public void setInList_worksWithPathMapping() throws Exception { // given / when final Immutable<POJO> initialisedImmutable = immutable // .inList(path -> path::titles) // .set(newArrayList("Foo")); // then assertThat(initialisedImmutable.asObject().titles(), is(newArrayList("Foo"))); } @Test public void get_returnsSetValue() throws Exception { // given final Immutable<POJO> newImmutable = immutable.in(p -> p::title).set("Foo"); // when / then assertThat(newImmutable.get(path::title), is("Foo")); } @Test public void get_returnsSetValue_withPathMapping() throws Exception { // given final Immutable<POJO> newImmutable = immutable.inList(p -> p::titles).update(l -> l.add("Foo")); // when / then assertThat(newImmutable.get(path -> path::titles), is(newArrayList("Foo"))); } @Test public void merge_combinesTwoImmutables() throws Exception { // given final Immutable<POJO> immutable0 = immutable.in(path::wantToClose).set(true); final Immutable<POJO> immutable1 = immutable.in(path.pojo()::title).set("Foo"); // when final Immutable<POJO> mergedImmutable = immutable0.merge(immutable1); // then final Immutable<POJO> manuallyMergedImmutable = immutable0.in(path.pojo()::title).set("Foo"); assertEquals(manuallyMergedImmutable, mergedImmutable); } @Test public void type_returnsCorrectType() throws Exception { // when / then assertThat(immutable.type(), equalTo(POJO.class)); } // TODO write test for this commit! Immutable accessed from another thread, wrong PathRecorder! private Immutable<POJO.Name> name(String firstname, String lastname) { return new Immutable<>(POJO.Name.class) // .in(p -> p::firstname).set(firstname) // .in(p -> p::lastname).set(lastname); } private interface POJO { String title(); List<String> titles(); String[] titleArray(); boolean wantToClose(); int currentPage(); // TODO add map support Map<String, String> myMap(); POJO pojo(); Name name(); interface Name { String firstname(); String lastname(); } } }
package net.bytebuddy.description.type; import net.bytebuddy.description.annotation.AnnotationDescription; import net.bytebuddy.description.annotation.AnnotationList; import net.bytebuddy.test.utility.JavaVersionRule; import net.bytebuddy.test.utility.MockitoRule; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.MethodRule; import org.junit.rules.TestRule; import org.mockito.Mock; import java.lang.annotation.ElementType; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.Mockito.*; public class TypeDescriptionGenericVisitorValidatorForTypeAnnotations { @Rule public MethodRule javaVersionRule = new JavaVersionRule(); @Rule public TestRule mockitoRule = new MockitoRule(this); @Mock private AnnotationDescription legalAnnotation, illegalAnnotation, duplicateAnnotation; @Mock private TypeDescription legalType, illegalType; @Mock private TypeDescription.Generic legal, illegal, duplicate, otherLegal, otherIllegal; @Before @SuppressWarnings({"unchecked", "rawtypes"}) public void setUp() throws Exception { when(otherLegal.accept(any(TypeDescription.Generic.Visitor.class))).thenReturn(true); when(otherIllegal.accept(any(TypeDescription.Generic.Visitor.class))).thenReturn(false); when(illegal.getDeclaredAnnotations()).thenReturn(new AnnotationList.Explicit(illegalAnnotation)); when(illegalAnnotation.getElementTypes()).thenReturn(new HashSet<ElementType>()); when(illegalAnnotation.getAnnotationType()).thenReturn(illegalType); when(otherLegal.asGenericType()).thenReturn(otherLegal); when(otherIllegal.asGenericType()).thenReturn(otherIllegal); try { Enum<?> typeUse = Enum.valueOf(ElementType.class, "TYPE_USE"); Enum<?> typeParameter = Enum.valueOf(ElementType.class, "TYPE_PARAMETER"); when(legalAnnotation.getElementTypes()).thenReturn(new HashSet(Arrays.asList(typeUse, typeParameter))); when(duplicateAnnotation.getElementTypes()).thenReturn(new HashSet(Arrays.asList(typeUse, typeParameter))); } catch (IllegalArgumentException ignored) { when(legalAnnotation.getElementTypes()).thenReturn(Collections.<ElementType>emptySet()); when(duplicateAnnotation.getElementTypes()).thenReturn(Collections.<ElementType>emptySet()); } when(legal.getDeclaredAnnotations()).thenReturn(new AnnotationList.Explicit(legalAnnotation)); when(duplicate.getDeclaredAnnotations()).thenReturn(new AnnotationList.Explicit(legalAnnotation, duplicateAnnotation)); when(legalAnnotation.getAnnotationType()).thenReturn(legalType); when(duplicateAnnotation.getAnnotationType()).thenReturn(legalType); } @Test public void testIllegalGenericArray() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onGenericArray(illegal), is(false)); } @Test public void testDuplicateGenericArray() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onGenericArray(duplicate), is(false)); } @Test public void testIllegalDelegatedGenericArray() throws Exception { when(legal.getComponentType()).thenReturn(otherIllegal); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onGenericArray(legal), is(false)); } @Test @JavaVersionRule.Enforce(8) public void testLegalGenericArray() throws Exception { when(legal.getComponentType()).thenReturn(otherLegal); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onGenericArray(legal), is(true)); verify(otherLegal).accept(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE); } @Test public void testIllegalNonGenericArray() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onNonGenericType(illegal), is(false)); } @Test public void testDuplicateNonGenericArray() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onNonGenericType(duplicate), is(false)); } @Test public void testIllegalDelegatedNonGenericArray() throws Exception { when(legal.isArray()).thenReturn(true); when(legal.getComponentType()).thenReturn(otherIllegal); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onNonGenericType(legal), is(false)); } @Test @JavaVersionRule.Enforce(8) public void testLegalNonGenericArray() throws Exception { when(legal.isArray()).thenReturn(true); when(legal.getComponentType()).thenReturn(otherLegal); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onNonGenericType(legal), is(true)); verify(otherLegal).accept(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE); } @Test public void testIllegalNonGeneric() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onNonGenericType(illegal), is(false)); } @Test public void testDuplicateNonGeneric() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onNonGenericType(duplicate), is(false)); } @Test @JavaVersionRule.Enforce(8) public void testLegalNonGeneric() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onNonGenericType(legal), is(true)); } @Test public void testIllegalTypeVariable() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onTypeVariable(illegal), is(false)); } @Test public void testDuplicateTypeVariable() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onTypeVariable(duplicate), is(false)); } @Test @JavaVersionRule.Enforce(8) public void testLegalTypeVariable() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onTypeVariable(legal), is(true)); } @Test public void testIllegalParameterized() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onParameterizedType(illegal), is(false)); } @Test public void testDuplicateParameterized() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onParameterizedType(duplicate), is(false)); } @Test public void testIllegalDelegatedOwnerTypeParameterized() throws Exception { when(legal.getOwnerType()).thenReturn(otherIllegal); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onParameterizedType(legal), is(false)); } @Test public void testIllegalDelegatedTypeArgumentParameterized() throws Exception { when(legal.getTypeArguments()).thenReturn(new TypeList.Generic.Explicit(otherIllegal)); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onParameterizedType(legal), is(false)); } @Test public void testIllegalDuplicateParameterized() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onParameterizedType(duplicate), is(false)); } @Test @JavaVersionRule.Enforce(8) public void testLegalParameterized() throws Exception { when(legal.isArray()).thenReturn(true); when(legal.getTypeArguments()).thenReturn(new TypeList.Generic.Explicit(otherLegal)); when(legal.getOwnerType()).thenReturn(otherLegal); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onParameterizedType(legal), is(true)); verify(otherLegal, times(2)).accept(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE); } @Test public void testWildcardIllegal() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onWildcard(illegal), is(false)); } @Test public void testWildcardDuplicate() throws Exception { assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onWildcard(duplicate), is(false)); } @Test public void testWildcardIllegalUpperBounds() throws Exception { when(legal.getUpperBounds()).thenReturn(new TypeList.Generic.Explicit(otherIllegal)); when(legal.getLowerBounds()).thenReturn(new TypeList.Generic.Empty()); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onWildcard(legal), is(false)); } @Test public void testWildcardIllegalLowerBounds() throws Exception { when(legal.getUpperBounds()).thenReturn(new TypeList.Generic.Explicit(TypeDescription.Generic.OBJECT)); when(legal.getLowerBounds()).thenReturn(new TypeList.Generic.Explicit(otherIllegal)); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onWildcard(legal), is(false)); } @Test @JavaVersionRule.Enforce(8) public void testWildcardLegal() throws Exception { when(legal.getUpperBounds()).thenReturn(new TypeList.Generic.Explicit(TypeDescription.Generic.OBJECT)); when(legal.getLowerBounds()).thenReturn(new TypeList.Generic.Explicit(otherLegal)); assertThat(TypeDescription.Generic.Visitor.Validator.ForTypeAnnotations.INSTANCE.onWildcard(legal), is(true)); } }
package butterknife.compiler; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import com.squareup.javapoet.TypeVariableName; import com.squareup.javapoet.WildcardTypeName; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.lang.model.element.Modifier; import butterknife.internal.ListenerClass; import butterknife.internal.ListenerMethod; import static butterknife.compiler.ButterKnifeProcessor.VIEW_TYPE; import static java.util.Collections.singletonList; import static javax.lang.model.element.Modifier.FINAL; import static javax.lang.model.element.Modifier.PRIVATE; import static javax.lang.model.element.Modifier.PROTECTED; import static javax.lang.model.element.Modifier.PUBLIC; import static javax.lang.model.element.Modifier.STATIC; final class BindingClass { private static final ClassName FINDER = ClassName.get("butterknife.internal", "Finder"); private static final ClassName VIEW_BINDER = ClassName.get("butterknife.internal", "ViewBinder"); private static final ClassName UTILS = ClassName.get("butterknife.internal", "Utils"); private static final ClassName VIEW = ClassName.get("android.view", "View"); private static final ClassName CONTEXT = ClassName.get("android.content", "Context"); private static final ClassName RESOURCES = ClassName.get("android.content.res", "Resources"); private static final ClassName THEME = RESOURCES.nestedClass("Theme"); private static final ClassName UNBINDER = ClassName.get("butterknife", "Unbinder"); private static final ClassName BITMAP_FACTORY = ClassName.get("android.graphics", "BitmapFactory"); private static final String UNBINDER_SIMPLE_NAME = "InnerUnbinder"; private static final String BIND_TO_TARGET = "bindToTarget"; private final Map<String, ViewBindings> viewIdMap = new LinkedHashMap<>(); private final Map<FieldCollectionViewBinding, String[]> collectionBindings = new LinkedHashMap<>(); private final List<FieldBitmapBinding> bitmapBindings = new ArrayList<>(); private final List<FieldDrawableBinding> drawableBindings = new ArrayList<>(); private final List<FieldResourceBinding> resourceBindings = new ArrayList<>(); private final boolean isFinal; private final TypeName targetTypeName; private final ClassName generatedClassName; private final ClassName unbinderClassName; private BindingClass parentBinding; BindingClass(TypeName targetTypeName, ClassName generatedClassName, boolean isFinal) { this.isFinal = isFinal; this.targetTypeName = targetTypeName; this.generatedClassName = generatedClassName; this.unbinderClassName = generatedClassName.nestedClass(UNBINDER_SIMPLE_NAME); } void addBitmap(FieldBitmapBinding binding) { bitmapBindings.add(binding); } void addDrawable(FieldDrawableBinding binding) { drawableBindings.add(binding); } void addField(String id, FieldViewBinding binding) { getOrCreateViewBindings(id).setFieldBinding(binding); } void addFieldCollection(String[] ids, FieldCollectionViewBinding binding) { collectionBindings.put(binding, ids); } boolean addMethod( String id, ListenerClass listener, ListenerMethod method, MethodViewBinding binding) { ViewBindings viewBindings = getOrCreateViewBindings(id); if (viewBindings.hasMethodBinding(listener, method) && !"void".equals(method.returnType())) { return false; } viewBindings.addMethodBinding(listener, method, binding); return true; } void addResource(FieldResourceBinding binding) { resourceBindings.add(binding); } void setParent(BindingClass parent) { this.parentBinding = parent; } ViewBindings getViewBinding(String id) { return viewIdMap.get(id); } private ViewBindings getOrCreateViewBindings(String id) { ViewBindings viewId = viewIdMap.get(id); if (viewId == null) { viewId = new ViewBindings(id); viewIdMap.put(id, viewId); } return viewId; } JavaFile brewJava() { TypeSpec.Builder result = TypeSpec.classBuilder(generatedClassName) .addModifiers(PUBLIC); if (isFinal) { result.addModifiers(Modifier.FINAL); } else { result.addTypeVariable(TypeVariableName.get("T", targetTypeName)); } TypeName targetType = isFinal ? targetTypeName : TypeVariableName.get("T"); if (hasParentBinding()) { result.superclass(ParameterizedTypeName.get(parentBinding.generatedClassName, targetType)); } else { result.addSuperinterface(ParameterizedTypeName.get(VIEW_BINDER, targetType)); } result.addMethod(createBindMethod(targetType)); if (isGeneratingUnbinder()) { result.addType(createUnbinderClass(targetType)); } else if (!isFinal) { result.addMethod(createBindToTargetMethod()); } return JavaFile.builder(generatedClassName.packageName(), result.build()) .addFileComment("Generated code from Butter Knife. Do not modify!") .build(); } private TypeSpec createUnbinderClass(TypeName targetType) { TypeSpec.Builder result = TypeSpec.classBuilder(unbinderClassName.simpleName()) .addModifiers(isFinal ? PRIVATE : PROTECTED, STATIC); if (isFinal) { result.addModifiers(Modifier.FINAL); } else { result.addTypeVariable(TypeVariableName.get("T", targetTypeName)); } if (hasInheritedUnbinder()) { result.superclass(ParameterizedTypeName.get(getInheritedUnbinder(), targetType)); } else { result.addSuperinterface(UNBINDER); result.addField(targetType, "target", isFinal ? PRIVATE : PROTECTED); } result.addMethod(createUnbinderConstructor(targetType)); if (hasViewBindings()) { result.addMethod(createUnbindInterfaceMethod(result, targetType)); } return result.build(); } private MethodSpec createUnbinderConstructor(TypeName targetType) { MethodSpec.Builder constructor = MethodSpec.constructorBuilder(); if (!isFinal) { constructor.addModifiers(PROTECTED); } if (hasMethodBindings()) { constructor.addParameter(targetType, "target", FINAL); } else { constructor.addParameter(targetType, "target"); } if (bindNeedsFinder()) { if (methodBindingsNeedFinder()) { constructor.addParameter(FINDER, "finder", FINAL); } else { constructor.addParameter(FINDER, "finder"); } constructor.addParameter(Object.class, "source"); } if (bindNeedsResources()) { constructor.addParameter(RESOURCES, "res"); } if (bindNeedsTheme()) { constructor.addParameter(THEME, "theme"); } if (hasInheritedUnbinder()) { CodeBlock.Builder invoke = CodeBlock.builder(); invoke.add("super(target"); if (parentBinding.bindNeedsFinder()) invoke.add(", finder, source"); if (parentBinding.bindNeedsResources()) invoke.add(", res"); if (parentBinding.bindNeedsTheme()) invoke.add(", theme"); constructor.addStatement("$L", invoke.add(")").build()); } else { constructor.addStatement("this.target = target"); } constructor.addCode("\n"); generateBindViewBody(constructor); return constructor.build(); } private MethodSpec createUnbindInterfaceMethod(TypeSpec.Builder unbinderClass, TypeName targetType) { MethodSpec.Builder result = MethodSpec.methodBuilder("unbind") .addAnnotation(Override.class) .addModifiers(PUBLIC); boolean rootUnbinderWithFields = !hasInheritedUnbinder() && hasFieldBindings(); if (hasFieldBindings() || rootUnbinderWithFields) { result.addStatement("$T target = this.target", targetType); } if (!hasInheritedUnbinder()) { String target = rootUnbinderWithFields ? "target" : "this.target"; result.addStatement("if ($N == null) throw new $T($S)", target, IllegalStateException.class, "Bindings already cleared."); } else { result.addStatement("super.unbind()"); } if (hasFieldBindings()) { result.addCode("\n"); for (ViewBindings bindings : viewIdMap.values()) { if (bindings.getFieldBinding() != null) { result.addStatement("target.$L = null", bindings.getFieldBinding().getName()); } } for (FieldCollectionViewBinding fieldCollectionBinding : collectionBindings.keySet()) { result.addStatement("target.$L = null", fieldCollectionBinding.getName()); } } if (hasMethodBindings()) { result.addCode("\n"); for (ViewBindings bindings : viewIdMap.values()) { addFieldAndUnbindStatement(unbinderClass, result, bindings); } } if (!hasInheritedUnbinder()) { result.addCode("\n"); result.addStatement("this.target = null"); } return result.build(); } private void addFieldAndUnbindStatement(TypeSpec.Builder result, MethodSpec.Builder unbindMethod, ViewBindings bindings) { // Only add fields to the unbinder if there are method bindings. Map<ListenerClass, Map<ListenerMethod, Set<MethodViewBinding>>> classMethodBindings = bindings.getMethodBindings(); if (classMethodBindings.isEmpty()) { return; } String fieldName = "target"; if (!bindings.isBoundToRoot()) { fieldName = bindings.getId().replace(".", ""); result.addField(VIEW, fieldName, PRIVATE); } // We only need to emit the null check if there are zero required bindings. boolean needsNullChecked = bindings.getRequiredBindings().isEmpty(); if (needsNullChecked) { unbindMethod.beginControlFlow("if ($N != null)", fieldName); } for (ListenerClass listenerClass : classMethodBindings.keySet()) { // We need to keep a reference to the listener // in case we need to unbind it via a remove method. boolean requiresRemoval = !"".equals(listenerClass.remover()); String listenerField = "null"; if (requiresRemoval) { TypeName listenerClassName = bestGuess(listenerClass.type()); listenerField = fieldName + ((ClassName) listenerClassName).simpleName(); result.addField(listenerClassName, listenerField, PRIVATE); } if (!VIEW_TYPE.equals(listenerClass.targetType())) { unbindMethod.addStatement("(($T) $N).$N($N)", bestGuess(listenerClass.targetType()), fieldName, removerOrSetter(listenerClass, requiresRemoval), listenerField); } else { unbindMethod.addStatement("$N.$N($N)", fieldName, removerOrSetter(listenerClass, requiresRemoval), listenerField); } if (requiresRemoval) { unbindMethod.addStatement("$N = null", listenerField); } } if (!bindings.isBoundToRoot()) { unbindMethod.addStatement("$N = null", fieldName); } if (needsNullChecked) { unbindMethod.endControlFlow(); } } private String removerOrSetter(ListenerClass listenerClass, boolean requiresRemoval) { return requiresRemoval ? listenerClass.remover() : listenerClass.setter(); } private MethodSpec createBindMethod(TypeName targetType) { MethodSpec.Builder result = MethodSpec.methodBuilder("bind") .addAnnotation(Override.class) .addModifiers(PUBLIC) .returns(UNBINDER) .addParameter(FINDER, "finder"); if (isFinal && hasMethodBindings()) { result.addParameter(targetType, "target", FINAL); } else { result.addParameter(targetType, "target"); } result.addParameter(Object.class, "source"); boolean needsFinder = bindNeedsFinder(); boolean needsResources = bindNeedsResources(); boolean needsTheme = bindNeedsTheme(); if (needsResources) { if (needsTheme) { result.addStatement("$T context = finder.getContext(source)", CONTEXT); result.addStatement("$T res = context.getResources()", RESOURCES); result.addStatement("$T theme = context.getTheme()", THEME); } else { result.addStatement("$T res = finder.getContext(source).getResources()", RESOURCES); } } if (isFinal && !isGeneratingUnbinder()) { if (needsResources) { result.addCode("\n"); } generateBindViewBody(result); result.addCode("\n"); } CodeBlock.Builder invoke = CodeBlock.builder(); if (isGeneratingUnbinder()) { invoke.add("return new $T", unbinderClassName); } else if (!isFinal) { invoke.add("$N", BIND_TO_TARGET); } if (isGeneratingUnbinder() || !isFinal) { invoke.add("(target"); if (needsFinder) invoke.add(", finder, source"); if (needsResources) invoke.add(", res"); if (needsTheme) invoke.add(", theme"); result.addStatement("$L", invoke.add(")").build()); } if (!isGeneratingUnbinder()) { result.addStatement("return $T.EMPTY", UNBINDER); } return result.build(); } private MethodSpec createBindToTargetMethod() { MethodSpec.Builder result = MethodSpec.methodBuilder(BIND_TO_TARGET) .addModifiers(PROTECTED, STATIC); if (hasMethodBindings()) { result.addParameter(targetTypeName, "target", FINAL); } else { result.addParameter(targetTypeName, "target"); } if (bindNeedsResources()) { result.addParameter(RESOURCES, "res"); } if (bindNeedsTheme()) { result.addParameter(THEME, "theme"); } generateBindViewBody(result); return result.build(); } private void generateBindViewBody(MethodSpec.Builder result) { if (hasResourceBindings()) { // Aapt can change IDs out from underneath us, just suppress since all will work at runtime. result.addAnnotation(AnnotationSpec.builder(SuppressWarnings.class) .addMember("value", "$S", "ResourceType") .build()); } if (!hasInheritedUnbinder() && hasParentBinding()) { CodeBlock.Builder invoke = CodeBlock.builder() // .add("$T.$N(target", parentBinding.generatedClassName, BIND_TO_TARGET); if (parentBinding.bindNeedsFinder()) invoke.add(", finder, source"); if (parentBinding.bindNeedsResources()) invoke.add(", res"); if (parentBinding.bindNeedsTheme()) invoke.add(", theme"); result.addStatement("$L", invoke.add(")").build()); result.addCode("\n"); } if (hasViewBindings()) { if (bindNeedsViewLocal()) { // Local variable in which all views will be temporarily stored. result.addStatement("$T view", VIEW); } // Loop over each view bindings and emit it. for (ViewBindings bindings : viewIdMap.values()) { addViewBindings(result, bindings); } // Loop over each collection binding and emit it. for (Map.Entry<FieldCollectionViewBinding, String[]> entry : collectionBindings.entrySet()) { emitCollectionBinding(result, entry.getKey(), entry.getValue()); } if (hasResourceBindings()) { result.addCode("\n"); } } if (hasResourceBindings()) { for (FieldBitmapBinding binding : bitmapBindings) { result.addStatement("target.$L = $T.decodeResource(res, $L)", binding.getName(), BITMAP_FACTORY, binding.getId()); } for (FieldDrawableBinding binding : drawableBindings) { String tintAttributeId = binding.getTintAttributeId(); if (tintAttributeId != null && !tintAttributeId.isEmpty()) { result.addStatement("target.$L = $T.getTintedDrawable(res, theme, $L, $L)", binding.getName(), UTILS, binding.getId(), tintAttributeId); } else { result.addStatement("target.$L = $T.getDrawable(res, theme, $L)", binding.getName(), UTILS, binding.getId()); } } for (FieldResourceBinding binding : resourceBindings) { // TODO being themeable is poor correlation to the need to use Utils. if (binding.isThemeable()) { result.addStatement("target.$L = $T.$L(res, theme, $L)", binding.getName(), UTILS, binding.getMethod(), binding.getId()); } else { result.addStatement("target.$L = res.$L($L)", binding.getName(), binding.getMethod(), binding.getId()); } } } } private void emitCollectionBinding( MethodSpec.Builder result, FieldCollectionViewBinding binding, String[] ids) { String ofName; switch (binding.getKind()) { case ARRAY: ofName = "arrayOf"; break; case LIST: ofName = "listOf"; break; default: throw new IllegalStateException("Unknown kind: " + binding.getKind()); } CodeBlock.Builder builder = CodeBlock.builder(); for (int i = 0; i < ids.length; i++) { if (i > 0) { builder.add(", "); } builder.add("\n"); if (requiresCast(binding.getType())) { builder.add("($T) ", binding.getType()); } if (binding.isRequired()) { builder.add("finder.findRequiredView(source, $L, $S)", ids[i], asHumanDescription(singletonList(binding))); } else { builder.add("finder.findOptionalView(source, $L)", ids[i]); } } result.addStatement("target.$L = $T.$L($L)", binding.getName(), UTILS, ofName, builder.build()); } private void addViewBindings(MethodSpec.Builder result, ViewBindings bindings) { if (bindings.isSingleFieldBinding()) { // Optimize the common case where there's a single binding directly to a field. FieldViewBinding fieldBinding = bindings.getFieldBinding(); CodeBlock.Builder invoke = CodeBlock.builder() .add("target.$L = finder.find", fieldBinding.getName()); invoke.add(fieldBinding.isRequired() ? "RequiredView" : "OptionalView"); if (requiresCast(fieldBinding.getType())) { invoke.add("AsType"); } invoke.add("(source, $L", bindings.getId()); if (fieldBinding.isRequired() || requiresCast(fieldBinding.getType())) { invoke.add(", $S", asHumanDescription(singletonList(fieldBinding))); } if (requiresCast(fieldBinding.getType())) { invoke.add(", $T.class", fieldBinding.getRawType()); } result.addStatement("$L)", invoke.build()); return; } List<ViewBinding> requiredViewBindings = bindings.getRequiredBindings(); if (requiredViewBindings.isEmpty()) { result.addStatement("view = finder.findOptionalView(source, $L)", bindings.getId()); } else if (!bindings.isBoundToRoot()) { result.addStatement("view = finder.findRequiredView(source, $L, $S)", bindings.getId(), asHumanDescription(requiredViewBindings)); } addFieldBindings(result, bindings); addMethodBindings(result, bindings); } private void addFieldBindings(MethodSpec.Builder result, ViewBindings bindings) { FieldViewBinding fieldBinding = bindings.getFieldBinding(); if (fieldBinding != null) { if (requiresCast(fieldBinding.getType())) { result.addStatement("target.$L = finder.castView(view, $L, $S)", fieldBinding.getName(), bindings.getId(), asHumanDescription(singletonList(fieldBinding))); } else { result.addStatement("target.$L = view", fieldBinding.getName()); } } } private void addMethodBindings(MethodSpec.Builder result, ViewBindings bindings) { Map<ListenerClass, Map<ListenerMethod, Set<MethodViewBinding>>> classMethodBindings = bindings.getMethodBindings(); if (classMethodBindings.isEmpty()) { return; } // We only need to emit the null check if there are zero required bindings. boolean needsNullChecked = bindings.getRequiredBindings().isEmpty(); if (needsNullChecked) { result.beginControlFlow("if (view != null)"); } // Add the view reference to the unbinder. String fieldName = "target"; String bindName = "target"; if (!bindings.isBoundToRoot()) { fieldName = bindings.getId().replace(".", ""); bindName = "view"; if (isGeneratingUnbinder()) { result.addStatement("$L = view", fieldName); } } for (Map.Entry<ListenerClass, Map<ListenerMethod, Set<MethodViewBinding>>> e : classMethodBindings.entrySet()) { ListenerClass listener = e.getKey(); Map<ListenerMethod, Set<MethodViewBinding>> methodBindings = e.getValue(); TypeSpec.Builder callback = TypeSpec.anonymousClassBuilder("") .superclass(ClassName.bestGuess(listener.type())); for (ListenerMethod method : getListenerMethods(listener)) { MethodSpec.Builder callbackMethod = MethodSpec.methodBuilder(method.name()) .addAnnotation(Override.class) .addModifiers(PUBLIC) .returns(bestGuess(method.returnType())); String[] parameterTypes = method.parameters(); for (int i = 0, count = parameterTypes.length; i < count; i++) { callbackMethod.addParameter(bestGuess(parameterTypes[i]), "p" + i); } boolean hasReturnType = !"void".equals(method.returnType()); CodeBlock.Builder builder = CodeBlock.builder(); if (hasReturnType) { builder.add("return "); } if (methodBindings.containsKey(method)) { for (MethodViewBinding binding : methodBindings.get(method)) { builder.add("target.$L(", binding.getName()); List<Parameter> parameters = binding.getParameters(); String[] listenerParameters = method.parameters(); for (int i = 0, count = parameters.size(); i < count; i++) { if (i > 0) { builder.add(", "); } Parameter parameter = parameters.get(i); int listenerPosition = parameter.getListenerPosition(); if (parameter.requiresCast(listenerParameters[listenerPosition])) { builder.add("finder.<$T>castParam(p$L, $S, $L, $S, $L)", parameter.getType(), listenerPosition, method.name(), listenerPosition, binding.getName(), i); } else { builder.add("p$L", listenerPosition); } } builder.add(");\n"); } } else if (hasReturnType) { builder.add("$L;\n", method.defaultReturn()); } callbackMethod.addCode(builder.build()); callback.addMethod(callbackMethod.build()); } boolean requiresRemoval = isGeneratingUnbinder() && listener.remover().length() != 0; String listenerField = null; if (requiresRemoval) { TypeName listenerClassName = bestGuess(listener.type()); listenerField = fieldName + ((ClassName) listenerClassName).simpleName(); result.addStatement("this.$L = $L", listenerField, callback.build()); } if (!VIEW_TYPE.equals(listener.targetType())) { result.addStatement("(($T) $N).$L($L)", bestGuess(listener.targetType()), bindName, listener.setter(), requiresRemoval ? listenerField : callback.build()); } else { result.addStatement("$N.$L($L)", bindName, listener.setter(), requiresRemoval ? listenerField : callback.build()); } } if (needsNullChecked) { result.endControlFlow(); } } static List<ListenerMethod> getListenerMethods(ListenerClass listener) { if (listener.method().length == 1) { return Arrays.asList(listener.method()); } try { List<ListenerMethod> methods = new ArrayList<>(); Class<? extends Enum<?>> callbacks = listener.callbacks(); for (Enum<?> callbackMethod : callbacks.getEnumConstants()) { Field callbackField = callbacks.getField(callbackMethod.name()); ListenerMethod method = callbackField.getAnnotation(ListenerMethod.class); if (method == null) { throw new IllegalStateException(String.format("@%s's %s.%s missing @%s annotation.", callbacks.getEnclosingClass().getSimpleName(), callbacks.getSimpleName(), callbackMethod.name(), ListenerMethod.class.getSimpleName())); } methods.add(method); } return methods; } catch (NoSuchFieldException e) { throw new AssertionError(e); } } static String asHumanDescription(Collection<? extends ViewBinding> bindings) { Iterator<? extends ViewBinding> iterator = bindings.iterator(); switch (bindings.size()) { case 1: return iterator.next().getDescription(); case 2: return iterator.next().getDescription() + " and " + iterator.next().getDescription(); default: StringBuilder builder = new StringBuilder(); for (int i = 0, count = bindings.size(); i < count; i++) { if (i != 0) { builder.append(", "); } if (i == count - 1) { builder.append("and "); } builder.append(iterator.next().getDescription()); } return builder.toString(); } } static TypeName bestGuess(String type) { switch (type) { case "void": return TypeName.VOID; case "boolean": return TypeName.BOOLEAN; case "byte": return TypeName.BYTE; case "char": return TypeName.CHAR; case "double": return TypeName.DOUBLE; case "float": return TypeName.FLOAT; case "int": return TypeName.INT; case "long": return TypeName.LONG; case "short": return TypeName.SHORT; default: int left = type.indexOf('<'); if (left != -1) { ClassName typeClassName = ClassName.bestGuess(type.substring(0, left)); List<TypeName> typeArguments = new ArrayList<>(); do { typeArguments.add(WildcardTypeName.subtypeOf(Object.class)); left = type.indexOf('<', left + 1); } while (left != -1); return ParameterizedTypeName.get(typeClassName, typeArguments.toArray(new TypeName[typeArguments.size()])); } return ClassName.bestGuess(type); } } /** True when this type has a parent view binder type. */ private boolean hasParentBinding() { return parentBinding != null; } /** True when this type contains an unbinder subclass. */ private boolean isGeneratingUnbinder() { return hasViewBindings() || hasInheritedUnbinder(); } /** True when any of this type's parents contain an unbinder subclass. */ private boolean hasInheritedUnbinder() { return hasParentBinding() && parentBinding.isGeneratingUnbinder(); } /** Return the nearest unbinder subclass from this type's parents. */ private ClassName getInheritedUnbinder() { return parentBinding.unbinderClassName; } /** True when this type's bindings require a view hierarchy. */ private boolean hasViewBindings() { return !viewIdMap.isEmpty() || !collectionBindings.isEmpty(); } /** True when this type's bindings require Android's {@code Resources}. */ private boolean hasResourceBindings() { return !(bitmapBindings.isEmpty() && drawableBindings.isEmpty() && resourceBindings.isEmpty()); } /** True when this type's resource bindings require Android's {@code Theme}. */ private boolean hasResourceBindingsNeedingTheme() { if (!drawableBindings.isEmpty()) { return true; } for (FieldResourceBinding resourceBinding : resourceBindings) { if (resourceBinding.isThemeable()) { return true; } } return false; } private boolean hasMethodBindings() { for (ViewBindings viewBindings : viewIdMap.values()) { if (!viewBindings.getMethodBindings().isEmpty()) { return true; } } return false; } private boolean methodBindingsNeedFinder() { for (ViewBindings viewBindings : viewIdMap.values()) { for (Map.Entry<ListenerClass, Map<ListenerMethod, Set<MethodViewBinding>>> entry : viewBindings.getMethodBindings().entrySet()) { Map<ListenerMethod, Set<MethodViewBinding>> methodBindings = entry.getValue(); for (ListenerMethod method : getListenerMethods(entry.getKey())) { if (methodBindings.containsKey(method)) { String[] parameterTypes = method.parameters(); for (MethodViewBinding methodViewBinding : methodBindings.get(method)) { for (Parameter parameter : methodViewBinding.getParameters()) { if (parameter.requiresCast(parameterTypes[parameter.getListenerPosition()])) { return true; } } } } } } } return false; } private boolean hasFieldBindings() { for (ViewBindings viewBindings : viewIdMap.values()) { if (viewBindings.getFieldBinding() != null) { return true; } } return !collectionBindings.isEmpty(); } private boolean bindNeedsFinder() { return hasViewBindings() // || hasParentBinding() && parentBinding.bindNeedsFinder(); } private boolean bindNeedsResources() { return hasResourceBindings() // || hasParentBinding() && parentBinding.bindNeedsResources(); } private boolean bindNeedsTheme() { return hasResourceBindings() && hasResourceBindingsNeedingTheme() // || hasParentBinding() && parentBinding.bindNeedsTheme(); } private boolean bindNeedsViewLocal() { for (ViewBindings viewBindings : viewIdMap.values()) { if (viewBindings.requiresLocal()) { return true; } } return false; } private static boolean requiresCast(TypeName type) { return !VIEW_TYPE.equals(type.toString()); } @Override public String toString() { return generatedClassName.toString(); } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl.execchain; import java.io.IOException; import java.io.InterruptedIOException; import java.net.URI; import java.net.URISyntaxException; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.ConnectionReuseStrategy; import org.apache.http.HttpClientConnection; import org.apache.http.HttpEntity; import org.apache.http.HttpException; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.ProtocolException; import org.apache.http.annotation.Immutable; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpExecutionAware; import org.apache.http.client.methods.HttpRequestWrapper; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.client.protocol.RequestClientConnControl; import org.apache.http.client.utils.URIUtils; import org.apache.http.conn.ConnectionKeepAliveStrategy; import org.apache.http.conn.ConnectionRequest; import org.apache.http.conn.HttpClientConnectionManager; import org.apache.http.conn.routing.HttpRoute; import org.apache.http.impl.conn.ConnectionShutdownException; import org.apache.http.protocol.HttpCoreContext; import org.apache.http.protocol.HttpProcessor; import org.apache.http.protocol.HttpRequestExecutor; import org.apache.http.protocol.ImmutableHttpProcessor; import org.apache.http.protocol.RequestContent; import org.apache.http.protocol.RequestTargetHost; import org.apache.http.protocol.RequestUserAgent; import org.apache.http.util.Args; import org.apache.http.util.VersionInfo; /** * Request executor that implements the most fundamental aspects of * the HTTP specification and the most straight-forward request / response * exchange with the target server. This executor does not support * execution via proxy and will make no attempts to retry the request * in case of a redirect, authentication challenge or I/O error. * * @since 4.3 */ @Immutable public class MinimalClientExec implements ClientExecChain { private final Log log = LogFactory.getLog(getClass()); private final HttpRequestExecutor requestExecutor; private final HttpClientConnectionManager connManager; private final ConnectionReuseStrategy reuseStrategy; private final ConnectionKeepAliveStrategy keepAliveStrategy; private final HttpProcessor httpProcessor; public MinimalClientExec( final HttpRequestExecutor requestExecutor, final HttpClientConnectionManager connManager, final ConnectionReuseStrategy reuseStrategy, final ConnectionKeepAliveStrategy keepAliveStrategy) { Args.notNull(requestExecutor, "HTTP request executor"); Args.notNull(connManager, "Client connection manager"); Args.notNull(reuseStrategy, "Connection reuse strategy"); Args.notNull(keepAliveStrategy, "Connection keep alive strategy"); this.httpProcessor = new ImmutableHttpProcessor( new RequestContent(), new RequestTargetHost(), new RequestClientConnControl(), new RequestUserAgent(VersionInfo.getUserAgent( "Apache-HttpClient", "org.apache.http.client", getClass()))); this.requestExecutor = requestExecutor; this.connManager = connManager; this.reuseStrategy = reuseStrategy; this.keepAliveStrategy = keepAliveStrategy; } static void rewriteRequestURI( final HttpRequestWrapper request, final HttpRoute route) throws ProtocolException { try { URI uri = request.getURI(); if (uri != null) { // Make sure the request URI is relative if (uri.isAbsolute()) { uri = URIUtils.rewriteURI(uri, null, true); } else { uri = URIUtils.rewriteURI(uri); } request.setURI(uri); } } catch (final URISyntaxException ex) { throw new ProtocolException("Invalid URI: " + request.getRequestLine().getUri(), ex); } } @Override public CloseableHttpResponse execute( final HttpRoute route, final HttpRequestWrapper request, final HttpClientContext context, final HttpExecutionAware execAware) throws IOException, HttpException { Args.notNull(route, "HTTP route"); Args.notNull(request, "HTTP request"); Args.notNull(context, "HTTP context"); rewriteRequestURI(request, route); final ConnectionRequest connRequest = connManager.requestConnection(route, null); if (execAware != null) { if (execAware.isAborted()) { connRequest.cancel(); throw new RequestAbortedException("Request aborted"); } else { execAware.setCancellable(connRequest); } } final RequestConfig config = context.getRequestConfig(); final HttpClientConnection managedConn; try { final int timeout = config.getConnectionRequestTimeout(); managedConn = connRequest.get(timeout > 0 ? timeout : 0, TimeUnit.MILLISECONDS); } catch(final InterruptedException interrupted) { Thread.currentThread().interrupt(); throw new RequestAbortedException("Request aborted", interrupted); } catch(final ExecutionException ex) { Throwable cause = ex.getCause(); if (cause == null) { cause = ex; } throw new RequestAbortedException("Request execution failed", cause); } final ConnectionHolder releaseTrigger = new ConnectionHolder(log, connManager, managedConn); try { if (execAware != null) { if (execAware.isAborted()) { releaseTrigger.close(); throw new RequestAbortedException("Request aborted"); } else { execAware.setCancellable(releaseTrigger); } } if (!managedConn.isOpen()) { final int timeout = config.getConnectTimeout(); this.connManager.connect( managedConn, route, timeout > 0 ? timeout : 0, context); this.connManager.routeComplete(managedConn, route, context); } final int timeout = config.getSocketTimeout(); if (timeout >= 0) { managedConn.setSocketTimeout(timeout); } HttpHost target = null; final HttpRequest original = request.getOriginal(); if (original instanceof HttpUriRequest) { final URI uri = ((HttpUriRequest) original).getURI(); if (uri.isAbsolute()) { target = new HttpHost(uri.getHost(), uri.getPort(), uri.getScheme()); } } if (target == null) { target = route.getTargetHost(); } context.setAttribute(HttpCoreContext.HTTP_TARGET_HOST, target); context.setAttribute(HttpCoreContext.HTTP_REQUEST, request); context.setAttribute(HttpCoreContext.HTTP_CONNECTION, managedConn); context.setAttribute(HttpClientContext.HTTP_ROUTE, route); httpProcessor.process(request, context); final HttpResponse response = requestExecutor.execute(request, managedConn, context); httpProcessor.process(response, context); // The connection is in or can be brought to a re-usable state. if (reuseStrategy.keepAlive(response, context)) { // Set the idle duration of this connection final long duration = keepAliveStrategy.getKeepAliveDuration(response, context); releaseTrigger.setValidFor(duration, TimeUnit.MILLISECONDS); releaseTrigger.markReusable(); } else { releaseTrigger.markNonReusable(); } // check for entity, release connection if possible final HttpEntity entity = response.getEntity(); if (entity == null || !entity.isStreaming()) { // connection not needed and (assumed to be) in re-usable state releaseTrigger.releaseConnection(); return new HttpResponseProxy(response, null); } else { return new HttpResponseProxy(response, releaseTrigger); } } catch (final ConnectionShutdownException ex) { final InterruptedIOException ioex = new InterruptedIOException( "Connection has been shut down"); ioex.initCause(ex); throw ioex; } catch (final HttpException ex) { releaseTrigger.abortConnection(); throw ex; } catch (final IOException ex) { releaseTrigger.abortConnection(); throw ex; } catch (final RuntimeException ex) { releaseTrigger.abortConnection(); throw ex; } } }
/* * Copyright 2012-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.autoconfigure; import java.io.IOException; import java.util.concurrent.CountDownLatch; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.boot.actuate.metrics.CounterService; import org.springframework.boot.actuate.metrics.GaugeService; import org.springframework.boot.test.EnvironmentTestUtils; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.annotation.Order; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.stereotype.Component; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MvcResult; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.context.request.async.DeferredResult; import org.springframework.web.filter.OncePerRequestFilter; import org.springframework.web.util.NestedServletException; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.fail; import static org.mockito.BDDMockito.willAnswer; import static org.mockito.BDDMockito.willThrow; import static org.mockito.Matchers.anyDouble; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.asyncDispatch; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.request; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Tests for {@link MetricFilterAutoConfiguration}. * * @author Phillip Webb * @author Andy Wilkinson * @author Stephane Nicoll */ public class MetricFilterAutoConfigurationTests { @Test public void recordsHttpInteractions() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class); Filter filter = context.getBean(Filter.class); final MockHttpServletRequest request = new MockHttpServletRequest("GET", "/test/path"); final MockHttpServletResponse response = new MockHttpServletResponse(); FilterChain chain = mock(FilterChain.class); willAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { response.setStatus(200); return null; } }).given(chain).doFilter(request, response); filter.doFilter(request, response, chain); verify(context.getBean(CounterService.class)).increment("status.200.test.path"); verify(context.getBean(GaugeService.class)).submit(eq("response.test.path"), anyDouble()); context.close(); } @Test public void recordsHttpInteractionsWithTemplateVariable() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class); Filter filter = context.getBean(Filter.class); MockMvc mvc = MockMvcBuilders.standaloneSetup(new MetricFilterTestController()) .addFilter(filter).build(); mvc.perform(get("/templateVarTest/foo")).andExpect(status().isOk()); verify(context.getBean(CounterService.class)) .increment("status.200.templateVarTest.someVariable"); verify(context.getBean(GaugeService.class)) .submit(eq("response.templateVarTest.someVariable"), anyDouble()); context.close(); } @Test public void recordsKnown404HttpInteractionsAsSingleMetricWithPathAndTemplateVariable() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class); Filter filter = context.getBean(Filter.class); MockMvc mvc = MockMvcBuilders.standaloneSetup(new MetricFilterTestController()) .addFilter(filter).build(); mvc.perform(get("/knownPath/foo")).andExpect(status().isNotFound()); verify(context.getBean(CounterService.class)) .increment("status.404.knownPath.someVariable"); verify(context.getBean(GaugeService.class)) .submit(eq("response.knownPath.someVariable"), anyDouble()); context.close(); } @Test public void records404HttpInteractionsAsSingleMetric() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class); Filter filter = context.getBean(Filter.class); MockMvc mvc = MockMvcBuilders.standaloneSetup(new MetricFilterTestController()) .addFilter(filter).build(); mvc.perform(get("/unknownPath/1")).andExpect(status().isNotFound()); mvc.perform(get("/unknownPath/2")).andExpect(status().isNotFound()); verify(context.getBean(CounterService.class), times(2)) .increment("status.404.unmapped"); verify(context.getBean(GaugeService.class), times(2)) .submit(eq("response.unmapped"), anyDouble()); context.close(); } @Test public void records302HttpInteractionsAsSingleMetric() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class, RedirectFilter.class); MetricsFilter filter = context.getBean(MetricsFilter.class); MockMvc mvc = MockMvcBuilders.standaloneSetup(new MetricFilterTestController()) .addFilter(filter).addFilter(context.getBean(RedirectFilter.class)) .build(); mvc.perform(get("/unknownPath/1")).andExpect(status().is3xxRedirection()); mvc.perform(get("/unknownPath/2")).andExpect(status().is3xxRedirection()); verify(context.getBean(CounterService.class), times(2)) .increment("status.302.unmapped"); verify(context.getBean(GaugeService.class), times(2)) .submit(eq("response.unmapped"), anyDouble()); context.close(); } @Test public void skipsFilterIfMissingServices() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( MetricFilterAutoConfiguration.class); assertThat(context.getBeansOfType(Filter.class).size()).isEqualTo(0); context.close(); } @Test public void skipsFilterIfPropertyDisabled() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); EnvironmentTestUtils.addEnvironment(context, "endpoints.metrics.filter.enabled:false"); context.register(Config.class, MetricFilterAutoConfiguration.class); context.refresh(); assertThat(context.getBeansOfType(Filter.class).size()).isEqualTo(0); context.close(); } @Test public void controllerMethodThatThrowsUnhandledException() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class); Filter filter = context.getBean(Filter.class); MockMvc mvc = MockMvcBuilders.standaloneSetup(new MetricFilterTestController()) .addFilter(filter).build(); try { mvc.perform(get("/unhandledException")) .andExpect(status().isInternalServerError()); } catch (NestedServletException ex) { // Expected } verify(context.getBean(CounterService.class)) .increment("status.500.unhandledException"); verify(context.getBean(GaugeService.class)) .submit(eq("response.unhandledException"), anyDouble()); context.close(); } @Test public void gaugeServiceThatThrows() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class); GaugeService gaugeService = context.getBean(GaugeService.class); willThrow(new IllegalStateException()).given(gaugeService).submit(anyString(), anyDouble()); Filter filter = context.getBean(Filter.class); MockMvc mvc = MockMvcBuilders.standaloneSetup(new MetricFilterTestController()) .addFilter(filter).build(); mvc.perform(get("/templateVarTest/foo")).andExpect(status().isOk()); verify(context.getBean(CounterService.class)) .increment("status.200.templateVarTest.someVariable"); verify(context.getBean(GaugeService.class)) .submit(eq("response.templateVarTest.someVariable"), anyDouble()); context.close(); } @Test public void correctlyRecordsMetricsForDeferredResultResponse() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class); MetricsFilter filter = context.getBean(MetricsFilter.class); CountDownLatch latch = new CountDownLatch(1); MockMvc mvc = MockMvcBuilders .standaloneSetup(new MetricFilterTestController(latch)).addFilter(filter) .build(); String attributeName = MetricsFilter.class.getName() + ".StopWatch"; MvcResult result = mvc.perform(post("/create")).andExpect(status().isOk()) .andExpect(request().asyncStarted()) .andExpect(request().attribute(attributeName, is(notNullValue()))) .andReturn(); latch.countDown(); mvc.perform(asyncDispatch(result)).andExpect(status().isCreated()) .andExpect(request().attribute(attributeName, is(nullValue()))); verify(context.getBean(CounterService.class)).increment("status.201.create"); context.close(); } @Test public void correctlyRecordsMetricsForFailedDeferredResultResponse() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class); MetricsFilter filter = context.getBean(MetricsFilter.class); CountDownLatch latch = new CountDownLatch(1); MockMvc mvc = MockMvcBuilders .standaloneSetup(new MetricFilterTestController(latch)).addFilter(filter) .build(); String attributeName = MetricsFilter.class.getName() + ".StopWatch"; MvcResult result = mvc.perform(post("/createFailure")).andExpect(status().isOk()) .andExpect(request().asyncStarted()) .andExpect(request().attribute(attributeName, is(notNullValue()))) .andReturn(); latch.countDown(); try { mvc.perform(asyncDispatch(result)); fail(); } catch (Exception ex) { assertThat(result.getRequest().getAttribute(attributeName)).isNull(); verify(context.getBean(CounterService.class)) .increment("status.500.createFailure"); } finally { context.close(); } } @Test public void records5xxxHttpInteractionsAsSingleMetric() throws Exception { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( Config.class, MetricFilterAutoConfiguration.class, ServiceUnavailableFilter.class); MetricsFilter filter = context.getBean(MetricsFilter.class); MockMvc mvc = MockMvcBuilders.standaloneSetup(new MetricFilterTestController()) .addFilter(filter) .addFilter(context.getBean(ServiceUnavailableFilter.class)).build(); mvc.perform(get("/unknownPath/1")).andExpect(status().isServiceUnavailable()); mvc.perform(get("/unknownPath/2")).andExpect(status().isServiceUnavailable()); verify(context.getBean(CounterService.class), times(2)) .increment("status.503.unmapped"); verify(context.getBean(GaugeService.class), times(2)) .submit(eq("response.unmapped"), anyDouble()); context.close(); } @Configuration public static class Config { @Bean public CounterService counterService() { return mock(CounterService.class); } @Bean public GaugeService gaugeService() { return mock(GaugeService.class); } } @RestController class MetricFilterTestController { private final CountDownLatch latch; MetricFilterTestController() { this(null); } MetricFilterTestController(CountDownLatch latch) { this.latch = latch; } @RequestMapping("templateVarTest/{someVariable}") public String testTemplateVariableResolution(@PathVariable String someVariable) { return someVariable; } @RequestMapping("knownPath/{someVariable}") @ResponseStatus(HttpStatus.NOT_FOUND) @ResponseBody public String testKnownPathWith404Response(@PathVariable String someVariable) { return someVariable; } @ResponseBody @RequestMapping("unhandledException") public String testException() { throw new RuntimeException(); } @RequestMapping("create") public DeferredResult<ResponseEntity<String>> create() { final DeferredResult<ResponseEntity<String>> result = new DeferredResult<ResponseEntity<String>>(); new Thread(new Runnable() { @Override public void run() { try { MetricFilterTestController.this.latch.await(); result.setResult( new ResponseEntity<String>("Done", HttpStatus.CREATED)); } catch (InterruptedException ex) { } } }).start(); return result; } @RequestMapping("createFailure") public DeferredResult<ResponseEntity<String>> createFailure() { final DeferredResult<ResponseEntity<String>> result = new DeferredResult<ResponseEntity<String>>(); new Thread(new Runnable() { @Override public void run() { try { MetricFilterTestController.this.latch.await(); result.setErrorResult(new Exception("It failed")); } catch (InterruptedException ex) { } } }).start(); return result; } } @Component @Order(0) public static class RedirectFilter extends OncePerRequestFilter { @Override protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain chain) throws ServletException, IOException { // send redirect before filter chain is executed, like Spring Security sending // us back to a login page response.sendRedirect("http://example.com"); } } @Component @Order(0) public static class ServiceUnavailableFilter extends OncePerRequestFilter { @Override protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain chain) throws ServletException, IOException { response.sendError(HttpStatus.SERVICE_UNAVAILABLE.value()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.execute; import static org.apache.phoenix.query.QueryConstants.*; import java.io.IOException; import java.sql.SQLException; import java.util.Collections; import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.phoenix.compile.ExplainPlan; import org.apache.phoenix.compile.GroupByCompiler.GroupBy; import org.apache.phoenix.compile.OrderByCompiler.OrderBy; import org.apache.phoenix.compile.QueryPlan; import org.apache.phoenix.compile.RowProjector; import org.apache.phoenix.compile.StatementContext; import org.apache.phoenix.coprocessor.BaseScannerRegionObserver; import org.apache.phoenix.expression.Expression; import org.apache.phoenix.expression.OrderByExpression; import org.apache.phoenix.expression.aggregator.Aggregators; import org.apache.phoenix.expression.aggregator.ServerAggregators; import org.apache.phoenix.iterate.AggregatingResultIterator; import org.apache.phoenix.iterate.BaseGroupedAggregatingResultIterator; import org.apache.phoenix.iterate.DistinctAggregatingResultIterator; import org.apache.phoenix.iterate.FilterAggregatingResultIterator; import org.apache.phoenix.iterate.FilterResultIterator; import org.apache.phoenix.iterate.GroupedAggregatingResultIterator; import org.apache.phoenix.iterate.LimitingResultIterator; import org.apache.phoenix.iterate.LookAheadResultIterator; import org.apache.phoenix.iterate.OrderedAggregatingResultIterator; import org.apache.phoenix.iterate.OrderedResultIterator; import org.apache.phoenix.iterate.ParallelScanGrouper; import org.apache.phoenix.iterate.PeekingResultIterator; import org.apache.phoenix.iterate.ResultIterator; import org.apache.phoenix.iterate.SequenceResultIterator; import org.apache.phoenix.iterate.UngroupedAggregatingResultIterator; import org.apache.phoenix.parse.FilterableStatement; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; import org.apache.phoenix.schema.TableRef; import org.apache.phoenix.schema.tuple.MultiKeyValueTuple; import org.apache.phoenix.schema.tuple.Tuple; import org.apache.phoenix.util.TupleUtil; import com.google.common.collect.Lists; public class ClientAggregatePlan extends ClientProcessingPlan { private final GroupBy groupBy; private final Expression having; private final Aggregators serverAggregators; private final Aggregators clientAggregators; public ClientAggregatePlan(StatementContext context, FilterableStatement statement, TableRef table, RowProjector projector, Integer limit, Expression where, OrderBy orderBy, GroupBy groupBy, Expression having, QueryPlan delegate) { super(context, statement, table, projector, limit, where, orderBy, delegate); this.groupBy = groupBy; this.having = having; this.serverAggregators = ServerAggregators.deserialize(context.getScan() .getAttribute(BaseScannerRegionObserver.AGGREGATORS), QueryServicesOptions.withDefaults().getConfiguration()); this.clientAggregators = context.getAggregationManager().getAggregators(); } @Override public ResultIterator iterator(ParallelScanGrouper scanGrouper) throws SQLException { ResultIterator iterator = delegate.iterator(scanGrouper); if (where != null) { iterator = new FilterResultIterator(iterator, where); } AggregatingResultIterator aggResultIterator; if (groupBy.isEmpty()) { aggResultIterator = new ClientUngroupedAggregatingResultIterator(LookAheadResultIterator.wrap(iterator), serverAggregators); aggResultIterator = new UngroupedAggregatingResultIterator(LookAheadResultIterator.wrap(aggResultIterator), clientAggregators); } else { if (!groupBy.isOrderPreserving()) { int thresholdBytes = context.getConnection().getQueryServices().getProps().getInt( QueryServices.SPOOL_THRESHOLD_BYTES_ATTRIB, QueryServicesOptions.DEFAULT_SPOOL_THRESHOLD_BYTES); List<Expression> keyExpressions = groupBy.getKeyExpressions(); List<OrderByExpression> keyExpressionOrderBy = Lists.newArrayListWithExpectedSize(keyExpressions.size()); for (Expression keyExpression : keyExpressions) { keyExpressionOrderBy.add(new OrderByExpression(keyExpression, false, true)); } iterator = new OrderedResultIterator(iterator, keyExpressionOrderBy, thresholdBytes, null, projector.getEstimatedRowByteSize()); } aggResultIterator = new ClientGroupedAggregatingResultIterator(LookAheadResultIterator.wrap(iterator), serverAggregators, groupBy.getKeyExpressions()); aggResultIterator = new GroupedAggregatingResultIterator(LookAheadResultIterator.wrap(aggResultIterator), clientAggregators); } if (having != null) { aggResultIterator = new FilterAggregatingResultIterator(aggResultIterator, having); } if (statement.isDistinct() && statement.isAggregate()) { // Dedup on client if select distinct and aggregation aggResultIterator = new DistinctAggregatingResultIterator(aggResultIterator, getProjector()); } ResultIterator resultScanner = aggResultIterator; if (orderBy.getOrderByExpressions().isEmpty()) { if (limit != null) { resultScanner = new LimitingResultIterator(aggResultIterator, limit); } } else { int thresholdBytes = context.getConnection().getQueryServices().getProps().getInt( QueryServices.SPOOL_THRESHOLD_BYTES_ATTRIB, QueryServicesOptions.DEFAULT_SPOOL_THRESHOLD_BYTES); resultScanner = new OrderedAggregatingResultIterator(aggResultIterator, orderBy.getOrderByExpressions(), thresholdBytes, limit); } if (context.getSequenceManager().getSequenceCount() > 0) { resultScanner = new SequenceResultIterator(resultScanner, context.getSequenceManager()); } return resultScanner; } @Override public ExplainPlan getExplainPlan() throws SQLException { List<String> planSteps = Lists.newArrayList(delegate.getExplainPlan().getPlanSteps()); if (where != null) { planSteps.add("CLIENT FILTER BY " + where.toString()); } if (!groupBy.isEmpty()) { if (!groupBy.isOrderPreserving()) { planSteps.add("CLIENT SORTED BY " + groupBy.getKeyExpressions().toString()); } planSteps.add("CLIENT AGGREGATE INTO DISTINCT ROWS BY " + groupBy.getExpressions().toString()); } else { planSteps.add("CLIENT AGGREGATE INTO SINGLE ROW"); } if (having != null) { planSteps.add("CLIENT AFTER-AGGREGATION FILTER BY " + having.toString()); } if (statement.isDistinct() && statement.isAggregate()) { planSteps.add("CLIENT DISTINCT ON " + projector.toString()); } if (orderBy.getOrderByExpressions().isEmpty()) { if (limit != null) { planSteps.add("CLIENT " + limit + " ROW LIMIT"); } } else { planSteps.add("CLIENT" + (limit == null ? "" : " TOP " + limit + " ROW" + (limit == 1 ? "" : "S")) + " SORTED BY " + orderBy.getOrderByExpressions().toString()); } if (context.getSequenceManager().getSequenceCount() > 0) { int nSequences = context.getSequenceManager().getSequenceCount(); planSteps.add("CLIENT RESERVE VALUES FROM " + nSequences + " SEQUENCE" + (nSequences == 1 ? "" : "S")); } return new ExplainPlan(planSteps); } @Override public GroupBy getGroupBy() { return groupBy; } private static class ClientGroupedAggregatingResultIterator extends BaseGroupedAggregatingResultIterator { private final List<Expression> groupByExpressions; public ClientGroupedAggregatingResultIterator(PeekingResultIterator iterator, Aggregators aggregators, List<Expression> groupByExpressions) { super(iterator, aggregators); this.groupByExpressions = groupByExpressions; } @Override protected ImmutableBytesWritable getGroupingKey(Tuple tuple, ImmutableBytesWritable ptr) throws SQLException { try { ImmutableBytesWritable key = TupleUtil.getConcatenatedValue(tuple, groupByExpressions); ptr.set(key.get(), key.getOffset(), key.getLength()); return ptr; } catch (IOException e) { throw new SQLException(e); } } @Override protected Tuple wrapKeyValueAsResult(KeyValue keyValue) { return new MultiKeyValueTuple(Collections.<Cell> singletonList(keyValue)); } @Override public String toString() { return "ClientGroupedAggregatingResultIterator [resultIterator=" + resultIterator + ", aggregators=" + aggregators + ", groupByExpressions=" + groupByExpressions + "]"; } } private static class ClientUngroupedAggregatingResultIterator extends BaseGroupedAggregatingResultIterator { public ClientUngroupedAggregatingResultIterator(PeekingResultIterator iterator, Aggregators aggregators) { super(iterator, aggregators); } @Override protected ImmutableBytesWritable getGroupingKey(Tuple tuple, ImmutableBytesWritable ptr) throws SQLException { ptr.set(UNGROUPED_AGG_ROW_KEY); return ptr; } @Override protected Tuple wrapKeyValueAsResult(KeyValue keyValue) throws SQLException { return new MultiKeyValueTuple(Collections.<Cell> singletonList(keyValue)); } @Override public String toString() { return "ClientUngroupedAggregatingResultIterator [resultIterator=" + resultIterator + ", aggregators=" + aggregators + "]"; } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ql; import org.apache.http.HttpHost; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.rest.yaml.ObjectPath; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.predicate.Range; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NullEquals; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.session.Configuration; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.ql.util.StringUtils; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeDiagnosingMatcher; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.net.URLConnection; import java.nio.charset.StandardCharsets; import java.nio.file.FileVisitOption; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.time.ZoneId; import java.util.ArrayList; import java.util.Collection; import java.util.EnumSet; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.jar.JarInputStream; import java.util.regex.Pattern; import java.util.zip.ZipEntry; import static java.util.Collections.emptyMap; import static org.elasticsearch.test.ESTestCase.between; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomZone; import static org.elasticsearch.xpack.ql.TestUtils.StringContainsRegex.containsRegex; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; public final class TestUtils { public static final ZoneId UTC = ZoneId.of("Z"); public static final Configuration TEST_CFG = new Configuration(UTC, null, null); private static final String MATCHER_TYPE_CONTAINS = "CONTAINS"; private static final String MATCHER_TYPE_REGEX = "REGEX"; private TestUtils() {} public static Configuration randomConfiguration() { return new Configuration(randomZone(), randomAlphaOfLength(10), randomAlphaOfLength(10)); } public static Configuration randomConfiguration(ZoneId zoneId) { return new Configuration(zoneId, randomAlphaOfLength(10), randomAlphaOfLength(10)); } public static Literal of(Object value) { return of(Source.EMPTY, value); } /** * Utility method for creating 'in-line' Literals (out of values instead of expressions). */ public static Literal of(Source source, Object value) { if (value instanceof Literal) { return (Literal) value; } return new Literal(source, value, DataTypes.fromJava(value)); } public static Equals equalsOf(Expression left, Expression right) { return new Equals(EMPTY, left, right, randomZone()); } public static NotEquals notEqualsOf(Expression left, Expression right) { return new NotEquals(EMPTY, left, right, randomZone()); } public static NullEquals nullEqualsOf(Expression left, Expression right) { return new NullEquals(EMPTY, left, right, randomZone()); } public static LessThan lessThanOf(Expression left, Expression right) { return new LessThan(EMPTY, left, right, randomZone()); } public static LessThanOrEqual lessThanOrEqualOf(Expression left, Expression right) { return new LessThanOrEqual(EMPTY, left, right, randomZone()); } public static GreaterThan greaterThanOf(Expression left, Expression right) { return new GreaterThan(EMPTY, left, right, randomZone()); } public static GreaterThanOrEqual greaterThanOrEqualOf(Expression left, Expression right) { return new GreaterThanOrEqual(EMPTY, left, right, randomZone()); } public static Range rangeOf(Expression value, Expression lower, boolean includeLower, Expression upper, boolean includeUpper) { return new Range(EMPTY, value, lower, includeLower, upper, includeUpper, randomZone()); } public static FieldAttribute fieldAttribute() { return fieldAttribute(randomAlphaOfLength(10), randomFrom(DataTypes.types())); } public static FieldAttribute fieldAttribute(String name, DataType type) { return new FieldAttribute(EMPTY, name, new EsField(name, type, emptyMap(), randomBoolean())); } public static EsRelation relation() { return new EsRelation(EMPTY, new EsIndex(randomAlphaOfLength(8), emptyMap()), randomBoolean()); } // // Common methods / assertions // public static void assertNoSearchContexts(RestClient client) throws IOException { Map<String, Object> stats = searchStats(client); @SuppressWarnings("unchecked") Map<String, Object> indicesStats = (Map<String, Object>) stats.get("indices"); for (String index : indicesStats.keySet()) { if (index.startsWith(".") == false) { // We are not interested in internal indices assertEquals(index + " should have no search contexts", 0, getOpenContexts(stats, index)); } } } public static int getNumberOfSearchContexts(RestClient client, String index) throws IOException { return getOpenContexts(searchStats(client), index); } private static Map<String, Object> searchStats(RestClient client) throws IOException { Response response = client.performRequest(new Request("GET", "/_stats/search")); try (InputStream content = response.getEntity().getContent()) { return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); } } @SuppressWarnings("unchecked") private static int getOpenContexts(Map<String, Object> stats, String index) { stats = (Map<String, Object>) stats.get("indices"); stats = (Map<String, Object>) stats.get(index); stats = (Map<String, Object>) stats.get("total"); stats = (Map<String, Object>) stats.get("search"); return (Integer) stats.get("open_contexts"); } // // Classpath // /** * Returns the classpath resources matching a simple pattern ("*.csv"). * It supports folders separated by "/" (e.g. "/some/folder/*.txt"). * * Currently able to resolve resources inside the classpath either from: * folders in the file-system (typically IDEs) or * inside jars (gradle). */ @SuppressForbidden(reason = "classpath discovery") public static List<URL> classpathResources(String pattern) throws IOException { while (pattern.startsWith("/")) { pattern = pattern.substring(1); } Tuple<String, String> split = pathAndName(pattern); // the root folder searched inside the classpath - default is the root classpath // default file match final String root = split.v1(); final String filePattern = split.v2(); String[] resources = System.getProperty("java.class.path").split(System.getProperty("path.separator")); List<URL> matches = new ArrayList<>(); for (String resource : resources) { Path path = PathUtils.get(resource); // check whether we're dealing with a jar // Java 7 java.nio.fileFileSystem can be used on top of ZIPs/JARs but consumes more memory // hence the use of the JAR API if (path.toString().endsWith(".jar")) { try (JarInputStream jar = jarInputStream(path.toUri().toURL())) { ZipEntry entry = null; while ((entry = jar.getNextEntry()) != null) { String name = entry.getName(); Tuple<String, String> entrySplit = pathAndName(name); if (root.equals(entrySplit.v1()) && Regex.simpleMatch(filePattern, entrySplit.v2())) { matches.add(new URL("jar:" + path.toUri() + "!/" + name)); } } } } // normal file access else if (Files.isDirectory(path)) { Files.walkFileTree(path, EnumSet.allOf(FileVisitOption.class), 1, new SimpleFileVisitor<>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (Regex.simpleMatch(filePattern, file.toString())) { matches.add(file.toUri().toURL()); } return FileVisitResult.CONTINUE; } }); } } return matches; } @SuppressForbidden(reason = "need to open stream") public static InputStream inputStream(URL resource) throws IOException { URLConnection con = resource.openConnection(); // do not to cache files (to avoid keeping file handles around) con.setUseCaches(false); return con.getInputStream(); } @SuppressForbidden(reason = "need to open jar") public static JarInputStream jarInputStream(URL resource) throws IOException { return new JarInputStream(inputStream(resource)); } public static BufferedReader reader(URL resource) throws IOException { return new BufferedReader(new InputStreamReader(inputStream(resource), StandardCharsets.UTF_8)); } public static Tuple<String, String> pathAndName(String string) { String folder = StringUtils.EMPTY; String file = string; int lastIndexOf = string.lastIndexOf("/"); if (lastIndexOf > 0) { folder = string.substring(0, lastIndexOf - 1); if (lastIndexOf + 1 < string.length()) { file = string.substring(lastIndexOf + 1); } } return new Tuple<>(folder, file); } public static TestNodes buildNodeAndVersions(RestClient client) throws IOException { Response response = client.performRequest(new Request("GET", "_nodes")); ObjectPath objectPath = ObjectPath.createFromResponse(response); Map<String, Object> nodesAsMap = objectPath.evaluate("nodes"); TestNodes nodes = new TestNodes(); for (String id : nodesAsMap.keySet()) { nodes.add( new TestNode( id, Version.fromString(objectPath.evaluate("nodes." + id + ".version")), HttpHost.create(objectPath.evaluate("nodes." + id + ".http.publish_address")) ) ); } return nodes; } public static String readResource(InputStream input) throws IOException { StringBuilder builder = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8))) { String line = reader.readLine(); while (line != null) { if (line.trim().startsWith("//") == false) { builder.append(line); builder.append('\n'); } line = reader.readLine(); } return builder.toString(); } } public static Map<String, Object> randomRuntimeMappings() { int count = between(1, 100); Map<String, Object> runtimeFields = new HashMap<>(count); while (runtimeFields.size() < count) { int size = between(1, 10); Map<String, Object> config = new HashMap<>(size); while (config.size() < size) { config.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); } runtimeFields.put(randomAlphaOfLength(5), config); } return runtimeFields; } public static Collection<Object[]> readSpec(Class<?> clazz, String testFileName) throws Exception { ArrayList<Object[]> arr = new ArrayList<>(); Map<String, Integer> testNames = new LinkedHashMap<>(); try ( InputStream is = clazz.getResourceAsStream(testFileName); BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8)) ) { int lineNumber = 0; String line; boolean done = false; String name = null; String query = null; ArrayList<Matcher<String>> matchers = new ArrayList<>(8); StringBuilder sb = new StringBuilder(); while ((line = reader.readLine()) != null) { lineNumber++; line = line.trim(); if (line.isEmpty() || line.startsWith("//")) { continue; } if (name == null) { name = line; Integer previousName = testNames.put(name, lineNumber); if (previousName != null) { throw new IllegalArgumentException( "Duplicate test name '" + line + "' at line " + lineNumber + " (previously seen at line " + previousName + ")" ); } } else if (query == null) { sb.append(line).append(' '); if (line.endsWith(";")) { sb.setLength(sb.length() - 2); query = sb.toString(); sb.setLength(0); } } else { if (line.endsWith(";")) { line = line.substring(0, line.length() - 1); done = true; } if (line.isEmpty() == false) { String[] matcherAndExpectation = line.split("[ \\t]+", 2); if (matcherAndExpectation.length == 1) { matchers.add(containsString(matcherAndExpectation[0])); } else if (matcherAndExpectation.length == 2) { String matcherType = matcherAndExpectation[0]; String expectation = matcherAndExpectation[1]; switch (matcherType.toUpperCase(Locale.ROOT)) { case MATCHER_TYPE_CONTAINS: matchers.add(containsString(expectation)); break; case MATCHER_TYPE_REGEX: matchers.add(containsRegex(expectation)); break; default: throw new IllegalArgumentException( "unsupported matcher on line " + testFileName + ":" + lineNumber + ": " + matcherType ); } } } if (done) { // Add and zero out for the next spec arr.add(new Object[] { testFileName, name, query, matchers }); name = null; query = null; matchers = new ArrayList<>(8); done = false; } } } if (name != null) { throw new IllegalStateException("Read a test [" + name + "] without a body at the end of [" + testFileName + "]"); } } return arr; } // Matcher which extends the functionality of org.hamcrest.Matchers.matchesPattern(String)} // by allowing to match detected regex groups later on in the pattern, e.g.: // "(?<id>.+?)"....... \k<id>....."} public static class StringContainsRegex extends TypeSafeDiagnosingMatcher<String> { private final Pattern pattern; protected StringContainsRegex(Pattern pattern) { this.pattern = pattern; } @Override public void describeTo(Description description) { description.appendText("a string containing the pattern ").appendValue(pattern); } @Override protected boolean matchesSafely(String actual, Description mismatchDescription) { if (pattern.matcher(actual).find() == false) { mismatchDescription.appendText("the string was ").appendValue(actual); return false; } return true; } public static Matcher<String> containsRegex(String regex) { return new StringContainsRegex(Pattern.compile(regex)); } } }
package org.cfr.capsicum.propertyset; import java.util.Collection; import java.util.Map; import org.apache.cayenne.access.dbsync.CreateIfNoSchemaStrategy; import org.cfr.capsicum.propertyset.CayenneRuntimeContextProvider; import org.cfr.capsicum.propertyset.ICayenneConfigurationProvider; import org.cfr.capsicum.propertyset.ICayennePropertySetDAO; import org.cfr.capsicum.propertyset.PropertySetItem; import org.cfr.capsicum.test.AbstractSimpleCayenneJUnitTests; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.collect.ImmutableMap; import com.opensymphony.module.propertyset.PropertySet; public class CayennePropertySetDAOImplTest extends AbstractSimpleCayenneJUnitTests { private static CayenneRuntimeContextProvider provider; @BeforeClass public static void init() { provider = new CayenneRuntimeContextProvider(); Map<String, Object> config = new ImmutableMap.Builder<String, Object>().put(ICayenneConfigurationProvider.DATASOURCE_PROPERTY_KEY, createDatasource()) .put(ICayenneConfigurationProvider.ADAPTER_PROPERTY_KEY, getDbAdapter().getCanonicalName()) .put(ICayenneConfigurationProvider.SCHEMA_UPDATE_STRATEGY_PROPERTY_KEY, CreateIfNoSchemaStrategy.class) .build(); // test before assertNull(provider.getCayenneRuntimeContext()); provider.setup(config); // test after assertNotNull(provider.getCayenneRuntimeContext()); } @Test public void createPropertySetItemAndSave() { ICayennePropertySetDAO propertySetDAO = provider.getPropertySetDAO(); propertySetDAO.removeAll(); PropertySetItem expected = propertySetDAO.create("Foo", 1, "foo.name"); propertySetDAO.save(expected); // verify is stored PropertySetItem actual = propertySetDAO.findByKey("Foo", 1L, "foo.name"); assertNotNull(actual); assertEquals(expected, actual); actual = propertySetDAO.findByKey("Foo", 1L, "foo"); assertNull(actual); } @Test public void getAllKeys() { ICayennePropertySetDAO propertySetDAO = provider.getPropertySetDAO(); propertySetDAO.removeAll(); String entityName = "Foo"; Long entityId = 1L; Creator.create(entityName, entityId, "foo.name"); Creator.create(entityName, entityId, "foo.login"); Creator.create(entityName, entityId, "foo.firstname"); Collection<String> keys = propertySetDAO.getKeys(entityName, entityId, null, -1); assertEquals(3, keys.size()); } @Test public void getAllKeysWithType() { ICayennePropertySetDAO propertySetDAO = provider.getPropertySetDAO(); propertySetDAO.removeAll(); String entityName = "Foo"; Long entityId = 1L; Creator.create(entityName, entityId, "foo.name", PropertySet.STRING); Creator.create(entityName, entityId, "foo.login", PropertySet.STRING); Creator.create(entityName, entityId, "foo.firstname", PropertySet.STRING); Creator.create(entityName, entityId, "foo.activated", PropertySet.BOOLEAN); Collection<String> keys = propertySetDAO.getKeys(entityName, entityId, null, PropertySet.STRING); assertEquals(3, keys.size()); keys = propertySetDAO.getKeys(entityName, entityId, null, PropertySet.BOOLEAN); assertEquals(1, keys.size()); } @Test public void getAllKeysWithTypeLike() { ICayennePropertySetDAO propertySetDAO = provider.getPropertySetDAO(); propertySetDAO.removeAll(); String entityName = "Foo"; Long entityId = 1L; Creator.create(entityName, entityId, "foo.name", PropertySet.STRING); Creator.create(entityName, entityId, "foo.login", PropertySet.STRING); Creator.create(entityName, entityId, "foo.firstname", PropertySet.STRING); Creator.create(entityName, entityId, "foo.activated", PropertySet.BOOLEAN); Creator.create(entityName, 2L, "foo.activated", PropertySet.BOOLEAN); Collection<String> keys = propertySetDAO.getKeys(entityName, entityId, "foo", PropertySet.STRING); assertEquals(3, keys.size()); } @Test public void getAllKeysLike() { ICayennePropertySetDAO propertySetDAO = provider.getPropertySetDAO(); propertySetDAO.removeAll(); String entityName = "Foo"; Long entityId = 1L; Creator.create(entityName, entityId, "foo.name", PropertySet.STRING); Creator.create(entityName, entityId, "foo.login", PropertySet.STRING); Creator.create(entityName, entityId, "foo.firstname", PropertySet.STRING); Creator.create(entityName, entityId, "foo.activated", PropertySet.BOOLEAN); Creator.create(entityName, 2L, "foo.activated", PropertySet.BOOLEAN); Collection<String> keys = propertySetDAO.getKeys(entityName, entityId, "foo", -1); assertEquals(4, keys.size()); } @Test public void removeAllOfEntity() { ICayennePropertySetDAO propertySetDAO = provider.getPropertySetDAO(); propertySetDAO.removeAll(); String entityName = "Foo"; Long entityId = 1L; Creator.create(entityName, entityId, "foo.name", PropertySet.STRING); Creator.create(entityName, entityId, "foo.login", PropertySet.STRING); Creator.create(entityName, entityId, "foo.firstname", PropertySet.STRING); Creator.create(entityName, entityId, "foo.activated", PropertySet.BOOLEAN); propertySetDAO.remove(entityName, entityId); Collection<String> keys = propertySetDAO.getKeys(entityName, entityId, null, -1); assertEquals(0, keys.size()); } @Test public void removeAllOfEntityByKey() { ICayennePropertySetDAO propertySetDAO = provider.getPropertySetDAO(); propertySetDAO.removeAll(); String entityName = "Foo"; Long entityId = 1L; Creator.create(entityName, entityId, "foo.name", PropertySet.STRING); Creator.create(entityName, entityId, "foo1.activated", PropertySet.BOOLEAN); propertySetDAO.remove(entityName, entityId, "foo.name"); Collection<String> keys = propertySetDAO.getKeys(entityName, entityId, null, -1); assertEquals(1, keys.size()); } public static class Builder { protected String entityName; protected Long entityId; protected int type; protected String key; public Builder entityId(Long entityId) { this.entityId = entityId; return this; } public Builder key(String key) { this.key = key; return this; } public Builder entityName(String entityName) { this.entityName = entityName; return this; } public Builder type(int type) { this.type = type; return this; } public PropertySetItem build() { PropertySetItem item = new PropertySetItem(); item.setEntityId(entityId); item.setEntityName(entityName); item.setPropertyType(type); item.setPropertyKey(key); return null; } } public static class Creator extends Builder { public static PropertySetItem create(String entityName, long entityId, String key) { return ((Creator) new Creator().entityName(entityName).entityId(entityId).key(key)).create(); } public static PropertySetItem create(String entityName, long entityId, String key, int type) { return ((Creator) new Creator().entityName(entityName).entityId(entityId).key(key).type(type)).create(); } public PropertySetItem create() { ICayennePropertySetDAO propertySetDAO = provider.getPropertySetDAO(); PropertySetItem item = propertySetDAO.create(entityName, entityId, key); item.setPropertyType(type); propertySetDAO.save(item); return item; } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.castpresentation; import android.app.Activity; import android.app.MediaRouteActionProvider; import android.app.Presentation; import android.content.Context; import android.content.DialogInterface; import android.content.res.Resources; import android.media.MediaRouter; import android.media.MediaRouter.RouteInfo; import android.opengl.GLSurfaceView; import android.os.Bundle; import android.util.Log; import android.view.Display; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.WindowManager; import android.widget.TextView; /** * <h3>PresentationWithMediaRouterActivity Activity</h3> * <p> * This demonstrates how to create an activity that shows some content on a * secondary display using a {@link Presentation}. * </p> * <p> * The activity uses the {@link MediaRouter} API to automatically detect when a * presentation display is available and to allow the user to control the media * routes using a menu item. When a presentation display is available, we stop * showing content in the main activity and instead open up a * {@link Presentation} on the preferred presentation display. When a * presentation display is removed, we revert to showing content in the main * activity. We also write information about displays and display-related events * to the Android log which you can read using <code>adb logcat</code>. * </p> * <p> * You can try this out using an HDMI or Wifi display or by using the * "Simulate secondary displays" feature in Development Settings to create a few * simulated secondary displays. Each display will appear in the list along with * a checkbox to show a presentation on that display. * </p> */ public class PresentationWithMediaRouterActivity extends Activity { private final String TAG = "PresentationWithMediaRouterActivity"; private MediaRouter mMediaRouter; private DemoPresentation mPresentation; private GLSurfaceView mSurfaceView; private TextView mInfoTextView; private boolean mPaused; /** * Initialization of the Activity after it is first created. Must at least * call {@link android.app.Activity#setContentView setContentView()} to * describe what is to be displayed in the screen. */ @Override protected void onCreate(Bundle savedInstanceState) { // Be sure to call the super class. super.onCreate(savedInstanceState); // Get the media router service. mMediaRouter = (MediaRouter) getSystemService(Context.MEDIA_ROUTER_SERVICE); // See assets/res/any/layout/presentation_with_media_router_activity.xml // for this // view layout definition, which is being set here as // the content of our screen. setContentView(R.layout.presentation_with_media_router_activity); // Set up the surface view for visual interest. mSurfaceView = (GLSurfaceView) findViewById(R.id.surface_view); mSurfaceView.setRenderer(new CubeRenderer(false)); // Get a text view where we will show information about what's // happening. mInfoTextView = (TextView) findViewById(R.id.info); } @Override protected void onResume() { // Be sure to call the super class. super.onResume(); // Listen for changes to media routes. mMediaRouter.addCallback(MediaRouter.ROUTE_TYPE_LIVE_VIDEO, mMediaRouterCallback); // Update the presentation based on the currently selected route. mPaused = false; updatePresentation(); } @Override protected void onPause() { // Be sure to call the super class. super.onPause(); // Stop listening for changes to media routes. mMediaRouter.removeCallback(mMediaRouterCallback); // Pause rendering. mPaused = true; updateContents(); } @Override protected void onStop() { // Be sure to call the super class. super.onStop(); // Dismiss the presentation when the activity is not visible. if (mPresentation != null) { Log.i(TAG, "Dismissing presentation because the activity is no longer visible."); mPresentation.dismiss(); mPresentation = null; } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Be sure to call the super class. super.onCreateOptionsMenu(menu); // Inflate the menu and configure the media router action provider. getMenuInflater().inflate(R.menu.presentation_with_media_router_menu, menu); MenuItem mediaRouteMenuItem = menu.findItem(R.id.menu_media_route); MediaRouteActionProvider mediaRouteActionProvider = (MediaRouteActionProvider) mediaRouteMenuItem.getActionProvider(); mediaRouteActionProvider.setRouteTypes(MediaRouter.ROUTE_TYPE_LIVE_VIDEO); // Return true to show the menu. return true; } private void updatePresentation() { // Get the current route and its presentation display. MediaRouter.RouteInfo route = mMediaRouter.getSelectedRoute( MediaRouter.ROUTE_TYPE_LIVE_VIDEO); Display presentationDisplay = route != null ? route.getPresentationDisplay() : null; // Dismiss the current presentation if the display has changed. if (mPresentation != null && mPresentation.getDisplay() != presentationDisplay) { Log.i(TAG, "Dismissing presentation because the current route no longer " + "has a presentation display."); mPresentation.dismiss(); mPresentation = null; } // Show a new presentation if needed. if (mPresentation == null && presentationDisplay != null) { Log.i(TAG, "Showing presentation on display: " + presentationDisplay); mPresentation = new DemoPresentation(this, presentationDisplay); mPresentation.setOnDismissListener(mOnDismissListener); try { mPresentation.show(); } catch (WindowManager.InvalidDisplayException ex) { Log.w(TAG, "Couldn't show presentation! Display was removed in " + "the meantime.", ex); mPresentation = null; } } // Update the contents playing in this activity. updateContents(); } /** * Show either the content in the main activity or the content in the * presentation along with some descriptive text about what is happening. */ private void updateContents() { if (mPresentation != null) { mInfoTextView.setText(getResources().getString( R.string.presentation_with_media_router_now_playing_remotely, mPresentation.getDisplay().getName())); mSurfaceView.setVisibility(View.INVISIBLE); mSurfaceView.onPause(); if (mPaused) { mPresentation.getSurfaceView().onPause(); } else { mPresentation.getSurfaceView().onResume(); } } else { mInfoTextView.setText(getResources().getString( R.string.presentation_with_media_router_now_playing_locally, getWindowManager().getDefaultDisplay().getName())); mSurfaceView.setVisibility(View.VISIBLE); if (mPaused) { mSurfaceView.onPause(); } else { mSurfaceView.onResume(); } } } private final MediaRouter.SimpleCallback mMediaRouterCallback = new MediaRouter.SimpleCallback() { @Override public void onRouteSelected(MediaRouter router, int type, RouteInfo info) { Log.d(TAG, "onRouteSelected: type=" + type + ", info=" + info); updatePresentation(); } @Override public void onRouteUnselected(MediaRouter router, int type, RouteInfo info) { Log.d(TAG, "onRouteUnselected: type=" + type + ", info=" + info); updatePresentation(); } @Override public void onRoutePresentationDisplayChanged(MediaRouter router, RouteInfo info) { Log.d(TAG, "onRoutePresentationDisplayChanged: info=" + info); updatePresentation(); } }; /** * Listens for when presentations are dismissed. */ private final DialogInterface.OnDismissListener mOnDismissListener = new DialogInterface.OnDismissListener() { @Override public void onDismiss(DialogInterface dialog) { if (dialog == mPresentation) { Log.i(TAG, "Presentation was dismissed."); mPresentation = null; updateContents(); } } }; /** * The presentation to show on the secondary display. * <p> * Note that this display may have different metrics from the display on * which the main activity is showing so we must be careful to use the * presentation's own {@link Context} whenever we load resources. * </p> */ private final static class DemoPresentation extends Presentation { private GLSurfaceView mSurfaceView; public DemoPresentation(Context context, Display display) { super(context, display); } @Override protected void onCreate(Bundle savedInstanceState) { // Be sure to call the super class. super.onCreate(savedInstanceState); // Get the resources for the context of the presentation. // Notice that we are getting the resources from the context of the // presentation. Resources r = getContext().getResources(); // Inflate the layout. setContentView(R.layout.presentation_with_media_router_content); // Set up the surface view for visual interest. mSurfaceView = (GLSurfaceView) findViewById(R.id.surface_view); mSurfaceView.setRenderer(new CubeRenderer(false)); } public GLSurfaceView getSurfaceView() { return mSurfaceView; } } }