gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// Copyright 2020 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.worker; import com.google.common.annotations.VisibleForTesting; import com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest; import com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse; import com.sun.management.OperatingSystemMXBean; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.management.ManagementFactory; import java.time.Duration; import java.util.ArrayDeque; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Queue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; /** * A helper class that handles WorkRequests * (https://docs.bazel.build/versions/master/persistent-workers.html), including multiplex workers * (https://docs.bazel.build/versions/master/multiplex-worker.html). */ public class WorkRequestHandler implements AutoCloseable { /** Contains the logic for reading {@link WorkRequest}s and writing {@link WorkResponse}s. */ public interface WorkerMessageProcessor { /** Reads the next incoming request from this worker's stdin. */ WorkRequest readWorkRequest() throws IOException; /** * Writes the provided {@link WorkResponse} to this worker's stdout. This function is also * responsible for flushing the stdout. */ void writeWorkResponse(WorkResponse workResponse) throws IOException; /** Clean up. */ void close() throws IOException; } /** Holds information necessary to properly handle a request, especially for cancellation. */ static class RequestInfo { /** * The builder for the response to this request. Since only one response must be sent per * request, this builder must be accessed through takeBuilder(), which zeroes this field and * returns the builder. */ private WorkResponse.Builder responseBuilder = WorkResponse.newBuilder(); /** * Returns the response builder. If called more than once on the same instance, subsequent calls * will return {@code null}. */ synchronized Optional<WorkResponse.Builder> takeBuilder() { WorkResponse.Builder b = responseBuilder; responseBuilder = null; return Optional.ofNullable(b); } } /** Requests that are currently being processed. Visible for testing. */ final Map<Integer, RequestInfo> activeRequests = new ConcurrentHashMap<>(); /** WorkRequests that have been received but could not be processed yet. */ private final Queue<WorkRequest> availableRequests = new ArrayDeque<>(); /** The function to be called after each {@link WorkRequest} is read. */ private final BiFunction<List<String>, PrintWriter, Integer> callback; /** This worker's stderr. */ private final PrintStream stderr; final WorkerMessageProcessor messageProcessor; private final CpuTimeBasedGcScheduler gcScheduler; /** * Creates a {@code WorkRequestHandler} that will call {@code callback} for each WorkRequest * received. * * @param callback Callback method for executing a single WorkRequest in a thread. The first * argument to {@code callback} is the set of command-line arguments, the second is where all * error messages and other user-oriented messages should be written to. The callback must * return an exit code indicating success (zero) or failure (nonzero). * @param stderr Stream that log messages should be written to, typically the process' stderr. * @param messageProcessor Object responsible for parsing {@code WorkRequest}s from the server and * writing {@code WorkResponses} to the server. */ public WorkRequestHandler( BiFunction<List<String>, PrintWriter, Integer> callback, PrintStream stderr, WorkerMessageProcessor messageProcessor) { this(callback, stderr, messageProcessor, Duration.ZERO); } /** * Creates a {@code WorkRequestHandler} that will call {@code callback} for each WorkRequest * received. * * @param callback Callback method for executing a single WorkRequest in a thread. The first * argument to {@code callback} is the set of command-line arguments, the second is where all * error messages and other user-oriented messages should be written to. The callback must * return an exit code indicating success (zero) or failure (nonzero). * @param stderr Stream that log messages should be written to, typically the process' stderr. * @param messageProcessor Object responsible for parsing {@code WorkRequest}s from the server and * writing {@code WorkResponses} to the server. * @param cpuUsageBeforeGc The minimum amount of CPU time between explicit garbage collection * calls. Pass Duration.ZERO to not do explicit garbage collection. * @deprecated Use WorkRequestHandlerBuilder instead. */ @Deprecated() public WorkRequestHandler( BiFunction<List<String>, PrintWriter, Integer> callback, PrintStream stderr, WorkerMessageProcessor messageProcessor, Duration cpuUsageBeforeGc) { this.callback = callback; this.stderr = stderr; this.messageProcessor = messageProcessor; this.gcScheduler = new CpuTimeBasedGcScheduler(cpuUsageBeforeGc); } /** Builder class for WorkRequestHandler. Required parameters are passed to the constructor. */ public static class WorkRequestHandlerBuilder { private final BiFunction<List<String>, PrintWriter, Integer> callback; private final PrintStream stderr; private final WorkerMessageProcessor messageProcessor; private Duration cpuUsageBeforeGc = Duration.ZERO; /** * Creates a {@code WorkRequestHandlerBuilder}. * * @param callback Callback method for executing a single WorkRequest in a thread. The first * argument to {@code callback} is the set of command-line arguments, the second is where * all error messages and other user-oriented messages should be written to. The callback * must return an exit code indicating success (zero) or failure (nonzero). * @param stderr Stream that log messages should be written to, typically the process' stderr. * @param messageProcessor Object responsible for parsing {@code WorkRequest}s from the server * and writing {@code WorkResponses} to the server. */ public WorkRequestHandlerBuilder( BiFunction<List<String>, PrintWriter, Integer> callback, PrintStream stderr, WorkerMessageProcessor messageProcessor) { this.callback = callback; this.stderr = stderr; this.messageProcessor = messageProcessor; } /** * Sets the minimum amount of CPU time between explicit garbage collection calls. Pass * Duration.ZERO to not do explicit garbage collection (the default). */ public WorkRequestHandlerBuilder setCpuUsageBeforeGc(Duration cpuUsageBeforeGc) { this.cpuUsageBeforeGc = cpuUsageBeforeGc; return this; } /** Returns a WorkRequestHandler instance with the values in this Builder. */ public WorkRequestHandler build() { return new WorkRequestHandler(callback, stderr, messageProcessor, cpuUsageBeforeGc); } } /** * Runs an infinite loop of reading {@link WorkRequest} from {@code in}, running the callback, * then writing the corresponding {@link WorkResponse} to {@code out}. If there is an error * reading or writing the requests or responses, it writes an error message on {@code err} and * returns. If {@code in} reaches EOF, it also returns. */ public void processRequests() throws IOException { while (true) { WorkRequest request = messageProcessor.readWorkRequest(); if (request == null) { break; } availableRequests.add(request); startRequestThreads(); } } /** * Starts threads for as many outstanding requests as possible. This is the only method that adds * to {@code activeRequests}. */ private synchronized void startRequestThreads() { while (!availableRequests.isEmpty()) { // If there's a singleplex request in process, don't start more processes. if (activeRequests.containsKey(0)) { return; } WorkRequest request = availableRequests.peek(); // Don't start new singleplex requests if there are other requests running. if (request.getRequestId() == 0 && !activeRequests.isEmpty()) { return; } availableRequests.remove(); Thread t = createResponseThread(request); activeRequests.put(request.getRequestId(), new RequestInfo()); t.start(); } } /** Creates a new {@link Thread} to process a multiplex request. */ Thread createResponseThread(WorkRequest request) { Thread currentThread = Thread.currentThread(); String threadName = request.getRequestId() > 0 ? "multiplex-request-" + request.getRequestId() : "singleplex-request"; return new Thread( () -> { RequestInfo requestInfo = activeRequests.get(request.getRequestId()); try { respondToRequest(request, requestInfo); } catch (IOException e) { e.printStackTrace(stderr); // In case of error, shut down the entire worker. currentThread.interrupt(); } finally { activeRequests.remove(request.getRequestId()); // A good time to start more requests, especially if we finished a singleplex request startRequestThreads(); } }, threadName); } /** Handles and responds to the given {@link WorkRequest}. */ @VisibleForTesting void respondToRequest(WorkRequest request, RequestInfo requestInfo) throws IOException { try (StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw)) { int exitCode; try { exitCode = callback.apply(request.getArgumentsList(), pw); } catch (RuntimeException e) { e.printStackTrace(pw); exitCode = 1; } pw.flush(); Optional<WorkResponse.Builder> optBuilder = requestInfo.takeBuilder(); if (optBuilder.isPresent()) { WorkResponse.Builder builder = optBuilder.get(); builder.setRequestId(request.getRequestId()); builder.setOutput(builder.getOutput() + sw.toString()).setExitCode(exitCode); WorkResponse response = builder.build(); synchronized (this) { messageProcessor.writeWorkResponse(response); } } gcScheduler.maybePerformGc(); } } @Override public void close() throws IOException { messageProcessor.close(); } /** * Class that performs GC occasionally, based on how much CPU time has passed. This strikes a * compromise between blindly doing GC after e.g. every request, which takes too much CPU, and not * doing explicit GC at all, which causes poor garbage collection in some cases. */ private static class CpuTimeBasedGcScheduler { /** * After this much CPU time has elapsed, we may force a GC run. Set to {@link Duration#ZERO} to * disable. */ private final Duration cpuUsageBeforeGc; /** The total process CPU time at the last GC run (or from the start of the worker). */ private final AtomicReference<Duration> cpuTimeAtLastGc; /** Used to get the CPU time used by this process. */ private static final OperatingSystemMXBean bean = (OperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean(); /** * Creates a new {@link CpuTimeBasedGcScheduler} that may perform GC after {@code * cpuUsageBeforeGc} amount of CPU time has been used. */ public CpuTimeBasedGcScheduler(Duration cpuUsageBeforeGc) { this.cpuUsageBeforeGc = cpuUsageBeforeGc; this.cpuTimeAtLastGc = new AtomicReference<>(getCpuTime()); } private Duration getCpuTime() { return !cpuUsageBeforeGc.isZero() ? Duration.ofNanos(bean.getProcessCpuTime()) : Duration.ZERO; } /** Call occasionally to perform a GC if enough CPU time has been used. */ private void maybePerformGc() { if (!cpuUsageBeforeGc.isZero()) { Duration currentCpuTime = getCpuTime(); Duration lastCpuTime = cpuTimeAtLastGc.get(); // Do GC when enough CPU time has been used, but only if nobody else beat us to it. if (currentCpuTime.minus(lastCpuTime).compareTo(cpuUsageBeforeGc) > 0 && cpuTimeAtLastGc.compareAndSet(lastCpuTime, currentCpuTime)) { System.gc(); // Avoid counting GC CPU time against CPU time before next GC. cpuTimeAtLastGc.compareAndSet(currentCpuTime, getCpuTime()); } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.daemon.supervisor; import static org.junit.Assert.*; import static org.mockito.Mockito.*; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.storm.Config; import org.apache.storm.DaemonConfig; import org.apache.storm.container.ResourceIsolationInterface; import org.apache.storm.daemon.supervisor.Container.ContainerType; import org.apache.storm.generated.LocalAssignment; import org.apache.storm.generated.ProfileAction; import org.apache.storm.generated.ProfileRequest; import org.apache.storm.generated.StormTopology; import org.apache.storm.utils.SimpleVersion; import org.apache.storm.utils.Utils; import org.apache.storm.utils.LocalState; import org.junit.Test; public class BasicContainerTest { public static class CommandRun { final List<String> cmd; final Map<String, String> env; final File pwd; public CommandRun(List<String> cmd, Map<String, String> env, File pwd) { this.cmd = cmd; this.env = env; this.pwd = pwd; } } public static class MockBasicContainer extends BasicContainer { public MockBasicContainer(ContainerType type, Map<String, Object> conf, String supervisorId, int port, LocalAssignment assignment, ResourceIsolationInterface resourceIsolationManager, LocalState localState, String workerId, Map<String, Object> topoConf, AdvancedFSOps ops, String profileCmd) throws IOException { super(type, conf, supervisorId, port, assignment, resourceIsolationManager, localState, workerId, topoConf, ops, profileCmd); } public final List<CommandRun> profileCmds = new ArrayList<>(); public final List<CommandRun> workerCmds = new ArrayList<>(); @Override protected Map<String, Object> readTopoConf() throws IOException { return new HashMap<>(); } @Override public void createNewWorkerId() { super.createNewWorkerId(); } @Override public List<String> substituteChildopts(Object value, int memOnheap) { return super.substituteChildopts(value, memOnheap); } @Override protected boolean runProfilingCommand(List<String> command, Map<String, String> env, String logPrefix, File targetDir) throws IOException, InterruptedException { profileCmds.add(new CommandRun(command, env, targetDir)); return true; } @Override protected void launchWorkerProcess(List<String> command, Map<String, String> env, String logPrefix, ExitCodeCallback processExitCallback, File targetDir) throws IOException { workerCmds.add(new CommandRun(command, env, targetDir)); } @Override protected String javaCmd(String cmd) { //avoid system dependent things return cmd; } @Override protected List<String> frameworkClasspath(SimpleVersion version) { //We are not really running anything so make this // simple to check for return Arrays.asList("FRAMEWORK_CP"); } @Override protected String javaLibraryPath(String stormRoot, Map<String, Object> conf) { return "JLP"; } } @Test public void testCreateNewWorkerId() throws Exception { final String topoId = "test_topology"; final int port = 8080; LocalAssignment la = new LocalAssignment(); la.set_topology_id(topoId); Map<String, Object> superConf = new HashMap<>(); AdvancedFSOps ops = mock(AdvancedFSOps.class); when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true); LocalState ls = mock(LocalState.class); MockBasicContainer mc = new MockBasicContainer(ContainerType.LAUNCH, superConf, "SUPERVISOR", port, la, null, ls, null, new HashMap<>(), ops, "profile"); //null worker id means generate one... assertNotNull(mc._workerId); verify(ls).getApprovedWorkers(); Map<String, Integer> expectedNewState = new HashMap<String, Integer>(); expectedNewState.put(mc._workerId, port); verify(ls).setApprovedWorkers(expectedNewState); } @Test public void testRecovery() throws Exception { final String topoId = "test_topology"; final String workerId = "myWorker"; final int port = 8080; LocalAssignment la = new LocalAssignment(); la.set_topology_id(topoId); Map<String, Integer> workerState = new HashMap<String, Integer>(); workerState.put(workerId, port); LocalState ls = mock(LocalState.class); when(ls.getApprovedWorkers()).thenReturn(workerState); Map<String, Object> superConf = new HashMap<>(); AdvancedFSOps ops = mock(AdvancedFSOps.class); when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true); MockBasicContainer mc = new MockBasicContainer(ContainerType.RECOVER_FULL, superConf, "SUPERVISOR", port, la, null, ls, null, new HashMap<>(), ops, "profile"); assertEquals(workerId, mc._workerId); } @Test public void testRecoveryMiss() throws Exception { final String topoId = "test_topology"; final int port = 8080; LocalAssignment la = new LocalAssignment(); la.set_topology_id(topoId); Map<String, Integer> workerState = new HashMap<String, Integer>(); workerState.put("somethingelse", port+1); LocalState ls = mock(LocalState.class); when(ls.getApprovedWorkers()).thenReturn(workerState); try { new MockBasicContainer(ContainerType.RECOVER_FULL, new HashMap<String, Object>(), "SUPERVISOR", port, la, null, ls, null, new HashMap<>(), null, "profile"); fail("Container recovered worker incorrectly"); } catch (ContainerRecoveryException e) { //Expected } } @Test public void testCleanUp() throws Exception { final String topoId = "test_topology"; final int port = 8080; final String workerId = "worker-id"; LocalAssignment la = new LocalAssignment(); la.set_topology_id(topoId); Map<String, Object> superConf = new HashMap<>(); AdvancedFSOps ops = mock(AdvancedFSOps.class); when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true); Map<String, Integer> workerState = new HashMap<String, Integer>(); workerState.put(workerId, port); LocalState ls = mock(LocalState.class); when(ls.getApprovedWorkers()).thenReturn(new HashMap<>(workerState)); MockBasicContainer mc = new MockBasicContainer(ContainerType.LAUNCH, superConf, "SUPERVISOR", port, la, null, ls, workerId, new HashMap<>(), ops, "profile"); mc.cleanUp(); assertNull(mc._workerId); verify(ls).getApprovedWorkers(); Map<String, Integer> expectedNewState = new HashMap<String, Integer>(); verify(ls).setApprovedWorkers(expectedNewState); } @Test public void testRunProfiling() throws Exception { final long pid = 100; final String topoId = "test_topology"; final int port = 8080; final String workerId = "worker-id"; final String stormLocal = ContainerTest.asAbsPath("tmp", "testing"); final String topoRoot = ContainerTest.asAbsPath(stormLocal, topoId, String.valueOf(port)); final File workerArtifactsPid = ContainerTest.asAbsFile(topoRoot, "worker.pid"); final Map<String, Object> superConf = new HashMap<>(); superConf.put(Config.STORM_LOCAL_DIR, stormLocal); superConf.put(Config.STORM_WORKERS_ARTIFACTS_DIR, stormLocal); LocalAssignment la = new LocalAssignment(); la.set_topology_id(topoId); AdvancedFSOps ops = mock(AdvancedFSOps.class); when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true); when(ops.slurpString(workerArtifactsPid)).thenReturn(String.valueOf(pid)); LocalState ls = mock(LocalState.class); MockBasicContainer mc = new MockBasicContainer(ContainerType.LAUNCH, superConf, "SUPERVISOR", port, la, null, ls, workerId, new HashMap<>(), ops, "profile"); //HEAP DUMP ProfileRequest req = new ProfileRequest(); req.set_action(ProfileAction.JMAP_DUMP); mc.runProfiling(req, false); assertEquals(1, mc.profileCmds.size()); CommandRun cmd = mc.profileCmds.get(0); mc.profileCmds.clear(); assertEquals(Arrays.asList("profile", String.valueOf(pid), "jmap", topoRoot), cmd.cmd); assertEquals(new File(topoRoot), cmd.pwd); //JSTACK DUMP req.set_action(ProfileAction.JSTACK_DUMP); mc.runProfiling(req, false); assertEquals(1, mc.profileCmds.size()); cmd = mc.profileCmds.get(0); mc.profileCmds.clear(); assertEquals(Arrays.asList("profile", String.valueOf(pid), "jstack", topoRoot), cmd.cmd); assertEquals(new File(topoRoot), cmd.pwd); //RESTART req.set_action(ProfileAction.JVM_RESTART); mc.runProfiling(req, false); assertEquals(1, mc.profileCmds.size()); cmd = mc.profileCmds.get(0); mc.profileCmds.clear(); assertEquals(Arrays.asList("profile", String.valueOf(pid), "kill"), cmd.cmd); assertEquals(new File(topoRoot), cmd.pwd); //JPROFILE DUMP req.set_action(ProfileAction.JPROFILE_DUMP); mc.runProfiling(req, false); assertEquals(1, mc.profileCmds.size()); cmd = mc.profileCmds.get(0); mc.profileCmds.clear(); assertEquals(Arrays.asList("profile", String.valueOf(pid), "dump", topoRoot), cmd.cmd); assertEquals(new File(topoRoot), cmd.pwd); //JPROFILE START req.set_action(ProfileAction.JPROFILE_STOP); mc.runProfiling(req, false); assertEquals(1, mc.profileCmds.size()); cmd = mc.profileCmds.get(0); mc.profileCmds.clear(); assertEquals(Arrays.asList("profile", String.valueOf(pid), "start"), cmd.cmd); assertEquals(new File(topoRoot), cmd.pwd); //JPROFILE STOP req.set_action(ProfileAction.JPROFILE_STOP); mc.runProfiling(req, true); assertEquals(1, mc.profileCmds.size()); cmd = mc.profileCmds.get(0); mc.profileCmds.clear(); assertEquals(Arrays.asList("profile", String.valueOf(pid), "stop", topoRoot), cmd.cmd); assertEquals(new File(topoRoot), cmd.pwd); } private static void setSystemProp(String key, String value) { if (value == null) { System.clearProperty(key); } else { System.setProperty(key, value); } } private static interface Run { public void run() throws Exception; } private static void checkpoint(Run r, String ... newValues) throws Exception { if (newValues.length % 2 != 0) { throw new IllegalArgumentException("Parameters are of the form system property name, new value"); } Map<String, String> orig = new HashMap<>(); try { for (int index = 0; index < newValues.length; index += 2) { String key = newValues[index]; String value = newValues[index + 1]; orig.put(key, System.getProperty(key)); setSystemProp(key, value); } r.run(); } finally { for (Map.Entry<String, String> entry: orig.entrySet()) { setSystemProp(entry.getKey(), entry.getValue()); } } } private static <T> void assertListEquals(List<T> a, List<T> b) { if (a == null) { assertNull(b); } if (b == null) { assertNull(a); } int commonLen = Math.min(a.size(), b.size()); for (int i = 0; i < commonLen; i++) { assertEquals("at index "+i+"\n"+a+" !=\n"+b+"\n", a.get(i), b.get(i)); } assertEquals("size of lists don't match \n"+a+" !=\n"+b, a.size(), b.size()); } @Test public void testLaunch() throws Exception { final String topoId = "test_topology"; final int port = 8080; final String stormHome = ContainerTest.asAbsPath("tmp", "storm-home"); final String stormLogDir = ContainerTest.asFile(".", "target").getCanonicalPath(); final String workerId = "worker-id"; final String stormLocal = ContainerTest.asAbsPath("tmp", "storm-local"); final String distRoot = ContainerTest.asAbsPath(stormLocal, "supervisor", "stormdist", topoId); final File stormcode = new File(distRoot, "stormcode.ser"); final File stormjar = new File(distRoot, "stormjar.jar"); final String log4jdir = ContainerTest.asAbsPath(stormHome, "conf"); final String workerConf = ContainerTest.asAbsPath(log4jdir, "worker.xml"); final String workerRoot = ContainerTest.asAbsPath(stormLocal, "workers", workerId); final String workerTmpDir = ContainerTest.asAbsPath(workerRoot, "tmp"); final StormTopology st = new StormTopology(); st.set_spouts(new HashMap<>()); st.set_bolts(new HashMap<>()); st.set_state_spouts(new HashMap<>()); byte [] serializedState = Utils.gzip(Utils.thriftSerialize(st)); final Map<String, Object> superConf = new HashMap<>(); superConf.put(Config.STORM_LOCAL_DIR, stormLocal); superConf.put(Config.STORM_WORKERS_ARTIFACTS_DIR, stormLocal); superConf.put(DaemonConfig.STORM_LOG4J2_CONF_DIR, log4jdir); superConf.put(Config.WORKER_CHILDOPTS, " -Dtesting=true"); LocalAssignment la = new LocalAssignment(); la.set_topology_id(topoId); AdvancedFSOps ops = mock(AdvancedFSOps.class); when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true); when(ops.slurp(stormcode)).thenReturn(serializedState); LocalState ls = mock(LocalState.class); checkpoint(() -> { MockBasicContainer mc = new MockBasicContainer(ContainerType.LAUNCH, superConf, "SUPERVISOR", port, la, null, ls, workerId, new HashMap<>(), ops, "profile"); mc.launch(); assertEquals(1, mc.workerCmds.size()); CommandRun cmd = mc.workerCmds.get(0); mc.workerCmds.clear(); assertListEquals(Arrays.asList( "java", "-cp", "FRAMEWORK_CP:" + stormjar.getAbsolutePath(), "-Dlogging.sensitivity=S3", "-Dlogfile.name=worker.log", "-Dstorm.home=" + stormHome, "-Dworkers.artifacts=" + stormLocal, "-Dstorm.id=" + topoId, "-Dworker.id=" + workerId, "-Dworker.port=" + port, "-Dstorm.log.dir=" + stormLogDir, "-Dlog4j.configurationFile=" + workerConf, "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector", "-Dstorm.local.dir=" + stormLocal, "org.apache.storm.LogWriter", "java", "-server", "-Dlogging.sensitivity=S3", "-Dlogfile.name=worker.log", "-Dstorm.home=" + stormHome, "-Dworkers.artifacts=" + stormLocal, "-Dstorm.id=" + topoId, "-Dworker.id=" + workerId, "-Dworker.port=" + port, "-Dstorm.log.dir=" + stormLogDir, "-Dlog4j.configurationFile=" + workerConf, "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector", "-Dstorm.local.dir=" + stormLocal, "-Dtesting=true", "-Djava.library.path=JLP", "-Dstorm.conf.file=", "-Dstorm.options=", "-Djava.io.tmpdir="+workerTmpDir, "-cp", "FRAMEWORK_CP:" + stormjar.getAbsolutePath(), "org.apache.storm.daemon.worker.Worker", topoId, "SUPERVISOR", String.valueOf(port), workerId ), cmd.cmd); assertEquals(new File(workerRoot), cmd.pwd); }, "storm.home", stormHome, "storm.log.dir", stormLogDir); } @Test public void testSubstChildOpts() throws Exception { String workerId = "w-01"; String topoId = "s-01"; int port = 9999; int memOnheap = 512; LocalAssignment la = new LocalAssignment(); la.set_topology_id(topoId); Map<String, Object> superConf = new HashMap<>(); AdvancedFSOps ops = mock(AdvancedFSOps.class); when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true); LocalState ls = mock(LocalState.class); MockBasicContainer mc = new MockBasicContainer(ContainerType.LAUNCH, superConf, "SUPERVISOR", port, la, null, ls, workerId, new HashMap<>(), ops, "profile"); assertListEquals(Arrays.asList( "-Xloggc:/tmp/storm/logs/gc.worker-9999-s-01-w-01-9999.log", "-Xms256m", "-Xmx512m"), mc.substituteChildopts("-Xloggc:/tmp/storm/logs/gc.worker-%ID%-%TOPOLOGY-ID%-%WORKER-ID%-%WORKER-PORT%.log -Xms256m -Xmx%HEAP-MEM%m", memOnheap)); assertListEquals(Arrays.asList( "-Xloggc:/tmp/storm/logs/gc.worker-9999-s-01-w-01-9999.log", "-Xms256m", "-Xmx512m"), mc.substituteChildopts(Arrays.asList("-Xloggc:/tmp/storm/logs/gc.worker-%ID%-%TOPOLOGY-ID%-%WORKER-ID%-%WORKER-PORT%.log","-Xms256m","-Xmx%HEAP-MEM%m"), memOnheap)); assertListEquals(Collections.emptyList(), mc.substituteChildopts(null)); } }
package org.codehaus.mojo.webstart; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.artifact.Artifact; /** * Represents a resolved jarResource. * <p> * Created on 10/29/13. * * @author Tony Chemit - dev@tchemit.fr * @since 1.0-beta-4 */ public class ResolvedJarResource { /** * The underlying jar resource configuration (from pom). */ private final JarResource jarResource; /** * The resolved artifact. */ private final Artifact artifact; /** * The hrefValue to fill in JarResource file. */ private String hrefValue; public ResolvedJarResource( Artifact artifact ) { this( new JarResource(), artifact ); } public ResolvedJarResource( JarResource jarResource, Artifact artifact ) { if ( artifact == null ) { throw new IllegalArgumentException( "artifact must not be null" ); } if ( jarResource == null ) { throw new IllegalArgumentException( "jarResource must not be null" ); } this.jarResource = jarResource; this.artifact = artifact; setHrefValue( jarResource.getHrefValue() ); } public String getArtifactId() { return artifact.getArtifactId(); } public String getType() { return artifact.getType(); } public String getClassifier() { return artifact.getClassifier(); } public String getGroupId() { return artifact.getGroupId(); } public String getVersion() { return artifact.getVersion(); } public String getMainClass() { return jarResource.getMainClass(); } public boolean isOutputJarVersion() { return jarResource.isOutputJarVersion(); } public boolean isIncludeInJnlp() { return jarResource.isIncludeInJnlp(); } /** * Returns the underlying artifact that this instance represents. * * @return Returns the value of the artifact field. */ public Artifact getArtifact() { return artifact; } /** * Returns the value that should be output for this jar in the href attribute of the * jar resource element in the generated JNLP file. If not set explicitly, this defaults * to the file name of the underlying artifact. * * @return The href attribute to be output for this jar resource in the generated JNLP file. */ public String getHrefValue() { String result; if ( hrefValue == null && getArtifact() != null ) { // use default value result = getArtifact().getFile().getName(); } else { // use customized value result = hrefValue; } return result; } /** * Sets the value that should be output for this jar in the href attribute of the * jar resource element in the generated JNLP file. If not set explicitly, this defaults * to the file name of the underlying artifact. * * @param hrefValue new value for field {@link #hrefValue} */ public void setHrefValue( String hrefValue ) { this.hrefValue = hrefValue; } /** * Returns true if the given object is a JarResource and has the same * combination of <code>groupId</code>, <code>artifactId</code>, * <code>version</code> and <code>classifier</code>. * * @return {@code true} if equals to given other object. */ @Override public boolean equals( Object obj ) { if ( obj == this ) { return true; } if ( !( obj instanceof ResolvedJarResource ) ) { return false; } ResolvedJarResource other = (ResolvedJarResource) obj; if ( fieldsAreNotEqual( getGroupId(), other.getGroupId() ) ) { return false; } if ( fieldsAreNotEqual( getArtifactId(), other.getArtifactId() ) ) { return false; } if ( fieldsAreNotEqual( getVersion(), other.getVersion() ) ) { return false; } if ( fieldsAreNotEqual( getType(), other.getType() ) ) { return false; } if ( fieldsAreNotEqual( getClassifier(), other.getClassifier() ) ) { return false; } return true; } private boolean fieldsAreNotEqual( Object field1, Object field2 ) { if ( field1 == null ) { return field2 != null; } else { return !field1.equals( field2 ); } } /** * {@inheritDoc} */ public int hashCode() { final int offset = 17; final int multiplier = 37; int result = offset; result += multiplier * fieldHashCode( getGroupId() ); result += multiplier * fieldHashCode( getArtifactId() ); result += multiplier * fieldHashCode( getVersion() ); result += multiplier * fieldHashCode( getType() ); result += multiplier * fieldHashCode( getClassifier() ); return result; } private int fieldHashCode( Object field ) { return field == null ? 0 : field.hashCode(); } }
/******************************************************************************* * Copyright 2016 MediaMath * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.mediamath.terminalone.models; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.ws.rs.core.Form; public class Strategy implements T1Entity, Cloneable { private static final String entityName = "Strategy"; public enum audSegExc { AND, OR } public enum audSegInc { AND, OR } public enum tgtSegExc { AND, OR } public enum tgtSegInc { AND, OR } public enum freqInt { hour("hour"), day("day"), week("week"), month("month"), campaign("campaign"); String value; freqInt(String s) { value = s; } public String getValue(){ return value; } } // should be not-applicable public enum freqType { even("even"), asap("asap"), no_limit("no-limit"); String value; freqType(String s) { value = s; } public String getValue(){ return value; } } // should be no-limit 'ctr', 'vcpm', 'vcr' and 'viewability_rate' public enum goalType { spend("spend"), reach("reach"), cpc("cpc"), cpa("cpa"), roi("roi"), ctr("ctr"), vcpm("vcpm"), vcr("vcr"), viewability_rate("viewability_rate"); String value; goalType(String s) { value = s; } public String getValue(){ return value; } } public enum mediaType { DISPLAY, VIDEO } public enum pacInt { hour("hour"), day("day"); String value; pacInt(String s) { value = s; } public String getValue(){ return value; } } public enum pacType { even("even"), asap("asap"); String value; pacType(String s) { value = s; } public String getValue(){ return value; } } public enum siteSelect { MATHSELECT_250, EXCLUDE_UGC, ALL, REDUCED } public enum supplyType { RTB, RMX_API, T1_RMX } public enum type { REM, GBO, AUD } private audSegExc audience_segment_exclude_op; private audSegInc audience_segment_include_op; private float bid_aggressiveness; private boolean bid_price_is_media_only; private ArrayList<Currency> budget = new ArrayList<Currency>(); private int campaign_id; private String created_on; private String currency_code; private String description; private ArrayList<Currency> effective_goal_value = new ArrayList<Currency>(); private Date end_date; private String feature_compatibility; private int frequency_amount; private freqInt frequency_interval; private boolean frequency_optimization; private freqType frequency_type; private goalType goal_type; private ArrayList<Currency> goal_value = new ArrayList<Currency>(); private int id; private int impression_cap; private freqType impression_cap_type; private int impression_pacing_amount; private freqType impression_pacing_type; private freqInt impression_pacing_interval; private ArrayList<Currency> max_bid = new ArrayList<Currency>(); private mediaType media_type; private ArrayList<Currency> min_bid = new ArrayList<Currency>(); private String name; private ArrayList<Currency> pacing_amount = new ArrayList<Currency>(); private pacInt pacing_interval; private pacType pacing_type; private ArrayList<Currency> roi_target = new ArrayList<Currency>(); private String pixel_target_expr; private ArrayList<Integer> includePixels = new ArrayList<Integer>(); private ArrayList<Integer> excludePixels = new ArrayList<Integer>(); private boolean run_on_all_exchanges; private boolean run_on_all_pmp; private boolean run_on_display; private boolean run_on_mobile; private boolean run_on_streaming; private boolean site_restriction_transparent_urls; private siteSelect site_selectiveness; private Date start_date; private boolean status; private supplyType supply_type; private tgtSegExc targeting_segment_exclude_op; private tgtSegInc targeting_segment_include_op; private type type; private String updated_on; private boolean use_campaign_end; private boolean use_campaign_start; private boolean use_mm_freq; private boolean use_optimization; private int version; private String zone_name; private Aggregate aggregate; private boolean copyStrategy = false; private int from_campaign_id; private int to_campaign_id; private Campaign campaign; private TargetDimensions targetDimensions; private StrategyTargetValues strategyTargetValues; private List<StrategyDomain> strategy_domain_restrictions = new ArrayList<StrategyDomain>(); private List<Segments> audience_segments = new ArrayList<Segments>(); private List<Segments> targeting_segments = new ArrayList<Segments>(); private List<TargetValues> target_values = new ArrayList<TargetValues>(); private List<Concept> concepts = new ArrayList<Concept>(); private List<StrategyAudienceSegment> strategyAudienceSegments = new ArrayList<StrategyAudienceSegment>(); private List<StrategyTargetingSegment> strategyTargetingSegments = new ArrayList<StrategyTargetingSegment>(); private List<StrategyDayPart> strategyDayParts = new ArrayList<StrategyDayPart>(); private List<StrategyTarget> strategyTarget = new ArrayList<StrategyTarget>(); private List<BulkStrategy> bulkStrategy = new ArrayList<BulkStrategy>(); private List<Deal> deals = new ArrayList<>(); private List<Integer> dealIds = new ArrayList<>(); private List<SiteList> site_lists = new ArrayList<SiteList>(); public audSegExc getAudienceSegmentExcludeOp() { return audience_segment_exclude_op; } public void setAudienceSegmentExcludeOp(audSegExc audience_segment_exclude_op) { this.audience_segment_exclude_op = audience_segment_exclude_op; } public audSegInc getAudienceSegmentIncludeOp() { return audience_segment_include_op; } public void setAudienceSegmentIncludeOp(audSegInc audience_segment_include_op) { this.audience_segment_include_op = audience_segment_include_op; } public float getBidAggresiveness() { return bid_aggressiveness; } public void setBidAggresiveness(float bid_aggresiveness) { this.bid_aggressiveness = bid_aggresiveness; } public boolean isBidPriceIsMediaOnly() { return bid_price_is_media_only; } public void setBidPriceIsMediaOnly(boolean bid_price_is_media_only) { this.bid_price_is_media_only = bid_price_is_media_only; } public int getCampaignId() { return campaign_id; } public void setCampaignId(int campaign_id) { this.campaign_id = campaign_id; } public String getCreatedOn() { return created_on; } public void setCreatedOn(String created_on) { this.created_on = created_on; } public String getCurrencyCode() { return currency_code; } public void setCurrencyCode(String currency_code) { this.currency_code = currency_code; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getFeatureCompatibility() { return feature_compatibility; } public void setFeatureCompatibility(String feature_compatibility) { this.feature_compatibility = feature_compatibility; } public freqInt getFrequencyInterval() { return frequency_interval; } public void setFrequencyInterval(freqInt frequency_interval) { this.frequency_interval = frequency_interval; } public boolean isFrequencyOptimization() { return frequency_optimization; } public void setFrequencyOptimization(boolean frequency_optimization) { this.frequency_optimization = frequency_optimization; } public freqType getFrequencyType() { return frequency_type; } public void setFrequencyType(freqType frequency_type) { this.frequency_type = frequency_type; } public goalType getGoalType() { return goal_type; } public void setGoalType(goalType goal_type) { this.goal_type = goal_type; } public int getId() { return id; } public void setId(int id) { this.id = id; } public int getImpressionCap() { return impression_cap; } public void setImpressionCap(int impression_cap) { this.impression_cap = impression_cap; } public freqType getImpressionCapType() { return impression_cap_type; } public void setImpressionCapType(freqType impression_cap_type) { this.impression_cap_type = impression_cap_type; } public int getImpressionPacingAmount() { return impression_pacing_amount; } public void setImpressionPacingAmount(int impression_pacing_amount) { this.impression_pacing_amount = impression_pacing_amount; } public freqType getImpressionPacingType() { return impression_pacing_type; } public void setImpressionPacingType(freqType impression_pacing_type) { this.impression_pacing_type = impression_pacing_type; } public freqInt getImpressionPacingInterval() { return impression_pacing_interval; } public void setImpressionPacingInterval(freqInt impression_pacing_interval) { this.impression_pacing_interval = impression_pacing_interval; } public mediaType getMediaType() { return media_type; } public void setMediaType(mediaType media_type) { this.media_type = media_type; } public String getName() { return name; } public void setName(String name) { this.name = name; } public pacInt getPacingInterval() { return pacing_interval; } public void setPacingInterval(pacInt pacing_interval) { this.pacing_interval = pacing_interval; } public pacType getPacingType() { return pacing_type; } public void setPacingType(pacType pacing_type) { this.pacing_type = pacing_type; } public String getPixelTargetExpr() { return pixel_target_expr; } public void setPixelTargetExpr(String pixel_target_expr) { this.pixel_target_expr = pixel_target_expr; } public boolean isRunOnAllExchanges() { return run_on_all_exchanges; } public void setRunOnAllExchanges(boolean run_on_all_exchanges) { this.run_on_all_exchanges = run_on_all_exchanges; } public boolean isRunOnAllPmp() { return run_on_all_pmp; } public void setRunOnAllPmp(boolean run_on_all_pmp) { this.run_on_all_pmp = run_on_all_pmp; } public boolean isRunOnDisplay() { return run_on_display; } public void setRunOnDisplay(boolean run_on_display) { this.run_on_display = run_on_display; } public boolean isRunOnMobile() { return run_on_mobile; } public void setRunOnMobile(boolean run_on_mobile) { this.run_on_mobile = run_on_mobile; } public boolean isRunOnStreaming() { return run_on_streaming; } public void setRunOnStreaming(boolean run_on_streaming) { this.run_on_streaming = run_on_streaming; } public boolean isSiteRestrictionTransparentUrls() { return site_restriction_transparent_urls; } public void setSiteRestrictionTransparentUrls(boolean site_restriction_transparent_urls) { this.site_restriction_transparent_urls = site_restriction_transparent_urls; } public siteSelect getSiteSelectiveness() { return site_selectiveness; } public void setSiteSelectiveness(siteSelect site_selectiveness) { this.site_selectiveness = site_selectiveness; } public boolean isStatus() { return status; } public void setStatus(boolean status) { this.status = status; } public supplyType getSupplyType() { return supply_type; } public void setSupplyType(supplyType supply_type) { this.supply_type = supply_type; } public tgtSegExc getTargetingSegmentExcludeOp() { return targeting_segment_exclude_op; } public void setTargetingSegmentExcludeOp(tgtSegExc targeting_segment_exclude_op) { this.targeting_segment_exclude_op = targeting_segment_exclude_op; } public tgtSegInc getTargetingSegmentIncludeOp() { return targeting_segment_include_op; } public void setTargetingSegmentIncludeOp(tgtSegInc targeting_segment_include_op) { this.targeting_segment_include_op = targeting_segment_include_op; } public type getType() { return type; } public void setType(type type) { this.type = type; } public String getUpdatedOn() { return updated_on; } public void setUpdatedOn(String updated_on) { this.updated_on = updated_on; } public boolean isUseCampaignEnd() { return use_campaign_end; } public void setUseCampaignEnd(boolean use_campaign_end) { this.use_campaign_end = use_campaign_end; } public boolean isUseCampaignStart() { return use_campaign_start; } public void setUseCampaignStart(boolean use_campaign_start) { this.use_campaign_start = use_campaign_start; } public boolean isUseMmFreq() { return use_mm_freq; } public void setUseMmFreq(boolean use_mm_freq) { this.use_mm_freq = use_mm_freq; } public boolean isUseOptimization() { return use_optimization; } public void setUseOptimization(boolean use_optimization) { this.use_optimization = use_optimization; } public int getVersion() { return version; } public void setVersion(int version) { this.version = version; } @Override public String getEntityname() { return entityName; } public String getZoneName() { return zone_name; } public void setZoneName(String zone_name) { this.zone_name = zone_name; } public List<StrategyDomain> getStrategyDomainRestrictions() { return strategy_domain_restrictions; } public void setStrategyDomainRestrictions(List<StrategyDomain> strategy_domain_restrictions) { this.strategy_domain_restrictions = strategy_domain_restrictions; } public List<Segments> getAudienceSegments() { return audience_segments; } public void setAudienceSegments(List<Segments> audience_segments) { this.audience_segments = audience_segments; } public List<TargetValues> getTargetValues() { return target_values; } public void setTargetValues(List<TargetValues> target_values) { this.target_values = target_values; } public Campaign getCampaign() { return campaign; } public void setCampaign(Campaign campaign) { this.campaign = campaign; } public List<Concept> getConcepts() { return concepts; } public void setConcepts(List<Concept> concepts) { this.concepts = concepts; } public Date getEndDate() { return end_date; } public void setEndDate(Date end_date) { this.end_date = end_date; } public Date getStartDate() { return start_date; } public void setStartDate(Date start_date) { this.start_date = start_date; } public int getFrequencyAmount() { return frequency_amount; } public void setFrequencyAmount(int frequency_amount) { this.frequency_amount = frequency_amount; } public ArrayList<Currency> getBudget() { return budget; } public void setBudget(ArrayList<Currency> budget) { this.budget = budget; } public void setBudget(float budget) { Currency currency = new Currency(); currency.setValue(budget); this.budget.add(currency); } public ArrayList<Currency> getEffectiveGoalValue() { return effective_goal_value; } public void setEffectiveGoalValue(ArrayList<Currency> effective_goal_value) { this.effective_goal_value = effective_goal_value; } public void setEffectiveGoalValue(float effective_goal_value) { Currency currency = new Currency(); currency.setValue(effective_goal_value); this.effective_goal_value.add(currency); } public ArrayList<Currency> getGoalValue() { return goal_value; } public void setGoalValue(ArrayList<Currency> goal_value) { this.goal_value = goal_value; } public void setGoalValue(float goal_value) { Currency currency = new Currency(); currency.setValue(goal_value); this.goal_value.add(currency); } public void setGoalValue(String goal_value) { Currency currency = new Currency(); currency.setValue(Float.valueOf(goal_value)); this.goal_value.add(currency); } public ArrayList<Currency> getMaxBid() { return max_bid; } public void setMaxBid(ArrayList<Currency> max_bid) { this.max_bid = max_bid; } public void setMaxBid(float max_bid) { Currency currency = new Currency(); currency.setValue(max_bid); this.max_bid.add(currency); } public ArrayList<Currency> getMinBid() { return min_bid; } public void setMinBid(ArrayList<Currency> min_bid) { this.min_bid = min_bid; } public void setMinBid(float min_bid) { Currency currency = new Currency(); currency.setValue(min_bid); this.min_bid.add(currency); } public ArrayList<Currency> getPacingAmount() { return pacing_amount; } public void setPacingAmount(ArrayList<Currency> pacing_amount) { this.pacing_amount = pacing_amount; } public void setPacingAmount(float pacing_amount) { Currency currency = new Currency(); currency.setValue(pacing_amount); this.pacing_amount.add(currency); } public ArrayList<Currency> getRoiTarget() { return roi_target; } public void setRoiTarget(ArrayList<Currency> roi_target) { this.roi_target = roi_target; } public void setRoiTarget(float roi_target) { Currency currency = new Currency(); currency.setValue(roi_target); this.roi_target.add(currency); } public Aggregate getAggregate() { return aggregate; } public void setAggregate(Aggregate aggregate) { this.aggregate = aggregate; } public boolean isCopyStrategy() { return copyStrategy; } public void setCopyStrategy(boolean copyStrategy) { this.copyStrategy = copyStrategy; } public int getFromCampaignId() { return from_campaign_id; } public void setFromCampaignId(int from_campaign_id) { this.from_campaign_id = from_campaign_id; } public int getToCampaignId() { return to_campaign_id; } public void setToCampaignId(int to_campaign_id) { this.to_campaign_id = to_campaign_id; } public List<BulkStrategy> getBulkStrategy() { return bulkStrategy; } public void setBulkStrategy(List<BulkStrategy> bulkStrategy) { this.bulkStrategy = bulkStrategy; } public List<SiteList> getSiteLists() { return site_lists; } public void setSiteLists(List<SiteList> siteLists) { this.site_lists = siteLists; } public ArrayList<Integer> getIncludePixels() { return includePixels; } public void setIncludePixels(ArrayList<Integer> includePixels) { this.includePixels = includePixels; } public void setIncludePixels(Integer pixelId) { this.includePixels.add(pixelId); } public ArrayList<Integer> getExcludePixels() { return excludePixels; } public void setExcludePixels(ArrayList<Integer> excludePixels) { this.excludePixels = excludePixels; } public void setExcludePixels(Integer pixelId) { this.excludePixels.add(pixelId); } public List<StrategyAudienceSegment> getStrategyAudienceSegments() { return strategyAudienceSegments; } public void setStrategyAudienceSegments(List<StrategyAudienceSegment> strategyAudienceSegments) { this.strategyAudienceSegments = strategyAudienceSegments; } public List<Deal> getDeals() { return deals; } public void setDeals(List<Deal> deals) { this.deals = deals; } public List<Integer> getDealIds() { return dealIds; } public void setDealIds(List<Integer> dealIds) { this.dealIds = dealIds; } public List<Segments> getTargetingSegments() { return targeting_segments; } public void setTargetingSegments(List<Segments> targeting_segments) { this.targeting_segments = targeting_segments; } public List<StrategyTargetingSegment> getStrategyTargetingSegments() { return strategyTargetingSegments; } public void setStrategyTargetingSegments(List<StrategyTargetingSegment> strategyTargetingSegments) { this.strategyTargetingSegments = strategyTargetingSegments; } public List<StrategyDayPart> getStrategyDayParts() { return strategyDayParts; } public void setStrategyDayParts(List<StrategyDayPart> strategyDayParts) { this.strategyDayParts = strategyDayParts; } public List<StrategyTarget> getStrategyTarget() { return strategyTarget; } public void setStrategyTarget(List<StrategyTarget> strategyTarget) { this.strategyTarget = strategyTarget; } public TargetDimensions getTargetDimensions() { return targetDimensions; } public void setTargetDimensions(TargetDimensions targetDimensions) { this.targetDimensions = targetDimensions; } public StrategyTargetValues getStrategyTargetValues() { return strategyTargetValues; } public void setStrategyTargetValues(StrategyTargetValues strategyTargetValues) { this.strategyTargetValues = strategyTargetValues; } @Override public Form getForm() { return null; } @Override public String getUri() { return null; } @Override public Strategy clone() throws CloneNotSupportedException{ return (Strategy) super.clone(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.orm.entities; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.actionmanager.RequestType; import javax.persistence.Basic; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.Lob; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.OneToOne; import javax.persistence.Table; import java.util.Collection; @Table(name = "request") @Entity public class RequestEntity { @Column(name = "request_id") @Id private Long requestId; @Column(name = "cluster_id", updatable = false, nullable = false) @Basic private Long clusterId; @Column(name = "request_schedule_id", updatable = false, insertable = false, nullable = true) @Basic private Long requestScheduleId; @Column(name = "request_context") @Basic private String requestContext; @Column(name = "command_name") @Basic private String commandName; @Column(name = "inputs") @Lob private byte[] inputs = new byte[0]; @Column(name = "request_type") @Enumerated(value = EnumType.STRING) private RequestType requestType; @Column(name = "status") @Enumerated(value = EnumType.STRING) private HostRoleStatus status; @Basic @Column(name = "create_time", nullable = false) private Long createTime = System.currentTimeMillis(); @Basic @Column(name = "start_time", nullable = false) private Long startTime = -1L; @Basic @Column(name = "end_time", nullable = false) private Long endTime = -1L; @Basic @Column(name = "exclusive_execution", insertable = true, updatable = true, nullable = false) private Integer exclusive = 0; @OneToMany(mappedBy = "request") private Collection<StageEntity> stages; @OneToMany(mappedBy = "requestEntity", cascade = CascadeType.ALL) private Collection<RequestResourceFilterEntity> resourceFilterEntities; @OneToOne(mappedBy = "requestEntity", cascade = {CascadeType.ALL}) private RequestOperationLevelEntity requestOperationLevel; @ManyToOne(cascade = {CascadeType.MERGE}) @JoinColumn(name = "request_schedule_id", referencedColumnName = "schedule_id") private RequestScheduleEntity requestScheduleEntity; public Long getRequestId() { return requestId; } public void setRequestId(Long id) { this.requestId = id; } public String getRequestContext() { return requestContext; } public void setRequestContext(String request_context) { this.requestContext = request_context; } public Collection<StageEntity> getStages() { return stages; } public void setStages(Collection<StageEntity> stages) { this.stages = stages; } public Long getCreateTime() { return createTime; } public void setCreateTime(Long createTime) { this.createTime = createTime; } public Long getStartTime() { return startTime; } public void setStartTime(Long startTime) { this.startTime = startTime; } public Long getEndTime() { return endTime; } public void setEndTime(Long endTime) { this.endTime = endTime; } public Boolean isExclusive() { return exclusive == 0 ? false : true; } public void setExclusive(Boolean exclusive) { this.exclusive = (exclusive == false ? 0 : 1); } public String getInputs() { return inputs != null ? new String(inputs) : null; } public void setInputs(String inputs) { this.inputs = inputs != null ? inputs.getBytes() : null; } public RequestType getRequestType() { return requestType; } public void setRequestType(RequestType requestType) { this.requestType = requestType; } public Long getClusterId() { return clusterId; } public Collection<RequestResourceFilterEntity> getResourceFilterEntities() { return resourceFilterEntities; } public void setResourceFilterEntities(Collection<RequestResourceFilterEntity> resourceFilterEntities) { this.resourceFilterEntities = resourceFilterEntities; } public RequestOperationLevelEntity getRequestOperationLevel() { return requestOperationLevel; } public void setRequestOperationLevel(RequestOperationLevelEntity operationLevel) { this.requestOperationLevel = operationLevel; } public void setClusterId(Long clusterId) { this.clusterId = clusterId; } public String getCommandName() { return commandName; } public void setCommandName(String commandName) { this.commandName = commandName; } public HostRoleStatus getStatus() { return status; } public void setStatus(HostRoleStatus status) { this.status = status; } public RequestScheduleEntity getRequestScheduleEntity() { return requestScheduleEntity; } public void setRequestScheduleEntity(RequestScheduleEntity requestScheduleEntity) { this.requestScheduleEntity = requestScheduleEntity; } public Long getRequestScheduleId() { return requestScheduleId; } public void setRequestScheduleId(Long scheduleId) { this.requestScheduleId = scheduleId; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RequestEntity that = (RequestEntity) o; if (requestId != null ? !requestId.equals(that.requestId) : that.requestId != null) return false; return true; } @Override public int hashCode() { return requestId != null ? requestId.hashCode() : 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.tools.ant.taskdefs.optional.image; import com.sun.media.jai.codec.FileSeekableStream; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.Project; import org.apache.tools.ant.taskdefs.MatchingTask; import org.apache.tools.ant.types.FileSet; import org.apache.tools.ant.types.Mapper; import org.apache.tools.ant.types.optional.image.Draw; import org.apache.tools.ant.types.optional.image.ImageOperation; import org.apache.tools.ant.types.optional.image.Rotate; import org.apache.tools.ant.types.optional.image.Scale; import org.apache.tools.ant.types.optional.image.TransformOperation; import org.apache.tools.ant.util.FileNameMapper; import org.apache.tools.ant.util.FileUtils; import org.apache.tools.ant.util.IdentityMapper; import javax.media.jai.JAI; import javax.media.jai.PlanarImage; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.Locale; import java.util.Vector; /** * A MatchingTask which relies on <a * href="http://java.sun.com/products/java-media/jai">JAI (Java * Advanced Imaging)</a> to perform image manipulation operations on * existing images. The operations are represented as ImageOperation * DataType objects. The operations are arranged to conform to the * Chaining Model of JAI. Check out the <a * href="http://java.sun.com/products/java-media/jai/forDevelopers/jai1_0_1guide-unc/"> * JAI Programming Guide</a>. * * @see org.apache.tools.ant.types.optional.image.ImageOperation * @see org.apache.tools.ant.types.DataType */ public class Image extends MatchingTask { // CheckStyle:VisibilityModifier OFF - bc protected Vector instructions = new Vector(); protected boolean overwrite = false; protected Vector filesets = new Vector(); protected File srcDir = null; protected File destDir = null; // CheckStyle:MemberNameCheck OFF - bc //cannot remove underscores due to protected visibility >:( protected String str_encoding = "JPEG"; protected boolean garbage_collect = false; private boolean failonerror = true; // CheckStyle:MemberNameCheck ON // CheckStyle:VisibilityModifier ON private Mapper mapperElement = null; /** * Add a set of files to be deleted. * @param set the FileSet to add. */ public void addFileset(FileSet set) { filesets.addElement(set); } /** * Set whether to fail on error. * If false, note errors to the output but keep going. * @param failonerror true or false. */ public void setFailOnError(boolean failonerror) { this.failonerror = failonerror; } /** * Set the source dir to find the image files. * @param srcDir the directory in which the image files reside. */ public void setSrcdir(File srcDir) { this.srcDir = srcDir; } /** * Set the image encoding type. <a * href="http://java.sun.com/products/java-media/jai/forDevelopers/jai1_0_1guide-unc/Encode.doc.html#56610"> * See this table in the JAI Programming Guide</a>. * @param encoding the String image encoding. */ public void setEncoding(String encoding) { str_encoding = encoding; } /** * Set whether to overwrite a file if there is a naming conflict. * @param overwrite whether to overwrite. */ public void setOverwrite(boolean overwrite) { this.overwrite = overwrite; } /** * Set whether to invoke Garbage Collection after each image processed. * Defaults to false. * @param gc whether to invoke the garbage collector. */ public void setGc(boolean gc) { garbage_collect = gc; } /** * Set the destination directory for manipulated images. * @param destDir The destination directory. */ public void setDestDir(File destDir) { this.destDir = destDir; } /** * Add an ImageOperation to chain. * @param instr The ImageOperation to append to the chain. */ public void addImageOperation(ImageOperation instr) { instructions.add(instr); } /** * Add a Rotate ImageOperation to the chain. * @param instr The Rotate operation to add to the chain. * @see org.apache.tools.ant.types.optional.image.Rotate */ public void addRotate(Rotate instr) { instructions.add(instr); } /** * Add a Scale ImageOperation to the chain. * @param instr The Scale operation to add to the chain. * @see org.apache.tools.ant.types.optional.image.Scale */ public void addScale(Scale instr) { instructions.add(instr); } /** * Add a Draw ImageOperation to the chain. DrawOperation * DataType objects can be nested inside the Draw object. * @param instr The Draw operation to add to the chain. * @see org.apache.tools.ant.types.optional.image.Draw * @see org.apache.tools.ant.types.optional.image.DrawOperation */ public void addDraw(Draw instr) { instructions.add(instr); } /** * Add an ImageOperation to chain. * @param instr The ImageOperation to append to the chain. * @since Ant 1.7 */ public void add(ImageOperation instr) { addImageOperation(instr); } /** * Defines the mapper to map source to destination files. * @return a mapper to be configured * @exception BuildException if more than one mapper is defined * @since Ant 1.8.0 */ public Mapper createMapper() throws BuildException { if (mapperElement != null) { throw new BuildException("Cannot define more than one mapper", getLocation()); } mapperElement = new Mapper(getProject()); return mapperElement; } /** * Add a nested filenamemapper. * @param fileNameMapper the mapper to add. * @since Ant 1.8.0 */ public void add(FileNameMapper fileNameMapper) { createMapper().add(fileNameMapper); } /** * Executes all the chained ImageOperations on the files inside * the directory. * @since Ant 1.8.0 */ public int processDir(final File srcDir, final String[] srcNames, final File dstDir, final FileNameMapper mapper) { int writeCount = 0; for (int i = 0; i < srcNames.length; ++i) { final String srcName = srcNames[i]; final File srcFile = new File(srcDir, srcName).getAbsoluteFile(); final String[] dstNames = mapper.mapFileName(srcName); if (dstNames == null) { log(srcFile + " skipped, don't know how to handle it", Project.MSG_VERBOSE); continue; } for (int j = 0; j < dstNames.length; ++j){ final String dstName = dstNames[j]; final File dstFile = new File(dstDir, dstName).getAbsoluteFile(); if (dstFile.exists()){ // avoid overwriting unless necessary if(!overwrite && srcFile.lastModified() <= dstFile.lastModified()) { log(srcFile + " omitted as " + dstFile + " is up to date.", Project.MSG_VERBOSE); // don't overwrite the file continue; } // avoid extra work while overwriting if (!srcFile.equals(dstFile)){ dstFile.delete(); } } processFile(srcFile, dstFile); ++writeCount; } } // run the garbage collector if wanted if (garbage_collect) { System.gc(); } return writeCount; } /** * Executes all the chained ImageOperations on the file * specified. * @param file The file to be processed. * @deprecated this method isn't used anymore */ public void processFile(File file) { processFile(file, new File(destDir == null ? srcDir : destDir, file.getName())); } /** * Executes all the chained ImageOperations on the file * specified. * @param file The file to be processed. * @param newFile The file to write to. * @since Ant 1.8.0 */ public void processFile(File file, File newFile) { try { log("Processing File: " + file.getAbsolutePath()); FileSeekableStream input = null; PlanarImage image = null; try { input = new FileSeekableStream(file); image = JAI.create("stream", input); final int size = instructions.size(); for (int i = 0; i < size; i++) { Object instr = instructions.elementAt(i); if (instr instanceof TransformOperation) { image = ((TransformOperation) instr) .executeTransformOperation(image); } else { log("Not a TransformOperation: " + instr); } } } finally { FileUtils.close(input); } File dstParent = newFile.getParentFile(); if (!dstParent.isDirectory() && !dstParent.mkdirs()){ throw new BuildException("Failed to create parent directory " + dstParent); } if ((overwrite && newFile.exists()) && (!newFile.equals(file))) { newFile.delete(); } FileOutputStream stream = null; try { stream = new FileOutputStream(newFile); JAI.create("encode", image, stream, str_encoding.toUpperCase(Locale.ENGLISH), null); stream.flush(); } finally { FileUtils.close(stream); } } catch (IOException err) { if (!file.equals(newFile)){ newFile.delete(); } if (!failonerror) { log("Error processing file: " + err); } else { throw new BuildException(err); } } catch (java.lang.RuntimeException rerr) { if (!file.equals(newFile)){ newFile.delete(); } if (!failonerror) { log("Error processing file: " + rerr); } else { throw new BuildException(rerr); } } } /** * Executes the Task. * @throws BuildException on error. */ public void execute() throws BuildException { validateAttributes(); try { File dest = destDir != null ? destDir : srcDir; int writeCount = 0; // build mapper final FileNameMapper mapper; if (mapperElement==null){ mapper = new IdentityMapper(); } else { mapper = mapperElement.getImplementation(); } // deal with specified srcDir if (srcDir != null) { final DirectoryScanner ds = super.getDirectoryScanner(srcDir); final String[] files = ds.getIncludedFiles(); writeCount += processDir(srcDir, files, dest, mapper); } // deal with the filesets final int size = filesets.size(); for (int i = 0; i < size; i++) { final FileSet fs = (FileSet) filesets.elementAt(i); final DirectoryScanner ds = fs.getDirectoryScanner(getProject()); final String[] files = ds.getIncludedFiles(); final File fromDir = fs.getDir(getProject()); writeCount += processDir(fromDir, files, dest, mapper); } if (writeCount>0){ log("Processed " + writeCount + (writeCount == 1 ? " image." : " images.")); } } catch (Exception err) { err.printStackTrace(); throw new BuildException(err.getMessage()); } } /** * Ensure we have a consistent and legal set of attributes, and set * any internal flags necessary based on different combinations * of attributes. * @throws BuildException on error. */ protected void validateAttributes() throws BuildException { if (srcDir == null && filesets.size() == 0) { throw new BuildException("Specify at least one source" + "--a srcDir or a fileset."); } if (srcDir == null && destDir == null) { throw new BuildException("Specify the destDir, or the srcDir."); } if (str_encoding.equalsIgnoreCase("jpg")) { str_encoding = "JPEG"; } else if (str_encoding.equalsIgnoreCase("tif")) { str_encoding = "TIFF"; } } }
/** * Copyright 2015 Otto (GmbH & Co KG) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ottogroup.bi.spqr.pipeline.statistics; import java.io.Serializable; import java.nio.ByteBuffer; import com.fasterxml.jackson.annotation.JsonProperty; /** * Holds statistical information about a single pipeline. Values tracked are: * <ul> * <li>pipeline identifier</li> * <li>number of messages processed for specified time frame</li> * <li>start time</li> * <li>end time</li> * <li>min. duration required for processing a single message (provided in milliseconds)</li> * <li>max. duration required for processing a single message (provided in milliseconds)</li> * <li>avg. duration required for processing a single message (provided in milliseconds)</li> * <li>min. message size found in specified time frame</li> * <li>max. message size found in specified time frame</li> * <li>avg. message size found in specified time frame</li> * <li>errors</li> * </ul> * @author mnxfst * @since Apr 14, 2015 */ public class MicroPipelineStatistics implements Serializable { private static final long serialVersionUID = -2458412374912750561L; public static final int SIZE_OF_INT = Integer.SIZE / Byte.SIZE; public static final int SIZE_OF_LONG = Long.SIZE / Byte.SIZE; /** identifier of host running the pipeline the stats belong to */ @JsonProperty(value="hid", required=true) private String processingNodeId = null; /** identifier of pipeline which generated the stats */ @JsonProperty(value="pid", required=true) private String pipelineId = null; /** identifier of pipeline component which generated the stats */ @JsonProperty(value="cid", required=true) private String componentId = null; /** number of messages processed since last event */ @JsonProperty(value="numMsg", required=true) private int numOfMessages = 0; /** start time */ @JsonProperty(value="st", required=true) private long startTime = 0; /** end time */ @JsonProperty(value="et", required=true) private long endTime = 0; /** min. duration required for processing a single message */ @JsonProperty(value="minDur") private int minDuration = 0; /** max. duration required for processing a single message */ @JsonProperty(value="maxDur") private int maxDuration = 0; /** avg. duration required for processing a single message */ @JsonProperty(value="avgDur") private int avgDuration = 0; /** min. message size */ @JsonProperty(value="minSize") private int minSize = 0; /** max. message size */ @JsonProperty(value="maxSize") private int maxSize = 0; /** avg. message size */ @JsonProperty(value="avgSize") private int avgSize = 0; /** error rate */ @JsonProperty(value="err", required=true) private int errors = 0; public MicroPipelineStatistics() { } public MicroPipelineStatistics(final String processingNodeId, final String pipelineId, final String componentId, final long startTime, final int numOfMessages, final int minDuration, final int maxDuration, final int avgDuration, final int minSize, final int maxSize, final int avgSize) { this.startTime = startTime; this.componentId = componentId; this.processingNodeId = processingNodeId; this.pipelineId = pipelineId; this.numOfMessages = numOfMessages; this.minDuration = minDuration; this.maxDuration = maxDuration; this.avgDuration = avgDuration; this.minSize = minSize; this.maxSize = maxSize; this.avgSize = avgSize; } /** * Converts the provided byte array into a {@link MicroPipelineStatistics} representation * @param statsContent * @return */ public static MicroPipelineStatistics fromByteArray(final byte[] statsContent) { MicroPipelineStatistics stats = new MicroPipelineStatistics(); ByteBuffer buf = ByteBuffer.wrap(statsContent); // ensure that the order is the same as when populating the array stats.setNumOfMessages(buf.getInt()); stats.setStartTime(buf.getLong()); stats.setEndTime(buf.getLong()); stats.setMinDuration(buf.getInt()); stats.setMaxDuration(buf.getInt()); stats.setAvgDuration(buf.getInt()); stats.setMinSize(buf.getInt()); stats.setMaxSize(buf.getInt()); stats.setAvgSize(buf.getInt()); stats.setErrors(buf.getInt()); byte[] procNodeId = new byte[buf.getInt()]; buf.get(procNodeId); byte[] pipelineId = new byte[buf.getInt()]; buf.get(pipelineId); byte[] componentId = new byte[buf.getInt()]; buf.get(componentId); stats.setProcessingNodeId(new String(procNodeId)); stats.setPipelineId(new String(pipelineId)); stats.setComponentId(new String(componentId)); return stats; } /** * Convert this {@link MicroPipelineStatistics} instance into its byte array representation * @return */ public byte[] toByteArray() { ///////////////////////////////////////////////////////// // describes how the size of the result array is computed // SIZE_OF_INT + // SIZE_OF_LONG + // SIZE_OF_LONG + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // SIZE_OF_INT + // procNodeId.length + // pid.length + // cid.length + // (SIZE_OF_INT * 3)); <-- add extra int's // for storing the field sizes of processingNodeId, pipelineId and componentId // which are required when extracting content from byte array // >> 11x SIZE_OF_INT // >> 3x SIZE_OF_LONG // // ByteBuffer buffer = ByteBuffer.allocate(11 * SIZE_OF_INT + 3 * SIZE_OF_LONG + procNodeId.length + pid.length + cid.length); // allocated buffer byte[] procNodeId = (this.processingNodeId != null ? this.processingNodeId.getBytes() : new byte[0]); byte[] pid = (this.pipelineId != null ? this.pipelineId.getBytes() : new byte[0]); byte[] cid = (this.componentId != null ? this.componentId.getBytes() : new byte[0]); ByteBuffer buffer = ByteBuffer.allocate(11 * SIZE_OF_INT + 2 * SIZE_OF_LONG + procNodeId.length + pid.length + cid.length); buffer.putInt(this.numOfMessages); buffer.putLong(this.startTime); buffer.putLong(this.endTime); buffer.putInt(this.minDuration); buffer.putInt(this.maxDuration); buffer.putInt(this.avgDuration); buffer.putInt(this.minSize); buffer.putInt(this.maxSize); buffer.putInt(this.avgSize); buffer.putInt(this.errors); buffer.putInt(procNodeId.length); buffer.put(procNodeId); buffer.putInt(pid.length); buffer.put(pid); buffer.putInt(cid.length); buffer.put(cid); return buffer.array(); } public static void main(String[] args) { MicroPipelineStatistics stats = new MicroPipelineStatistics("procNodeId-1", "--", "component-123", System.currentTimeMillis(), 1234, 2, 432, 56, 67890, 98765, 45678); stats.setErrors(9383); stats.setEndTime(System.currentTimeMillis()+ 1000); long n1 = System.nanoTime(); byte[] converted = stats.toByteArray(); long n2 = System.nanoTime(); System.out.println("length: " + converted.length + ", conversion: " +(n2-n1)+"ns"); long s1 = System.nanoTime(); MicroPipelineStatistics reStats = MicroPipelineStatistics.fromByteArray(converted); long s2 = System.nanoTime(); System.out.println("length: " + converted.length + ", conversion: " +(s2-s1)+"ns"); System.out.println(stats); System.out.println(reStats); } public String getComponentId() { return componentId; } public void setComponentId(String componentId) { this.componentId = componentId; } public long getStartTime() { return startTime; } public void setStartTime(long startTime) { this.startTime = startTime; } public long getEndTime() { return endTime; } public void setEndTime(long endTime) { this.endTime = endTime; } public int getErrors() { return errors; } public void setErrors(int errors) { this.errors = errors; } public void incNumOfMessages() { this.numOfMessages++; } public void incNumOfMessages(int v) { this.numOfMessages = this.numOfMessages + v; } public String getProcessingNodeId() { return processingNodeId; } public void setProcessingNodeId(String processingNodeId) { this.processingNodeId = processingNodeId; } public String getPipelineId() { return pipelineId; } public void setPipelineId(String pipelineId) { this.pipelineId = pipelineId; } public int getNumOfMessages() { return numOfMessages; } public void setNumOfMessages(int numOfMessages) { this.numOfMessages = numOfMessages; } public int getMinDuration() { return minDuration; } public void setMinDuration(int minDuration) { this.minDuration = minDuration; } public int getMaxDuration() { return maxDuration; } public void setMaxDuration(int maxDuration) { this.maxDuration = maxDuration; } public int getAvgDuration() { return avgDuration; } public void setAvgDuration(int avgDuration) { this.avgDuration = avgDuration; } public int getMinSize() { return minSize; } public void setMinSize(int minSize) { this.minSize = minSize; } public int getMaxSize() { return maxSize; } public void setMaxSize(int maxSize) { this.maxSize = maxSize; } public int getAvgSize() { return avgSize; } public void setAvgSize(int avgSize) { this.avgSize = avgSize; } /** * @see java.lang.Object#toString() */ public String toString() { return "MicroPipelineStatistics [processingNodeId=" + processingNodeId + ", pipelineId=" + pipelineId + ", componentId=" + componentId + ", numOfMessages=" + numOfMessages + ", startTime=" + startTime + ", endTime=" + endTime + ", minDuration=" + minDuration + ", maxDuration=" + maxDuration + ", avgDuration=" + avgDuration + ", minSize=" + minSize + ", maxSize=" + maxSize + ", avgSize=" + avgSize + ", errors=" + errors + "]"; } }
/* * Copyright 2013 Thomas Bocek * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package net.tomp2p.rpc; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import net.tomp2p.peers.Number160; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import org.junit.rules.TestWatcher; import org.junit.runner.Description; import java.util.Random; /** * Test the counting bloom filter and the regular bloom filter. * * @author Thomas Bocek * */ public class TestBloomFilter { private final int bfSize = 40; private final int bfSizeLarge = 200; @Rule public TestRule watcher = new TestWatcher() { protected void starting(Description description) { System.out.println("Starting test: " + description.getMethodName()); } }; @Test public void testEmptyBloomfilter() { SimpleBloomFilter<Number160> bloomFilter = new SimpleBloomFilter<Number160>(0, 0); Number160 hash = new Number160("0x41844ffc9aeea30aefd1e0aff687dacdf1c6f36e"); Assert.assertEquals(true, bloomFilter.isVoid()); Assert.assertEquals(false, bloomFilter.contains(hash)); // convert and back ByteBuf buf = Unpooled.buffer(SimpleBloomFilter.SIZE_HEADER); bloomFilter.encode(buf); SimpleBloomFilter<Number160> bloomFilter2 = new SimpleBloomFilter<Number160>(buf); Assert.assertEquals(true, bloomFilter2.isVoid()); } @Test public void testFullBloomfilter() { SimpleBloomFilter<Number160> bloomFilter = new SimpleBloomFilter<Number160>(8, Integer.MAX_VALUE).setAll(); Number160 hash = new Number160("0x41844ffc9aeea30aefd1e0aff687dacdf1c6f36e"); Assert.assertEquals(true, bloomFilter.isFull()); Assert.assertEquals(true, bloomFilter.contains(hash)); // convert and back, minimun size is a long ByteBuf buf = Unpooled.buffer(8 + SimpleBloomFilter.SIZE_HEADER); bloomFilter.encode(buf); SimpleBloomFilter<Number160> bloomFilter2 = new SimpleBloomFilter<Number160>(buf); Assert.assertEquals(true, bloomFilter2.isFull()); } @Test public void testOneBloomfilter1() { SimpleBloomFilter<Number160> bloomFilter = new SimpleBloomFilter<Number160>(1024, 1000); Number160 hash = new Number160("0x41844ffc9aeea30aefd1e0aff687dacdf1c6f36e"); bloomFilter.add(hash); // convert and back, minimun size is a long ByteBuf buf = Unpooled.buffer(); bloomFilter.encode(buf); SimpleBloomFilter<Number160> bloomFilter2 = new SimpleBloomFilter<Number160>(buf); Assert.assertEquals(true, bloomFilter2.contains(hash)); } @Test public void testOneBloomfilter2() { SimpleBloomFilter<Number160> bloomFilter = new SimpleBloomFilter<Number160>(0.01d, 1000); Number160 hash = new Number160("0x41844ffc9aeea30aefd1e0aff687dacdf1c6f36e"); bloomFilter.add(hash); // convert and back, minimun size is a long ByteBuf buf = Unpooled.buffer(); bloomFilter.encode(buf); SimpleBloomFilter<Number160> bloomFilter2 = new SimpleBloomFilter<Number160>(buf); Assert.assertEquals(true, bloomFilter2.contains(hash)); } /** * Test the serialization and if the bloomfilter works as expected. */ @Test public void testBloomfilter() { Random rnd = new Random(0); final int filterSize = 100; final int expected = 1000; SimpleBloomFilter<Number160> bloomFilter = new SimpleBloomFilter<Number160>(filterSize, expected); for (int i = 0; i < expected; i++) { bloomFilter.add(new Number160(rnd)); } bloomFilter.add(Number160.MAX_VALUE); // convert and back ByteBuf buf = Unpooled.buffer(filterSize + SimpleBloomFilter.SIZE_HEADER); bloomFilter.encode(buf); SimpleBloomFilter<Number160> bloomFilter2 = new SimpleBloomFilter<Number160>(buf); Assert.assertEquals(true, bloomFilter2.contains(Number160.MAX_VALUE)); Assert.assertEquals(false, bloomFilter2.contains(Number160.ONE)); Assert.assertEquals(bloomFilter, bloomFilter2); } /** * Test with a small set of additions. */ @Test public void testCountingBloomFilter() { final int countingSize = 10; int[] counting = new int[countingSize]; CountingBloomFilter<String> cbs = new CountingBloomFilter<String>(bfSize, counting); cbs.add("abc"); cbs.add("abc"); cbs.add("abc"); cbs.add("abd"); cbs.add("abe"); Assert.assertEquals(3, cbs.approximateCount("abc")); Assert.assertEquals(1, cbs.approximateCount("abd")); Assert.assertEquals(0, cbs.approximateCount("abg")); } /** * Test with a large set of additions. */ @Test public void testCountingBloomFilter2() { final int countingSize = 20; final int items = 100; final int error = 8; int[] counting = new int[countingSize]; CountingBloomFilter<String> cbs = new CountingBloomFilter<String>(bfSizeLarge, counting); System.out.println(cbs.expectedFalsePositiveProbability()); for (int i = 0; i < items; i++) { cbs.add("abc"); } for (int i = 0; i < items; i++) { cbs.add("abc" + i); } // here we show the false negatives. Actually, we should get 100, but // since we inserted also other stuff, we get // more. Assert.assertEquals(items + error, cbs.approximateCount("abc")); } /** * Test with a small set of additions. */ @Test public void testCountingBloomFilter3() { final int countingSize = 10; int[] counting = new int[countingSize]; CountingBloomFilter<String> cbs = new CountingBloomFilter<String>(bfSize, counting); cbs.add("abc"); cbs.add("abc"); cbs.add("abc"); cbs.add("abd"); cbs.add("abe"); Assert.assertEquals(true, cbs.contains("abc")); } @Test public void intersect() { for(int i=1;i<100;i++) { intersect((0.001 * i)); } } @Test public void intersect1() { intersect(0.01); } private void intersect(double falsePositive) { final int nrPeers = 1000; final int range1 = 800; final int range2 = 1800; SimpleBloomFilter<Number160> sbf = new SimpleBloomFilter<Number160>(falsePositive, nrPeers); for(int i=0;i<nrPeers;i++) { sbf.add(new Number160(i)); } int counter = 0; for(int i=range1;i<range2;i++) { Number160 test = new Number160(i); if(sbf.contains(test)) { counter ++; } } //System.err.println("counter "+counter); Assert.assertEquals(true, counter>=nrPeers-range1); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.functions.instance.state; import static org.apache.bookkeeper.common.concurrent.FutureUtils.result; import static org.apache.bookkeeper.stream.protocol.ProtocolConstants.DEFAULT_STREAM_CONF; import com.google.common.base.Stopwatch; import io.netty.buffer.ByteBuf; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import lombok.extern.slf4j.Slf4j; import org.apache.bookkeeper.api.StorageClient; import org.apache.bookkeeper.api.kv.Table; import org.apache.bookkeeper.clients.StorageClientBuilder; import org.apache.bookkeeper.clients.admin.SimpleStorageAdminClientImpl; import org.apache.bookkeeper.clients.admin.StorageAdminClient; import org.apache.bookkeeper.clients.config.StorageClientSettings; import org.apache.bookkeeper.clients.exceptions.ClientException; import org.apache.bookkeeper.clients.exceptions.InternalServerException; import org.apache.bookkeeper.clients.exceptions.NamespaceNotFoundException; import org.apache.bookkeeper.clients.exceptions.StreamNotFoundException; import org.apache.bookkeeper.clients.utils.ClientResources; import org.apache.bookkeeper.common.util.Backoff.Jitter; import org.apache.bookkeeper.common.util.Backoff.Jitter.Type; import org.apache.bookkeeper.stream.proto.NamespaceConfiguration; import org.apache.bookkeeper.stream.proto.StorageType; import org.apache.bookkeeper.stream.proto.StreamConfiguration; import org.apache.pulsar.functions.api.StateStore; import org.apache.pulsar.functions.proto.Function.FunctionDetails; import org.apache.pulsar.functions.utils.FunctionCommon; /** * The state store provider that provides bookkeeper table backed state stores. */ @Slf4j public class BKStateStoreProviderImpl implements StateStoreProvider { private String stateStorageServiceUrl; private Map<String, StorageClient> clients; @Override public void init(Map<String, Object> config, FunctionDetails functionDetails) throws Exception { stateStorageServiceUrl = (String) config.get(STATE_STORAGE_SERVICE_URL); clients = new HashMap<>(); } private StorageClient getStorageClient(String tenant, String namespace) { final String tableNs = FunctionCommon.getStateNamespace(tenant, namespace); StorageClient client = clients.get(tableNs); if (null != client) { return client; } StorageClientSettings settings = StorageClientSettings.newBuilder() .serviceUri(stateStorageServiceUrl) .enableServerSideRouting(true) .clientName("function-" + tableNs) // configure a maximum 2 minutes jitter backoff for accessing table service .backoffPolicy(Jitter.of( Type.EXPONENTIAL, 100, 2000, 60 )) .build(); StorageClient storageClient = StorageClientBuilder.newBuilder() .withSettings(settings) .withNamespace(tableNs) .build(); clients.put(tableNs, storageClient); return storageClient; } private void createStateTable(String stateStorageServiceUrl, String tenant, String namespace, String name) throws Exception { final String tableNs = FunctionCommon.getStateNamespace(tenant, namespace); final String tableName = name; try (StorageAdminClient storageAdminClient = new SimpleStorageAdminClientImpl( StorageClientSettings.newBuilder().serviceUri(stateStorageServiceUrl).build(), ClientResources.create().scheduler())){ StreamConfiguration streamConf = StreamConfiguration.newBuilder(DEFAULT_STREAM_CONF) .setInitialNumRanges(4) .setMinNumRanges(4) .setStorageType(StorageType.TABLE) .build(); Stopwatch elapsedWatch = Stopwatch.createStarted(); Exception lastException = null; while (elapsedWatch.elapsed(TimeUnit.MINUTES) < 1) { try { result(storageAdminClient.getStream(tableNs, tableName)); return; } catch (NamespaceNotFoundException nnfe) { try { result(storageAdminClient.createNamespace(tableNs, NamespaceConfiguration.newBuilder() .setDefaultStreamConf(streamConf) .build())); } catch (Exception e) { // there might be two clients conflicting at creating table, so let's retrieve the table again // to make sure the table is created. lastException = e; log.warn("Encountered exception when creating namespace {} for state table", tableName, e); } try { result(storageAdminClient.createStream(tableNs, tableName, streamConf)); } catch (Exception e) { // there might be two clients conflicting at creating table, so let's retrieve the table again // to make sure the table is created. lastException = e; log.warn("Encountered exception when creating table {}/{}", tableNs, tableName, e); } } catch (StreamNotFoundException snfe) { try { result(storageAdminClient.createStream(tableNs, tableName, streamConf)); } catch (Exception e) { // there might be two client conflicting at creating table, so let's retrieve it to make // sure the table is created. lastException = e; log.warn("Encountered exception when creating table {}/{}", tableNs, tableName, e); } } catch (ClientException ce) { log.warn( "Encountered issue {} on fetching state stable metadata, re-attempting in 100 milliseconds", ce.getMessage()); TimeUnit.MILLISECONDS.sleep(100); } } throw new IOException( String.format("Failed to setup / verify state table for function %s/%s/%s within timeout", tenant, name, name), lastException); } } private Table<ByteBuf, ByteBuf> openStateTable(String tenant, String namespace, String name) throws Exception { StorageClient client = getStorageClient(tenant, namespace); log.info("Opening state table for function {}/{}/{}", tenant, namespace, name); // NOTE: this is a workaround until we bump bk version to 4.9.0 // table might just be created above, so it might not be ready for serving traffic Stopwatch openSw = Stopwatch.createStarted(); while (openSw.elapsed(TimeUnit.MINUTES) < 1) { try { return result(client.openTable(name), 1, TimeUnit.MINUTES); } catch (InternalServerException ise) { log.warn( "Encountered internal server on opening state table '{}/{}/{}', " + " re-attempt in 100 milliseconds : {}", tenant, namespace, name, ise.getMessage()); TimeUnit.MILLISECONDS.sleep(100); } catch (TimeoutException e) { throw new RuntimeException( "Failed to open state table for function " + tenant + "/" + namespace + "/" + name + " within timeout period", e); } } throw new IOException("Failed to open state table for function " + tenant + "/" + namespace + "/" + name); } @Override public <T extends StateStore> T getStateStore(String tenant, String namespace, String name) throws Exception { // we defer creation of the state table until a java instance is running here. createStateTable(stateStorageServiceUrl, tenant, namespace, name); Table<ByteBuf, ByteBuf> table = openStateTable(tenant, namespace, name); return (T) new BKStateStoreImpl(tenant, namespace, name, table); } @Override public void close() { clients.forEach((name, client) -> client.closeAsync() .exceptionally(cause -> { log.warn("Failed to close state storage client", cause); return null; }) ); clients.clear(); } }
/* * Copyright (c) 2015, Nordic Semiconductor * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package no.nordicsemi.android.nrftoolbox.hts; import android.graphics.Point; import android.os.Bundle; import android.os.Handler; import android.support.annotation.NonNull; import android.view.ViewGroup; import android.widget.TextView; import org.achartengine.GraphicalView; import java.util.UUID; import no.nordicsemi.android.nrftoolbox.R; import no.nordicsemi.android.nrftoolbox.profile.BleManager; import no.nordicsemi.android.nrftoolbox.profile.BleProfileActivity; /** * BPMActivity is the main Heart rate activity. It implements HRSManagerCallbacks to receive callbacks from BPMManager class. The activity supports portrait and landscape orientations. The activity * uses external library AChartEngine to show real time graph of HR values. */ // TODO The BPMActivity should be rewritten to use the service approach, like other do. public class HTSActivity extends BleProfileActivity implements HTSManagerCallbacks { @SuppressWarnings("unused") private final String TAG = "BPMActivity"; private final static String GRAPH_STATUS = "graph_status"; private final static String GRAPH_COUNTER = "graph_counter"; private final static String HR_VALUE = "hr_value"; private final static int MAX_HR_VALUE = 65535; private final static int MIN_POSITIVE_VALUE = 0; private final static int REFRESH_INTERVAL = 1000; // 1 second interval private Handler mHandler = new Handler(); private boolean isGraphInProgress = false; private GraphicalView mGraphView; private LineGraphView mLineGraph; private TextView mHRSValue, mHRSPosition; private int mHrmValue = 0; private int mCounter = 0; @Override protected void onCreateView(final Bundle savedInstanceState) { setContentView(R.layout.activity_feature_hts); setGUI(); } private void setGUI() { mLineGraph = LineGraphView.getLineGraphView(); mHRSValue = (TextView) findViewById(R.id.text_hrs_value); mHRSPosition = (TextView) findViewById(R.id.text_hrs_position); showGraph(); } private void showGraph() { mGraphView = mLineGraph.getView(this); ViewGroup layout = (ViewGroup) findViewById(R.id.graph_hrs); layout.addView(mGraphView); } @Override protected void onRestoreInstanceState(@NonNull final Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); if (savedInstanceState != null) { isGraphInProgress = savedInstanceState.getBoolean(GRAPH_STATUS); mCounter = savedInstanceState.getInt(GRAPH_COUNTER); mHrmValue = savedInstanceState.getInt(HR_VALUE); if (isGraphInProgress) startShowGraph(); } } @Override protected void onSaveInstanceState(final Bundle outState) { super.onSaveInstanceState(outState); outState.putBoolean(GRAPH_STATUS, isGraphInProgress); outState.putInt(GRAPH_COUNTER, mCounter); outState.putInt(HR_VALUE, mHrmValue); } @Override protected void onDestroy() { super.onDestroy(); stopShowGraph(); } @Override protected int getLoggerProfileTitle() { return R.string.hrs_feature_title; } @Override protected int getAboutTextId() { return R.string.hrs_about_text; } @Override protected int getDefaultDeviceName() { return R.string.hrs_default_name; } @Override protected UUID getFilterUUID() { return HTSManager.HR_SERVICE_UUID; } private void updateGraph(final int hrmValue) { mCounter++; mLineGraph.addValue(new Point(mCounter, hrmValue)); mGraphView.repaint(); } private Runnable mRepeatTask = new Runnable() { @Override public void run() { if (mHrmValue > 0) updateGraph(mHrmValue); if (isGraphInProgress) mHandler.postDelayed(mRepeatTask, REFRESH_INTERVAL); } }; void startShowGraph() { isGraphInProgress = true; mRepeatTask.run(); } void stopShowGraph() { isGraphInProgress = false; mHandler.removeCallbacks(mRepeatTask); } @Override protected BleManager<HTSManagerCallbacks> initializeManager() { final HTSManager manager = HTSManager.getInstance(getApplicationContext()); manager.setGattCallbacks(this); return manager; } private void setHRSValueOnView(final int value) { runOnUiThread(new Runnable() { @Override public void run() { if (value >= MIN_POSITIVE_VALUE && value <= MAX_HR_VALUE) { mHRSValue.setText(Integer.toString(value)); } else { mHRSValue.setText(R.string.not_available_value); } } }); } private void setHRSPositionOnView(final String position) { runOnUiThread(new Runnable() { @Override public void run() { if (position != null) { mHRSPosition.setText(position); } else { mHRSPosition.setText(R.string.not_available); } } }); } @Override public void onServicesDiscovered(final boolean optionalServicesFound) { // this may notify user or show some views } @Override public void onDeviceReady() { startShowGraph(); } @Override public void onHRSensorPositionFound(final String position) { setHRSPositionOnView(position); } @Override public void onHRValueReceived(int value) { mHrmValue = value; setHRSValueOnView(mHrmValue); } @Override public void onDeviceDisconnected() { super.onDeviceDisconnected(); runOnUiThread(new Runnable() { @Override public void run() { mHRSValue.setText(R.string.not_available_value); mHRSPosition.setText(R.string.not_available); stopShowGraph(); } }); } @Override protected void setDefaultUI() { mHRSValue.setText(R.string.not_available_value); mHRSPosition.setText(R.string.not_available); clearGraph(); } private void clearGraph() { mLineGraph.clearGraph(); mGraphView.repaint(); mCounter = 0; mHrmValue = 0; } }
/* * Copyright 2014-2021 JKOOL, LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jkoolcloud.jesl.tnt4j.sink; import java.io.IOException; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.StringUtils; import com.jkoolcloud.jesl.net.JKClient; import com.jkoolcloud.tnt4j.core.KeyValueStats; import com.jkoolcloud.tnt4j.core.OpLevel; import com.jkoolcloud.tnt4j.format.EventFormatter; import com.jkoolcloud.tnt4j.sink.DefaultEventSinkFactory; import com.jkoolcloud.tnt4j.sink.EventSink; import com.jkoolcloud.tnt4j.sink.LoggedEventSink; import com.jkoolcloud.tnt4j.tracker.TrackingActivity; import com.jkoolcloud.tnt4j.tracker.TrackingEvent; import com.jkoolcloud.tnt4j.utils.Utils; /** * <p> * This class implements {@link EventSink} with HTTP/S as the underlying sink implementation. * </p> * * * @version $Revision: 8 $ * * @see TrackingActivity * @see TrackingEvent * @see OpLevel * @see EventSink * @see EventFormatter */ public class JKCloudEventSink extends LoggedEventSink { private static final EventSink logger = DefaultEventSinkFactory.defaultEventSink(JKCloudEventSink.class); public static final String KEY_IDLE_COUNT = "sink-idle-count"; public static final String KEY_LAST_BYTES = "sink-last-bytes"; public static final String KEY_SENT_MSGS = "sink-sent-messages"; public static final String KEY_SERVICE_URL = "sink-service-url"; public static final String KEY_PROXY_URL = "sink-proxy-url"; public static final String KEY_LAST_WAGE = "sink-last-write-age-ms"; public static final String KEY_ACK_COUNT = "sink-ack-count"; public static final String KEY_LAST_ACK_MSG = "sink-ack-last-msg"; public static final String KEY_LAST_ACK_ELAPSED = "sink-ack-last-elapsed-ms"; public static final long DEFAULT_IDLE_TIMEOUT = TimeUnit.MINUTES.toMillis(4); public static final long DEFAULT_CONN_TIMEOUT = TimeUnit.SECONDS.toMillis(10); private JKClient jkHandle; private String url = "localhost"; private String accessToken; private String lastAckMsg = ""; private String proxyScheme = "http"; private String proxyHost; private int proxyPort = 0; private String proxyUser; private String proxyPass; private long connTimeout = DEFAULT_CONN_TIMEOUT; private long idleTimeout = DEFAULT_IDLE_TIMEOUT; private boolean ackSends = false; private AtomicLong idleCount = new AtomicLong(0); private AtomicLong lastWrite = new AtomicLong(0); private AtomicLong lastBytes = new AtomicLong(0); private AtomicLong sentMsgs = new AtomicLong(0); private AtomicLong ackCount = new AtomicLong(0); private AtomicLong ackElapsed = new AtomicLong(0); /** * Create a socket event sink based on a given URL and formatter. Another sink can be associated with this sink * where all events are routed. * * @param name * sink name * @param url * http/https URL to jKoolCloud service * @param frm * event formatter associated with this sink * @param sink * piped sink where all events are piped * */ public JKCloudEventSink(String name, String url, EventFormatter frm, EventSink sink) { this(name, url, null, frm, sink); } /** * Create a socket event sink based on a given URL and formatter. Another sink can be associated with this sink * where all events are routed. * * @param name * sink name * @param url * http/https URL to jKoolCloud service * @param token * api access token * @param frm * event formatter associated with this sink * @param sink * piped sink where all events are piped * */ public JKCloudEventSink(String name, String url, String token, EventFormatter frm, EventSink sink) { super(name, frm, sink); this.url = url; this.accessToken = token; } /** * Sets proxy communication parameters. * * @param scheme * proxy communication scheme * @param host * proxy host name if any, null if none * @param port * proxy port number if any, 0 of none * @return itself */ public JKCloudEventSink setProxyParms(String scheme, String host, int port) { this.proxyScheme = scheme; this.proxyHost = host; this.proxyPort = port; return this; } /** * Sets proxy basic authentication credentials. * * @param user * proxy authentication user name * @param pass * proxy authentication password * @return itself */ public JKCloudEventSink setProxyCredentials(String user, String pass) { this.proxyUser = user; this.proxyPass = pass; return this; } /** * Sets connection timeout. * * @param timeout * connection timeout * @param tunit * time out time units * @return itself */ public JKCloudEventSink setConnectionTimeout(long timeout, TimeUnit tunit) { this.connTimeout = tunit.toMillis(timeout); return this; } /** * Acknowledge every send (much slower if set to true) * * @param ackSends * true to acknowledge every sends, false -- send and forget * @return itself */ public JKCloudEventSink ackSends(boolean ackSends) { this.ackSends = ackSends; return this; } /** * Gets the access token used to establish authenticated connections to Analyzer. * * @return access token */ public String getAccessToken() { return accessToken; } /** * Sets the access token to use when establishing authenticated connections to Analyzer. * * @param accessToken * the access token * @return itself */ public JKCloudEventSink setAccessToken(String accessToken) { this.accessToken = accessToken; return this; } /** * Sets idle timeout for the sink. Connection is dropped on next write after timeout. * * @param timeout * idle timeout * @param tunit * time out time units * @return itself */ public JKCloudEventSink setIdleTimeout(long timeout, TimeUnit tunit) { this.idleTimeout = tunit.toMillis(timeout); return this; } /** * Gets idle timeout in milliseconds * * @return idle timeout in ms. */ public long getIdleTimeout() { return idleTimeout; } /** * Gets last write time stamp * * @return last write time stamp */ public long getLastWriteTime() { return lastWrite.get(); } /** * Gets last write age in ms * * @return last write age in ms */ public long getLastWriteAge() { return lastWrite.get() > 0 ? System.currentTimeMillis() - lastWrite.get() : 0; } /** * Handle connection idle timeout, attempt to close and reopen to avoid data loss. * * @throws IOException * if IO error occurs while closing/opening sink handle */ protected void handleIdleReconnect() throws IOException { if ((idleTimeout > 0) && (getLastWriteAge() > idleTimeout)) { idleCount.incrementAndGet(); reopen(); } } @Override public void resetStats() { idleCount.set(0); lastBytes.set(0); sentMsgs.set(0); ackCount.set(0); ackElapsed.set(0); lastAckMsg = ""; super.resetStats(); } @Override public KeyValueStats getStats(Map<String, Object> stats) { super.getStats(stats); stats.put(Utils.qualify(this, KEY_LAST_BYTES), lastBytes.get()); stats.put(Utils.qualify(this, KEY_SENT_MSGS), sentMsgs.get()); stats.put(Utils.qualify(this, KEY_LAST_WAGE), getLastWriteAge()); stats.put(Utils.qualify(this, KEY_IDLE_COUNT), idleCount.get()); if (ackSends) { stats.put(Utils.qualify(this, KEY_ACK_COUNT), ackCount.get()); stats.put(Utils.qualify(this, KEY_LAST_ACK_ELAPSED), ackElapsed.get()); stats.put(Utils.qualify(this, KEY_LAST_ACK_MSG), lastAckMsg); } stats.put(Utils.qualify(this, KEY_SERVICE_URL), url); if (!Utils.isEmpty(proxyHost)) { stats.put(Utils.qualify(this, KEY_PROXY_URL), (proxyScheme + "//" + proxyHost + ":" + proxyPort)); } return this; } @Override public Object getSinkHandle() { return jkHandle; } @Override public synchronized boolean isOpen() { return (jkHandle != null && jkHandle.isConnected()); } @Override protected synchronized void _open() throws IOException { try { if (isOpen()) { _close(); } setErrorState(null); logger.log(OpLevel.DEBUG, "Open name={6}, url={0}, timeout={5}, proxy.host={1}, proxy.port={2}, proxy.scheme={3}, proxy.user={4}, proxy.pass={5}", url, proxyHost, proxyPort, proxyScheme, proxyUser, proxyPass == null ? null : "xxxxxx", getName(), connTimeout); jkHandle = new JKClient(url, connTimeout, proxyHost, proxyPort, proxyScheme, proxyUser, proxyPass, logger); if (!StringUtils.isEmpty(accessToken)) { jkHandle.connect(accessToken); } else { jkHandle.connect(); } lastWrite.set(System.currentTimeMillis()); super._open(); } catch (Throwable e) { logger.log(OpLevel.ERROR, "Failed to open sink name={6}, url={0}, proxy.host={1}, proxy.port={2}, proxy.scheme={3}, proxy.user={4}, proxy.pass={5}", url, proxyHost, proxyPort, proxyScheme, proxyUser, proxyPass == null ? null : "xxxxxx", getName(), e); _close(); if (e instanceof IOException) { throw (IOException) e; } else { throw new IOException(e.getMessage(), e); } } } @Override protected synchronized void _close() throws IOException { logger.log(OpLevel.DEBUG, "Closing sink name={4}, url={0}, proxy.host={1}, proxy.port={2}, proxy.scheme={3}", url, proxyHost, proxyPort, proxyScheme, getName()); Utils.close(jkHandle); super._close(); } @Override public String toString() { return super.toString() // + "{url: " + url // + ", token: " + Utils.hide(accessToken, "x", 4) // + ", jk.handle: " + jkHandle // + "}"; } @Override protected synchronized void writeLine(String msg) throws IOException { if (StringUtils.isEmpty(msg)) { return; } _checkState(); handleIdleReconnect(); String lineMsg = msg.endsWith("\n") ? msg : msg + "\n"; incrementBytesSent(lineMsg.length()); jkHandle.send(accessToken, lineMsg, ackSends); long timestamp = System.currentTimeMillis(); lastWrite.set(timestamp); sentMsgs.incrementAndGet(); lastBytes.set(lineMsg.length()); if (ackSends) { _handleAcks(timestamp); } } private void _handleAcks(long timestamp) throws IOException { try { lastAckMsg = jkHandle.read(); } finally { ackCount.incrementAndGet(); ackElapsed.set(System.currentTimeMillis() - timestamp); } } }
/** * Copyright 2005-2015 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kim.impl.jaxb; import java.io.Serializable; import javax.xml.bind.Unmarshaller; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlTransient; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.NormalizedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.kuali.rice.core.util.jaxb.NameAndNamespacePair; import org.kuali.rice.core.util.jaxb.NameAndNamespacePairValidatingAdapter; import org.kuali.rice.kim.api.permission.PermissionContract; import org.kuali.rice.kim.api.role.RoleContract; import org.kuali.rice.kim.api.services.KimApiServiceLocator; /** * Base class representing an unmarshalled &lt;rolePermission&gt; element. * Refer to the static inner classes for more information about the specific contexts. * * @author Kuali Rice Team (rice.collab@kuali.org) */ @XmlTransient public abstract class RolePermissionXmlDTO implements Serializable { private static final long serialVersionUID = 1L; @XmlElement(name="permissionId") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) private String permissionId; @XmlElement(name="permissionName") @XmlJavaTypeAdapter(NameAndNamespacePairValidatingAdapter.class) private NameAndNamespacePair permissionNameAndNamespace; /** * Constructs an empty RolePermissionXmlDTO instance. */ public RolePermissionXmlDTO() {} /** * Constructs a RolePermissionXmlDTO that gets populated from the given KIM permission. * * @param permission The permission that this DTO should obtain its data from. * @param populateIds If true, the permission ID will get populated; otherwise, it will remain null. */ public RolePermissionXmlDTO(PermissionContract permission, boolean populateIds) { if (permission == null) { throw new IllegalArgumentException("Cannot construct a role permission with a null permission"); } if (populateIds) { this.permissionId = permission.getId(); } this.permissionNameAndNamespace = new NameAndNamespacePair(permission.getNamespaceCode(), permission.getName()); } /** * @return the permissionId */ public String getPermissionId() { return this.permissionId; } /** * @param permissionId the permissionId to set */ public void setPermissionId(String permissionId) { this.permissionId = permissionId; } /** * @return the permissionNameAndNamespace */ public NameAndNamespacePair getPermissionNameAndNamespace() { return this.permissionNameAndNamespace; } /** * @param permissionNameAndNamespace the permissionNameAndNamespace to set */ public void setPermissionNameAndNamespace(NameAndNamespacePair permissionNameAndNamespace) { this.permissionNameAndNamespace = permissionNameAndNamespace; } /** * Retrieves the permission name from the permission-name-and-namespace combo. * * @return The name of the permission assigned to the role, or null if the permission-name-and-namespace combo is null. */ public String getPermissionName() { return (permissionNameAndNamespace != null) ? permissionNameAndNamespace.getName() : null; } /** * Retrieves the permission namespace code from the permission-name-and-namespace combo. * * @return The namespace code of the permission assigned to the role, or null if the permission-name-and-namespace combo is null. */ public String getPermissionNamespaceCode() { return (permissionNameAndNamespace != null) ? permissionNameAndNamespace.getNamespaceCode() : null; } /** * Retrieves the ID of the role that the permission is assigned to. * Subclasses are responsible for implementing this method so that it does so. * * @return The role ID of the role that the permission is assigned to. */ public abstract String getRoleId(); // ======================================================================================================= /** * This class represents a &lt;rolePermission&gt; element that is not a descendant of a &lt;role&gt; element. * * @author Kuali Rice Team (rice.collab@kuali.org) */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name="StandaloneRolePermissionType", propOrder={ "roleId", "roleNameAndNamespace", "permissionId", "permissionNameAndNamespace" }) public static class OutsideOfRole extends RolePermissionXmlDTO { private static final long serialVersionUID = 1L; @XmlElement(name="roleId") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) private String roleId; @XmlElement(name="roleName") @XmlJavaTypeAdapter(NameAndNamespacePairValidatingAdapter.class) private NameAndNamespacePair roleNameAndNamespace; public OutsideOfRole() { super(); } public OutsideOfRole(PermissionContract permission, String roleId, boolean populateIds) { super(permission, populateIds); if (populateIds) { this.roleId = roleId; } RoleContract tempRole = KimApiServiceLocator.getRoleService().getRole(roleId); if (tempRole == null) { throw new IllegalArgumentException("Cannot find role with ID \"" + roleId + "\""); } this.roleNameAndNamespace = new NameAndNamespacePair(tempRole.getNamespaceCode(), tempRole.getName()); } /** * @see org.kuali.rice.kim.impl.jaxb.RolePermissionXmlDTO#getRoleId() */ @Override public String getRoleId() { return this.roleId; } /** * @param roleId the roleId to set */ public void setRoleId(String roleId) { this.roleId = roleId; } /** * @return the roleNameAndNamespace */ public NameAndNamespacePair getRoleNameAndNamespace() { return this.roleNameAndNamespace; } /** * @param roleNameAndNamespace the roleNameAndNamespace to set */ public void setRoleNameAndNamespace(NameAndNamespacePair roleNameAndNamespace) { this.roleNameAndNamespace = roleNameAndNamespace; } /** * Retrieves the role name from the role-name-and-namespace combo. * * @return The name of the role that is assigned to the permission, or null if the role-name-and-namespace combo is null. */ public String getRoleName() { return (roleNameAndNamespace != null) ? roleNameAndNamespace.getName() : null; } /** * Retrieves the role namespace code from the role-name-and-namespace combo. * * @return The namespace code of the role that is assigned to the permission, or null if the role-name-and-namespace combo is null. */ public String getRoleNamespaceCode() { return (roleNameAndNamespace != null) ? roleNameAndNamespace.getNamespaceCode() : null; } } // ======================================================================================================= /** * This class represents a &lt;rolePermission&gt; element that is a descendant of a &lt;role&gt; element. * * @author Kuali Rice Team (rice.collab@kuali.org) */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name="RolePermissionType", propOrder={ "permissionId", "permissionNameAndNamespace" }) public static class WithinRole extends RolePermissionXmlDTO { private static final long serialVersionUID = 1L; @XmlTransient private String roleId; public WithinRole() { super(); } public WithinRole(PermissionContract permission, boolean populateIds) { super(permission, populateIds); } void beforeUnmarshal(Unmarshaller unmarshaller, Object parent) { if (parent instanceof RolePermissionsXmlDTO) { this.roleId = ((RolePermissionXmlDTO)parent).getRoleId(); } } /** * @see org.kuali.rice.kim.impl.jaxb.RolePermissionXmlDTO#getRoleId() */ @Override public String getRoleId() { return this.roleId; } } }
/* * Copyright 1999-2018 Alibaba Group Holding Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.csp.sentinel.demo.flow; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import com.alibaba.csp.sentinel.util.TimeUtil; import com.alibaba.csp.sentinel.Entry; import com.alibaba.csp.sentinel.SphU; import com.alibaba.csp.sentinel.slots.block.BlockException; import com.alibaba.csp.sentinel.slots.block.RuleConstant; import com.alibaba.csp.sentinel.slots.block.flow.FlowRule; import com.alibaba.csp.sentinel.slots.block.flow.FlowRuleManager; /** * When {@link FlowRule#controlBehavior} set to {@link RuleConstant#CONTROL_BEHAVIOR_WARM_UP}, real passed qps will * gradually increase to {@link FlowRule#count}, other than burst increasing. * <p/> * Run this demo, results are as follows: * <pre> * ... * 1530497805902, total:1, pass:1, block:0 // run in slow qps * 1530497806905, total:3, pass:3, block:0 * 1530497807909, total:2, pass:2, block:0 * 1530497808913, total:3, pass:3, block:0 * 1530497809917, total:269, pass:6, block:263 // request qps burst increase, warm up behavior triggered. * 1530497810917, total:3676, pass:7, block:3669 * 1530497811919, total:3734, pass:9, block:3725 * 1530497812920, total:3692, pass:9, block:3683 * 1530497813923, total:3642, pass:10, block:3632 * 1530497814926, total:3685, pass:10, block:3675 * 1530497815930, total:3671, pass:11, block:3660 * 1530497816933, total:3660, pass:15, block:3645 * 1530497817936, total:3681, pass:21, block:3661 // warm up process end, pass qps increased to {@link FlowRule#count} * 1530497818940, total:3737, pass:20, block:3716 * 1530497819945, total:3663, pass:20, block:3643 * 1530497820950, total:3723, pass:21, block:3702 * 1530497821954, total:3680, pass:20, block:3660 * ... * </pre> * * @author jialiang.linjl */ public class WarmUpFlowDemo { private static final String KEY = "abc"; private static AtomicInteger pass = new AtomicInteger(); private static AtomicInteger block = new AtomicInteger(); private static AtomicInteger total = new AtomicInteger(); private static volatile boolean stop = false; private static final int threadCount = 100; private static int seconds = 60 + 40; public static void main(String[] args) throws Exception { initFlowRule(); // trigger Sentinel internal init Entry entry = null; try { entry = SphU.entry(KEY); } catch (Exception e) { } finally { if (entry != null) { entry.exit(); } } Thread timer = new Thread(new TimerTask()); timer.setName("sentinel-timer-task"); timer.start(); //first make the system run on a very low condition for (int i = 0; i < 3; i++) { Thread t = new Thread(new WarmUpTask()); t.setName("sentinel-warmup-task"); t.start(); } Thread.sleep(20000); /* * Start more thread to simulate more qps. Since we use {@link RuleConstant.CONTROL_BEHAVIOR_WARM_UP} as * {@link FlowRule#controlBehavior}, real passed qps will increase to {@link FlowRule#count} in * {@link FlowRule#warmUpPeriodSec} seconds. */ for (int i = 0; i < threadCount; i++) { Thread t = new Thread(new RunTask()); t.setName("sentinel-run-task"); t.start(); } } private static void initFlowRule() { List<FlowRule> rules = new ArrayList<FlowRule>(); FlowRule rule1 = new FlowRule(); rule1.setResource(KEY); rule1.setCount(20); rule1.setGrade(RuleConstant.FLOW_GRADE_QPS); rule1.setLimitApp("default"); rule1.setControlBehavior(RuleConstant.CONTROL_BEHAVIOR_WARM_UP); rule1.setWarmUpPeriodSec(10); rules.add(rule1); FlowRuleManager.loadRules(rules); } static class WarmUpTask implements Runnable { @Override public void run() { while (!stop) { Entry entry = null; try { entry = SphU.entry(KEY); // token acquired, means pass pass.addAndGet(1); } catch (BlockException e1) { block.incrementAndGet(); } catch (Exception e2) { // biz exception } finally { total.incrementAndGet(); if (entry != null) { entry.exit(); } } Random random2 = new Random(); try { TimeUnit.MILLISECONDS.sleep(random2.nextInt(2000)); } catch (InterruptedException e) { // ignore } } } } static class RunTask implements Runnable { @Override public void run() { while (!stop) { Entry entry = null; try { entry = SphU.entry(KEY); pass.addAndGet(1); } catch (BlockException e1) { block.incrementAndGet(); } catch (Exception e2) { // biz exception } finally { total.incrementAndGet(); if (entry != null) { entry.exit(); } } Random random2 = new Random(); try { TimeUnit.MILLISECONDS.sleep(random2.nextInt(50)); } catch (InterruptedException e) { // ignore } } } } static class TimerTask implements Runnable { @Override public void run() { long start = System.currentTimeMillis(); System.out.println("begin to statistic!!!"); long oldTotal = 0; long oldPass = 0; long oldBlock = 0; while (!stop) { try { TimeUnit.SECONDS.sleep(1); } catch (InterruptedException e) { } long globalTotal = total.get(); long oneSecondTotal = globalTotal - oldTotal; oldTotal = globalTotal; long globalPass = pass.get(); long oneSecondPass = globalPass - oldPass; oldPass = globalPass; long globalBlock = block.get(); long oneSecondBlock = globalBlock - oldBlock; oldBlock = globalBlock; System.out.println(TimeUtil.currentTimeMillis() + ", total:" + oneSecondTotal + ", pass:" + oneSecondPass + ", block:" + oneSecondBlock); if (seconds-- <= 0) { stop = true; } } long cost = System.currentTimeMillis() - start; System.out.println("time cost: " + cost + " ms"); System.out.println("total:" + total.get() + ", pass:" + pass.get() + ", block:" + block.get()); System.exit(0); } } }
/* * Copyright 2015 Karl Bennett * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shiver.me.timbers.data.random; import org.junit.Test; import shiver.me.timbers.data.random.test.DoubleValues; import static shiver.me.timbers.data.random.RandomDoubles.someDouble; import static shiver.me.timbers.data.random.RandomDoubles.someDoubleBetween; import static shiver.me.timbers.data.random.RandomDoubles.someDoubleGreaterThan; import static shiver.me.timbers.data.random.RandomDoubles.someDoubleLessThan; import static shiver.me.timbers.data.random.RandomDoubles.someDoubles; import static shiver.me.timbers.data.random.RandomDoubles.someNegativeDouble; import static shiver.me.timbers.data.random.RandomDoubles.somePositiveDouble; import static shiver.me.timbers.data.random.test.IsANumber.isADouble; import static shiver.me.timbers.data.random.test.TestUtils.TEST_RETRY_AMOUNT; public class RandomDoublesTest { private static final DoubleValues VALUES = new DoubleValues(); private static final BoundSingleNumbersTests<Double> SNT = new BoundSingleNumbersTests<>(VALUES, isADouble()); private static final MultipleNumbersTests<Double> MNT = new MultipleNumbersTests<>(VALUES, isADouble()); private static final Numbers<Double> DOUBLES_METHODS = new DoublesMethods(); static { RandomDoubles.setRetryAmount(TEST_RETRY_AMOUNT); } @Test public void Instantiation_to_get_full_coverage() { new RandomDoubles(); } @Test public void A_random_double_can_be_generated() { SNT.A_random_number_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_positive_double_can_be_generated() { SNT.A_random_positive_number_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_negative_double_can_be_generated() { SNT.A_random_negative_number_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_double_greater_than_a_positive_number_can_be_generated() { SNT.A_random_number_greater_than_a_positive_number_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_double_greater_than_a_negative_number_can_be_generated() { SNT.A_random_number_greater_than_a_negative_number_can_be_generated(DOUBLES_METHODS); } @Test(expected = IllegalStateException.class) public void A_random_double_greater_than_the_max_double_value_can_not_be_generated() { SNT.A_random_number_greater_than_the_max_number_value_can_not_be_generated(DOUBLES_METHODS); } @Test public void A_random_double_less_than_a_positive_number_can_be_generated() { SNT.A_random_number_less_than_a_positive_number_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_double_less_than_a_negative_number_can_be_generated() { SNT.A_random_number_less_than_a_negative_number_can_be_generated(DOUBLES_METHODS); } @Test(expected = IllegalStateException.class) public void A_random_double_less_than_the_min_double_value_can_not_be_generated() { SNT.A_random_number_less_than_the_min_number_value_can_not_be_generated(DOUBLES_METHODS); } @Test public void A_random_double_between_two_positive_numbers_can_be_generated() { SNT.A_random_number_between_two_positive_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_double_between_two_negative_numbers_can_be_generated() { SNT.A_random_number_between_two_negative_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_double_between_a_negative_number_and_a_positive_number_can_be_generated() { SNT.A_random_number_between_a_negative_number_and_a_positive_number_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_double_between_a_range_the_size_of_the_max_double_value_can_be_generated() { SNT.A_random_number_between_a_range_the_size_of_the_max_number_value_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_double_between_the_max_ranges_can_be_generated() { SNT.A_random_number_between_the_max_ranges_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_length_iterable_of_random_doubles_can_be_generated() { MNT.A_random_length_iterable_of_random_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_fixed_length_iterable_of_random_doubles_can_be_generated() { MNT.A_fixed_length_iterable_of_random_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_list_of_doubles_can_be_generated() { MNT.A_random_list_of_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_array_of_doubles_can_be_generated() { MNT.A_random_array_of_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_set_of_doubles_can_be_generated() { MNT.A_random_set_of_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_iterable_of_positive_doubles_can_be_generated() { MNT.A_random_iterable_of_positive_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_iterable_of_negative_doubles_can_be_generated() { MNT.A_random_iterable_of_negative_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_iterable_of_doubles_greater_than_a_number_can_be_generated() { MNT.A_random_iterable_of_numbers_greater_than_a_number_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_iterable_of_doubles_less_than_a_number_can_be_generated() { MNT.A_random_iterable_of_numbers_less_than_a_number_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_iterable_of_doubles_between_two_numbers_can_be_generated() { MNT.A_random_iterable_of_numbers_between_two_numbers_can_be_generated(DOUBLES_METHODS); } @Test public void A_random_iterable_of_doubles_with_multiple_restrictions_can_be_generated() { MNT.A_random_iterable_of_numbers_with_multiple_restrictions_can_be_generated(DOUBLES_METHODS); } private static class DoublesMethods implements Numbers<Double> { @Override public Double someNumber() { return someDouble(); } @Override public Double somePositiveNumber() { return somePositiveDouble(); } @Override public Double someNegativeNumber() { return someNegativeDouble(); } @Override public Double someNumberGreaterThan(Double size) { return someDoubleGreaterThan(size); } @Override public Double someNumberLessThan(Double size) { return someDoubleLessThan(size); } @Override public Double someNumberBetween(Double min, Double max) { return someDoubleBetween(min, max); } @Override public NumbersIterable<Double> someNumbers() { return someDoubles(); } @Override public NumbersIterable<Double> someNumbers(int length) { return someDoubles(length); } } }
/* * Artificial Intelligence for Humans * Volume 3: Deep Learning and Neural Networks * Java Version * http://www.aifh.org * http://www.jeffheaton.com * * Code repository: * https://github.com/jeffheaton/aifh * * Copyright 2014-2015 by Jeff Heaton * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For more information on Heaton Research copyrights, licenses * and trademarks visit: * http://www.heatonresearch.com/copyright */ package com.heatonresearch.aifh.examples.rbf; import com.heatonresearch.aifh.general.data.BasicData; import com.heatonresearch.aifh.learning.RBFNetwork; import com.heatonresearch.aifh.learning.TrainAnneal; import com.heatonresearch.aifh.learning.score.ScoreFunction; import com.heatonresearch.aifh.learning.score.ScoreRegressionData; import com.heatonresearch.aifh.normalize.DataSet; import com.heatonresearch.aifh.randomize.MersenneTwisterGenerateRandom; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartPanel; import org.jfree.chart.JFreeChart; import org.jfree.chart.plot.PlotOrientation; import org.jfree.data.xy.XYSeries; import org.jfree.data.xy.XYSeriesCollection; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; public class LearnIrisAnnealROC extends JFrame implements ActionListener { private final JButton buttonAnneal; private final JButton buttonReset; private final TrainAnneal trainer; private final RBFNetwork network; private final List<BasicData> training; private final XYSeriesCollection dataset; private final XYSeries dataSeries1; public static final String TPR = "True Positive Rate"; public static final String FPR = "False Positive Rate"; public static final String THRESHOLD = "threshold"; class RocPair implements Comparable<RocPair> { private final double x; private final double y; public RocPair(double x, double y) { this.x = x; this.y = y; } public double getX() { return this.x; } public double getY() { return this.y; } public boolean equal(RocPair other) { return Double.compare(getX(),other.getX())==0; } @Override public int compareTo(RocPair other) { return Double.compare(getX(),other.getX()); } @Override public String toString() { return "[" + this.x + "," + this.y + "]"; } } public LearnIrisAnnealROC() { this.setSize(640, 480); this.setDefaultCloseOperation(EXIT_ON_CLOSE); this.setTitle("Iris Classify ROC"); Container content = this.getContentPane(); content.setLayout(new BorderLayout()); this.dataset = new XYSeriesCollection( ); this.dataSeries1 = new XYSeries("Threshold"); this.dataset.addSeries(this.dataSeries1); final JFreeChart lineChart = ChartFactory.createXYLineChart( "Line Chart Demo 6", // chart title FPR, // x axis label TPR, // y axis label this.dataset, // data PlotOrientation.VERTICAL, true, // include legend true, // tooltips false // urls ); ChartPanel chartPanel = new ChartPanel( lineChart ); chartPanel.setPreferredSize( new java.awt.Dimension( 560 , 367 ) ); content.add(chartPanel, BorderLayout.CENTER); JPanel buttonPanel = new JPanel(); buttonPanel.add(this.buttonAnneal = new JButton("Anneal")); buttonPanel.add(this.buttonReset = new JButton("Reset")); this.buttonAnneal.addActionListener(this); this.buttonReset.addActionListener(this); content.add(buttonPanel,BorderLayout.SOUTH); this.training = loadIrisData(); this.network = new RBFNetwork(4, 4, 1); this.network.reset(new MersenneTwisterGenerateRandom()); final ScoreFunction score = new ScoreRegressionData(this.training); this.trainer = new TrainAnneal(this.network, score); } private List<BasicData> loadIrisData() { try { final InputStream istream = this.getClass().getResourceAsStream("/iris.csv"); if (istream == null) { System.out.println("Cannot access data set, make sure the resources are available."); System.exit(1); } final DataSet ds = DataSet.load(istream); // The following ranges are setup for the Iris data set. If you wish to normalize other files you will // need to modify the below function calls other files. ds.normalizeRange(0, 0, 1); ds.normalizeRange(1, 0, 1); ds.normalizeRange(2, 0, 1); ds.normalizeRange(3, 0, 1); final Map<String, Integer> species = ds.encodeNumeric(4); istream.close(); int irisVersicolor = species.get("Iris-versicolor"); final java.util.List<BasicData> trainingData = ds.extractSupervised(0, 4, 4, 1); for (BasicData aTrainingData : trainingData) { if (aTrainingData.getIdeal()[0] == irisVersicolor) { aTrainingData.getIdeal()[0] = 1; // True, is versicolor } else { aTrainingData.getIdeal()[0] = 0; // False, is not versicolor } } return trainingData; } catch(IOException ex) { ex.printStackTrace(); System.exit(0); return null; } } private double[] calculateTruePositiveFalsePositive(double thresh) { int tp = 0; int fp = 0; int tn = 0; int fn = 0; for(BasicData item: this.training) { double x = this.network.computeRegression(item.getInput())[0]; double y = item.getIdeal()[0]; if( x>thresh ) { if( y>0.5 ) { tp++; } else { fp++; } } else { if( y<0.5 ) { tn++; } else { fn++; } } } double tpr = ((double)tp)/(tp+fn); double fpr = ((double)fp)/(fp+tn); double[] result = new double[2]; result[0] = fpr; result[1] = tpr; return result; } private void updateChart( ) { for(int i=0;i<5;i++) { this.trainer.iteration(); } System.out.println(this.trainer.getLastError()); Set<RocPair> list = new TreeSet<>(); list.add(new RocPair(0,0)); for(int i=0;i<=10;i++) { double[] tpfp = calculateTruePositiveFalsePositive(i/10.0); list.add(new RocPair(tpfp[0],tpfp[1])); } list.add(new RocPair(1,1)); this.dataSeries1.clear(); for(RocPair pair: list) { this.dataSeries1.add( pair.getX(), pair.getY() ); } } public static void main(String[] args) { JFrame prg = new LearnIrisAnnealROC(); prg.setVisible(true); } /** * Invoked when an action occurs. * * @param e */ @Override public void actionPerformed(ActionEvent e) { if( e.getSource()==this.buttonAnneal ) { updateChart(); } else if(e.getSource()==this.buttonReset) { this.network.reset(new MersenneTwisterGenerateRandom()); updateChart(); } } }
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.internal.operators.flowable; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; import java.io.IOException; import java.lang.management.ManagementFactory; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import org.junit.Test; import org.mockito.InOrder; import org.reactivestreams.*; import io.reactivex.*; import io.reactivex.disposables.*; import io.reactivex.exceptions.*; import io.reactivex.flowables.ConnectableFlowable; import io.reactivex.functions.*; import io.reactivex.internal.functions.Functions; import io.reactivex.internal.operators.flowable.FlowableRefCount.RefConnection; import io.reactivex.internal.subscriptions.BooleanSubscription; import io.reactivex.internal.util.ExceptionHelper; import io.reactivex.plugins.RxJavaPlugins; import io.reactivex.processors.*; import io.reactivex.schedulers.*; import io.reactivex.subscribers.TestSubscriber; public class FlowableRefCountTest { @Test public void testRefCountAsync() { final AtomicInteger subscribeCount = new AtomicInteger(); final AtomicInteger nextCount = new AtomicInteger(); Flowable<Long> r = Flowable.interval(0, 20, TimeUnit.MILLISECONDS) .doOnSubscribe(new Consumer<Subscription>() { @Override public void accept(Subscription s) { subscribeCount.incrementAndGet(); } }) .doOnNext(new Consumer<Long>() { @Override public void accept(Long l) { nextCount.incrementAndGet(); } }) .publish().refCount(); final AtomicInteger receivedCount = new AtomicInteger(); Disposable d1 = r.subscribe(new Consumer<Long>() { @Override public void accept(Long l) { receivedCount.incrementAndGet(); } }); Disposable d2 = r.subscribe(); try { Thread.sleep(10); } catch (InterruptedException e) { } for (;;) { int a = nextCount.get(); int b = receivedCount.get(); if (a > 10 && a < 20 && a == b) { break; } if (a >= 20) { break; } try { Thread.sleep(20); } catch (InterruptedException e) { } } // give time to emit // now unsubscribe d2.dispose(); // unsubscribe s2 first as we're counting in 1 and there can be a race between unsubscribe and one subscriber getting a value but not the other d1.dispose(); System.out.println("onNext: " + nextCount.get()); // should emit once for both subscribers assertEquals(nextCount.get(), receivedCount.get()); // only 1 subscribe assertEquals(1, subscribeCount.get()); } @Test public void testRefCountSynchronous() { final AtomicInteger subscribeCount = new AtomicInteger(); final AtomicInteger nextCount = new AtomicInteger(); Flowable<Integer> r = Flowable.just(1, 2, 3, 4, 5, 6, 7, 8, 9) .doOnSubscribe(new Consumer<Subscription>() { @Override public void accept(Subscription s) { subscribeCount.incrementAndGet(); } }) .doOnNext(new Consumer<Integer>() { @Override public void accept(Integer l) { nextCount.incrementAndGet(); } }) .publish().refCount(); final AtomicInteger receivedCount = new AtomicInteger(); Disposable d1 = r.subscribe(new Consumer<Integer>() { @Override public void accept(Integer l) { receivedCount.incrementAndGet(); } }); Disposable d2 = r.subscribe(); // give time to emit try { Thread.sleep(50); } catch (InterruptedException e) { } // now unsubscribe d2.dispose(); // unsubscribe s2 first as we're counting in 1 and there can be a race between unsubscribe and one subscriber getting a value but not the other d1.dispose(); System.out.println("onNext Count: " + nextCount.get()); // it will emit twice because it is synchronous assertEquals(nextCount.get(), receivedCount.get() * 2); // it will subscribe twice because it is synchronous assertEquals(2, subscribeCount.get()); } @Test public void testRefCountSynchronousTake() { final AtomicInteger nextCount = new AtomicInteger(); Flowable<Integer> r = Flowable.just(1, 2, 3, 4, 5, 6, 7, 8, 9) .doOnNext(new Consumer<Integer>() { @Override public void accept(Integer l) { System.out.println("onNext --------> " + l); nextCount.incrementAndGet(); } }) .take(4) .publish().refCount(); final AtomicInteger receivedCount = new AtomicInteger(); r.subscribe(new Consumer<Integer>() { @Override public void accept(Integer l) { receivedCount.incrementAndGet(); } }); System.out.println("onNext: " + nextCount.get()); assertEquals(4, receivedCount.get()); assertEquals(4, receivedCount.get()); } @Test public void testRepeat() { final AtomicInteger subscribeCount = new AtomicInteger(); final AtomicInteger unsubscribeCount = new AtomicInteger(); Flowable<Long> r = Flowable.interval(0, 1, TimeUnit.MILLISECONDS) .doOnSubscribe(new Consumer<Subscription>() { @Override public void accept(Subscription s) { System.out.println("******************************* Subscribe received"); // when we are subscribed subscribeCount.incrementAndGet(); } }) .doOnCancel(new Action() { @Override public void run() { System.out.println("******************************* Unsubscribe received"); // when we are unsubscribed unsubscribeCount.incrementAndGet(); } }) .publish().refCount(); for (int i = 0; i < 10; i++) { TestSubscriber<Long> ts1 = new TestSubscriber<Long>(); TestSubscriber<Long> ts2 = new TestSubscriber<Long>(); r.subscribe(ts1); r.subscribe(ts2); try { Thread.sleep(50); } catch (InterruptedException e) { } ts1.dispose(); ts2.dispose(); ts1.assertNoErrors(); ts2.assertNoErrors(); assertTrue(ts1.valueCount() > 0); assertTrue(ts2.valueCount() > 0); } assertEquals(10, subscribeCount.get()); assertEquals(10, unsubscribeCount.get()); } @Test public void testConnectUnsubscribe() throws InterruptedException { final CountDownLatch unsubscribeLatch = new CountDownLatch(1); final CountDownLatch subscribeLatch = new CountDownLatch(1); Flowable<Long> f = synchronousInterval() .doOnSubscribe(new Consumer<Subscription>() { @Override public void accept(Subscription s) { System.out.println("******************************* Subscribe received"); // when we are subscribed subscribeLatch.countDown(); } }) .doOnCancel(new Action() { @Override public void run() { System.out.println("******************************* Unsubscribe received"); // when we are unsubscribed unsubscribeLatch.countDown(); } }); TestSubscriber<Long> s = new TestSubscriber<Long>(); f.publish().refCount().subscribeOn(Schedulers.newThread()).subscribe(s); System.out.println("send unsubscribe"); // wait until connected subscribeLatch.await(); // now unsubscribe s.dispose(); System.out.println("DONE sending unsubscribe ... now waiting"); if (!unsubscribeLatch.await(3000, TimeUnit.MILLISECONDS)) { System.out.println("Errors: " + s.errors()); if (s.errors().size() > 0) { s.errors().get(0).printStackTrace(); } fail("timed out waiting for unsubscribe"); } s.assertNoErrors(); } @Test public void testConnectUnsubscribeRaceConditionLoop() throws InterruptedException { for (int i = 0; i < 100; i++) { testConnectUnsubscribeRaceCondition(); } } @Test public void testConnectUnsubscribeRaceCondition() throws InterruptedException { final AtomicInteger subUnsubCount = new AtomicInteger(); Flowable<Long> f = synchronousInterval() .doOnCancel(new Action() { @Override public void run() { System.out.println("******************************* Unsubscribe received"); // when we are unsubscribed subUnsubCount.decrementAndGet(); } }) .doOnSubscribe(new Consumer<Subscription>() { @Override public void accept(Subscription s) { System.out.println("******************************* SUBSCRIBE received"); subUnsubCount.incrementAndGet(); } }); TestSubscriber<Long> s = new TestSubscriber<Long>(); f.publish().refCount().subscribeOn(Schedulers.computation()).subscribe(s); System.out.println("send unsubscribe"); // now immediately unsubscribe while subscribeOn is racing to subscribe s.dispose(); // this generally will mean it won't even subscribe as it is already unsubscribed by the time connect() gets scheduled // give time to the counter to update Thread.sleep(10); // either we subscribed and then unsubscribed, or we didn't ever even subscribe assertEquals(0, subUnsubCount.get()); System.out.println("DONE sending unsubscribe ... now waiting"); System.out.println("Errors: " + s.errors()); if (s.errors().size() > 0) { s.errors().get(0).printStackTrace(); } s.assertNoErrors(); } private Flowable<Long> synchronousInterval() { return Flowable.unsafeCreate(new Publisher<Long>() { @Override public void subscribe(Subscriber<? super Long> subscriber) { final AtomicBoolean cancel = new AtomicBoolean(); subscriber.onSubscribe(new Subscription() { @Override public void request(long n) { } @Override public void cancel() { cancel.set(true); } }); for (;;) { if (cancel.get()) { break; } try { Thread.sleep(100); } catch (InterruptedException e) { } subscriber.onNext(1L); } } }); } @Test public void onlyFirstShouldSubscribeAndLastUnsubscribe() { final AtomicInteger subscriptionCount = new AtomicInteger(); final AtomicInteger unsubscriptionCount = new AtomicInteger(); Flowable<Integer> flowable = Flowable.unsafeCreate(new Publisher<Integer>() { @Override public void subscribe(Subscriber<? super Integer> subscriber) { subscriptionCount.incrementAndGet(); subscriber.onSubscribe(new Subscription() { @Override public void request(long n) { } @Override public void cancel() { unsubscriptionCount.incrementAndGet(); } }); } }); Flowable<Integer> refCounted = flowable.publish().refCount(); Disposable first = refCounted.subscribe(); assertEquals(1, subscriptionCount.get()); Disposable second = refCounted.subscribe(); assertEquals(1, subscriptionCount.get()); first.dispose(); assertEquals(0, unsubscriptionCount.get()); second.dispose(); assertEquals(1, unsubscriptionCount.get()); } @Test public void testRefCount() { TestScheduler s = new TestScheduler(); Flowable<Long> interval = Flowable.interval(100, TimeUnit.MILLISECONDS, s).publish().refCount(); // subscribe list1 final List<Long> list1 = new ArrayList<Long>(); Disposable d1 = interval.subscribe(new Consumer<Long>() { @Override public void accept(Long t1) { list1.add(t1); } }); s.advanceTimeBy(200, TimeUnit.MILLISECONDS); assertEquals(2, list1.size()); assertEquals(0L, list1.get(0).longValue()); assertEquals(1L, list1.get(1).longValue()); // subscribe list2 final List<Long> list2 = new ArrayList<Long>(); Disposable d2 = interval.subscribe(new Consumer<Long>() { @Override public void accept(Long t1) { list2.add(t1); } }); s.advanceTimeBy(300, TimeUnit.MILLISECONDS); // list 1 should have 5 items assertEquals(5, list1.size()); assertEquals(2L, list1.get(2).longValue()); assertEquals(3L, list1.get(3).longValue()); assertEquals(4L, list1.get(4).longValue()); // list 2 should only have 3 items assertEquals(3, list2.size()); assertEquals(2L, list2.get(0).longValue()); assertEquals(3L, list2.get(1).longValue()); assertEquals(4L, list2.get(2).longValue()); // unsubscribe list1 d1.dispose(); // advance further s.advanceTimeBy(300, TimeUnit.MILLISECONDS); // list 1 should still have 5 items assertEquals(5, list1.size()); // list 2 should have 6 items assertEquals(6, list2.size()); assertEquals(5L, list2.get(3).longValue()); assertEquals(6L, list2.get(4).longValue()); assertEquals(7L, list2.get(5).longValue()); // unsubscribe list2 d2.dispose(); // advance further s.advanceTimeBy(1000, TimeUnit.MILLISECONDS); // subscribing a new one should start over because the source should have been unsubscribed // subscribe list3 final List<Long> list3 = new ArrayList<Long>(); interval.subscribe(new Consumer<Long>() { @Override public void accept(Long t1) { list3.add(t1); } }); s.advanceTimeBy(200, TimeUnit.MILLISECONDS); assertEquals(2, list3.size()); assertEquals(0L, list3.get(0).longValue()); assertEquals(1L, list3.get(1).longValue()); } @Test public void testAlreadyUnsubscribedClient() { Subscriber<Integer> done = CancelledSubscriber.INSTANCE; Subscriber<Integer> subscriber = TestHelper.mockSubscriber(); Flowable<Integer> result = Flowable.just(1).publish().refCount(); result.subscribe(done); result.subscribe(subscriber); verify(subscriber).onNext(1); verify(subscriber).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); } @Test public void testAlreadyUnsubscribedInterleavesWithClient() { ReplayProcessor<Integer> source = ReplayProcessor.create(); Subscriber<Integer> done = CancelledSubscriber.INSTANCE; Subscriber<Integer> subscriber = TestHelper.mockSubscriber(); InOrder inOrder = inOrder(subscriber); Flowable<Integer> result = source.publish().refCount(); result.subscribe(subscriber); source.onNext(1); result.subscribe(done); source.onNext(2); source.onComplete(); inOrder.verify(subscriber).onNext(1); inOrder.verify(subscriber).onNext(2); inOrder.verify(subscriber).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); } @Test public void testConnectDisconnectConnectAndSubjectState() { Flowable<Integer> f1 = Flowable.just(10); Flowable<Integer> f2 = Flowable.just(20); Flowable<Integer> combined = Flowable.combineLatest(f1, f2, new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer t1, Integer t2) { return t1 + t2; } }) .publish().refCount(); TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>(); TestSubscriber<Integer> ts2 = new TestSubscriber<Integer>(); combined.subscribe(ts1); combined.subscribe(ts2); ts1.assertTerminated(); ts1.assertNoErrors(); ts1.assertValue(30); ts2.assertTerminated(); ts2.assertNoErrors(); ts2.assertValue(30); } @Test(timeout = 10000) public void testUpstreamErrorAllowsRetry() throws InterruptedException { List<Throwable> errors = TestHelper.trackPluginErrors(); try { final AtomicInteger intervalSubscribed = new AtomicInteger(); Flowable<String> interval = Flowable.interval(200, TimeUnit.MILLISECONDS) .doOnSubscribe(new Consumer<Subscription>() { @Override public void accept(Subscription s) { System.out.println("Subscribing to interval " + intervalSubscribed.incrementAndGet()); } } ) .flatMap(new Function<Long, Publisher<String>>() { @Override public Publisher<String> apply(Long t1) { return Flowable.defer(new Callable<Publisher<String>>() { @Override public Publisher<String> call() { return Flowable.<String>error(new TestException("Some exception")); } }); } }) .onErrorResumeNext(new Function<Throwable, Publisher<String>>() { @Override public Publisher<String> apply(Throwable t1) { return Flowable.error(t1); } }) .publish() .refCount(); interval .doOnError(new Consumer<Throwable>() { @Override public void accept(Throwable t1) { System.out.println("Subscriber 1 onError: " + t1); } }) .retry(5) .subscribe(new Consumer<String>() { @Override public void accept(String t1) { System.out.println("Subscriber 1: " + t1); } }); Thread.sleep(100); interval .doOnError(new Consumer<Throwable>() { @Override public void accept(Throwable t1) { System.out.println("Subscriber 2 onError: " + t1); } }) .retry(5) .subscribe(new Consumer<String>() { @Override public void accept(String t1) { System.out.println("Subscriber 2: " + t1); } }); Thread.sleep(1300); System.out.println(intervalSubscribed.get()); assertEquals(6, intervalSubscribed.get()); TestHelper.assertError(errors, 0, OnErrorNotImplementedException.class); } finally { RxJavaPlugins.reset(); } } private enum CancelledSubscriber implements FlowableSubscriber<Integer> { INSTANCE; @Override public void onSubscribe(Subscription s) { s.cancel(); } @Override public void onNext(Integer o) { } @Override public void onError(Throwable t) { } @Override public void onComplete() { } } @Test public void disposed() { TestHelper.checkDisposed(Flowable.just(1).publish().refCount()); } @Test public void noOpConnect() { final int[] calls = { 0 }; Flowable<Integer> f = new ConnectableFlowable<Integer>() { @Override public void connect(Consumer<? super Disposable> connection) { calls[0]++; } @Override protected void subscribeActual(Subscriber<? super Integer> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); } }.refCount(); f.test(); f.test(); assertEquals(1, calls[0]); } Flowable<Object> source; @Test public void replayNoLeak() throws Exception { System.gc(); Thread.sleep(100); long start = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); source = Flowable.fromCallable(new Callable<Object>() { @Override public Object call() throws Exception { return new byte[100 * 1000 * 1000]; } }) .replay(1) .refCount(); source.subscribe(); System.gc(); Thread.sleep(100); long after = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); source = null; assertTrue(String.format("%,3d -> %,3d%n", start, after), start + 20 * 1000 * 1000 > after); } @Test public void replayNoLeak2() throws Exception { System.gc(); Thread.sleep(100); long start = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); source = Flowable.fromCallable(new Callable<Object>() { @Override public Object call() throws Exception { return new byte[100 * 1000 * 1000]; } }).concatWith(Flowable.never()) .replay(1) .refCount(); Disposable d1 = source.subscribe(); Disposable d2 = source.subscribe(); d1.dispose(); d2.dispose(); d1 = null; d2 = null; System.gc(); Thread.sleep(100); long after = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); source = null; assertTrue(String.format("%,3d -> %,3d%n", start, after), start + 20 * 1000 * 1000 > after); } static final class ExceptionData extends Exception { private static final long serialVersionUID = -6763898015338136119L; public final Object data; ExceptionData(Object data) { this.data = data; } } @Test public void publishNoLeak() throws Exception { System.gc(); Thread.sleep(100); long start = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); source = Flowable.fromCallable(new Callable<Object>() { @Override public Object call() throws Exception { throw new ExceptionData(new byte[100 * 1000 * 1000]); } }) .publish() .refCount(); source.subscribe(Functions.emptyConsumer(), Functions.emptyConsumer()); System.gc(); Thread.sleep(100); long after = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); source = null; assertTrue(String.format("%,3d -> %,3d%n", start, after), start + 20 * 1000 * 1000 > after); } @Test public void publishNoLeak2() throws Exception { System.gc(); Thread.sleep(100); long start = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); source = Flowable.fromCallable(new Callable<Object>() { @Override public Object call() throws Exception { return new byte[100 * 1000 * 1000]; } }).concatWith(Flowable.never()) .publish() .refCount(); Disposable d1 = source.test(); Disposable d2 = source.test(); d1.dispose(); d2.dispose(); d1 = null; d2 = null; System.gc(); Thread.sleep(100); long after = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); source = null; assertTrue(String.format("%,3d -> %,3d%n", start, after), start + 20 * 1000 * 1000 > after); } @Test public void replayIsUnsubscribed() { ConnectableFlowable<Integer> cf = Flowable.just(1) .replay(); if (cf instanceof Disposable) { assertTrue(((Disposable)cf).isDisposed()); Disposable connection = cf.connect(); assertFalse(((Disposable)cf).isDisposed()); connection.dispose(); assertTrue(((Disposable)cf).isDisposed()); } } static final class BadFlowableSubscribe extends ConnectableFlowable<Object> { @Override public void connect(Consumer<? super Disposable> connection) { try { connection.accept(Disposables.empty()); } catch (Throwable ex) { throw ExceptionHelper.wrapOrThrow(ex); } } @Override protected void subscribeActual(Subscriber<? super Object> subscriber) { throw new TestException("subscribeActual"); } } static final class BadFlowableDispose extends ConnectableFlowable<Object> implements Disposable { @Override public void dispose() { throw new TestException("dispose"); } @Override public boolean isDisposed() { return false; } @Override public void connect(Consumer<? super Disposable> connection) { try { connection.accept(Disposables.empty()); } catch (Throwable ex) { throw ExceptionHelper.wrapOrThrow(ex); } } @Override protected void subscribeActual(Subscriber<? super Object> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); } } static final class BadFlowableConnect extends ConnectableFlowable<Object> { @Override public void connect(Consumer<? super Disposable> connection) { throw new TestException("connect"); } @Override protected void subscribeActual(Subscriber<? super Object> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); } } @Test public void badSourceSubscribe() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { BadFlowableSubscribe bo = new BadFlowableSubscribe(); try { bo.refCount() .test(); fail("Should have thrown"); } catch (NullPointerException ex) { assertTrue(ex.getCause() instanceof TestException); } TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void badSourceDispose() { BadFlowableDispose bf = new BadFlowableDispose(); try { bf.refCount() .test() .cancel(); fail("Should have thrown"); } catch (TestException expected) { } } @Test public void badSourceConnect() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { BadFlowableConnect bf = new BadFlowableConnect(); try { bf.refCount() .test(); fail("Should have thrown"); } catch (NullPointerException ex) { assertTrue(ex.getCause() instanceof TestException); } TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } static final class BadFlowableSubscribe2 extends ConnectableFlowable<Object> { int count; @Override public void connect(Consumer<? super Disposable> connection) { try { connection.accept(Disposables.empty()); } catch (Throwable ex) { throw ExceptionHelper.wrapOrThrow(ex); } } @Override protected void subscribeActual(Subscriber<? super Object> subscriber) { if (++count == 1) { subscriber.onSubscribe(new BooleanSubscription()); } else { throw new TestException("subscribeActual"); } } } @Test public void badSourceSubscribe2() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { BadFlowableSubscribe2 bf = new BadFlowableSubscribe2(); Flowable<Object> f = bf.refCount(); f.test(); try { f.test(); fail("Should have thrown"); } catch (NullPointerException ex) { assertTrue(ex.getCause() instanceof TestException); } TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } static final class BadFlowableConnect2 extends ConnectableFlowable<Object> implements Disposable { @Override public void connect(Consumer<? super Disposable> connection) { try { connection.accept(Disposables.empty()); } catch (Throwable ex) { throw ExceptionHelper.wrapOrThrow(ex); } } @Override protected void subscribeActual(Subscriber<? super Object> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); subscriber.onComplete(); } @Override public void dispose() { throw new TestException("dispose"); } @Override public boolean isDisposed() { return false; } } @Test public void badSourceCompleteDisconnect() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { BadFlowableConnect2 bf = new BadFlowableConnect2(); try { bf.refCount() .test(); fail("Should have thrown"); } catch (NullPointerException ex) { assertTrue(ex.getCause() instanceof TestException); } TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test(timeout = 7500) public void blockingSourceAsnycCancel() throws Exception { BehaviorProcessor<Integer> bp = BehaviorProcessor.createDefault(1); Flowable<Integer> f = bp .replay(1) .refCount(); f.subscribe(); final AtomicBoolean interrupted = new AtomicBoolean(); f.switchMap(new Function<Integer, Publisher<? extends Object>>() { @Override public Publisher<? extends Object> apply(Integer v) throws Exception { return Flowable.create(new FlowableOnSubscribe<Object>() { @Override public void subscribe(FlowableEmitter<Object> emitter) throws Exception { while (!emitter.isCancelled()) { Thread.sleep(100); } interrupted.set(true); } }, BackpressureStrategy.MISSING); } }) .takeUntil(Flowable.timer(500, TimeUnit.MILLISECONDS)) .test() .awaitDone(5, TimeUnit.SECONDS) .assertResult(); assertTrue(interrupted.get()); } @Test public void byCount() { final int[] subscriptions = { 0 }; Flowable<Integer> source = Flowable.range(1, 5) .doOnSubscribe(new Consumer<Subscription>() { @Override public void accept(Subscription s) throws Exception { subscriptions[0]++; } }) .publish() .refCount(2); for (int i = 0; i < 3; i++) { TestSubscriber<Integer> ts1 = source.test(); ts1.assertEmpty(); TestSubscriber<Integer> ts2 = source.test(); ts1.assertResult(1, 2, 3, 4, 5); ts2.assertResult(1, 2, 3, 4, 5); } assertEquals(3, subscriptions[0]); } @Test public void resubscribeBeforeTimeout() throws Exception { final int[] subscriptions = { 0 }; PublishProcessor<Integer> pp = PublishProcessor.create(); Flowable<Integer> source = pp .doOnSubscribe(new Consumer<Subscription>() { @Override public void accept(Subscription s) throws Exception { subscriptions[0]++; } }) .publish() .refCount(500, TimeUnit.MILLISECONDS); TestSubscriber<Integer> ts1 = source.test(0); assertEquals(1, subscriptions[0]); ts1.cancel(); Thread.sleep(100); ts1 = source.test(0); assertEquals(1, subscriptions[0]); Thread.sleep(500); assertEquals(1, subscriptions[0]); pp.onNext(1); pp.onNext(2); pp.onNext(3); pp.onNext(4); pp.onNext(5); pp.onComplete(); ts1.requestMore(5) .assertResult(1, 2, 3, 4, 5); } @Test public void letitTimeout() throws Exception { final int[] subscriptions = { 0 }; PublishProcessor<Integer> pp = PublishProcessor.create(); Flowable<Integer> source = pp .doOnSubscribe(new Consumer<Subscription>() { @Override public void accept(Subscription s) throws Exception { subscriptions[0]++; } }) .publish() .refCount(1, 100, TimeUnit.MILLISECONDS); TestSubscriber<Integer> ts1 = source.test(0); assertEquals(1, subscriptions[0]); ts1.cancel(); assertTrue(pp.hasSubscribers()); Thread.sleep(200); assertFalse(pp.hasSubscribers()); } @Test public void error() { Flowable.<Integer>error(new IOException()) .publish() .refCount(500, TimeUnit.MILLISECONDS) .test() .assertFailure(IOException.class); } @Test public void comeAndGo() { PublishProcessor<Integer> pp = PublishProcessor.create(); Flowable<Integer> source = pp .publish() .refCount(1); TestSubscriber<Integer> ts1 = source.test(0); assertTrue(pp.hasSubscribers()); for (int i = 0; i < 3; i++) { TestSubscriber<Integer> ts2 = source.test(); ts1.cancel(); ts1 = ts2; } ts1.cancel(); assertFalse(pp.hasSubscribers()); } @Test public void unsubscribeSubscribeRace() { for (int i = 0; i < 1000; i++) { final Flowable<Integer> source = Flowable.range(1, 5) .replay() .refCount(1) ; final TestSubscriber<Integer> ts1 = source.test(0); final TestSubscriber<Integer> ts2 = new TestSubscriber<Integer>(0); Runnable r1 = new Runnable() { @Override public void run() { ts1.cancel(); } }; Runnable r2 = new Runnable() { @Override public void run() { source.subscribe(ts2); } }; TestHelper.race(r1, r2, Schedulers.single()); ts2.requestMore(6) // FIXME RxJava replay() doesn't issue onComplete without request .withTag("Round: " + i) .assertResult(1, 2, 3, 4, 5); } } static final class BadFlowableDoubleOnX extends ConnectableFlowable<Object> implements Disposable { @Override public void connect(Consumer<? super Disposable> connection) { try { connection.accept(Disposables.empty()); } catch (Throwable ex) { throw ExceptionHelper.wrapOrThrow(ex); } } @Override protected void subscribeActual(Subscriber<? super Object> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); subscriber.onSubscribe(new BooleanSubscription()); subscriber.onComplete(); subscriber.onComplete(); subscriber.onError(new TestException()); } @Override public void dispose() { } @Override public boolean isDisposed() { return false; } } @Test public void doubleOnX() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new BadFlowableDoubleOnX() .refCount() .test() .assertResult(); TestHelper.assertError(errors, 0, ProtocolViolationException.class); TestHelper.assertUndeliverable(errors, 1, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void doubleOnXCount() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new BadFlowableDoubleOnX() .refCount(1) .test() .assertResult(); TestHelper.assertError(errors, 0, ProtocolViolationException.class); TestHelper.assertUndeliverable(errors, 1, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void doubleOnXTime() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new BadFlowableDoubleOnX() .refCount(5, TimeUnit.SECONDS, Schedulers.single()) .test() .assertResult(); TestHelper.assertError(errors, 0, ProtocolViolationException.class); TestHelper.assertUndeliverable(errors, 1, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void cancelTerminateStateExclusion() { FlowableRefCount<Object> o = (FlowableRefCount<Object>)PublishProcessor.create() .publish() .refCount(); o.cancel(null); RefConnection rc = new RefConnection(o); o.connection = null; rc.subscriberCount = 0; o.timeout(rc); rc.subscriberCount = 1; o.timeout(rc); o.connection = rc; o.timeout(rc); rc.subscriberCount = 0; o.timeout(rc); // ------------------- rc.subscriberCount = 2; rc.connected = false; o.connection = rc; o.cancel(rc); rc.subscriberCount = 1; rc.connected = false; o.connection = rc; o.cancel(rc); rc.subscriberCount = 2; rc.connected = true; o.connection = rc; o.cancel(rc); rc.subscriberCount = 1; rc.connected = true; o.connection = rc; o.cancel(rc); o.connection = rc; o.cancel(new RefConnection(o)); } @Test public void replayRefCountShallBeThreadSafe() { for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) { Flowable<Integer> flowable = Flowable.just(1).replay(1).refCount(); TestSubscriber<Integer> ts1 = flowable .subscribeOn(Schedulers.io()) .test(); TestSubscriber<Integer> ts2 = flowable .subscribeOn(Schedulers.io()) .test(); ts1 .withTag("" + i) .awaitDone(5, TimeUnit.SECONDS) .assertResult(1); ts2 .withTag("" + i) .awaitDone(5, TimeUnit.SECONDS) .assertResult(1); } } static final class TestConnectableFlowable<T> extends ConnectableFlowable<T> implements Disposable { volatile boolean disposed; @Override public void dispose() { disposed = true; } @Override public boolean isDisposed() { return disposed; } @Override public void connect(Consumer<? super Disposable> connection) { // not relevant } @Override protected void subscribeActual(Subscriber<? super T> subscriber) { // not relevant } } @Test public void timeoutDisposesSource() { FlowableRefCount<Object> o = (FlowableRefCount<Object>)new TestConnectableFlowable<Object>().refCount(); RefConnection rc = new RefConnection(o); o.connection = rc; o.timeout(rc); assertTrue(((Disposable)o.source).isDisposed()); } @Test public void disconnectBeforeConnect() { BehaviorProcessor<Integer> processor = BehaviorProcessor.create(); Flowable<Integer> flowable = processor .replay(1) .refCount(); flowable.takeUntil(Flowable.just(1)).test(); processor.onNext(2); flowable.take(1).test().assertResult(2); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution; import com.google.common.annotations.VisibleForTesting; import org.apache.spark.memory.TaskMemoryManager; import org.apache.spark.sql.types.StructType; import org.apache.spark.unsafe.KVIterator; import org.apache.spark.unsafe.Platform; import org.apache.spark.unsafe.map.BytesToBytesMap; import org.apache.spark.unsafe.memory.MemoryBlock; import org.apache.spark.util.collection.unsafe.sort.PrefixComparator; import org.apache.spark.util.collection.unsafe.sort.RecordComparator; import org.apache.spark.util.collection.unsafe.sort.UnsafeExternalSorter; import org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter; import org.apache.spark.util.collection.unsafe.sort.UnsafeSorterIterator; import java.io.IOException; import javax.annotation.Nullable; import io.indexr.query.TaskContext; import io.indexr.query.row.UnsafeRow; import io.indexr.query.util.BaseOrdering; import io.indexr.query.util.SortPrefixUtils; /** * A class for performing external sorting on key-value records. Both key and value are UnsafeRows. * * Note that this class allows optionally passing in a {@link BytesToBytesMap} directly in order * to perform in-place sorting of records in the map. */ public final class UnsafeKVExternalSorter { private final StructType keySchema; private final StructType valueSchema; private final UnsafeExternalRowSorter.PrefixComputer prefixComputer; private final UnsafeExternalSorter sorter; public UnsafeKVExternalSorter( StructType keySchema, StructType valueSchema, //BlockManager blockManager, long pageSizeBytes) throws IOException { this(keySchema, valueSchema, pageSizeBytes, null); } public UnsafeKVExternalSorter( StructType keySchema, StructType valueSchema, //BlockManager blockManager, long pageSizeBytes, @Nullable BytesToBytesMap map) throws IOException { this.keySchema = keySchema; this.valueSchema = valueSchema; final TaskContext taskContext = TaskContext.get(); prefixComputer = SortPrefixUtils.createPrefixGenerator(keySchema); PrefixComparator prefixComparator = SortPrefixUtils.getPrefixComparator(keySchema); BaseOrdering ordering = BaseOrdering.create(keySchema); KVComparator recordComparator = new KVComparator(ordering, keySchema.size()); TaskMemoryManager taskMemoryManager = taskContext.taskMemoryManager(); if (map == null) { sorter = UnsafeExternalSorter.create( taskMemoryManager, //blockManager, taskContext, recordComparator, prefixComparator, /* initialSize */ 4096, pageSizeBytes); } else { // During spilling, the array in map will not be used, so we can borrow that and use it // as the underline array for in-memory sorter (it's always large enough). // Since we will not grow the array, it's fine to pass `null` as consumer. final UnsafeInMemorySorter inMemSorter = new UnsafeInMemorySorter( null, taskMemoryManager, recordComparator, prefixComparator, map.getArray()); // We cannot use the destructive iterator here because we are reusing the existing memory // pages in BytesToBytesMap to hold records during sorting. // The only new memory we are allocating is the pointer/prefix array. BytesToBytesMap.MapIterator iter = map.iterator(); final int numKeyFields = keySchema.size(); UnsafeRow row = new UnsafeRow(numKeyFields); while (iter.hasNext()) { final BytesToBytesMap.Location loc = iter.next(); final Object baseObject = loc.getKeyAddress().getBaseObject(); final long baseOffset = loc.getKeyAddress().getBaseOffset(); // Get encoded memory address // baseObject + baseOffset point to the beginning of the key data in the map, but that // the KV-pair's length data is stored in the word immediately before that address MemoryBlock page = loc.getMemoryPage(); long address = taskMemoryManager.encodePageNumberAndOffset(page, baseOffset - 8); // Compute prefix row.pointTo(baseObject, baseOffset, loc.getKeyLength()); final long prefix = prefixComputer.computePrefix(row); inMemSorter.insertRecord(address, prefix); } sorter = UnsafeExternalSorter.createWithExistingInMemorySorter( taskMemoryManager, //blockManager, taskContext, new KVComparator(ordering, keySchema.size()), prefixComparator, /* initialSize */ 4096, pageSizeBytes, inMemSorter); // reset the map, so we can re-use it to insert new records. the inMemSorter will not used // anymore, so the underline array could be used by map again. map.reset(); } } /** * Inserts a key-value record into the sorter. If the sorter no longer has enough memory to hold * the record, the sorter sorts the existing records in-memory, writes them out as partially * sorted runs, and then reallocates memory to hold the new record. */ public void insertKV(UnsafeRow key, UnsafeRow value) throws IOException { final long prefix = prefixComputer.computePrefix(key); sorter.insertKVRecord( key.getBaseObject(), key.getBaseOffset(), key.getSizeInBytes(), value.getBaseObject(), value.getBaseOffset(), value.getSizeInBytes(), prefix); } /** * Merges another UnsafeKVExternalSorter into `this`, the other one will be emptied. */ public void merge(UnsafeKVExternalSorter other) throws IOException { sorter.merge(other.sorter); } /** * Returns a sorted iterator. It is the caller's responsibility to call `cleanupResources()` * after consuming this iterator. */ public KVSorterIterator sortedIterator() throws IOException { try { final UnsafeSorterIterator underlying = sorter.getSortedIterator(); if (!underlying.hasNext()) { // Since we won't ever call next() on an empty iterator, we need to clean up resources // here in order to prevent memory leaks. cleanupResources(); } return new KVSorterIterator(underlying); } catch (IOException e) { cleanupResources(); throw e; } } /** * Return the peak memory used so far, in bytes. */ public long getPeakMemoryUsedBytes() { return sorter.getPeakMemoryUsedBytes(); } /** * Marks the current page as no-more-space-available, and as a result, either allocate a * new page or spill when we see the next record. */ @VisibleForTesting void closeCurrentPage() { sorter.closeCurrentPage(); } /** * Frees this sorter's in-memory data structures and cleans up its spill files. */ public void cleanupResources() { sorter.cleanupResources(); } private static final class KVComparator extends RecordComparator { private final BaseOrdering ordering; private final UnsafeRow row1; private final UnsafeRow row2; private final int numKeyFields; public KVComparator(BaseOrdering ordering, int numKeyFields) { this.numKeyFields = numKeyFields; this.row1 = new UnsafeRow(numKeyFields); this.row2 = new UnsafeRow(numKeyFields); this.ordering = ordering; } @Override public int compare(Object baseObj1, long baseOff1, Object baseObj2, long baseOff2) { // Note that since ordering doesn't need the total length of the record, we just pass -1 // into the row. row1.pointTo(baseObj1, baseOff1 + 4, -1); row2.pointTo(baseObj2, baseOff2 + 4, -1); return ordering.compare(row1, row2); } } public class KVSorterIterator extends KVIterator<UnsafeRow, UnsafeRow> { private UnsafeRow key = new UnsafeRow(keySchema.size()); private UnsafeRow value = new UnsafeRow(valueSchema.size()); private final UnsafeSorterIterator underlying; private KVSorterIterator(UnsafeSorterIterator underlying) { this.underlying = underlying; } @Override public boolean next() throws IOException { try { if (underlying.hasNext()) { underlying.loadNext(); Object baseObj = underlying.getBaseObject(); long recordOffset = underlying.getBaseOffset(); int recordLen = underlying.getRecordLength(); // Note that recordLen = keyLen + valueLen + 4 bytes (for the keyLen itself) int keyLen = Platform.getInt(baseObj, recordOffset); int valueLen = recordLen - keyLen - 4; key.pointTo(baseObj, recordOffset + 4, keyLen); value.pointTo(baseObj, recordOffset + 4 + keyLen, valueLen); return true; } else { key = null; value = null; cleanupResources(); return false; } } catch (IOException e) { cleanupResources(); throw e; } } @Override public UnsafeRow getKey() { return key; } @Override public UnsafeRow getValue() { return value; } @Override public void close() { cleanupResources(); } } ; }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; /** * */ public class TypeFieldMapper extends MetadataFieldMapper { public static final String NAME = "_type"; public static final String CONTENT_TYPE = "_type"; public static class Defaults { public static final String NAME = TypeFieldMapper.NAME; public static final MappedFieldType FIELD_TYPE = new TypeFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } } public static class Builder extends MetadataFieldMapper.Builder<Builder, TypeFieldMapper> { public Builder(MappedFieldType existing) { super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @Override public TypeFieldMapper build(BuilderContext context) { fieldType.setName(buildFullName(context)); return new TypeFieldMapper(fieldType, context.indexSettings()); } } public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { throw new MapperParsingException(NAME + " is not configurable"); } @Override public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { return new TypeFieldMapper(indexSettings, fieldType); } } static final class TypeFieldType extends MappedFieldType { public TypeFieldType() { } protected TypeFieldType(TypeFieldType ref) { super(ref); } @Override public MappedFieldType clone() { return new TypeFieldType(this); } @Override public String typeName() { return CONTENT_TYPE; } @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { if (indexOptions() == IndexOptions.NONE) { throw new AssertionError(); } return new TypeQuery(indexedValueForSearch(value)); } } public static class TypeQuery extends Query { private final BytesRef type; public TypeQuery(BytesRef type) { this.type = Objects.requireNonNull(type); } @Override public Query rewrite(IndexReader reader) throws IOException { Term term = new Term(CONTENT_TYPE, type); TermContext context = TermContext.build(reader.getContext(), term); if (context.docFreq() == reader.maxDoc()) { // All docs have the same type. // Using a match_all query will help Lucene perform some optimizations // For instance, match_all queries as filter clauses are automatically removed return new MatchAllDocsQuery(); } else { return new ConstantScoreQuery(new TermQuery(term, context)); } } @Override public boolean equals(Object obj) { if (super.equals(obj) == false) { return false; } TypeQuery that = (TypeQuery) obj; return type.equals(that.type); } @Override public int hashCode() { return 31 * super.hashCode() + type.hashCode(); } @Override public String toString(String field) { return "_type:" + type; } } private TypeFieldMapper(Settings indexSettings, MappedFieldType existing) { this(existing == null ? defaultFieldType(indexSettings) : existing.clone(), indexSettings); } private TypeFieldMapper(MappedFieldType fieldType, Settings indexSettings) { super(NAME, fieldType, defaultFieldType(indexSettings), indexSettings); } private static MappedFieldType defaultFieldType(Settings indexSettings) { MappedFieldType defaultFieldType = Defaults.FIELD_TYPE.clone(); Version indexCreated = Version.indexCreated(indexSettings); if (indexCreated.onOrAfter(Version.V_2_1_0)) { defaultFieldType.setHasDocValues(true); } return defaultFieldType; } @Override public void preParse(ParseContext context) throws IOException { super.parse(context); } @Override public void postParse(ParseContext context) throws IOException { } @Override public Mapper parse(ParseContext context) throws IOException { // we parse in pre parse return null; } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) { return; } fields.add(new Field(fieldType().name(), context.type(), fieldType())); if (fieldType().hasDocValues()) { fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(context.type()))); } } @Override protected String contentType() { return CONTENT_TYPE; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder; } @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // do nothing here, no merging, but also no exception } }
/* * Copyright 2012 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.heuristic.selector.move.generic; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.optaplanner.core.impl.testdata.util.PlannerAssert.assertAllCodesOfMoveSelector; import static org.optaplanner.core.impl.testdata.util.PlannerAssert.verifyPhaseLifecycle; import org.junit.jupiter.api.Test; import org.optaplanner.core.impl.domain.entity.descriptor.EntityDescriptor; import org.optaplanner.core.impl.heuristic.selector.SelectorTestUtils; import org.optaplanner.core.impl.heuristic.selector.entity.EntitySelector; import org.optaplanner.core.impl.phase.scope.AbstractPhaseScope; import org.optaplanner.core.impl.phase.scope.AbstractStepScope; import org.optaplanner.core.impl.solver.scope.DefaultSolverScope; import org.optaplanner.core.impl.testdata.domain.TestdataEntity; public class SwapMoveSelectorTest { @Test public void originalLeftEqualsRight() { EntitySelector entitySelector = SelectorTestUtils.mockEntitySelector(TestdataEntity.buildEntityDescriptor(), new TestdataEntity("a"), new TestdataEntity("b"), new TestdataEntity("c"), new TestdataEntity("d")); SwapMoveSelector moveSelector = new SwapMoveSelector(entitySelector, entitySelector, entitySelector.getEntityDescriptor().getGenuineVariableDescriptorList(), false); DefaultSolverScope solverScope = mock(DefaultSolverScope.class); moveSelector.solvingStarted(solverScope); AbstractPhaseScope phaseScopeA = mock(AbstractPhaseScope.class); when(phaseScopeA.getSolverScope()).thenReturn(solverScope); moveSelector.phaseStarted(phaseScopeA); AbstractStepScope stepScopeA1 = mock(AbstractStepScope.class); when(stepScopeA1.getPhaseScope()).thenReturn(phaseScopeA); moveSelector.stepStarted(stepScopeA1); assertAllCodesOfMoveSelector(moveSelector, "a<->b", "a<->c", "a<->d", "b<->c", "b<->d", "c<->d"); moveSelector.stepEnded(stepScopeA1); AbstractStepScope stepScopeA2 = mock(AbstractStepScope.class); when(stepScopeA2.getPhaseScope()).thenReturn(phaseScopeA); moveSelector.stepStarted(stepScopeA2); assertAllCodesOfMoveSelector(moveSelector, "a<->b", "a<->c", "a<->d", "b<->c", "b<->d", "c<->d"); moveSelector.stepEnded(stepScopeA2); moveSelector.phaseEnded(phaseScopeA); AbstractPhaseScope phaseScopeB = mock(AbstractPhaseScope.class); when(phaseScopeB.getSolverScope()).thenReturn(solverScope); moveSelector.phaseStarted(phaseScopeB); AbstractStepScope stepScopeB1 = mock(AbstractStepScope.class); when(stepScopeB1.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB1); assertAllCodesOfMoveSelector(moveSelector, "a<->b", "a<->c", "a<->d", "b<->c", "b<->d", "c<->d"); moveSelector.stepEnded(stepScopeB1); AbstractStepScope stepScopeB2 = mock(AbstractStepScope.class); when(stepScopeB2.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB2); assertAllCodesOfMoveSelector(moveSelector, "a<->b", "a<->c", "a<->d", "b<->c", "b<->d", "c<->d"); moveSelector.stepEnded(stepScopeB2); AbstractStepScope stepScopeB3 = mock(AbstractStepScope.class); when(stepScopeB3.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB3); assertAllCodesOfMoveSelector(moveSelector, "a<->b", "a<->c", "a<->d", "b<->c", "b<->d", "c<->d"); moveSelector.stepEnded(stepScopeB3); moveSelector.phaseEnded(phaseScopeB); moveSelector.solvingEnded(solverScope); verifyPhaseLifecycle(entitySelector, 1, 2, 5); } @Test public void emptyOriginalLeftEqualsRight() { EntitySelector entitySelector = SelectorTestUtils.mockEntitySelector(TestdataEntity.buildEntityDescriptor()); SwapMoveSelector moveSelector = new SwapMoveSelector(entitySelector, entitySelector, entitySelector.getEntityDescriptor().getGenuineVariableDescriptorList(), false); DefaultSolverScope solverScope = mock(DefaultSolverScope.class); moveSelector.solvingStarted(solverScope); AbstractPhaseScope phaseScopeA = mock(AbstractPhaseScope.class); when(phaseScopeA.getSolverScope()).thenReturn(solverScope); moveSelector.phaseStarted(phaseScopeA); AbstractStepScope stepScopeA1 = mock(AbstractStepScope.class); when(stepScopeA1.getPhaseScope()).thenReturn(phaseScopeA); moveSelector.stepStarted(stepScopeA1); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeA1); AbstractStepScope stepScopeA2 = mock(AbstractStepScope.class); when(stepScopeA2.getPhaseScope()).thenReturn(phaseScopeA); moveSelector.stepStarted(stepScopeA2); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeA2); moveSelector.phaseEnded(phaseScopeA); AbstractPhaseScope phaseScopeB = mock(AbstractPhaseScope.class); when(phaseScopeB.getSolverScope()).thenReturn(solverScope); moveSelector.phaseStarted(phaseScopeB); AbstractStepScope stepScopeB1 = mock(AbstractStepScope.class); when(stepScopeB1.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB1); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeB1); AbstractStepScope stepScopeB2 = mock(AbstractStepScope.class); when(stepScopeB2.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB2); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeB2); AbstractStepScope stepScopeB3 = mock(AbstractStepScope.class); when(stepScopeB3.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB3); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeB3); moveSelector.phaseEnded(phaseScopeB); moveSelector.solvingEnded(solverScope); verifyPhaseLifecycle(entitySelector, 1, 2, 5); } @Test public void originalLeftUnequalsRight() { EntityDescriptor entityDescriptor = TestdataEntity.buildEntityDescriptor(); EntitySelector leftEntitySelector = SelectorTestUtils.mockEntitySelector(entityDescriptor, new TestdataEntity("a"), new TestdataEntity("b"), new TestdataEntity("c"), new TestdataEntity("d")); EntitySelector rightEntitySelector = SelectorTestUtils.mockEntitySelector(entityDescriptor, new TestdataEntity("x"), new TestdataEntity("y"), new TestdataEntity("z")); SwapMoveSelector moveSelector = new SwapMoveSelector(leftEntitySelector, rightEntitySelector, leftEntitySelector.getEntityDescriptor().getGenuineVariableDescriptorList(), false); DefaultSolverScope solverScope = mock(DefaultSolverScope.class); moveSelector.solvingStarted(solverScope); AbstractPhaseScope phaseScopeA = mock(AbstractPhaseScope.class); when(phaseScopeA.getSolverScope()).thenReturn(solverScope); moveSelector.phaseStarted(phaseScopeA); AbstractStepScope stepScopeA1 = mock(AbstractStepScope.class); when(stepScopeA1.getPhaseScope()).thenReturn(phaseScopeA); moveSelector.stepStarted(stepScopeA1); assertAllCodesOfMoveSelector(moveSelector, "a<->x", "a<->y", "a<->z", "b<->x", "b<->y", "b<->z", "c<->x", "c<->y", "c<->z", "d<->x", "d<->y", "d<->z"); moveSelector.stepEnded(stepScopeA1); AbstractStepScope stepScopeA2 = mock(AbstractStepScope.class); when(stepScopeA2.getPhaseScope()).thenReturn(phaseScopeA); moveSelector.stepStarted(stepScopeA2); assertAllCodesOfMoveSelector(moveSelector, "a<->x", "a<->y", "a<->z", "b<->x", "b<->y", "b<->z", "c<->x", "c<->y", "c<->z", "d<->x", "d<->y", "d<->z"); moveSelector.stepEnded(stepScopeA2); moveSelector.phaseEnded(phaseScopeA); AbstractPhaseScope phaseScopeB = mock(AbstractPhaseScope.class); when(phaseScopeB.getSolverScope()).thenReturn(solverScope); moveSelector.phaseStarted(phaseScopeB); AbstractStepScope stepScopeB1 = mock(AbstractStepScope.class); when(stepScopeB1.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB1); assertAllCodesOfMoveSelector(moveSelector, "a<->x", "a<->y", "a<->z", "b<->x", "b<->y", "b<->z", "c<->x", "c<->y", "c<->z", "d<->x", "d<->y", "d<->z"); moveSelector.stepEnded(stepScopeB1); AbstractStepScope stepScopeB2 = mock(AbstractStepScope.class); when(stepScopeB2.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB2); assertAllCodesOfMoveSelector(moveSelector, "a<->x", "a<->y", "a<->z", "b<->x", "b<->y", "b<->z", "c<->x", "c<->y", "c<->z", "d<->x", "d<->y", "d<->z"); moveSelector.stepEnded(stepScopeB2); AbstractStepScope stepScopeB3 = mock(AbstractStepScope.class); when(stepScopeB3.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB3); assertAllCodesOfMoveSelector(moveSelector, "a<->x", "a<->y", "a<->z", "b<->x", "b<->y", "b<->z", "c<->x", "c<->y", "c<->z", "d<->x", "d<->y", "d<->z"); moveSelector.stepEnded(stepScopeB3); moveSelector.phaseEnded(phaseScopeB); moveSelector.solvingEnded(solverScope); verifyPhaseLifecycle(leftEntitySelector, 1, 2, 5); verifyPhaseLifecycle(rightEntitySelector, 1, 2, 5); } @Test public void emptyRightOriginalLeftUnequalsRight() { EntityDescriptor entityDescriptor = TestdataEntity.buildEntityDescriptor(); EntitySelector leftEntitySelector = SelectorTestUtils.mockEntitySelector(entityDescriptor, new TestdataEntity("a"), new TestdataEntity("b"), new TestdataEntity("c"), new TestdataEntity("d")); EntitySelector rightEntitySelector = SelectorTestUtils.mockEntitySelector(entityDescriptor); SwapMoveSelector moveSelector = new SwapMoveSelector(leftEntitySelector, rightEntitySelector, leftEntitySelector.getEntityDescriptor().getGenuineVariableDescriptorList(), false); DefaultSolverScope solverScope = mock(DefaultSolverScope.class); moveSelector.solvingStarted(solverScope); AbstractPhaseScope phaseScopeA = mock(AbstractPhaseScope.class); when(phaseScopeA.getSolverScope()).thenReturn(solverScope); moveSelector.phaseStarted(phaseScopeA); AbstractStepScope stepScopeA1 = mock(AbstractStepScope.class); when(stepScopeA1.getPhaseScope()).thenReturn(phaseScopeA); moveSelector.stepStarted(stepScopeA1); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeA1); AbstractStepScope stepScopeA2 = mock(AbstractStepScope.class); when(stepScopeA2.getPhaseScope()).thenReturn(phaseScopeA); moveSelector.stepStarted(stepScopeA2); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeA2); moveSelector.phaseEnded(phaseScopeA); AbstractPhaseScope phaseScopeB = mock(AbstractPhaseScope.class); when(phaseScopeB.getSolverScope()).thenReturn(solverScope); moveSelector.phaseStarted(phaseScopeB); AbstractStepScope stepScopeB1 = mock(AbstractStepScope.class); when(stepScopeB1.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB1); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeB1); AbstractStepScope stepScopeB2 = mock(AbstractStepScope.class); when(stepScopeB2.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB2); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeB2); AbstractStepScope stepScopeB3 = mock(AbstractStepScope.class); when(stepScopeB3.getPhaseScope()).thenReturn(phaseScopeB); moveSelector.stepStarted(stepScopeB3); assertAllCodesOfMoveSelector(moveSelector); moveSelector.stepEnded(stepScopeB3); moveSelector.phaseEnded(phaseScopeB); moveSelector.solvingEnded(solverScope); verifyPhaseLifecycle(leftEntitySelector, 1, 2, 5); verifyPhaseLifecycle(rightEntitySelector, 1, 2, 5); } }
/* * Copyright 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.media3.session; import static android.support.v4.media.MediaMetadataCompat.METADATA_KEY_ADVERTISEMENT; import static android.support.v4.media.MediaMetadataCompat.METADATA_KEY_DISPLAY_DESCRIPTION; import static android.support.v4.media.MediaMetadataCompat.METADATA_KEY_DISPLAY_ICON_URI; import static android.support.v4.media.MediaMetadataCompat.METADATA_KEY_DISPLAY_SUBTITLE; import static android.support.v4.media.MediaMetadataCompat.METADATA_KEY_DISPLAY_TITLE; import static android.support.v4.media.MediaMetadataCompat.METADATA_KEY_DURATION; import static android.support.v4.media.MediaMetadataCompat.METADATA_KEY_MEDIA_ID; import static android.support.v4.media.MediaMetadataCompat.METADATA_KEY_MEDIA_URI; import static androidx.media3.common.PlaybackException.ERROR_CODE_REMOTE_ERROR; import static androidx.media3.common.Player.MEDIA_ITEM_TRANSITION_REASON_AUTO; import static androidx.media3.common.Player.STATE_BUFFERING; import static androidx.media3.common.Player.STATE_READY; import static androidx.media3.session.MediaConstants.ARGUMENT_CAPTIONING_ENABLED; import static androidx.media3.session.MediaConstants.SESSION_COMMAND_ON_CAPTIONING_ENABLED_CHANGED; import static androidx.media3.session.SessionResult.RESULT_INFO_SKIPPED; import static androidx.media3.session.SessionResult.RESULT_SUCCESS; import static androidx.media3.test.session.common.CommonConstants.DEFAULT_TEST_NAME; import static androidx.media3.test.session.common.CommonConstants.METADATA_ALBUM_TITLE; import static androidx.media3.test.session.common.CommonConstants.METADATA_ARTIST; import static androidx.media3.test.session.common.CommonConstants.METADATA_DESCRIPTION; import static androidx.media3.test.session.common.CommonConstants.METADATA_TITLE; import static androidx.media3.test.session.common.CommonConstants.SUPPORT_APP_PACKAGE_NAME; import static androidx.media3.test.session.common.TestUtils.NO_RESPONSE_TIMEOUT_MS; import static androidx.media3.test.session.common.TestUtils.TIMEOUT_MS; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import static java.util.concurrent.TimeUnit.MILLISECONDS; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.media.AudioManager; import android.net.Uri; import android.os.Bundle; import android.support.v4.media.MediaDescriptionCompat; import android.support.v4.media.MediaMetadataCompat; import android.support.v4.media.session.MediaSessionCompat; import android.support.v4.media.session.MediaSessionCompat.QueueItem; import android.support.v4.media.session.PlaybackStateCompat; import android.support.v4.media.session.PlaybackStateCompat.CustomAction; import android.text.TextUtils; import androidx.annotation.Nullable; import androidx.media.VolumeProviderCompat; import androidx.media3.common.DeviceInfo; import androidx.media3.common.MediaItem; import androidx.media3.common.MediaMetadata; import androidx.media3.common.PlaybackException; import androidx.media3.common.PlaybackParameters; import androidx.media3.common.Player; import androidx.media3.common.Player.DiscontinuityReason; import androidx.media3.common.Player.PositionInfo; import androidx.media3.common.Player.RepeatMode; import androidx.media3.common.Player.State; import androidx.media3.common.Timeline; import androidx.media3.common.util.Util; import androidx.media3.test.session.common.HandlerThreadTestRule; import androidx.media3.test.session.common.MainLooperTestRule; import androidx.media3.test.session.common.MockActivity; import androidx.media3.test.session.common.TestUtils; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.truth.os.BundleSubject; import androidx.test.filters.MediumTest; import com.google.common.collect.ImmutableList; import com.google.common.collect.Range; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; import org.junit.runner.RunWith; /** Tests for {@link MediaController} interacting with {@link MediaSessionCompat}. */ @RunWith(AndroidJUnit4.class) @MediumTest public class MediaControllerWithMediaSessionCompatTest { private static final String TAG = "MCwMSCTest"; @ClassRule public static MainLooperTestRule mainLooperTestRule = new MainLooperTestRule(); private final HandlerThreadTestRule threadTestRule = new HandlerThreadTestRule(TAG); private final MediaControllerTestRule controllerTestRule = new MediaControllerTestRule(threadTestRule); @Rule public final TestRule chain = RuleChain.outerRule(threadTestRule).around(controllerTestRule); private Context context; private RemoteMediaSessionCompat session; @Before public void setUp() throws Exception { context = ApplicationProvider.getApplicationContext(); session = new RemoteMediaSessionCompat(DEFAULT_TEST_NAME, context); } @After public void cleanUp() throws Exception { session.cleanUp(); } @Test public void connected() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); assertThat(controller.isConnected()).isTrue(); } @Test public void disconnected_bySessionRelease() throws Exception { CountDownLatch latch = new CountDownLatch(1); MediaController controller = controllerTestRule.createController( session.getSessionToken(), new MediaController.Listener() { @Override public void onDisconnected(MediaController controller) { latch.countDown(); } }); session.release(); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(controller.isConnected()).isFalse(); } @Test public void disconnected_byControllerRelease() throws Exception { CountDownLatch latch = new CountDownLatch(1); MediaController controller = controllerTestRule.createController( session.getSessionToken(), new MediaController.Listener() { @Override public void onDisconnected(MediaController controller) { latch.countDown(); } }); threadTestRule.getHandler().postAndSync(controller::release); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(controller.isConnected()).isFalse(); } @Test public void close_twice_doesNotCrash() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); threadTestRule.getHandler().postAndSync(controller::release); threadTestRule.getHandler().postAndSync(controller::release); } @Test public void gettersAfterConnected() throws Exception { long position = 150_000; long bufferedPosition = 900_000; long duration = 1_000_000; float speed = 1.5f; CharSequence queueTitle = "queueTitle"; @PlaybackStateCompat.ShuffleMode int shuffleMode = PlaybackStateCompat.SHUFFLE_MODE_GROUP; @PlaybackStateCompat.RepeatMode int repeatMode = PlaybackStateCompat.REPEAT_MODE_GROUP; boolean isPlayingAd = true; MediaMetadataCompat metadata = new MediaMetadataCompat.Builder() .putString(METADATA_KEY_MEDIA_ID, "gettersAfterConnected") .putLong(METADATA_KEY_DURATION, duration) .putLong(METADATA_KEY_ADVERTISEMENT, isPlayingAd ? 1 : 0) .build(); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PLAYING, position, speed) .setBufferedPosition(bufferedPosition) .build()); session.setMetadata(metadata); session.setQueueTitle(queueTitle); session.setShuffleMode(shuffleMode); session.setRepeatMode(repeatMode); MediaController controller = controllerTestRule.createController(session.getSessionToken()); AtomicLong positionRef = new AtomicLong(); AtomicLong bufferedPositionRef = new AtomicLong(); AtomicReference<Float> speedRef = new AtomicReference<>(); AtomicReference<MediaItem> mediaItemRef = new AtomicReference<>(); AtomicBoolean playWhenReadyRef = new AtomicBoolean(); AtomicLong playbackStateRef = new AtomicLong(); AtomicBoolean shuffleModeEnabledRef = new AtomicBoolean(); AtomicLong repeatModeRef = new AtomicLong(); AtomicReference<MediaMetadata> playlistMetadataRef = new AtomicReference<>(); AtomicBoolean isPlayingAdRef = new AtomicBoolean(); threadTestRule .getHandler() .postAndSync( () -> { positionRef.set(controller.getCurrentPosition()); bufferedPositionRef.set(controller.getBufferedPosition()); speedRef.set(controller.getPlaybackParameters().speed); mediaItemRef.set(controller.getCurrentMediaItem()); playWhenReadyRef.set(controller.getPlayWhenReady()); playbackStateRef.set(controller.getPlaybackState()); repeatModeRef.set(controller.getRepeatMode()); shuffleModeEnabledRef.set(controller.getShuffleModeEnabled()); playlistMetadataRef.set(controller.getPlaylistMetadata()); isPlayingAdRef.set(controller.isPlayingAd()); }); assertThat(positionRef.get()) .isIn(Range.closedOpen(position, position + (long) (speed * TIMEOUT_MS))); assertThat(bufferedPositionRef.get()).isEqualTo(bufferedPosition); assertThat(speedRef.get()).isEqualTo(speed); assertThat(mediaItemRef.get().mediaId).isEqualTo(metadata.getDescription().getMediaId()); assertThat(playWhenReadyRef.get()).isTrue(); assertThat(playbackStateRef.get()).isEqualTo(STATE_READY); assertThat(shuffleModeEnabledRef.get()).isTrue(); assertThat(repeatModeRef.get()).isEqualTo(Player.REPEAT_MODE_ALL); assertThat(playlistMetadataRef.get().title.toString()).isEqualTo(queueTitle.toString()); assertThat(isPlayingAdRef.get()).isEqualTo(isPlayingAd); } @Test public void getPackageName() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); assertThat(controller.getConnectedToken().getPackageName()).isEqualTo(SUPPORT_APP_PACKAGE_NAME); } @Test public void getSessionVersion() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); assertThat(controller.getConnectedToken().getSessionVersion()).isLessThan(1_000_000); } @Test public void getSessionActivity() throws Exception { Intent sessionActivity = new Intent(context, MockActivity.class); PendingIntent pi = PendingIntent.getActivity( context, 0, sessionActivity, Util.SDK_INT >= 23 ? PendingIntent.FLAG_IMMUTABLE : 0); session.setSessionActivity(pi); MediaController controller = controllerTestRule.createController(session.getSessionToken()); PendingIntent sessionActivityOut = controller.getSessionActivity(); assertThat(sessionActivityOut).isNotNull(); if (Util.SDK_INT >= 17) { // PendingIntent#getCreatorPackage() is added in API 17. assertThat(sessionActivityOut.getCreatorPackage()).isEqualTo(context.getPackageName()); } } @Test public void setRepeatMode_updatesAndNotifiesRepeatMode() throws Exception { @Player.RepeatMode int testRepeatMode = Player.REPEAT_MODE_ALL; MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicInteger repeatModeFromParamRef = new AtomicInteger(); AtomicInteger repeatModeFromGetterRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onRepeatModeChanged(@RepeatMode int repeatMode) { repeatModeFromParamRef.set(repeatMode); repeatModeFromGetterRef.set(controller.getRepeatMode()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setRepeatMode(PlaybackStateCompat.REPEAT_MODE_GROUP); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(repeatModeFromParamRef.get()).isEqualTo(testRepeatMode); assertThat(repeatModeFromGetterRef.get()).isEqualTo(testRepeatMode); } @Test public void setShuffleModeEnabled_updatesAndNotifiesShuffleModeEnabled() throws Exception { boolean testShuffleModeEnabled = true; MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicBoolean shuffleModeFromParamRef = new AtomicBoolean(); AtomicBoolean shuffleModeFromGetterRef = new AtomicBoolean(); Player.Listener listener = new Player.Listener() { @Override public void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) { shuffleModeFromParamRef.set(shuffleModeEnabled); shuffleModeFromGetterRef.set(controller.getShuffleModeEnabled()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setShuffleMode(PlaybackStateCompat.SHUFFLE_MODE_ALL); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(shuffleModeFromParamRef.get()).isEqualTo(testShuffleModeEnabled); assertThat(shuffleModeFromGetterRef.get()).isEqualTo(testShuffleModeEnabled); } @Test public void setQueue_updatesAndNotifiesTimeline() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicReference<Timeline> timelineFromParamRef = new AtomicReference<>(); AtomicReference<Timeline> timelineFromGetterRef = new AtomicReference<>(); AtomicInteger reasonRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onTimelineChanged( Timeline timeline, @Player.TimelineChangeReason int reason) { timelineFromParamRef.set(timeline); timelineFromGetterRef.set(controller.getCurrentTimeline()); reasonRef.set(reason); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); Timeline testTimeline = MediaTestUtils.createTimeline(/* windowCount= */ 2); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(MediaUtils.convertToMediaItemList(testTimeline)); session.setQueue(testQueue); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); MediaTestUtils.assertMediaIdEquals(testTimeline, timelineFromParamRef.get()); MediaTestUtils.assertMediaIdEquals(testTimeline, timelineFromParamRef.get()); assertThat(reasonRef.get()).isEqualTo(Player.TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED); } @Test public void setQueue_withNull_notifiesEmptyTimeline() throws Exception { Timeline timeline = MediaTestUtils.createTimeline(/* windowCount= */ 2); List<QueueItem> queue = MediaUtils.convertToQueueItemList(MediaUtils.convertToMediaItemList(timeline)); session.setQueue(queue); CountDownLatch latch = new CountDownLatch(1); AtomicReference<Timeline> timelineRef = new AtomicReference<>(); Player.Listener listener = new Player.Listener() { @Override public void onTimelineChanged( Timeline timeline, @Player.TimelineChangeReason int reason) { timelineRef.set(timeline); latch.countDown(); } }; MediaController controller = controllerTestRule.createController(session.getSessionToken()); threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setQueue(null); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(timelineRef.get().getWindowCount()).isEqualTo(0); assertThat(timelineRef.get().getPeriodCount()).isEqualTo(0); } @Test public void setQueue_withDescription_notifiesTimelineWithMetadata() throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicReference<Timeline> timelineRef = new AtomicReference<>(); Player.Listener listener = new Player.Listener() { @Override public void onTimelineChanged( Timeline timeline, @Player.TimelineChangeReason int reason) { timelineRef.set(timeline); latch.countDown(); } }; MediaController controller = controllerTestRule.createController(session.getSessionToken()); threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); String testMediaId = "testMediaId"; CharSequence testTitle = "testTitle"; CharSequence testSubtitle = "testSubtitle"; CharSequence testDescription = "testDescription"; Uri testIconUri = Uri.parse("androidx://media3-session/icon"); Uri testMediaUri = Uri.parse("androidx://media3-session/media"); Bundle testExtras = TestUtils.createTestBundle(); MediaDescriptionCompat description = new MediaDescriptionCompat.Builder() .setMediaId(testMediaId) .setTitle(testTitle) .setSubtitle(testSubtitle) .setDescription(testDescription) .setIconUri(testIconUri) .setMediaUri(testMediaUri) .setExtras(testExtras) .build(); QueueItem queueItem = new QueueItem(description, /* id= */ 0); session.setQueue(ImmutableList.of(queueItem)); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(timelineRef.get().getWindowCount()).isEqualTo(1); MediaMetadata metadata = timelineRef .get() .getWindow(/* windowIndex= */ 0, new Timeline.Window()) .mediaItem .mediaMetadata; assertThat(TextUtils.equals(metadata.title, testTitle)).isTrue(); assertThat(TextUtils.equals(metadata.subtitle, testSubtitle)).isTrue(); assertThat(TextUtils.equals(metadata.description, testDescription)).isTrue(); assertThat(metadata.artworkUri).isEqualTo(testIconUri); if (Util.SDK_INT < 21 || Util.SDK_INT >= 23) { // TODO(b/199055952): Test mediaUri for all API levels once the bug is fixed. assertThat(metadata.mediaUri).isEqualTo(testMediaUri); } assertThat(TestUtils.equals(metadata.extras, testExtras)).isTrue(); } @Test public void setQueueTitle_updatesAndNotifiesPlaylistMetadata() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicReference<MediaMetadata> metadataFromParamRef = new AtomicReference<>(); AtomicReference<MediaMetadata> metadataFromGetterRef = new AtomicReference<>(); Player.Listener listener = new Player.Listener() { @Override public void onPlaylistMetadataChanged(MediaMetadata playlistMetadata) { metadataFromParamRef.set(playlistMetadata); metadataFromGetterRef.set(controller.getPlaylistMetadata()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); CharSequence queueTitle = "queueTitle"; session.setQueueTitle(queueTitle); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(metadataFromParamRef.get().title.toString()).isEqualTo(queueTitle.toString()); assertThat(metadataFromGetterRef.get().title.toString()).isEqualTo(queueTitle.toString()); } @Test public void getCurrentMediaItem_byDefault_returnsNull() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); MediaItem mediaItem = threadTestRule.getHandler().postAndSync(controller::getCurrentMediaItem); assertThat(mediaItem).isNull(); } @Test public void getCurrentMediaItem_withSetMetadata_returnsMediaItemWithMediaId() throws Exception { String testMediaId = "testMediaId"; MediaMetadataCompat metadata = new MediaMetadataCompat.Builder().putText(METADATA_KEY_MEDIA_ID, testMediaId).build(); session.setMetadata(metadata); MediaController controller = controllerTestRule.createController(session.getSessionToken()); MediaItem mediaItem = threadTestRule.getHandler().postAndSync(controller::getCurrentMediaItem); assertThat(mediaItem.mediaId).isEqualTo(testMediaId); } @Test public void setMetadata_notifiesCurrentMediaItem() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicReference<MediaItem> itemRef = new AtomicReference<>(); Player.Listener listener = new Player.Listener() { @Override public void onMediaItemTransition( @Nullable MediaItem item, @Player.MediaItemTransitionReason int reason) { itemRef.set(item); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); String testMediaId = "testMediaId"; CharSequence testTitle = "testTitle"; CharSequence testSubtitle = "testSubtitle"; CharSequence testDescription = "testDescription"; String testIconUri = "androidx://media3-session/icon"; String testMediaUri = "androidx://media3-session/media"; MediaMetadataCompat metadataCompat = new MediaMetadataCompat.Builder() .putText(METADATA_KEY_MEDIA_ID, testMediaId) .putText(METADATA_KEY_DISPLAY_TITLE, testTitle) .putText(METADATA_KEY_DISPLAY_SUBTITLE, testSubtitle) .putText(METADATA_KEY_DISPLAY_DESCRIPTION, testDescription) .putString(METADATA_KEY_DISPLAY_ICON_URI, testIconUri) .putString(METADATA_KEY_MEDIA_URI, testMediaUri) .build(); session.setMetadata(metadataCompat); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(itemRef.get().mediaId).isEqualTo(testMediaId); MediaMetadata metadata = itemRef.get().mediaMetadata; assertThat(TextUtils.equals(metadata.title, testTitle)).isTrue(); assertThat(TextUtils.equals(metadata.subtitle, testSubtitle)).isTrue(); assertThat(TextUtils.equals(metadata.description, testDescription)).isTrue(); assertThat(metadata.artworkUri).isEqualTo(Uri.parse(testIconUri)); assertThat(metadata.mediaUri).isEqualTo(Uri.parse(testMediaUri)); } @Test public void isPlayingAd_withMetadataWithAd_returnsTrue() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicBoolean isPlayingAdRef = new AtomicBoolean(); Player.Listener listener = new Player.Listener() { @Override public void onPositionDiscontinuity( PositionInfo oldPosition, PositionInfo newPosition, @DiscontinuityReason int reason) { isPlayingAdRef.set(controller.isPlayingAd()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); MediaMetadataCompat metadata = new MediaMetadataCompat.Builder().putLong(METADATA_KEY_ADVERTISEMENT, 1).build(); session.setMetadata(metadata); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(isPlayingAdRef.get()).isTrue(); } @Test public void setMediaUri_resultSetAfterPrepare() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); Uri testUri = Uri.parse("androidx://test"); ListenableFuture<SessionResult> future = threadTestRule .getHandler() .postAndSync(() -> controller.setMediaUri(testUri, /* extras= */ Bundle.EMPTY)); SessionResult result; try { result = future.get(NO_RESPONSE_TIMEOUT_MS, TimeUnit.MILLISECONDS); assertWithMessage("TimeoutException is expected").fail(); } catch (TimeoutException e) { // expected. } threadTestRule.getHandler().postAndSync(controller::prepare); result = future.get(TIMEOUT_MS, TimeUnit.MILLISECONDS); assertThat(result.resultCode).isEqualTo(RESULT_SUCCESS); } @Test public void setMediaUri_resultSetAfterPlay() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); Uri testUri = Uri.parse("androidx://test"); ListenableFuture<SessionResult> future = threadTestRule .getHandler() .postAndSync(() -> controller.setMediaUri(testUri, /* extras= */ Bundle.EMPTY)); SessionResult result; try { result = future.get(NO_RESPONSE_TIMEOUT_MS, TimeUnit.MILLISECONDS); assertWithMessage("TimeoutException is expected").fail(); } catch (TimeoutException e) { // expected. } threadTestRule.getHandler().postAndSync(controller::play); result = future.get(TIMEOUT_MS, TimeUnit.MILLISECONDS); assertThat(result.resultCode).isEqualTo(RESULT_SUCCESS); } @Test public void setMediaUris_multipleCalls_previousCallReturnsResultInfoSkipped() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); Uri testUri1 = Uri.parse("androidx://test1"); Uri testUri2 = Uri.parse("androidx://test2"); ListenableFuture<SessionResult> future1 = threadTestRule .getHandler() .postAndSync(() -> controller.setMediaUri(testUri1, /* extras= */ Bundle.EMPTY)); ListenableFuture<SessionResult> future2 = threadTestRule .getHandler() .postAndSync(() -> controller.setMediaUri(testUri2, /* extras= */ Bundle.EMPTY)); threadTestRule.getHandler().postAndSync(controller::prepare); SessionResult result1 = future1.get(TIMEOUT_MS, TimeUnit.MILLISECONDS); SessionResult result2 = future2.get(TIMEOUT_MS, TimeUnit.MILLISECONDS); assertThat(result1.resultCode).isEqualTo(RESULT_INFO_SKIPPED); assertThat(result2.resultCode).isEqualTo(RESULT_SUCCESS); } @Test public void seekToDefaultPosition_withMediaItemIndex_updatesExpectedMediaItemIndex() throws Exception { List<MediaItem> testList = MediaTestUtils.createMediaItems(3); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(testList); session.setQueue(testQueue); session.setPlaybackState(/* state= */ null); int testMediaItemIndex = 2; MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicInteger currentMediaItemIndexRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onPositionDiscontinuity( PositionInfo oldPosition, PositionInfo newPosition, @DiscontinuityReason int reason) { currentMediaItemIndexRef.set(controller.getCurrentMediaItemIndex()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); threadTestRule .getHandler() .postAndSync(() -> controller.seekToDefaultPosition(testMediaItemIndex)); session.setPlaybackState( new PlaybackStateCompat.Builder() .setActiveQueueItemId(testQueue.get(testMediaItemIndex).getQueueId()) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(currentMediaItemIndexRef.get()).isEqualTo(testMediaItemIndex); } @Test public void seekTo_withMediaItemIndex_updatesExpectedMediaItemIndex() throws Exception { List<MediaItem> testList = MediaTestUtils.createMediaItems(3); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(testList); session.setQueue(testQueue); session.setPlaybackState(/* state= */ null); long testPositionMs = 23L; int testMediaItemIndex = 2; MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicInteger mediaItemIndexFromParamRef = new AtomicInteger(); AtomicInteger mediaItemIndexFromGetterRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onPositionDiscontinuity( PositionInfo oldPosition, PositionInfo newPosition, @DiscontinuityReason int reason) { mediaItemIndexFromParamRef.set(newPosition.mediaItemIndex); mediaItemIndexFromGetterRef.set(controller.getCurrentMediaItemIndex()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); threadTestRule .getHandler() .postAndSync(() -> controller.seekTo(testMediaItemIndex, testPositionMs)); session.setPlaybackState( new PlaybackStateCompat.Builder() .setActiveQueueItemId(testQueue.get(testMediaItemIndex).getQueueId()) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(mediaItemIndexFromParamRef.get()).isEqualTo(testMediaItemIndex); assertThat(mediaItemIndexFromGetterRef.get()).isEqualTo(testMediaItemIndex); } @Test public void getMediaItemCount_withValidQueueAndQueueId_returnsQueueSize() throws Exception { List<MediaItem> testList = MediaTestUtils.createMediaItems(3); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(testList); session.setQueue(testQueue); session.setPlaybackState( new PlaybackStateCompat.Builder() .setActiveQueueItemId(testQueue.get(0).getQueueId()) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); int mediaItemCount = threadTestRule.getHandler().postAndSync(controller::getMediaItemCount); assertThat(mediaItemCount).isEqualTo(testList.size()); } @Test public void getMediaItemCount_withoutQueue_returnsZero() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); int mediaItemCount = threadTestRule.getHandler().postAndSync(controller::getMediaItemCount); assertThat(mediaItemCount).isEqualTo(0); } @Test public void getMediaItemCount_withoutQueueButEmptyMetadata_returnsOne() throws Exception { session.setMetadata(new MediaMetadataCompat.Builder().build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); int mediaItemCount = threadTestRule.getHandler().postAndSync(controller::getMediaItemCount); assertThat(mediaItemCount).isEqualTo(1); } @Test public void getMediaItemCount_withInvalidQueueIdWithoutMetadata_returnsAdjustedCount() throws Exception { List<MediaItem> testList = MediaTestUtils.createMediaItems(3); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(testList); session.setQueue(testQueue); MediaController controller = controllerTestRule.createController(session.getSessionToken()); int mediaItemCount = threadTestRule.getHandler().postAndSync(controller::getMediaItemCount); assertThat(mediaItemCount).isEqualTo(testList.size()); } @Test public void getMediaItemCount_withInvalidQueueIdWithMetadata_returnsAdjustedCount() throws Exception { List<MediaItem> testList = MediaTestUtils.createMediaItems(3); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(testList); MediaItem testRemoveMediaItem = MediaTestUtils.createMediaItem("removed"); MediaMetadataCompat testMetadataCompat = MediaUtils.convertToMediaMetadataCompat(testRemoveMediaItem, /* durationMs= */ 100L); session.setQueue(testQueue); session.setMetadata(testMetadataCompat); MediaController controller = controllerTestRule.createController(session.getSessionToken()); int mediaItemCount = threadTestRule.getHandler().postAndSync(controller::getMediaItemCount); assertThat(mediaItemCount).isEqualTo(testList.size() + 1); } @Test public void getMediaItemCount_whenQueueIdIsChangedFromInvalidToValid_returnOriginalCount() throws Exception { List<MediaItem> testList = MediaTestUtils.createMediaItems(3); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(testList); MediaItem testRemoveMediaItem = MediaTestUtils.createMediaItem("removed"); MediaMetadataCompat testMetadataCompat = MediaUtils.convertToMediaMetadataCompat(testRemoveMediaItem, /* durationMs= */ 100L); session.setQueue(testQueue); session.setMetadata(testMetadataCompat); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); Player.Listener listener = new Player.Listener() { @Override public void onTimelineChanged( Timeline timeline, @Player.TimelineChangeReason int reason) { latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setPlaybackState( new PlaybackStateCompat.Builder() .setActiveQueueItemId(testQueue.get(0).getQueueId()) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); int mediaItemCount = threadTestRule.getHandler().postAndSync(controller::getMediaItemCount); assertThat(mediaItemCount).isEqualTo(testList.size()); } @Test public void getCurrentMediaItemIndex_withInvalidQueueIdWithMetadata_returnsEndOfList() throws Exception { List<MediaItem> testList = MediaTestUtils.createMediaItems(3); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(testList); MediaItem testRemoveMediaItem = MediaTestUtils.createMediaItem("removed"); MediaMetadataCompat testMetadataCompat = MediaUtils.convertToMediaMetadataCompat(testRemoveMediaItem, /* durationMs= */ 100L); session.setQueue(testQueue); session.setMetadata(testMetadataCompat); MediaController controller = controllerTestRule.createController(session.getSessionToken()); int mediaItemIndex = threadTestRule.getHandler().postAndSync(controller::getCurrentMediaItemIndex); assertThat(mediaItemIndex).isEqualTo(testList.size()); } @Test public void getMediaMetadata_withMediaMetadataCompat_returnsConvertedMediaMetadata() throws Exception { MediaItem testMediaItem = MediaTestUtils.createMediaItem("test"); MediaMetadata testMediaMetadata = testMediaItem.mediaMetadata; MediaMetadataCompat testMediaMetadataCompat = MediaUtils.convertToMediaMetadataCompat(testMediaItem, /* durationMs= */ 100L); session.setMetadata(testMediaMetadataCompat); MediaController controller = controllerTestRule.createController(session.getSessionToken()); MediaMetadata mediaMetadata = threadTestRule.getHandler().postAndSync(controller::getMediaMetadata); assertThat(mediaMetadata).isEqualTo(testMediaMetadata); } @Test public void getMediaMetadata_withoutMediaMetadataCompat_returnsEmptyMediaMetadata() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); MediaMetadata mediaMetadata = threadTestRule.getHandler().postAndSync(controller::getMediaMetadata); assertThat(mediaMetadata).isEqualTo(MediaMetadata.EMPTY); } @Test public void getMediaMetadata_withMediaDescriptionWithoutMediaMetadata_returnsMediaDescriptionValues() throws Exception { MediaDescriptionCompat testMediaDescriptionCompat = new MediaDescriptionCompat.Builder() .setTitle(METADATA_TITLE) .setDescription(METADATA_DESCRIPTION) .build(); long testActiveQueueId = 0; List<QueueItem> testQueueItemList = ImmutableList.of( new MediaSessionCompat.QueueItem(testMediaDescriptionCompat, testActiveQueueId)); session.setQueue(testQueueItemList); session.setPlaybackState( new PlaybackStateCompat.Builder().setActiveQueueItemId(testActiveQueueId).build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); MediaMetadata mediaMetadata = threadTestRule.getHandler().postAndSync(controller::getMediaMetadata); assertThat(mediaMetadata.title).isEqualTo(testMediaDescriptionCompat.getTitle()); assertThat(mediaMetadata.description).isEqualTo(testMediaDescriptionCompat.getDescription()); } @Test public void getMediaMetadata_withMediaMetadataCompatWithQueue_returnsMediaMetadataCompatValues() throws Exception { String testMediaDescriptionCompatDescription = "testMediaDescriptionCompatDescription"; String testMediaMetadataCompatDescription = "testMediaMetadataCompatDescription"; MediaDescriptionCompat testMediaDescriptionCompat = new MediaDescriptionCompat.Builder() .setDescription(testMediaDescriptionCompatDescription) .build(); MediaMetadataCompat testMediaMetadataCompat = new MediaMetadataCompat.Builder() .putText( MediaMetadataCompat.METADATA_KEY_DISPLAY_DESCRIPTION, testMediaMetadataCompatDescription) .putText(MediaMetadataCompat.METADATA_KEY_ARTIST, METADATA_ARTIST) .putText(MediaMetadataCompat.METADATA_KEY_ALBUM, METADATA_ALBUM_TITLE) .build(); long testActiveQueueId = 0; List<QueueItem> testQueueItemList = ImmutableList.of( new MediaSessionCompat.QueueItem(testMediaDescriptionCompat, testActiveQueueId)); session.setQueue(testQueueItemList); session.setMetadata(testMediaMetadataCompat); session.setPlaybackState( new PlaybackStateCompat.Builder().setActiveQueueItemId(testActiveQueueId).build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); MediaMetadata mediaMetadata = threadTestRule.getHandler().postAndSync(controller::getMediaMetadata); assertThat(TextUtils.equals(mediaMetadata.description, testMediaMetadataCompatDescription)) .isTrue(); assertThat(TextUtils.equals(mediaMetadata.artist, METADATA_ARTIST)).isTrue(); assertThat(TextUtils.equals(mediaMetadata.albumTitle, METADATA_ALBUM_TITLE)).isTrue(); } @Test public void getMediaMetadata_withoutMediaMetadataCompatWithQueue_returnsEmptyMediaMetadata() throws Exception { List<MediaItem> testList = MediaTestUtils.createMediaItems(3); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(testList); int testIndex = 1; long testActiveQueueId = testQueue.get(testIndex).getQueueId(); session.setQueue(testQueue); session.setPlaybackState( new PlaybackStateCompat.Builder().setActiveQueueItemId(testActiveQueueId).build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); MediaMetadata mediaMetadata = threadTestRule.getHandler().postAndSync(controller::getMediaMetadata); assertThat(mediaMetadata).isEqualTo(testList.get(testIndex).mediaMetadata); } @Test public void setPlaybackState_withActiveQueueItemId_notifiesCurrentMediaItem() throws Exception { List<MediaItem> testList = MediaTestUtils.createMediaItems(/* size= */ 2); List<QueueItem> testQueue = MediaUtils.convertToQueueItemList(testList); session.setQueue(testQueue); PlaybackStateCompat.Builder builder = new PlaybackStateCompat.Builder(); // Set the current active queue item to index 'oldItemIndex'. int oldItemIndex = 0; builder.setActiveQueueItemId(testQueue.get(oldItemIndex).getQueueId()); session.setPlaybackState(builder.build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicReference<MediaItem> itemRef = new AtomicReference<>(); AtomicInteger mediaItemTransitionReasonRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onMediaItemTransition( @Nullable MediaItem item, @Player.MediaItemTransitionReason int reason) { itemRef.set(item); mediaItemTransitionReasonRef.set(reason); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); // The new playbackState will tell the controller that the active queue item is changed to // 'newItemIndex'. int newItemIndex = 1; builder.setActiveQueueItemId(testQueue.get(newItemIndex).getQueueId()); session.setPlaybackState(builder.build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); int currentIndex = threadTestRule.getHandler().postAndSync(controller::getCurrentMediaItemIndex); assertThat(currentIndex).isEqualTo(newItemIndex); MediaTestUtils.assertMediaIdEquals(testList.get(newItemIndex), itemRef.get()); assertThat(mediaItemTransitionReasonRef.get()).isEqualTo(MEDIA_ITEM_TRANSITION_REASON_AUTO); } @Test public void setPlaybackState_withAdjacentQueueItemWhilePlaying_notifiesPositionDiscontinuityByAutoTransition() throws Exception { long testDuration = 3_000; List<QueueItem> testQueue = MediaTestUtils.createQueueItems(/* size= */ 2); session.setQueue(testQueue); session.setMetadata( new MediaMetadataCompat.Builder().putLong(METADATA_KEY_DURATION, testDuration).build()); PlaybackStateCompat.Builder builder = new PlaybackStateCompat.Builder(); // Set the current active queue item to index 'oldItemIndex'. int oldItemIndex = 0; builder.setActiveQueueItemId(testQueue.get(oldItemIndex).getQueueId()); builder.setState( PlaybackStateCompat.STATE_PLAYING, /* position= */ 0L, /* playbackSpeed= */ 1.0f); session.setPlaybackState(builder.build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicReference<PositionInfo> oldPositionRef = new AtomicReference<>(); AtomicReference<PositionInfo> newPositionRef = new AtomicReference<>(); AtomicInteger positionDiscontinuityReasonRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onPositionDiscontinuity( PositionInfo oldPosition, PositionInfo newPosition, @DiscontinuityReason int reason) { oldPositionRef.set(oldPosition); newPositionRef.set(newPosition); positionDiscontinuityReasonRef.set(reason); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); int newItemIndex = 1; builder.setActiveQueueItemId(testQueue.get(newItemIndex).getQueueId()); session.setPlaybackState(builder.build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(positionDiscontinuityReasonRef.get()) .isEqualTo(Player.DISCONTINUITY_REASON_AUTO_TRANSITION); assertThat(oldPositionRef.get().mediaItemIndex).isEqualTo(oldItemIndex); assertThat(newPositionRef.get().mediaItemIndex).isEqualTo(newItemIndex); } @Test public void setPlaybackState_withAdjacentQueueItemAfterPlaybackDone_notifiesPositionDiscontinuityByTransition() throws Exception { long testDuration = 3000; List<QueueItem> testQueue = MediaTestUtils.createQueueItems(/* size= */ 2); session.setQueue(testQueue); session.setMetadata( new MediaMetadataCompat.Builder().putLong(METADATA_KEY_DURATION, testDuration).build()); PlaybackStateCompat.Builder builder = new PlaybackStateCompat.Builder(); // Set the current active queue item to index 'oldItemIndex'. int oldItemIndex = 0; builder.setActiveQueueItemId(testQueue.get(oldItemIndex).getQueueId()); builder.setState( PlaybackStateCompat.STATE_PLAYING, /* position= */ testDuration, /* playbackSpeed= */ 1.0f); session.setPlaybackState(builder.build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicReference<PositionInfo> oldPositionRef = new AtomicReference<>(); AtomicReference<PositionInfo> newPositionRef = new AtomicReference<>(); AtomicInteger positionDiscontinuityReasonRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onPositionDiscontinuity( PositionInfo oldPosition, PositionInfo newPosition, @DiscontinuityReason int reason) { oldPositionRef.set(oldPosition); newPositionRef.set(newPosition); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); int newItemIndex = 1; builder.setActiveQueueItemId(testQueue.get(newItemIndex).getQueueId()); builder.setState( PlaybackStateCompat.STATE_PLAYING, /* position= */ 0, /* playbackSpeed= */ 1.0f); session.setPlaybackState(builder.build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(positionDiscontinuityReasonRef.get()) .isEqualTo(Player.DISCONTINUITY_REASON_AUTO_TRANSITION); assertThat(oldPositionRef.get().mediaItemIndex).isEqualTo(oldItemIndex); assertThat(newPositionRef.get().mediaItemIndex).isEqualTo(newItemIndex); } @Test public void setPlaybackState_withDistantQueueItem_notifiesPositionDiscontinuityByAutoTransition() throws Exception { List<QueueItem> testQueue = MediaTestUtils.createQueueItems(/* size= */ 3); session.setQueue(testQueue); session.setMetadata(new MediaMetadataCompat.Builder().build()); PlaybackStateCompat.Builder builder = new PlaybackStateCompat.Builder(); // Set the current active queue item to index 'oldItemIndex'. int oldItemIndex = 0; builder.setActiveQueueItemId(testQueue.get(oldItemIndex).getQueueId()); session.setPlaybackState(builder.build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicReference<PositionInfo> oldPositionRef = new AtomicReference<>(); AtomicReference<PositionInfo> newPositionRef = new AtomicReference<>(); AtomicInteger positionDiscontinuityReasonRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onPositionDiscontinuity( PositionInfo oldPosition, PositionInfo newPosition, @DiscontinuityReason int reason) { oldPositionRef.set(oldPosition); newPositionRef.set(newPosition); positionDiscontinuityReasonRef.set(reason); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); int newItemIndex = 2; builder.setActiveQueueItemId(testQueue.get(newItemIndex).getQueueId()); session.setPlaybackState(builder.build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(positionDiscontinuityReasonRef.get()) .isEqualTo(Player.DISCONTINUITY_REASON_AUTO_TRANSITION); assertThat(oldPositionRef.get().mediaItemIndex).isEqualTo(oldItemIndex); assertThat(newPositionRef.get().mediaItemIndex).isEqualTo(newItemIndex); } @Test public void setPlaybackState_withNewPosition_notifiesOnPositionDiscontinuity() throws Exception { long testOldCurrentPositionMs = 300L; session.setPlaybackState( new PlaybackStateCompat.Builder() .setState( PlaybackStateCompat.STATE_PAUSED, testOldCurrentPositionMs, /* playbackSpeed= */ 1f) .build()); session.setMetadata(new MediaMetadataCompat.Builder().build()); AtomicReference<PositionInfo> oldPositionRef = new AtomicReference<>(); AtomicReference<PositionInfo> newPositionRef = new AtomicReference<>(); AtomicInteger positionDiscontinuityReasonRef = new AtomicInteger(); CountDownLatch latch = new CountDownLatch(1); Player.Listener listener = new Player.Listener() { @Override public void onPositionDiscontinuity( PositionInfo oldPosition, PositionInfo newPosition, @DiscontinuityReason int reason) { oldPositionRef.set(oldPosition); newPositionRef.set(newPosition); positionDiscontinuityReasonRef.set(reason); latch.countDown(); } }; MediaController controller = controllerTestRule.createController(session.getSessionToken()); threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); long testNewCurrentPositionMs = 900L; session.setPlaybackState( new PlaybackStateCompat.Builder() .setState( PlaybackStateCompat.STATE_PAUSED, testNewCurrentPositionMs, /* playbackSpeed= */ 1f) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(positionDiscontinuityReasonRef.get()) .isEqualTo(Player.DISCONTINUITY_REASON_INTERNAL); assertThat(oldPositionRef.get().positionMs).isEqualTo(testOldCurrentPositionMs); assertThat(newPositionRef.get().positionMs).isEqualTo(testNewCurrentPositionMs); } @Test public void setPlaybackState_fromStateBufferingToPlaying_notifiesReadyState() throws Exception { List<MediaItem> testPlaylist = MediaTestUtils.createMediaItems(/* size= */ 1); MediaMetadataCompat metadata = MediaUtils.convertToMediaMetadataCompat(testPlaylist.get(0), /* durationMs= */ 50_000); long testBufferedPosition = 5_000; session.setMetadata(metadata); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState( PlaybackStateCompat.STATE_BUFFERING, /* position= */ 0, /* playbackSpeed= */ 1f) .setBufferedPosition(0) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicInteger stateFromParamRef = new AtomicInteger(); AtomicInteger stateFromGetterRef = new AtomicInteger(); AtomicLong bufferedPositionFromGetterRef = new AtomicLong(); Player.Listener listener = new Player.Listener() { @Override public void onPlaybackStateChanged(@State int playbackState) { stateFromParamRef.set(playbackState); stateFromGetterRef.set(controller.getPlaybackState()); bufferedPositionFromGetterRef.set(controller.getBufferedPosition()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PLAYING, /* position= */ 0, /* playbackSpeed= */ 1f) .setBufferedPosition(testBufferedPosition) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(stateFromParamRef.get()).isEqualTo(STATE_READY); assertThat(stateFromGetterRef.get()).isEqualTo(STATE_READY); assertThat(bufferedPositionFromGetterRef.get()).isEqualTo(testBufferedPosition); } @Test public void setPlaybackState_fromStatePlayingToBuffering_notifiesBufferingState() throws Exception { List<MediaItem> testPlaylist = MediaTestUtils.createMediaItems(1); MediaMetadataCompat metadata = MediaUtils.convertToMediaMetadataCompat(testPlaylist.get(0), /* durationMs= */ 1_000); long testBufferingPosition = 0; session.setMetadata(metadata); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState( PlaybackStateCompat.STATE_PLAYING, /* position= */ 100, /* playbackSpeed= */ 1f) .setBufferedPosition(500) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicInteger stateFromParamRef = new AtomicInteger(); AtomicInteger stateFromGetterRef = new AtomicInteger(); AtomicLong bufferedPositionFromGetterRef = new AtomicLong(); Player.Listener listener = new Player.Listener() { @Override public void onPlaybackStateChanged(int playbackState) { stateFromParamRef.set(playbackState); stateFromGetterRef.set(controller.getPlaybackState()); bufferedPositionFromGetterRef.set(controller.getBufferedPosition()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState( PlaybackStateCompat.STATE_BUFFERING, /* position= */ 0, /* playbackSpeed= */ 1f) .setBufferedPosition(testBufferingPosition) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(stateFromParamRef.get()).isEqualTo(STATE_BUFFERING); assertThat(stateFromGetterRef.get()).isEqualTo(STATE_BUFFERING); assertThat(bufferedPositionFromGetterRef.get()).isEqualTo(testBufferingPosition); } @Test public void setPlaybackState_fromStateNoneToPlaying_notifiesReadyState() throws Exception { session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_NONE, /* position= */ 0, /* playbackSpeed= */ 1f) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicInteger stateFromParamRef = new AtomicInteger(); AtomicInteger stateFromGetterRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onPlaybackStateChanged(int playbackState) { stateFromParamRef.set(playbackState); stateFromGetterRef.set(controller.getPlaybackState()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PLAYING, /* position= */ 0, /* playbackSpeed= */ 1f) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(stateFromParamRef.get()).isEqualTo(STATE_READY); assertThat(stateFromGetterRef.get()).isEqualTo(STATE_READY); } @Test public void setPlaybackState_fromStatePausedToPlaying_notifiesPlayWhenReady() throws Exception { boolean testPlayWhenReady = true; session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PAUSED, /* position= */ 0, /* playbackSpeed= */ 1f) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicBoolean playWhenReadyFromParamRef = new AtomicBoolean(); AtomicBoolean playWhenReadyFromGetterRef = new AtomicBoolean(); AtomicInteger playWhenReadyChangedReasonFromParamRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onPlayWhenReadyChanged( boolean playWhenReady, @Player.PlayWhenReadyChangeReason int reason) { playWhenReadyFromParamRef.set(playWhenReady); playWhenReadyFromGetterRef.set(controller.getPlayWhenReady()); playWhenReadyChangedReasonFromParamRef.set(reason); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PLAYING, /* position= */ 0, /* playbackSpeed= */ 1f) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(playWhenReadyFromParamRef.get()).isEqualTo(testPlayWhenReady); assertThat(playWhenReadyFromGetterRef.get()).isEqualTo(testPlayWhenReady); assertThat(playWhenReadyChangedReasonFromParamRef.get()) .isEqualTo(Player.PLAY_WHEN_READY_CHANGE_REASON_REMOTE); } @Test public void setPlaybackState_toBuffering_notifiesPlaybackStateBuffering() throws Exception { session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PAUSED, /* position= */ 0, /* playbackSpeed= */ 1f) .build()); session.setMetadata( new MediaMetadataCompat.Builder().putLong(METADATA_KEY_DURATION, 1_000).build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicInteger playbackStateFromParamRef = new AtomicInteger(); AtomicInteger playbackStateFromGetterRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onPlaybackStateChanged(@Player.State int playbackState) { playbackStateFromParamRef.set(playbackState); playbackStateFromGetterRef.set(controller.getPlaybackState()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState( PlaybackStateCompat.STATE_BUFFERING, /* position= */ 0, /* playbackSpeed= */ 1f) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(playbackStateFromParamRef.get()).isEqualTo(Player.STATE_BUFFERING); assertThat(playbackStateFromGetterRef.get()).isEqualTo(Player.STATE_BUFFERING); } @Test public void setPlaybackState_toPausedWithEndPosition_notifiesPlaybackStateEnded() throws Exception { long testDuration = 1_000; session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PLAYING, /* position= */ 0, /* playbackSpeed= */ 1f) .build()); session.setMetadata( new MediaMetadataCompat.Builder().putLong(METADATA_KEY_DURATION, testDuration).build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicInteger playbackStateFromParamRef = new AtomicInteger(); AtomicInteger playbackStateFromGetterRef = new AtomicInteger(); Player.Listener listener = new Player.Listener() { @Override public void onPlaybackStateChanged(@Player.State int playbackState) { playbackStateFromParamRef.set(playbackState); playbackStateFromGetterRef.set(controller.getPlaybackState()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState( PlaybackStateCompat.STATE_PAUSED, /* position= */ testDuration, /* playbackSpeed= */ 1f) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(playbackStateFromParamRef.get()).isEqualTo(Player.STATE_ENDED); assertThat(playbackStateFromGetterRef.get()).isEqualTo(Player.STATE_ENDED); } @Test public void setPlaybackState_withSpeed_notifiesOnPlaybackParametersChanged() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicReference<PlaybackParameters> playbackParametersFromParamRef = new AtomicReference<>(); AtomicReference<PlaybackParameters> playbackParametersFromGetterRef = new AtomicReference<>(); Player.Listener listener = new Player.Listener() { @Override public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) { playbackParametersFromParamRef.set(playbackParameters); playbackParametersFromGetterRef.set(controller.getPlaybackParameters()); latch.countDown(); } }; threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); float testSpeed = 3.0f; session.setPlaybackState( new PlaybackStateCompat.Builder() .setState( PlaybackStateCompat.STATE_PLAYING, /* position= */ 0, /* playbackSpeed= */ testSpeed) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(playbackParametersFromParamRef.get().speed).isEqualTo(testSpeed); assertThat(playbackParametersFromGetterRef.get().speed).isEqualTo(testSpeed); } @Test public void setPlaybackState_withError_notifiesOnPlayerErrorChanged() throws Exception { String testErrorMessage = "testErrorMessage"; int testErrorCode = PlaybackStateCompat.ERROR_CODE_UNKNOWN_ERROR; // 0 String testConvertedErrorMessage = "testErrorMessage, code=0"; MediaController controller = controllerTestRule.createController(session.getSessionToken()); CountDownLatch latch = new CountDownLatch(1); AtomicReference<PlaybackException> errorFromParamRef = new AtomicReference<>(); AtomicReference<PlaybackException> errorFromGetterRef = new AtomicReference<>(); Player.Listener listener = new Player.Listener() { @Override public void onPlayerErrorChanged(@Nullable PlaybackException error) { errorFromParamRef.set(error); errorFromGetterRef.set(controller.getPlayerError()); latch.countDown(); } }; controller.addListener(listener); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_ERROR, /* position= */ 0, /* playbackSpeed= */ 1.0f) .setErrorMessage(testErrorCode, testErrorMessage) .build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(errorFromParamRef.get().errorCode).isEqualTo(ERROR_CODE_REMOTE_ERROR); assertThat(errorFromParamRef.get().getMessage()).isEqualTo(testConvertedErrorMessage); assertThat(errorFromGetterRef.get().errorCode).isEqualTo(ERROR_CODE_REMOTE_ERROR); assertThat(errorFromGetterRef.get().getMessage()).isEqualTo(testConvertedErrorMessage); } @Test public void setPlaybackToRemote_notifiesDeviceInfoAndVolume() throws Exception { int volumeControlType = VolumeProviderCompat.VOLUME_CONTROL_ABSOLUTE; int maxVolume = 100; int currentVolume = 45; AtomicReference<DeviceInfo> deviceInfoRef = new AtomicReference<>(); CountDownLatch latchForDeviceInfo = new CountDownLatch(1); CountDownLatch latchForDeviceVolume = new CountDownLatch(1); Player.Listener listener = new Player.Listener() { @Override public void onDeviceInfoChanged(DeviceInfo deviceInfo) { if (deviceInfo.playbackType == DeviceInfo.PLAYBACK_TYPE_REMOTE) { deviceInfoRef.set(deviceInfo); latchForDeviceInfo.countDown(); } } @Override public void onDeviceVolumeChanged(int volume, boolean muted) { if (volume == currentVolume) { latchForDeviceVolume.countDown(); } } }; MediaController controller = controllerTestRule.createController(session.getSessionToken()); threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setPlaybackToRemote(volumeControlType, maxVolume, currentVolume); assertThat(latchForDeviceInfo.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(latchForDeviceVolume.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(deviceInfoRef.get().maxVolume).isEqualTo(maxVolume); } @Test public void setPlaybackToLocal_notifiesDeviceInfoAndVolume() throws Exception { if (Util.SDK_INT == 21 || Util.SDK_INT == 22) { // In API 21 and 22, onAudioInfoChanged is not called. return; } session.setPlaybackToRemote( VolumeProviderCompat.VOLUME_CONTROL_ABSOLUTE, /* maxVolume= */ 100, /* currentVolume= */ 45); int testLocalStreamType = AudioManager.STREAM_ALARM; AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); int maxVolume = audioManager.getStreamMaxVolume(testLocalStreamType); int currentVolume = audioManager.getStreamVolume(testLocalStreamType); AtomicReference<DeviceInfo> deviceInfoRef = new AtomicReference<>(); CountDownLatch latchForDeviceInfo = new CountDownLatch(1); CountDownLatch latchForDeviceVolume = new CountDownLatch(1); Player.Listener listener = new Player.Listener() { @Override public void onDeviceInfoChanged(DeviceInfo deviceInfo) { if (deviceInfo.playbackType == DeviceInfo.PLAYBACK_TYPE_LOCAL) { deviceInfoRef.set(deviceInfo); latchForDeviceInfo.countDown(); } } @Override public void onDeviceVolumeChanged(int volume, boolean muted) { if (volume == currentVolume) { latchForDeviceVolume.countDown(); } } }; MediaController controller = controllerTestRule.createController(session.getSessionToken()); threadTestRule.getHandler().postAndSync(() -> controller.addListener(listener)); session.setPlaybackToLocal(testLocalStreamType); assertThat(latchForDeviceInfo.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(latchForDeviceVolume.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(deviceInfoRef.get().maxVolume).isEqualTo(maxVolume); } @Test public void sendSessionEvent_notifiesCustomCommand() throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicReference<SessionCommand> commandRef = new AtomicReference<>(); AtomicReference<Bundle> argsRef = new AtomicReference<>(); MediaController.Listener listener = new MediaController.Listener() { @Override public ListenableFuture<SessionResult> onCustomCommand( MediaController controller, SessionCommand command, Bundle args) { commandRef.set(command); argsRef.set(args); latch.countDown(); return Futures.immediateFuture(new SessionResult(RESULT_SUCCESS)); } }; controllerTestRule.createController(session.getSessionToken(), listener); String event = "customCommand"; Bundle extras = TestUtils.createTestBundle(); session.sendSessionEvent(event, extras); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(commandRef.get().customAction).isEqualTo(event); assertThat(TestUtils.equals(extras, argsRef.get())).isTrue(); } @Test public void setPlaybackState_withCustomAction_notifiesCustomLayout() throws Exception { CustomAction testCustomAction = new CustomAction.Builder("testCustomAction", "testName", 1).build(); CountDownLatch latch = new CountDownLatch(1); List<CommandButton> layoutOut = new ArrayList<>(); MediaController.Listener listener = new MediaController.Listener() { @Override public ListenableFuture<SessionResult> onSetCustomLayout( MediaController controller, List<CommandButton> layout) { layoutOut.addAll(layout); latch.countDown(); return Futures.immediateFuture(new SessionResult(RESULT_SUCCESS)); } }; controllerTestRule.createController(session.getSessionToken(), listener); session.setPlaybackState( new PlaybackStateCompat.Builder().addCustomAction(testCustomAction).build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(layoutOut).hasSize(1); CommandButton button = layoutOut.get(0); assertThat(button.sessionCommand.customAction).isEqualTo(testCustomAction.getAction()); assertThat(button.displayName.toString()).isEqualTo(testCustomAction.getName().toString()); assertThat(button.iconResId).isEqualTo(testCustomAction.getIcon()); } @Test public void setPlaybackState_withCustomAction_notifiesAvailableCommands() throws Exception { CustomAction testCustomAction = new CustomAction.Builder("testCustomAction", "testName1", 1).build(); SessionCommand testSessionCommand = new SessionCommand(testCustomAction.getAction(), /* extras= */ Bundle.EMPTY); CountDownLatch latch = new CountDownLatch(1); AtomicReference<SessionCommands> commandsRef = new AtomicReference<>(); MediaController.Listener listener = new MediaController.Listener() { @Override public void onAvailableSessionCommandsChanged( MediaController controller, SessionCommands commands) { commandsRef.set(commands); latch.countDown(); } }; controllerTestRule.createController(session.getSessionToken(), listener); session.setPlaybackState( new PlaybackStateCompat.Builder().addCustomAction(testCustomAction).build()); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); SessionCommands commands = commandsRef.get(); assertThat(commands.contains(testSessionCommand)).isTrue(); } @Test public void setCaptioningEnabled_notifiesCustomCommand() throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicReference<SessionCommand> commandRef = new AtomicReference<>(); AtomicReference<Bundle> argsRef = new AtomicReference<>(); MediaController.Listener listener = new MediaController.Listener() { @Override public ListenableFuture<SessionResult> onCustomCommand( MediaController controller, SessionCommand command, Bundle args) { commandRef.set(command); argsRef.set(args); latch.countDown(); return Futures.immediateFuture(new SessionResult(RESULT_SUCCESS)); } }; controllerTestRule.createController(session.getSessionToken(), listener); session.setCaptioningEnabled(true); assertThat(latch.await(TIMEOUT_MS, MILLISECONDS)).isTrue(); assertThat(commandRef.get().customAction) .isEqualTo(SESSION_COMMAND_ON_CAPTIONING_ENABLED_CHANGED); BundleSubject.assertThat(argsRef.get()).bool(ARGUMENT_CAPTIONING_ENABLED).isTrue(); } @Test public void getCurrentPosition_byDefault_returnsZero() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); long currentPositionMs = threadTestRule.getHandler().postAndSync(controller::getCurrentPosition); assertThat(currentPositionMs).isEqualTo(0); } @Test public void getCurrentPosition_withNegativePosition_adjustsToZero() throws Exception { long testPositionMs = -100L; session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PAUSED, testPositionMs, /* playbackSpeed= */ 1.0f) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); long currentPositionMs = threadTestRule.getHandler().postAndSync(controller::getCurrentPosition); assertThat(currentPositionMs).isEqualTo(0); } @Test public void getCurrentPosition_withGreaterThanDuration_adjustsToDuration() throws Exception { long testDurationMs = 100L; long testPositionMs = 200L; session.setMetadata( new MediaMetadataCompat.Builder().putLong(METADATA_KEY_DURATION, testDurationMs).build()); session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PAUSED, testPositionMs, /* playbackSpeed= */ 1.0f) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); long currentPositionMs = threadTestRule.getHandler().postAndSync(controller::getCurrentPosition); assertThat(currentPositionMs).isEqualTo(testDurationMs); } @Test public void getContentPosition_byDefault_returnsZero() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); long contentPositionMs = threadTestRule.getHandler().postAndSync(controller::getContentPosition); assertThat(contentPositionMs).isEqualTo(0); } @Test public void getContentBufferedPosition_byDefault_returnsZero() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); long contentBufferedPositionMs = threadTestRule.getHandler().postAndSync(controller::getContentBufferedPosition); assertThat(contentBufferedPositionMs).isEqualTo(0); } @Test public void getBufferedPosition_byDefault_returnsZero() throws Exception { MediaController controller = controllerTestRule.createController(session.getSessionToken()); long bufferedPositionMs = threadTestRule.getHandler().postAndSync(controller::getBufferedPosition); assertThat(bufferedPositionMs).isEqualTo(0); } @Test public void getBufferedPosition_withLessThanPosition_adjustsToPosition() throws Exception { long testPositionMs = 300L; long testBufferedPositionMs = 100L; session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PAUSED, testPositionMs, /* playbackSpeed= */ 1.0f) .setBufferedPosition(testBufferedPositionMs) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); long bufferedPositionMs = threadTestRule.getHandler().postAndSync(controller::getBufferedPosition); assertThat(bufferedPositionMs).isEqualTo(testPositionMs); } @Test public void getBufferedPosition_withGreaterThanDuration_adjustsToDuration() throws Exception { long testDurationMs = 100L; long testBufferedPositionMs = 200L; session.setMetadata( new MediaMetadataCompat.Builder().putLong(METADATA_KEY_DURATION, testDurationMs).build()); session.setPlaybackState( new PlaybackStateCompat.Builder().setBufferedPosition(testBufferedPositionMs).build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); long bufferedPositionMs = threadTestRule.getHandler().postAndSync(controller::getBufferedPosition); assertThat(bufferedPositionMs).isEqualTo(testDurationMs); } @Test public void getBufferedPosition() throws Exception { long testPositionMs = 300L; long testBufferedPositionMs = 331L; session.setPlaybackState( new PlaybackStateCompat.Builder() .setState(PlaybackStateCompat.STATE_PAUSED, testPositionMs, /* playbackSpeed= */ 1.0f) .setBufferedPosition(testBufferedPositionMs) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); long bufferedPositionMs = threadTestRule.getHandler().postAndSync(controller::getBufferedPosition); assertThat(bufferedPositionMs).isEqualTo(testBufferedPositionMs); } @Test public void getTotalBufferedDuration() throws Exception { long testCurrentPositionMs = 224L; long testBufferedPositionMs = 331L; long testTotalBufferedDurationMs = testBufferedPositionMs - testCurrentPositionMs; session.setPlaybackState( new PlaybackStateCompat.Builder() .setState( PlaybackStateCompat.STATE_PAUSED, testCurrentPositionMs, /* playbackSpeed= */ 1.0f) .setBufferedPosition(testBufferedPositionMs) .build()); MediaController controller = controllerTestRule.createController(session.getSessionToken()); long totalBufferedDurationMs = threadTestRule.getHandler().postAndSync(controller::getTotalBufferedDuration); assertThat(totalBufferedDurationMs).isEqualTo(testTotalBufferedDurationMs); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.repository.kdr.delegates; import java.util.ArrayList; import java.util.Collection; import java.util.Enumeration; import java.util.List; import java.util.Properties; import org.pentaho.di.core.Const; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleDependencyException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.LongObjectId; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.RepositoryOperation; import org.pentaho.di.repository.kdr.KettleDatabaseRepository; public class KettleDatabaseRepositoryDatabaseDelegate extends KettleDatabaseRepositoryBaseDelegate { private static final Class<?> PKG = DatabaseMeta.class; // for i18n purposes, needed by Translator2!! public KettleDatabaseRepositoryDatabaseDelegate( KettleDatabaseRepository repository ) { super( repository ); } public synchronized ObjectId getDatabaseID( String name ) throws KettleException { return repository.connectionDelegate.getIDWithValue( quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE ), quote( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE ), quote( KettleDatabaseRepository.FIELD_DATABASE_NAME ), name ); } public synchronized String getDatabaseTypeCode( ObjectId id_database_type ) throws KettleException { return repository.connectionDelegate.getStringWithID( quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE_TYPE ), quote( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE ), id_database_type, quote( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE ) ); } public synchronized String getDatabaseConTypeCode( ObjectId id_database_contype ) throws KettleException { return repository.connectionDelegate.getStringWithID( quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE ), quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ), id_database_contype, quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ) ); } public RowMetaAndData getDatabase( ObjectId id_database ) throws KettleException { return repository.connectionDelegate.getOneRow( quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE ), quote( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE ), id_database ); } public RowMetaAndData getDatabaseAttribute( ObjectId id_database_attribute ) throws KettleException { return repository.connectionDelegate.getOneRow( quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE ), quote( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE ), id_database_attribute ); } public Collection<RowMetaAndData> getDatabaseAttributes() throws KettleDatabaseException, KettleValueException { List<RowMetaAndData> attrs = new ArrayList<RowMetaAndData>(); List<Object[]> rows = repository.connectionDelegate.getRows( "SELECT * FROM " + quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE ), 0 ); for ( Object[] row : rows ) { RowMetaAndData rowWithMeta = new RowMetaAndData( repository.connectionDelegate.getReturnRowMeta(), row ); long id = rowWithMeta.getInteger( quote( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE ), 0 ); if ( id > 0 ) { attrs.add( rowWithMeta ); } } return attrs; } /** * * Load the Database Info */ public DatabaseMeta loadDatabaseMeta( ObjectId id_database ) throws KettleException { DatabaseMeta databaseMeta = new DatabaseMeta(); try { RowMetaAndData r = getDatabase( id_database ); if ( r != null ) { ObjectId id_database_type = new LongObjectId( r.getInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, 0 ) ); // con_type String dbTypeDesc = getDatabaseTypeCode( id_database_type ); if ( dbTypeDesc != null ) { databaseMeta.setDatabaseInterface( DatabaseMeta.getDatabaseInterface( dbTypeDesc ) ); databaseMeta.setAttributes( new Properties() ); // new attributes } databaseMeta.setObjectId( id_database ); databaseMeta.setName( r.getString( KettleDatabaseRepository.FIELD_DATABASE_NAME, "" ) ); ObjectId id_database_contype = new LongObjectId( r.getInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, 0 ) ); // con_access databaseMeta.setAccessType( DatabaseMeta.getAccessType( getDatabaseConTypeCode( id_database_contype ) ) ); databaseMeta.setHostname( r.getString( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, "" ) ); databaseMeta.setDBName( r.getString( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, "" ) ); databaseMeta.setDBPort( r.getString( KettleDatabaseRepository.FIELD_DATABASE_PORT, "" ) ); databaseMeta.setUsername( r.getString( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, "" ) ); databaseMeta.setPassword( Encr.decryptPasswordOptionallyEncrypted( r.getString( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, "" ) ) ); databaseMeta.setServername( r.getString( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, "" ) ); databaseMeta.setDataTablespace( r.getString( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, "" ) ); databaseMeta.setIndexTablespace( r.getString( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, "" ) ); // Also, load all the properties we can find... final Collection<RowMetaAndData> attrs = repository.connectionDelegate.getDatabaseAttributes( id_database ); for ( RowMetaAndData row : attrs ) { String code = row.getString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, "" ); String attribute = row.getString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, "" ); databaseMeta.getAttributes().put( code, Const.NVL( attribute, "" ) ); } } return databaseMeta; } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Error loading database connection from repository (id_database=" + id_database + ")", dbe ); } } /** * Saves the database information into a given repository. * * @param databaseMeta * The database metadata object to store * * @throws KettleException * if an error occurs. */ public void saveDatabaseMeta( DatabaseMeta databaseMeta ) throws KettleException { try { // If we don't have an ID, we don't know which entry in the database we need to update. // See if a database with the same name is already available... if ( databaseMeta.getObjectId() == null ) { databaseMeta.setObjectId( getDatabaseID( databaseMeta.getName() ) ); } // Still not found? --> Insert if ( databaseMeta.getObjectId() == null ) { // Insert new Note in repository // databaseMeta.setObjectId( insertDatabase( databaseMeta.getName(), databaseMeta.getPluginId(), DatabaseMeta.getAccessTypeDesc( databaseMeta .getAccessType() ), databaseMeta.getHostname(), databaseMeta.getDatabaseName(), databaseMeta .getDatabasePortNumberString(), databaseMeta.getUsername(), databaseMeta.getPassword(), databaseMeta.getServername(), databaseMeta.getDataTablespace(), databaseMeta.getIndexTablespace() ) ); } else { // --> found entry with the same name... // Update the note... updateDatabase( databaseMeta.getObjectId(), databaseMeta.getName(), databaseMeta.getPluginId(), DatabaseMeta .getAccessTypeDesc( databaseMeta.getAccessType() ), databaseMeta.getHostname(), databaseMeta .getDatabaseName(), databaseMeta.getDatabasePortNumberString(), databaseMeta.getUsername(), databaseMeta.getPassword(), databaseMeta.getServername(), databaseMeta.getDataTablespace(), databaseMeta.getIndexTablespace() ); } // For the extra attributes, just delete them and re-add them. delDatabaseAttributes( databaseMeta.getObjectId() ); // OK, now get a list of all the attributes set on the database connection... insertDatabaseAttributes( databaseMeta.getObjectId(), databaseMeta.getAttributes() ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Error saving database connection or one of its attributes to the repository.", dbe ); } } public synchronized ObjectId insertDatabase( String name, String type, String access, String host, String dbname, String port, String user, String pass, String servername, String data_tablespace, String index_tablespace ) throws KettleException { ObjectId id = repository.connectionDelegate.getNextDatabaseID(); ObjectId id_database_type = getDatabaseTypeID( type ); if ( id_database_type == null ) { // New support database type: add it! id_database_type = repository.connectionDelegate.getNextDatabaseTypeID(); String tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE; RowMetaInterface tableMeta = new RowMeta(); tableMeta.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, ValueMetaInterface.TYPE_INTEGER, 5, 0 ) ); tableMeta.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); tableMeta.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); repository.connectionDelegate.getDatabase().prepareInsert( tableMeta, tablename ); Object[] tableData = new Object[3]; int tableIndex = 0; tableData[tableIndex++] = new LongObjectId( id_database_type ).longValue(); tableData[tableIndex++] = type; tableData[tableIndex++] = type; repository.connectionDelegate.getDatabase().setValuesInsert( tableMeta, tableData ); repository.connectionDelegate.getDatabase().insertRow(); repository.connectionDelegate.getDatabase().closeInsert(); } ObjectId id_database_contype = getDatabaseConTypeID( access ); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER ), id ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_NAME, ValueMetaInterface.TYPE_STRING ), name ); table .addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, ValueMetaInterface.TYPE_INTEGER ), id_database_type ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, ValueMetaInterface.TYPE_INTEGER ), id_database_contype ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, ValueMetaInterface.TYPE_STRING ), host ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, ValueMetaInterface.TYPE_STRING ), dbname ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_PORT, ValueMetaInterface.TYPE_INTEGER ), Long.valueOf( Const.toLong( port, -1 ) ) ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, ValueMetaInterface.TYPE_STRING ), user ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, ValueMetaInterface.TYPE_STRING ), Encr .encryptPasswordIfNotUsingVariables( pass ) ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, ValueMetaInterface.TYPE_STRING ), servername ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, ValueMetaInterface.TYPE_STRING ), data_tablespace ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, ValueMetaInterface.TYPE_STRING ), index_tablespace ); repository.connectionDelegate.getDatabase().prepareInsert( table.getRowMeta(), KettleDatabaseRepository.TABLE_R_DATABASE ); repository.connectionDelegate.getDatabase().setValuesInsert( table ); repository.connectionDelegate.getDatabase().insertRow(); repository.connectionDelegate.getDatabase().closeInsert(); return id; } public synchronized void updateDatabase( ObjectId id_database, String name, String type, String access, String host, String dbname, String port, String user, String pass, String servername, String data_tablespace, String index_tablespace ) throws KettleException { ObjectId id_database_type = getDatabaseTypeID( type ); ObjectId id_database_contype = getDatabaseConTypeID( access ); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_NAME, ValueMetaInterface.TYPE_STRING ), name ); table .addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, ValueMetaInterface.TYPE_INTEGER ), id_database_type ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, ValueMetaInterface.TYPE_INTEGER ), id_database_contype ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, ValueMetaInterface.TYPE_STRING ), host ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, ValueMetaInterface.TYPE_STRING ), dbname ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_PORT, ValueMetaInterface.TYPE_INTEGER ), Long.valueOf( Const.toLong( port, -1 ) ) ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, ValueMetaInterface.TYPE_STRING ), user ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, ValueMetaInterface.TYPE_STRING ), Encr .encryptPasswordIfNotUsingVariables( pass ) ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, ValueMetaInterface.TYPE_STRING ), servername ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, ValueMetaInterface.TYPE_STRING ), data_tablespace ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, ValueMetaInterface.TYPE_STRING ), index_tablespace ); repository.connectionDelegate.updateTableRow( KettleDatabaseRepository.TABLE_R_DATABASE, KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, table, id_database ); } public synchronized ObjectId getDatabaseTypeID( String code ) throws KettleException { return repository.connectionDelegate.getIDWithValue( quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE_TYPE ), quote( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE ), quote( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE ), code ); } public synchronized ObjectId getDatabaseConTypeID( String code ) throws KettleException { return repository.connectionDelegate.getIDWithValue( quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE ), quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ), quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ), code ); } /** * Remove a database connection from the repository * * @param databaseName * The name of the connection to remove * @throws KettleException * In case something went wrong: database error, insufficient permissions, depending objects, etc. */ public void deleteDatabaseMeta( String databaseName ) throws KettleException { repository.getSecurityProvider().validateAction( RepositoryOperation.DELETE_DATABASE ); try { ObjectId id_database = getDatabaseID( databaseName ); delDatabase( id_database ); } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "KettleDatabaseRepository.Exception.ErrorDeletingConnection.Message", databaseName ), dbe ); } } public synchronized void delDatabase( ObjectId id_database ) throws KettleException { repository.getSecurityProvider().validateAction( RepositoryOperation.DELETE_DATABASE ); // First, see if the database connection is still used by other connections... // If so, generate an error!! // We look in table R_STEP_DATABASE to see if there are any steps using this database. // String[] transList = repository.getTransformationsUsingDatabase( id_database ); String[] jobList = repository.getJobsUsingDatabase( id_database ); if ( jobList.length == 0 && transList.length == 0 ) { repository.connectionDelegate.performDelete( "DELETE FROM " + quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE ) + " WHERE " + quote( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE ) + " = ? ", id_database ); } else { String message = " Database used by the following " + Const.CR; if ( jobList.length > 0 ) { message = "jobs :" + Const.CR; for ( String job : jobList ) { message += "\t " + job + Const.CR; } } message += "transformations:" + Const.CR; for ( String trans : transList ) { message += "\t " + trans + Const.CR; } KettleDependencyException e = new KettleDependencyException( message ); throw new KettleDependencyException( "This database is still in use by " + jobList.length + " jobs and " + transList.length + " transformations references", e ); } } public synchronized void delDatabaseAttributes( ObjectId id_database ) throws KettleException { repository.connectionDelegate.performDelete( "DELETE FROM " + quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE ) + " WHERE " + quote( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE ) + " = ? ", id_database ); } public synchronized int getNrDatabases() throws KettleException { int retval = 0; String sql = "SELECT COUNT(*) FROM " + quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE ); RowMetaAndData r = repository.connectionDelegate.getOneRow( sql ); if ( r != null ) { retval = (int) r.getInteger( 0, 0L ); } return retval; } public synchronized int getNrDatabases( ObjectId id_transformation ) throws KettleException { int retval = 0; RowMetaAndData transIdRow = repository.connectionDelegate.getParameterMetaData( id_transformation ); String sql = "SELECT COUNT(*) FROM " + quoteTable( KettleDatabaseRepository.TABLE_R_STEP_DATABASE ) + " WHERE " + quote( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION ) + " = ? "; RowMetaAndData r = repository.connectionDelegate.getOneRow( sql, transIdRow.getRowMeta(), transIdRow.getData() ); if ( r != null ) { retval = (int) r.getInteger( 0, 0L ); } return retval; } public synchronized int getNrDatabaseAttributes( ObjectId id_database ) throws KettleException { int retval = 0; String sql = "SELECT COUNT(*) FROM " + quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE ) + " WHERE " + quote( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE ) + " = " + id_database; RowMetaAndData r = repository.connectionDelegate.getOneRow( sql ); if ( r != null ) { retval = (int) r.getInteger( 0, 0L ); } return retval; } private RowMetaAndData createAttributeRow( ObjectId idDatabase, String code, String strValue ) throws KettleException { ObjectId id = repository.connectionDelegate.getNextDatabaseAttributeID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER ), id ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER ), idDatabase ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING ), code ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING ), strValue ); return table; } private void insertDatabaseAttributes( ObjectId idDatabase, Properties properties ) throws KettleException { if ( properties.isEmpty() ) { return; } Database db = repository.connectionDelegate.getDatabase(); boolean firstAttribute = true; Enumeration<Object> keys = properties.keys(); while ( keys.hasMoreElements() ) { String code = (String) keys.nextElement(); String attribute = (String) properties.get( code ); RowMetaAndData attributeData = createAttributeRow( idDatabase, code, attribute ); if ( firstAttribute ) { db.prepareInsert( attributeData.getRowMeta(), KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE ); firstAttribute = false; } db.setValuesInsert( attributeData ); db.insertRow( db.getPrepStatementInsert(), true, false ); } db.executeAndClearBatch( db.getPrepStatementInsert() ); db.closeInsert(); } }
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2011 Eric Lafortune (eric@graphics.cornell.edu) * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.optimize; import proguard.classfile.*; import proguard.classfile.attribute.visitor.AttributeVisitor; import proguard.classfile.attribute.*; import proguard.classfile.attribute.annotation.*; import proguard.classfile.editor.ConstantPoolEditor; import proguard.classfile.util.*; import proguard.classfile.visitor.MemberVisitor; /** * This MemberVisitor adds an additional parameter to the duplicate * initialization methods that it visits. */ public class DuplicateInitializerFixer extends SimplifiedVisitor implements MemberVisitor, AttributeVisitor { private static final boolean DEBUG = false; private static final char[] TYPES = new char[] { ClassConstants.INTERNAL_TYPE_BYTE, ClassConstants.INTERNAL_TYPE_CHAR, ClassConstants.INTERNAL_TYPE_SHORT, ClassConstants.INTERNAL_TYPE_INT, ClassConstants.INTERNAL_TYPE_BOOLEAN }; private final MemberVisitor extraFixedInitializerVisitor; /** * Creates a new DuplicateInitializerFixer. */ public DuplicateInitializerFixer() { this(null); } /** * Creates a new DuplicateInitializerFixer with an extra visitor. * @param extraFixedInitializerVisitor an optional extra visitor for all * initializers that have been fixed. */ public DuplicateInitializerFixer(MemberVisitor extraFixedInitializerVisitor) { this.extraFixedInitializerVisitor = extraFixedInitializerVisitor; } // Implementations for MemberVisitor. public void visitProgramMethod(ProgramClass programClass, ProgramMethod programMethod) { // Is it a class instance initializer? String name = programMethod.getName(programClass); if (name.equals(ClassConstants.INTERNAL_METHOD_NAME_INIT)) { // Is there already another initializer with the same descriptor? String descriptor = programMethod.getDescriptor(programClass); Method similarMethod = programClass.findMethod(name, descriptor); if (!programMethod.equals(similarMethod)) { // Should this initializer be preserved? if (KeepMarker.isKept(programMethod)) { // Fix the other initializer. programMethod = (ProgramMethod)similarMethod; } int index = descriptor.indexOf(ClassConstants.INTERNAL_METHOD_ARGUMENTS_CLOSE); // Try to find a new, unique descriptor. int typeCounter = 0; while (true) { // Construct the new descriptor by inserting a new type // as an additional last argument. StringBuffer newDescriptorBuffer = new StringBuffer(descriptor.substring(0, index)); for (int arrayDimension = 0; arrayDimension < typeCounter / TYPES.length; arrayDimension++) { newDescriptorBuffer.append(ClassConstants.INTERNAL_TYPE_ARRAY); } newDescriptorBuffer.append(TYPES[typeCounter % TYPES.length]); newDescriptorBuffer.append(descriptor.substring(index)); String newDescriptor = newDescriptorBuffer.toString(); // Is the new initializer descriptor unique? if (programClass.findMethod(name, newDescriptor) == null) { if (DEBUG) { System.out.println("DuplicateInitializerFixer:"); System.out.println(" ["+programClass.getName()+"."+name+descriptor+"] ("+ClassUtil.externalClassAccessFlags(programMethod.getAccessFlags())+") -> ["+newDescriptor+"]"); } // Update the descriptor. programMethod.u2descriptorIndex = new ConstantPoolEditor(programClass).addUtf8Constant(newDescriptor); // Fix the local variable frame size, the method // signature, and the parameter annotations, if // necessary. programMethod.attributesAccept(programClass, this); // Visit the initializer, if required. if (extraFixedInitializerVisitor != null) { extraFixedInitializerVisitor.visitProgramMethod(programClass, programMethod); } // We're done with this constructor. return; } typeCounter++; } } } } // Implementations for AttributeVisitor. public void visitAnyAttribute(Clazz clazz, Attribute attribute) {} public void visitCodeAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute) { // The minimum variable size is determined by the arguments. int maxLocals = ClassUtil.internalMethodParameterSize(method.getDescriptor(clazz), method.getAccessFlags()); if (codeAttribute.u2maxLocals < maxLocals) { codeAttribute.u2maxLocals = maxLocals; } } public void visitSignatureAttribute(Clazz clazz, Method method, SignatureAttribute signatureAttribute) { String descriptor = method.getDescriptor(clazz); int descriptorIndex = descriptor.indexOf(ClassConstants.INTERNAL_METHOD_ARGUMENTS_CLOSE); String signature = clazz.getString(signatureAttribute.u2signatureIndex); int signatureIndex = signature.indexOf(ClassConstants.INTERNAL_METHOD_ARGUMENTS_CLOSE); String newSignature = signature.substring(0, signatureIndex) + descriptor.charAt(descriptorIndex - 1) + signature.substring(signatureIndex); // Update the signature. signatureAttribute.u2signatureIndex = new ConstantPoolEditor((ProgramClass)clazz).addUtf8Constant(newSignature); } public void visitAnyParameterAnnotationsAttribute(Clazz clazz, Method method, ParameterAnnotationsAttribute parameterAnnotationsAttribute) { // Update the number of parameters. int oldParametersCount = parameterAnnotationsAttribute.u2parametersCount++; if (parameterAnnotationsAttribute.u2parameterAnnotationsCount == null || parameterAnnotationsAttribute.u2parameterAnnotationsCount.length < parameterAnnotationsAttribute.u2parametersCount) { int[] annotationsCounts = new int[parameterAnnotationsAttribute.u2parametersCount]; Annotation[][] annotations = new Annotation[parameterAnnotationsAttribute.u2parametersCount][]; System.arraycopy(parameterAnnotationsAttribute.u2parameterAnnotationsCount, 0, annotationsCounts, 0, oldParametersCount); System.arraycopy(parameterAnnotationsAttribute.parameterAnnotations, 0, annotations, 0, oldParametersCount); parameterAnnotationsAttribute.u2parameterAnnotationsCount = annotationsCounts; parameterAnnotationsAttribute.parameterAnnotations = annotations; } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.source.xml; import com.intellij.ide.highlighter.DTDFileType; import com.intellij.openapi.util.Key; import com.intellij.psi.*; import com.intellij.psi.impl.source.resolve.reference.ReferenceProvidersRegistry; import com.intellij.psi.search.PsiElementProcessor; import com.intellij.psi.util.CachedValue; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.util.CachedValuesManager; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.*; import com.intellij.util.ArrayUtil; import com.intellij.xml.XmlElementDescriptor; import com.intellij.xml.impl.schema.AnyXmlElementDescriptor; import com.intellij.xml.util.XmlUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * @author mike */ public class XmlEntityRefImpl extends XmlElementImpl implements XmlEntityRef { @NonNls private static final String GT_ENTITY = "&gt;"; @NonNls private static final String QUOT_ENTITY = "&quot;"; public XmlEntityRefImpl() { super(XmlElementType.XML_ENTITY_REF); } private static final Key<String> EVALUATION_IN_PROCESS = Key.create("EvalKey"); public XmlEntityDecl resolve(PsiFile targetFile) { String text = getText(); if (text.equals(GT_ENTITY) || text.equals(QUOT_ENTITY)) return null; return resolveEntity(this, text, targetFile); } public static XmlEntityDecl resolveEntity(final XmlElement element, final String text, PsiFile targetFile) { if (targetFile instanceof XmlFile) { XmlDocument document = ((XmlFile)targetFile).getDocument(); if (document != null && document.getUserData(DISABLE_ENTITY_EXPAND) != null) return null; } final String entityName = text.substring(1, text.length() - 1); final PsiElement targetElement = targetFile != null ? targetFile : element; CachedValue<XmlEntityDecl> value; synchronized(PsiLock.LOCK) { Map<String, CachedValue<XmlEntityDecl>> map = XmlEntityCache.getCachingMap(targetElement); value = map.get(entityName); final PsiFile containingFile = element.getContainingFile(); if (value == null) { final PsiManager manager = element.getManager(); if(manager == null){ return resolveEntity(targetElement, entityName, containingFile).getValue(); } value = CachedValuesManager.getManager(manager.getProject()).createCachedValue(new CachedValueProvider<XmlEntityDecl>() { public Result<XmlEntityDecl> compute() { return resolveEntity(targetElement, entityName, containingFile); } }); map.put(entityName, value); } } return value.getValue(); } private static final Key<Boolean> DISABLE_ENTITY_EXPAND = Key.create("disable.entity.expand"); private static CachedValueProvider.Result<XmlEntityDecl> resolveEntity(final PsiElement targetElement, final String entityName, PsiFile contextFile) { if (targetElement.getUserData(EVALUATION_IN_PROCESS) != null) { return new CachedValueProvider.Result<XmlEntityDecl>(null,targetElement); } try { targetElement.putUserData(EVALUATION_IN_PROCESS, ""); final List<PsiElement> deps = new ArrayList<PsiElement>(); final XmlEntityDecl[] result = new XmlEntityDecl[]{null}; PsiElementProcessor processor = new PsiElementProcessor() { public boolean execute(@NotNull PsiElement element) { if (element instanceof XmlDoctype) { XmlDoctype xmlDoctype = (XmlDoctype)element; final String dtdUri = XmlUtil.getDtdUri(xmlDoctype); if (dtdUri != null) { XmlFile file = XmlUtil.getContainingFile(element); if (file == null) return true; final XmlFile xmlFile = XmlUtil.findNamespace(file, dtdUri); if (xmlFile != null) { if (xmlFile != targetElement) { deps.add(xmlFile); if(!XmlUtil.processXmlElements(xmlFile, this,true)) return false; } } } final XmlMarkupDecl markupDecl = xmlDoctype.getMarkupDecl(); if (markupDecl != null) { if (!XmlUtil.processXmlElements(markupDecl, this, true)) return false; } } else if (element instanceof XmlEntityDecl) { XmlEntityDecl entityDecl = (XmlEntityDecl)element; final String declName = entityDecl.getName(); if (declName.equals(entityName)) { result[0] = entityDecl; return false; } } return true; } }; FileViewProvider provider = targetElement.getContainingFile().getViewProvider(); deps.add(provider.getPsi(provider.getBaseLanguage())); boolean notfound = PsiTreeUtil.processElements(targetElement, processor); if (notfound) { if (contextFile != targetElement && contextFile != null && contextFile.isValid()) { notfound = PsiTreeUtil.processElements(contextFile, processor); } } if (notfound && // no dtd ref at all targetElement instanceof XmlFile && deps.size() == 1 && ((XmlFile)targetElement).getFileType() != DTDFileType.INSTANCE ) { XmlDocument document = ((XmlFile)targetElement).getDocument(); final XmlTag rootTag = document.getRootTag(); if (rootTag != null && document.getUserData(DISABLE_ENTITY_EXPAND) == null) { final XmlElementDescriptor descriptor = rootTag.getDescriptor(); if (descriptor != null && !(descriptor instanceof AnyXmlElementDescriptor)) { PsiElement element = descriptor.getDeclaration(); final PsiFile containingFile = element != null ? element.getContainingFile():null; final XmlFile descriptorFile = containingFile instanceof XmlFile ? (XmlFile)containingFile:null; if (descriptorFile != null && !descriptorFile.getName().equals(((XmlFile)targetElement).getName()+".dtd")) { deps.add(descriptorFile); XmlUtil.processXmlElements( descriptorFile, processor, true ); } } } } return new CachedValueProvider.Result<XmlEntityDecl>(result[0], ArrayUtil.toObjectArray(deps)); } finally { targetElement.putUserData(EVALUATION_IN_PROCESS, null); } } public XmlTag getParentTag() { final XmlElement parent = (XmlElement)getParent(); if(parent instanceof XmlTag) return (XmlTag)parent; return null; } public XmlTagChild getNextSiblingInTag() { PsiElement nextSibling = getNextSibling(); if(nextSibling instanceof XmlTagChild) return (XmlTagChild)nextSibling; return null; } public XmlTagChild getPrevSiblingInTag() { final PsiElement prevSibling = getPrevSibling(); if(prevSibling instanceof XmlTagChild) return (XmlTagChild)prevSibling; return null; } @NotNull public PsiReference[] getReferences() { return ReferenceProvidersRegistry.getReferencesFromProviders(this,XmlEntityRef.class); } public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof XmlElementVisitor) { ((XmlElementVisitor)visitor).visitXmlElement(this); } else { visitor.visitElement(this); } } public static void setNoEntityExpandOutOfDocument(XmlDocument doc, boolean b) { if (b) doc.putUserData(DISABLE_ENTITY_EXPAND, Boolean.TRUE); else doc.putUserData(DISABLE_ENTITY_EXPAND, null); } }
package org.apache.commons.jcs.auxiliary.remote; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import junit.framework.TestCase; import org.apache.commons.jcs.auxiliary.MockCacheEventLogger; import org.apache.commons.jcs.auxiliary.remote.behavior.IRemoteCacheAttributes; import org.apache.commons.jcs.engine.CacheElement; import org.apache.commons.jcs.engine.ZombieCacheServiceNonLocal; import org.apache.commons.jcs.engine.behavior.ICacheElement; import org.apache.commons.jcs.engine.behavior.ICacheElementSerialized; import org.apache.commons.jcs.utils.serialization.SerializationConversionUtil; import java.util.HashSet; import java.util.Map; /** * Unit Tests for the Remote Cache. */ public class RemoteCacheUnitTest extends TestCase { /** * Verify that the remote service update method is called. The remote cache serializes the obect * first. * <p> * @throws Exception */ public void testUpdate() throws Exception { // SETUP long listenerId = 123; IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = new MockRemoteCacheListener<String, String>(); listener.setListenerId( listenerId ); RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, service, listener ); String cacheName = "testUpdate"; // DO WORK ICacheElement<String, String> element = new CacheElement<String, String>( cacheName, "key", "value" ); remoteCache.update( element ); // VERIFY assertTrue( "The element should be in the serialized wrapper.", service.lastUpdate instanceof ICacheElementSerialized ); ICacheElement<String, String> result = SerializationConversionUtil .getDeSerializedCacheElement( (ICacheElementSerialized<String, String>) service.lastUpdate, remoteCache .getElementSerializer() ); assertEquals( "Wrong element updated.", element.getVal(), result.getVal() ); assertEquals( "Wrong listener id.", Long.valueOf( listenerId ), service.updateRequestIdList.get( 0 ) ); } /** * Verify that when we call fix events queued in the zombie are propagated to the new service. * <p> * @throws Exception */ public void testUpdateZombieThenFix() throws Exception { // SETUP IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); ZombieCacheServiceNonLocal<String, String> zombie = new ZombieCacheServiceNonLocal<String, String>( 10 ); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = new MockRemoteCacheListener<String, String>(); // set the zombie RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, zombie, listener ); String cacheName = "testUpdate"; // DO WORK ICacheElement<String, String> element = new CacheElement<String, String>( cacheName, "key", "value" ); remoteCache.update( element ); // set the new service, this should call propagate remoteCache.fixCache( service ); // VERIFY assertTrue( "The element should be in the serialized warapper.", service.lastUpdate instanceof ICacheElementSerialized ); ICacheElement<String, String> result = SerializationConversionUtil .getDeSerializedCacheElement( (ICacheElementSerialized<String, String>) service.lastUpdate, remoteCache .getElementSerializer() ); assertEquals( "Wrong element updated.", element.getVal(), result.getVal() ); } /** * Verify event log calls. * <p> * @throws Exception */ public void testUpdate_simple() throws Exception { // SETUP IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = new MockRemoteCacheListener<String, String>(); RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, service, listener ); MockCacheEventLogger cacheEventLogger = new MockCacheEventLogger(); remoteCache.setCacheEventLogger( cacheEventLogger ); ICacheElement<String, String> item = new CacheElement<String, String>( "region", "key", "value" ); // DO WORK remoteCache.update( item ); // VERIFY assertEquals( "Start should have been called.", 1, cacheEventLogger.startICacheEventCalls ); assertEquals( "End should have been called.", 1, cacheEventLogger.endICacheEventCalls ); } /** * Verify event log calls. * <p> * @throws Exception */ public void testGet_simple() throws Exception { // SETUP IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = new MockRemoteCacheListener<String, String>(); RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, service, listener ); MockCacheEventLogger cacheEventLogger = new MockCacheEventLogger(); remoteCache.setCacheEventLogger( cacheEventLogger ); // DO WORK remoteCache.get( "key" ); // VERIFY assertEquals( "Start should have been called.", 1, cacheEventLogger.startICacheEventCalls ); assertEquals( "End should have been called.", 1, cacheEventLogger.endICacheEventCalls ); } /** * Verify event log calls. * <p> * @throws Exception */ public void testGetMultiple_simple() throws Exception { // SETUP IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = new MockRemoteCacheListener<String, String>(); RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, service, listener ); MockCacheEventLogger cacheEventLogger = new MockCacheEventLogger(); remoteCache.setCacheEventLogger( cacheEventLogger ); // DO WORK remoteCache.getMultiple( new HashSet<String>() ); // VERIFY assertEquals( "Start should have been called.", 1, cacheEventLogger.startICacheEventCalls ); assertEquals( "End should have been called.", 1, cacheEventLogger.endICacheEventCalls ); } /** * Verify event log calls. * <p> * @throws Exception */ public void testRemove_simple() throws Exception { // SETUP IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = new MockRemoteCacheListener<String, String>(); RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, service, listener ); MockCacheEventLogger cacheEventLogger = new MockCacheEventLogger(); remoteCache.setCacheEventLogger( cacheEventLogger ); // DO WORK remoteCache.remove( "key" ); // VERIFY assertEquals( "Start should have been called.", 1, cacheEventLogger.startICacheEventCalls ); assertEquals( "End should have been called.", 1, cacheEventLogger.endICacheEventCalls ); } /** * Verify event log calls. * <p> * @throws Exception */ public void testRemoveAll_simple() throws Exception { // SETUP IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = new MockRemoteCacheListener<String, String>(); RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, service, listener ); MockCacheEventLogger cacheEventLogger = new MockCacheEventLogger(); remoteCache.setCacheEventLogger( cacheEventLogger ); // DO WORK remoteCache.remove( "key" ); // VERIFY assertEquals( "Start should have been called.", 1, cacheEventLogger.startICacheEventCalls ); assertEquals( "End should have been called.", 1, cacheEventLogger.endICacheEventCalls ); } /** * Verify event log calls. * <p> * @throws Exception */ public void testGetMatching_simple() throws Exception { // SETUP String pattern = "adsfasdfasd.?"; IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = new MockRemoteCacheListener<String, String>(); RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, service, listener ); MockCacheEventLogger cacheEventLogger = new MockCacheEventLogger(); remoteCache.setCacheEventLogger( cacheEventLogger ); // DO WORK Map<String, ICacheElement<String, String>> result = remoteCache.getMatching( pattern ); // VERIFY assertNotNull( "Should have a map", result ); assertEquals( "Start should have been called.", 1, cacheEventLogger.startICacheEventCalls ); assertEquals( "End should have been called.", 1, cacheEventLogger.endICacheEventCalls ); } /** * Verify event log calls. * <p> * @throws Exception */ public void testDispose_simple() throws Exception { // SETUP IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = new MockRemoteCacheListener<String, String>(); RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, service, listener ); MockCacheEventLogger cacheEventLogger = new MockCacheEventLogger(); remoteCache.setCacheEventLogger( cacheEventLogger ); // DO WORK remoteCache.dispose( ); // VERIFY assertEquals( "Start should have been called.", 1, cacheEventLogger.startICacheEventCalls ); assertEquals( "End should have been called.", 1, cacheEventLogger.endICacheEventCalls ); } /** * Verify that there is no problem if there is no listener. * <p> * @throws Exception */ public void testDispose_nullListener() throws Exception { // SETUP IRemoteCacheAttributes cattr = new RemoteCacheAttributes(); MockRemoteCacheService<String, String> service = new MockRemoteCacheService<String, String>(); MockRemoteCacheListener<String, String> listener = null; RemoteCache<String, String> remoteCache = new RemoteCache<String, String>( cattr, service, listener ); MockCacheEventLogger cacheEventLogger = new MockCacheEventLogger(); remoteCache.setCacheEventLogger( cacheEventLogger ); // DO WORK remoteCache.dispose( ); // VERIFY assertEquals( "Start should have been called.", 1, cacheEventLogger.startICacheEventCalls ); assertEquals( "End should have been called.", 1, cacheEventLogger.endICacheEventCalls ); } }
package com.compomics.util.experiment.identification.spectrum_annotation.simple_annotators; import com.compomics.util.experiment.biology.aminoacids.AminoAcid; import com.compomics.util.experiment.biology.ions.Ion; import com.compomics.util.experiment.biology.ions.NeutralLoss; import com.compomics.util.experiment.biology.modifications.Modification; import com.compomics.util.experiment.biology.modifications.ModificationFactory; import com.compomics.util.experiment.biology.proteins.Peptide; import com.compomics.util.experiment.mass_spectrometry.utils.StandardMasses; import com.compomics.util.experiment.biology.ions.impl.ElementaryIon; import com.compomics.util.experiment.biology.ions.impl.PeptideFragmentIon; import com.compomics.util.experiment.identification.matches.IonMatch; import com.compomics.util.experiment.identification.matches.ModificationMatch; import com.compomics.util.experiment.identification.spectrum_annotation.spectrum_annotators.SimplePeptideAnnotator.IonSeries; import com.compomics.util.experiment.identification.utils.ModificationUtils; import com.compomics.util.experiment.mass_spectrometry.spectra.Peak; import com.compomics.util.experiment.mass_spectrometry.indexes.SpectrumIndex; import java.util.ArrayList; import java.util.HashMap; /** * A fragment annotator for peptide fragment ions with neutral losses. * * @author Marc Vaudel */ public class FragmentAnnotatorNL { /** * Empty default constructor */ public FragmentAnnotatorNL() { forwardIonMz1 = null; complementaryIonMz1 = null; peptideLength = 0; forwardIonType = 0; complementaryIonType = 0; forwardNeutralLossesMasses = null; forwardNeutralLosses = null; complementaryNeutralLossesMasses = null; complementaryNeutralLosses = null; } /** * The modifications factory. */ private final ModificationFactory modificationFactory = ModificationFactory.getInstance(); /** * Array of the forward ion m/z with charge 1. */ private final double[] forwardIonMz1; /** * Array of the complementary ion m/z with charge 1. */ private final double[] complementaryIonMz1; /** * Length of the peptide sequence. */ private final int peptideLength; /** * The type of forward ion annotated. */ private final int forwardIonType; /** * The type of forward ion annotated. */ private final int complementaryIonType; /** * Array of the masses that can be lost by every forward fragment ion. */ private final ArrayList<double[]> forwardNeutralLossesMasses; /** * List of the neutral losses that can be lost by every forward fragment * ion. */ private final ArrayList<ArrayList<NeutralLoss>> forwardNeutralLosses; /** * Array of the masses that can be lost by every complementary fragment ion. */ private final ArrayList<double[]> complementaryNeutralLossesMasses; /** * List of the neutral losses that can be lost by every complementary * fragment ion. */ private final ArrayList<ArrayList<NeutralLoss>> complementaryNeutralLosses; /** * Constructor. Fixed modifications must be indexed as provided by the * peptide class. * * @param peptide the peptide * @param fixedModifications the fixed modifications of the peptide * @param ionSeries the ion series to annotate * @param sequenceDependent boolean indicating whether the H2O and NH3 * losses should be adapted to the sequence */ public FragmentAnnotatorNL( Peptide peptide, String[] fixedModifications, IonSeries ionSeries, boolean sequenceDependent ) { char[] aas = peptide.getSequence().toCharArray(); peptideLength = aas.length; forwardIonMz1 = new double[peptideLength]; complementaryIonMz1 = new double[peptideLength]; // Get the sequence neutral losses to inspect NeutralLoss[] losses = new NeutralLoss[]{NeutralLoss.H2O, NeutralLoss.NH3}; int[][] lossesIndexes = new int[2][2]; int[] sequenceLossesIndexes = sequenceDependent ? getLossesIndexes(aas) : new int[]{0, 0, peptideLength, peptideLength}; lossesIndexes[0][0] = sequenceLossesIndexes[0]; lossesIndexes[0][1] = sequenceLossesIndexes[2]; lossesIndexes[1][0] = sequenceLossesIndexes[1]; lossesIndexes[1][1] = sequenceLossesIndexes[3]; // See if the peptide is modified double[] modificationsMasses = new double[peptideLength]; // Keep track of the modified amino acids and possible losses HashMap<String, int[]> modificationLossesSites = new HashMap<>(1); for (int i = 0; i < fixedModifications.length; i++) { String modName = fixedModifications[i]; if (modName != null) { Modification modification = modificationFactory.getModification(modName); int site = ModificationUtils.getSite(i, peptideLength) - 1; modificationsMasses[site] += modification.getMass(); for (NeutralLoss neutralLoss : modification.getNeutralLosses()) { int[] sites = modificationLossesSites.get(neutralLoss.name); if (sites == null) { sites = new int[]{site, site}; modificationLossesSites.put(neutralLoss.name, sites); } else { if (site < sites[0]) { sites[0] = site; } else if (site > sites[1]) { sites[1] = site; } } } } } ModificationMatch[] modificationMatches = peptide.getVariableModifications(); for (ModificationMatch modificationMatch : modificationMatches) { String modificationName = modificationMatch.getModification(); Modification modification = modificationFactory.getModification(modificationName); double modificationMass = modification.getMass(); int i = modificationMatch.getSite(); int site; if (i > 0 && i < peptideLength + 1) { site = i - 1; } else if (i == 0) { site = i; } else { site = i - 2; } modificationsMasses[site] += modificationMass; for (NeutralLoss neutralLoss : modification.getNeutralLosses()) { int[] sites = modificationLossesSites.get(neutralLoss.name); if (sites == null) { sites = new int[]{site, site}; modificationLossesSites.put(neutralLoss.name, sites); } else { if (site < sites[0]) { sites[0] = site; } else if (site > sites[1]) { sites[1] = site; } } } } if (!modificationLossesSites.isEmpty()) { int[][] newIndexes = new int[lossesIndexes.length + modificationLossesSites.size()][2]; System.arraycopy(lossesIndexes, 0, newIndexes, 0, lossesIndexes.length); NeutralLoss[] newLosses = new NeutralLoss[losses.length + modificationLossesSites.size()]; System.arraycopy(losses, 0, newLosses, 0, losses.length); int index = lossesIndexes.length; for (String lossName : modificationLossesSites.keySet()) { int[] sites = modificationLossesSites.get(lossName); newIndexes[index][1] = sites[0]; newIndexes[index][1] = sites[1]; newLosses[index] = NeutralLoss.getNeutralLoss(lossName); index++; } lossesIndexes = newIndexes; losses = newLosses; } double forwardMass; double complementaryMass; if (ionSeries == IonSeries.by) { forwardMass = ElementaryIon.proton.getTheoreticMass(); complementaryMass = peptide.getMass() + ElementaryIon.protonMassMultiples[2]; forwardIonType = PeptideFragmentIon.B_ION; complementaryIonType = PeptideFragmentIon.Y_ION; } else if (ionSeries == IonSeries.cz) { forwardMass = ElementaryIon.proton.getTheoreticMass() + StandardMasses.nh3.mass; complementaryMass = peptide.getMass() + ElementaryIon.protonMassMultiples[2] - StandardMasses.nh3.mass; forwardIonType = PeptideFragmentIon.C_ION; complementaryIonType = PeptideFragmentIon.Z_ION; } else if (ionSeries == IonSeries.ax) { forwardMass = ElementaryIon.proton.getTheoreticMass() - StandardMasses.co.mass; complementaryMass = peptide.getMass() + ElementaryIon.protonMassMultiples[2] + StandardMasses.co.mass; forwardIonType = PeptideFragmentIon.A_ION; complementaryIonType = PeptideFragmentIon.X_ION; } else { throw new UnsupportedOperationException("Ion series " + ionSeries + " not supported."); } forwardNeutralLossesMasses = new ArrayList<>(peptideLength); forwardNeutralLosses = new ArrayList<>(peptideLength); complementaryNeutralLossesMasses = new ArrayList<>(peptideLength); complementaryNeutralLosses = new ArrayList<>(peptideLength); for (int i = 0; i < peptideLength; i++) { char aa = aas[i]; AminoAcid aminoAcid = AminoAcid.getAminoAcid(aa); forwardMass += aminoAcid.getMonoisotopicMass(); forwardMass += modificationsMasses[i]; forwardIonMz1[i] = forwardMass; complementaryIonMz1[i] = complementaryMass - forwardMass; ArrayList<NeutralLoss> forwardIonlosses = new ArrayList<>(lossesIndexes.length); ArrayList<NeutralLoss> complementaryIonlosses = new ArrayList<>(lossesIndexes.length); double[] forwardMasses = new double[lossesIndexes.length]; double[] complementaryMasses = new double[lossesIndexes.length]; int indexForward = 0; int indexComplementary = 0; for (int j = 0; j < lossesIndexes.length; j++) { int[] indexes = lossesIndexes[j]; NeutralLoss neutralLoss = losses[j]; if (i >= indexes[0]) { forwardIonlosses.add(neutralLoss); forwardMasses[indexForward++] = neutralLoss.getMass(); } if (i <= indexes[j]) { complementaryIonlosses.add(neutralLoss); complementaryMasses[indexComplementary++] = neutralLoss.getMass(); } } forwardNeutralLossesMasses.add(forwardMasses); forwardNeutralLosses.add(forwardIonlosses); complementaryNeutralLossesMasses.add(complementaryMasses); complementaryNeutralLosses.add(complementaryIonlosses); } } /** * Constructor. Fixed modifications must be indexed as provided by the * peptide class. * * @param peptide the peptide * @param fixedModifications the fixed modifications * @param ionSeries the ion series to annotate * @param sequenceDependent boolean indicating whether the H2O and NH3 * losses should be adapted to the sequence * @param forward boolean indicating whether forward ions should be * annotated * @param complementary boolean indicating whether complementary ions should * be annotated */ public FragmentAnnotatorNL( Peptide peptide, String[] fixedModifications, IonSeries ionSeries, boolean sequenceDependent, boolean forward, boolean complementary ) { char[] aas = peptide.getSequence().toCharArray(); peptideLength = aas.length; forwardIonMz1 = new double[peptideLength]; complementaryIonMz1 = new double[peptideLength]; // Get the sequence neutral losses to inspect NeutralLoss[] losses = new NeutralLoss[]{NeutralLoss.H2O, NeutralLoss.NH3}; int[][] lossesIndexes = new int[2][2]; int[] sequenceLossesIndexes = sequenceDependent ? getLossesIndexes(aas) : new int[]{0, 0, peptideLength, peptideLength}; lossesIndexes[0][0] = sequenceLossesIndexes[0]; lossesIndexes[0][1] = sequenceLossesIndexes[2]; lossesIndexes[1][0] = sequenceLossesIndexes[1]; lossesIndexes[1][1] = sequenceLossesIndexes[3]; // See if the peptide is modified double[] modificationsMasses = new double[peptideLength]; // Keep track of the modified amino acids and possible losses HashMap<String, int[]> modificationLossesSites = new HashMap<>(1); for (int i = 0; i < fixedModifications.length; i++) { String modName = fixedModifications[i]; if (modName != null) { Modification modification = modificationFactory.getModification(modName); int site = ModificationUtils.getSite(i, peptideLength) - 1; modificationsMasses[site] += modification.getMass(); for (NeutralLoss neutralLoss : modification.getNeutralLosses()) { int[] sites = modificationLossesSites.get(neutralLoss.name); if (sites == null) { sites = new int[]{site, site}; modificationLossesSites.put(neutralLoss.name, sites); } else { if (site < sites[0]) { sites[0] = site; } else if (site > sites[1]) { sites[1] = site; } } } } } ModificationMatch[] modificationMatches = peptide.getVariableModifications(); for (ModificationMatch modificationMatch : modificationMatches) { String modificationName = modificationMatch.getModification(); Modification modification = modificationFactory.getModification(modificationName); double modificationMass = modification.getMass(); int i = modificationMatch.getSite(); int site; if (i > 0 && i < peptideLength + 1) { site = i - 1; } else if (i == 0) { site = i; } else { site = i - 2; } modificationsMasses[site] += modificationMass; for (NeutralLoss neutralLoss : modification.getNeutralLosses()) { int[] sites = modificationLossesSites.get(neutralLoss.name); if (sites == null) { sites = new int[]{site, site}; modificationLossesSites.put(neutralLoss.name, sites); } else { if (site < sites[0]) { sites[0] = site; } else if (site > sites[1]) { sites[1] = site; } } } } if (!modificationLossesSites.isEmpty()) { int[][] newIndexes = new int[lossesIndexes.length + modificationLossesSites.size()][2]; System.arraycopy(lossesIndexes, 0, newIndexes, 0, lossesIndexes.length); NeutralLoss[] newLosses = new NeutralLoss[losses.length + modificationLossesSites.size()]; System.arraycopy(losses, 0, newLosses, 0, losses.length); int index = lossesIndexes.length; for (String lossName : modificationLossesSites.keySet()) { int[] sites = modificationLossesSites.get(lossName); newIndexes[index][1] = sites[0]; newIndexes[index][1] = sites[1]; newLosses[index] = NeutralLoss.getNeutralLoss(lossName); index++; } lossesIndexes = newIndexes; losses = newLosses; } double forwardMass; double complementaryMass; if (ionSeries == IonSeries.by) { forwardMass = ElementaryIon.proton.getTheoreticMass(); complementaryMass = peptide.getMass() + ElementaryIon.protonMassMultiples[2]; forwardIonType = PeptideFragmentIon.B_ION; complementaryIonType = PeptideFragmentIon.Y_ION; } else if (ionSeries == IonSeries.cz) { forwardMass = ElementaryIon.proton.getTheoreticMass() + StandardMasses.nh3.mass; complementaryMass = peptide.getMass() + ElementaryIon.protonMassMultiples[2] - StandardMasses.nh3.mass; forwardIonType = PeptideFragmentIon.C_ION; complementaryIonType = PeptideFragmentIon.Z_ION; } else if (ionSeries == IonSeries.ax) { forwardMass = ElementaryIon.proton.getTheoreticMass() - StandardMasses.co.mass; complementaryMass = peptide.getMass() + ElementaryIon.protonMassMultiples[2] + StandardMasses.co.mass; forwardIonType = PeptideFragmentIon.A_ION; complementaryIonType = PeptideFragmentIon.X_ION; } else { throw new UnsupportedOperationException("Ion series " + ionSeries + " not supported."); } if (forward) { forwardNeutralLossesMasses = new ArrayList<>(peptideLength); forwardNeutralLosses = new ArrayList<>(peptideLength); } else { forwardNeutralLossesMasses = null; forwardNeutralLosses = null; } if (complementary) { complementaryNeutralLossesMasses = new ArrayList<>(peptideLength); complementaryNeutralLosses = new ArrayList<>(peptideLength); } else { complementaryNeutralLossesMasses = null; complementaryNeutralLosses = null; } for (int i = 0; i < peptideLength; i++) { char aa = aas[i]; AminoAcid aminoAcid = AminoAcid.getAminoAcid(aa); forwardMass += aminoAcid.getMonoisotopicMass(); forwardMass += modificationsMasses[i]; if (forward) { forwardIonMz1[i] = forwardMass; ArrayList<NeutralLoss> forwardIonlosses = new ArrayList<>(lossesIndexes.length); double[] forwardMasses = new double[lossesIndexes.length]; int indexForward = 0; for (int j = 0; j < lossesIndexes.length; j++) { int[] indexes = lossesIndexes[j]; NeutralLoss neutralLoss = losses[j]; if (i >= indexes[0]) { forwardIonlosses.add(neutralLoss); forwardMasses[indexForward++] = neutralLoss.getMass(); } } forwardNeutralLossesMasses.add(forwardMasses); forwardNeutralLosses.add(forwardIonlosses); } if (complementary) { complementaryIonMz1[i] = complementaryMass - forwardMass; ArrayList<NeutralLoss> complementaryIonlosses = new ArrayList<>(lossesIndexes.length); double[] complementaryMasses = new double[lossesIndexes.length]; int indexComplementary = 0; for (int j = 0; j < lossesIndexes.length; j++) { int[] indexes = lossesIndexes[j]; NeutralLoss neutralLoss = losses[j]; if (i <= indexes[j]) { complementaryIonlosses.add(neutralLoss); complementaryMasses[indexComplementary++] = neutralLoss.getMass(); } } complementaryNeutralLossesMasses.add(complementaryMasses); complementaryNeutralLosses.add(complementaryIonlosses); } } } /** * Returns the index at which the water and ammonia losses can occur in an * array. * * @param aas the amino acid sequence as char array * * @return the index at which the water and ammonia losses can occur in an * array */ private int[] getLossesIndexes(char[] aas) { int[] lossesIndexes = {aas.length, aas.length, 0, 0}; boolean waterForward = false; boolean ammoniaForward = false; boolean waterComplementary = false; boolean ammoniaComplementary = false; for (int i = 0; i < aas.length; i++) { char aa = aas[i]; if (!waterForward) { for (char lossAa : NeutralLoss.H2O.aminoAcids) { if (aa == lossAa) { lossesIndexes[0] = i; waterForward = true; break; } } } if (!ammoniaForward) { for (char lossAa : NeutralLoss.NH3.aminoAcids) { if (aa == lossAa) { lossesIndexes[1] = i; ammoniaForward = true; break; } } } int rewindI = aas.length - i - 1; aa = aas[rewindI]; if (!waterComplementary) { for (char lossAa : NeutralLoss.H2O.aminoAcids) { if (aa == lossAa) { lossesIndexes[2] = rewindI; waterComplementary = true; break; } } } if (!ammoniaComplementary) { for (char lossAa : NeutralLoss.NH3.aminoAcids) { if (aa == lossAa) { lossesIndexes[3] = rewindI; ammoniaComplementary = true; break; } } } if (waterForward && ammoniaForward && waterComplementary && ammoniaComplementary) { break; } } return lossesIndexes; } /** * Returns the ions matched in the given spectrum at the given charge. * * @param spectrumIndex the index of the spectrum * @param peptideCharge the charge of the peptide * * @return the ions matched in the given spectrum at the given charge */ public ArrayList<IonMatch> getIonMatches( SpectrumIndex spectrumIndex, int peptideCharge ) { ArrayList<IonMatch> results = new ArrayList<>(0); for (int i = 0; i < peptideLength; i++) { double ionMz1 = forwardIonMz1[i]; int ionNumber = i + 1; ArrayList<NeutralLoss> neutralLosses = forwardNeutralLosses.get(i); double[] neutralLossesMasses = forwardNeutralLossesMasses.get(i); for (int j = 0; j < neutralLosses.size(); j++) { double lossMass1 = neutralLossesMasses[j]; double mz1WithLoss = ionMz1 - lossMass1; int[] indexes = spectrumIndex.getMatchingPeaks(mz1WithLoss); if (indexes.length > 0) { NeutralLoss[] ionLosses = {neutralLosses.get(j)}; double ionMass = mz1WithLoss - ElementaryIon.proton.getTheoreticMass(); for (int index : indexes) { Ion ion = new PeptideFragmentIon( forwardIonType, ionNumber, ionMass, ionLosses ); results.add( new IonMatch( spectrumIndex.mzArray[index], spectrumIndex.intensityArray[index], ion, 1 ) ); } } for (int k = j + 1; k < neutralLosses.size(); k++) { double lossMass2 = neutralLossesMasses[k]; double mz1WithLoss2 = mz1WithLoss - lossMass2; indexes = spectrumIndex.getMatchingPeaks(mz1WithLoss2); if (indexes.length > 0) { NeutralLoss[] ionLosses = {neutralLosses.get(j), neutralLosses.get(k)}; double ionMass = mz1WithLoss2 - ElementaryIon.proton.getTheoreticMass(); for (int index : indexes) { Ion ion = new PeptideFragmentIon( forwardIonType, ionNumber, ionMass, ionLosses ); results.add( new IonMatch( spectrumIndex.mzArray[index], spectrumIndex.intensityArray[index], ion, 1 ) ); } } } } ionMz1 = complementaryIonMz1[i]; ionNumber = peptideLength - ionNumber; neutralLosses = complementaryNeutralLosses.get(i); neutralLossesMasses = complementaryNeutralLossesMasses.get(i); for (int j = 0; j < neutralLosses.size(); j++) { double lossMass1 = neutralLossesMasses[j]; double mz1WithLoss = ionMz1 - lossMass1; int[] indexes = spectrumIndex.getMatchingPeaks(mz1WithLoss); if (indexes.length > 0) { NeutralLoss[] ionLosses = {neutralLosses.get(j)}; double ionMass = mz1WithLoss - ElementaryIon.proton.getTheoreticMass(); for (int index : indexes) { Ion ion = new PeptideFragmentIon( complementaryIonType, ionNumber, ionMass, ionLosses ); results.add( new IonMatch( spectrumIndex.mzArray[index], spectrumIndex.intensityArray[index], ion, 1 ) ); } } for (int k = j + 1; k < neutralLosses.size(); k++) { double lossMass2 = neutralLossesMasses[k]; double mz1WithLoss2 = mz1WithLoss - lossMass2; indexes = spectrumIndex.getMatchingPeaks(mz1WithLoss2); double ionMass = mz1WithLoss2 - ElementaryIon.proton.getTheoreticMass(); if (indexes.length > 0) { NeutralLoss[] ionLosses = {neutralLosses.get(j), neutralLosses.get(k)}; for (int index : indexes) { Ion ion = new PeptideFragmentIon( complementaryIonType, ionNumber, ionMass, ionLosses ); results.add( new IonMatch( spectrumIndex.mzArray[index], spectrumIndex.intensityArray[index], ion, 1 ) ); } } } } } for (int ionCharge = 2; ionCharge < peptideCharge; ionCharge++) { int extraProtons = ionCharge - 1; double protonContribution = ElementaryIon.getProtonMassMultiple(extraProtons); for (int i = 0; i < peptideLength; i++) { double ionMz1 = forwardIonMz1[i]; int ionNumber = i + 1; ArrayList<NeutralLoss> neutralLosses = forwardNeutralLosses.get(i); double[] neutralLossesMasses = forwardNeutralLossesMasses.get(i); for (int j = 0; j < neutralLosses.size(); j++) { double lossMass1 = neutralLossesMasses[j]; double mz1WithLoss = ionMz1 - lossMass1; double mzWithLoss = (mz1WithLoss + protonContribution) / ionCharge; int[] indexes = spectrumIndex.getMatchingPeaks(mzWithLoss); if (indexes.length > 0) { NeutralLoss[] ionLosses = {neutralLosses.get(j)}; double ionMass = mz1WithLoss - ElementaryIon.proton.getTheoreticMass(); for (int index : indexes) { Ion ion = new PeptideFragmentIon( forwardIonType, ionNumber, ionMass, ionLosses ); results.add( new IonMatch( spectrumIndex.mzArray[index], spectrumIndex.intensityArray[index], ion, ionCharge ) ); } } for (int k = j + 1; k < neutralLosses.size(); k++) { double lossMass2 = neutralLossesMasses[k]; double mz1WithLoss2 = mz1WithLoss - lossMass2; double mzWithLoss2 = (mz1WithLoss2 + protonContribution) / ionCharge; indexes = spectrumIndex.getMatchingPeaks(mzWithLoss2); if (indexes.length > 0) { NeutralLoss[] ionLosses = {neutralLosses.get(j), neutralLosses.get(k)}; double ionMass = mz1WithLoss2 - ElementaryIon.proton.getTheoreticMass(); for (int index : indexes) { Ion ion = new PeptideFragmentIon( forwardIonType, ionNumber, ionMass, ionLosses ); results.add( new IonMatch( spectrumIndex.mzArray[index], spectrumIndex.intensityArray[index], ion, ionCharge ) ); } } } } ionMz1 = complementaryIonMz1[i]; ionNumber = peptideLength - ionNumber; neutralLosses = complementaryNeutralLosses.get(i); neutralLossesMasses = complementaryNeutralLossesMasses.get(i); for (int j = 0; j < neutralLosses.size(); j++) { double lossMass1 = neutralLossesMasses[j]; double mz1WithLoss = ionMz1 - lossMass1; double mzWithLoss = (mz1WithLoss + protonContribution) / ionCharge; int[] indexes = spectrumIndex.getMatchingPeaks(mzWithLoss); if (indexes.length > 0) { NeutralLoss[] ionLosses = {neutralLosses.get(j)}; double ionMass = mz1WithLoss - ElementaryIon.proton.getTheoreticMass(); for (int index : indexes) { Ion ion = new PeptideFragmentIon( complementaryIonType, ionNumber, ionMass, ionLosses ); results.add( new IonMatch( spectrumIndex.mzArray[index], spectrumIndex.intensityArray[index], ion, ionCharge ) ); } } for (int k = j + 1; k < neutralLosses.size(); k++) { double lossMass2 = neutralLossesMasses[k]; double mz1WithLoss2 = mz1WithLoss - lossMass2; double mzWithLoss2 = (mz1WithLoss2 + protonContribution) / ionCharge; indexes = spectrumIndex.getMatchingPeaks(mzWithLoss2); double ionMass = mz1WithLoss2 - ElementaryIon.proton.getTheoreticMass(); if (indexes.length > 0) { NeutralLoss[] ionLosses = {neutralLosses.get(j), neutralLosses.get(k)}; for (int index : indexes) { Ion ion = new PeptideFragmentIon( complementaryIonType, ionNumber, ionMass, ionLosses ); results.add( new IonMatch( spectrumIndex.mzArray[index], spectrumIndex.intensityArray[index], ion, ionCharge ) ); } } } } } } return results; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.clients.consumer.internals; import org.apache.kafka.clients.ClientResponse; import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.Node; import org.apache.kafka.common.errors.AuthenticationException; import org.apache.kafka.common.errors.DisconnectException; import org.apache.kafka.common.errors.GroupAuthorizationException; import org.apache.kafka.common.errors.IllegalGenerationException; import org.apache.kafka.common.errors.InterruptException; import org.apache.kafka.common.errors.RebalanceInProgressException; import org.apache.kafka.common.errors.RetriableException; import org.apache.kafka.common.errors.UnknownMemberIdException; import org.apache.kafka.common.metrics.Measurable; import org.apache.kafka.common.metrics.MetricConfig; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.metrics.Sensor; import org.apache.kafka.common.metrics.stats.Avg; import org.apache.kafka.common.metrics.stats.Count; import org.apache.kafka.common.metrics.stats.Max; import org.apache.kafka.common.metrics.stats.Meter; import org.apache.kafka.common.protocol.Errors; import org.apache.kafka.common.requests.FindCoordinatorRequest; import org.apache.kafka.common.requests.FindCoordinatorResponse; import org.apache.kafka.common.requests.HeartbeatRequest; import org.apache.kafka.common.requests.HeartbeatResponse; import org.apache.kafka.common.requests.JoinGroupRequest; import org.apache.kafka.common.requests.JoinGroupRequest.ProtocolMetadata; import org.apache.kafka.common.requests.JoinGroupResponse; import org.apache.kafka.common.requests.LeaveGroupRequest; import org.apache.kafka.common.requests.LeaveGroupResponse; import org.apache.kafka.common.requests.OffsetCommitRequest; import org.apache.kafka.common.requests.SyncGroupRequest; import org.apache.kafka.common.requests.SyncGroupResponse; import org.apache.kafka.common.utils.KafkaThread; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Timer; import org.slf4j.Logger; import java.io.Closeable; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; /** * AbstractCoordinator implements group management for a single group member by interacting with * a designated Kafka broker (the coordinator). Group semantics are provided by extending this class. * See {@link ConsumerCoordinator} for example usage. * * From a high level, Kafka's group management protocol consists of the following sequence of actions: * * <ol> * <li>Group Registration: Group members register with the coordinator providing their own metadata * (such as the set of topics they are interested in).</li> * <li>Group/Leader Selection: The coordinator select the members of the group and chooses one member * as the leader.</li> * <li>State Assignment: The leader collects the metadata from all the members of the group and * assigns state.</li> * <li>Group Stabilization: Each member receives the state assigned by the leader and begins * processing.</li> * </ol> * * To leverage this protocol, an implementation must define the format of metadata provided by each * member for group registration in {@link #metadata()} and the format of the state assignment provided * by the leader in {@link #performAssignment(String, String, Map)} and becomes available to members in * {@link #onJoinComplete(int, String, String, ByteBuffer)}. * * Note on locking: this class shares state between the caller and a background thread which is * used for sending heartbeats after the client has joined the group. All mutable state as well as * state transitions are protected with the class's monitor. Generally this means acquiring the lock * before reading or writing the state of the group (e.g. generation, memberId) and holding the lock * when sending a request that affects the state of the group (e.g. JoinGroup, LeaveGroup). */ public abstract class AbstractCoordinator implements Closeable { public static final String HEARTBEAT_THREAD_PREFIX = "kafka-coordinator-heartbeat-thread"; private enum MemberState { UNJOINED, // the client is not part of a group REBALANCING, // the client has begun rebalancing STABLE, // the client has joined and is sending heartbeats } private final Logger log; private final int sessionTimeoutMs; private final boolean leaveGroupOnClose; private final GroupCoordinatorMetrics sensors; private final Heartbeat heartbeat; protected final int rebalanceTimeoutMs; protected final String groupId; protected final ConsumerNetworkClient client; protected final Time time; protected final long retryBackoffMs; private HeartbeatThread heartbeatThread = null; private boolean rejoinNeeded = true; private boolean needsJoinPrepare = true; private MemberState state = MemberState.UNJOINED; private RequestFuture<ByteBuffer> joinFuture = null; private Node coordinator = null; private Generation generation = Generation.NO_GENERATION; private RequestFuture<Void> findCoordinatorFuture = null; /** * Initialize the coordination manager. */ public AbstractCoordinator(LogContext logContext, ConsumerNetworkClient client, String groupId, int rebalanceTimeoutMs, int sessionTimeoutMs, Heartbeat heartbeat, Metrics metrics, String metricGrpPrefix, Time time, long retryBackoffMs, boolean leaveGroupOnClose) { this.log = logContext.logger(AbstractCoordinator.class); this.client = client; this.time = time; this.groupId = groupId; this.rebalanceTimeoutMs = rebalanceTimeoutMs; this.sessionTimeoutMs = sessionTimeoutMs; this.leaveGroupOnClose = leaveGroupOnClose; this.heartbeat = heartbeat; this.sensors = new GroupCoordinatorMetrics(metrics, metricGrpPrefix); this.retryBackoffMs = retryBackoffMs; } public AbstractCoordinator(LogContext logContext, ConsumerNetworkClient client, String groupId, int rebalanceTimeoutMs, int sessionTimeoutMs, int heartbeatIntervalMs, Metrics metrics, String metricGrpPrefix, Time time, long retryBackoffMs, boolean leaveGroupOnClose) { this(logContext, client, groupId, rebalanceTimeoutMs, sessionTimeoutMs, new Heartbeat(time, sessionTimeoutMs, heartbeatIntervalMs, rebalanceTimeoutMs, retryBackoffMs), metrics, metricGrpPrefix, time, retryBackoffMs, leaveGroupOnClose); } /** * Unique identifier for the class of supported protocols (e.g. "consumer" or "connect"). * @return Non-null protocol type name */ protected abstract String protocolType(); /** * Get the current list of protocols and their associated metadata supported * by the local member. The order of the protocols in the list indicates the preference * of the protocol (the first entry is the most preferred). The coordinator takes this * preference into account when selecting the generation protocol (generally more preferred * protocols will be selected as long as all members support them and there is no disagreement * on the preference). * @return Non-empty map of supported protocols and metadata */ protected abstract List<ProtocolMetadata> metadata(); /** * Invoked prior to each group join or rejoin. This is typically used to perform any * cleanup from the previous generation (such as committing offsets for the consumer) * @param generation The previous generation or -1 if there was none * @param memberId The identifier of this member in the previous group or "" if there was none */ protected abstract void onJoinPrepare(int generation, String memberId); /** * Perform assignment for the group. This is used by the leader to push state to all the members * of the group (e.g. to push partition assignments in the case of the new consumer) * @param leaderId The id of the leader (which is this member) * @param allMemberMetadata Metadata from all members of the group * @return A map from each member to their state assignment */ protected abstract Map<String, ByteBuffer> performAssignment(String leaderId, String protocol, Map<String, ByteBuffer> allMemberMetadata); /** * Invoked when a group member has successfully joined a group. If this call fails with an exception, * then it will be retried using the same assignment state on the next call to {@link #ensureActiveGroup()}. * * @param generation The generation that was joined * @param memberId The identifier for the local member in the group * @param protocol The protocol selected by the coordinator * @param memberAssignment The assignment propagated from the group leader */ protected abstract void onJoinComplete(int generation, String memberId, String protocol, ByteBuffer memberAssignment); /** * Visible for testing. * * Ensure that the coordinator is ready to receive requests. * * @param timer Timer bounding how long this method can block * @return true If coordinator discovery and initial connection succeeded, false otherwise */ protected synchronized boolean ensureCoordinatorReady(final Timer timer) { if (!coordinatorUnknown()) return true; do { final RequestFuture<Void> future = lookupCoordinator(); client.poll(future, timer); if (!future.isDone()) { // ran out of time break; } if (future.failed()) { if (future.isRetriable()) { log.debug("Coordinator discovery failed, refreshing metadata"); client.awaitMetadataUpdate(timer); } else throw future.exception(); } else if (coordinator != null && client.isUnavailable(coordinator)) { // we found the coordinator, but the connection has failed, so mark // it dead and backoff before retrying discovery markCoordinatorUnknown(); timer.sleep(retryBackoffMs); } } while (coordinatorUnknown() && timer.notExpired()); return !coordinatorUnknown(); } protected synchronized RequestFuture<Void> lookupCoordinator() { if (findCoordinatorFuture == null) { // find a node to ask about the coordinator Node node = this.client.leastLoadedNode(); if (node == null) { log.debug("No broker available to send FindCoordinator request"); return RequestFuture.noBrokersAvailable(); } else findCoordinatorFuture = sendFindCoordinatorRequest(node); } return findCoordinatorFuture; } private synchronized void clearFindCoordinatorFuture() { findCoordinatorFuture = null; } /** * Check whether the group should be rejoined (e.g. if metadata changes) or whether a * rejoin request is already in flight and needs to be completed. * * @return true if it should, false otherwise */ protected synchronized boolean rejoinNeededOrPending() { // if there's a pending joinFuture, we should try to complete handling it. return rejoinNeeded || joinFuture != null; } /** * Check the status of the heartbeat thread (if it is active) and indicate the liveness * of the client. This must be called periodically after joining with {@link #ensureActiveGroup()} * to ensure that the member stays in the group. If an interval of time longer than the * provided rebalance timeout expires without calling this method, then the client will proactively * leave the group. * * @param now current time in milliseconds * @throws RuntimeException for unexpected errors raised from the heartbeat thread */ protected synchronized void pollHeartbeat(long now) { if (heartbeatThread != null) { if (heartbeatThread.hasFailed()) { // set the heartbeat thread to null and raise an exception. If the user catches it, // the next call to ensureActiveGroup() will spawn a new heartbeat thread. RuntimeException cause = heartbeatThread.failureCause(); heartbeatThread = null; throw cause; } // Awake the heartbeat thread if needed if (heartbeat.shouldHeartbeat(now)) { notify(); } heartbeat.poll(now); } } protected synchronized long timeToNextHeartbeat(long now) { // if we have not joined the group, we don't need to send heartbeats if (state == MemberState.UNJOINED) return Long.MAX_VALUE; return heartbeat.timeToNextHeartbeat(now); } /** * Ensure that the group is active (i.e. joined and synced) */ public void ensureActiveGroup() { while (!ensureActiveGroup(time.timer(Long.MAX_VALUE))) { log.warn("still waiting to ensure active group"); } } /** * Ensure the group is active (i.e., joined and synced) * * @param timer Timer bounding how long this method can block * @return true iff the group is active */ boolean ensureActiveGroup(final Timer timer) { // always ensure that the coordinator is ready because we may have been disconnected // when sending heartbeats and does not necessarily require us to rejoin the group. if (!ensureCoordinatorReady(timer)) { return false; } startHeartbeatThreadIfNeeded(); return joinGroupIfNeeded(timer); } private synchronized void startHeartbeatThreadIfNeeded() { if (heartbeatThread == null) { heartbeatThread = new HeartbeatThread(); heartbeatThread.start(); } } private synchronized void disableHeartbeatThread() { if (heartbeatThread != null) heartbeatThread.disable(); } private void closeHeartbeatThread() { HeartbeatThread thread = null; synchronized (this) { if (heartbeatThread == null) return; heartbeatThread.close(); thread = heartbeatThread; heartbeatThread = null; } try { thread.join(); } catch (InterruptedException e) { log.warn("Interrupted while waiting for consumer heartbeat thread to close"); throw new InterruptException(e); } } /** * Joins the group without starting the heartbeat thread. * * Visible for testing. * * @param timer Timer bounding how long this method can block * @return true iff the operation succeeded */ boolean joinGroupIfNeeded(final Timer timer) { while (rejoinNeededOrPending()) { if (!ensureCoordinatorReady(timer)) { return false; } // call onJoinPrepare if needed. We set a flag to make sure that we do not call it a second // time if the client is woken up before a pending rebalance completes. This must be called // on each iteration of the loop because an event requiring a rebalance (such as a metadata // refresh which changes the matched subscription set) can occur while another rebalance is // still in progress. if (needsJoinPrepare) { onJoinPrepare(generation.generationId, generation.memberId); needsJoinPrepare = false; } final RequestFuture<ByteBuffer> future = initiateJoinGroup(); client.poll(future, timer); if (!future.isDone()) { // we ran out of time return false; } if (future.succeeded()) { // Duplicate the buffer in case `onJoinComplete` does not complete and needs to be retried. ByteBuffer memberAssignment = future.value().duplicate(); onJoinComplete(generation.generationId, generation.memberId, generation.protocol, memberAssignment); // We reset the join group future only after the completion callback returns. This ensures // that if the callback is woken up, we will retry it on the next joinGroupIfNeeded. resetJoinGroupFuture(); needsJoinPrepare = true; } else { resetJoinGroupFuture(); final RuntimeException exception = future.exception(); if (exception instanceof UnknownMemberIdException || exception instanceof RebalanceInProgressException || exception instanceof IllegalGenerationException) continue; else if (!future.isRetriable()) throw exception; timer.sleep(retryBackoffMs); } } return true; } private synchronized void resetJoinGroupFuture() { this.joinFuture = null; } private synchronized RequestFuture<ByteBuffer> initiateJoinGroup() { // we store the join future in case we are woken up by the user after beginning the // rebalance in the call to poll below. This ensures that we do not mistakenly attempt // to rejoin before the pending rebalance has completed. if (joinFuture == null) { // fence off the heartbeat thread explicitly so that it cannot interfere with the join group. // Note that this must come after the call to onJoinPrepare since we must be able to continue // sending heartbeats if that callback takes some time. disableHeartbeatThread(); state = MemberState.REBALANCING; joinFuture = sendJoinGroupRequest(); joinFuture.addListener(new RequestFutureListener<ByteBuffer>() { @Override public void onSuccess(ByteBuffer value) { // handle join completion in the callback so that the callback will be invoked // even if the consumer is woken up before finishing the rebalance synchronized (AbstractCoordinator.this) { log.info("Successfully joined group with generation {}", generation.generationId); state = MemberState.STABLE; rejoinNeeded = false; if (heartbeatThread != null) heartbeatThread.enable(); } } @Override public void onFailure(RuntimeException e) { // we handle failures below after the request finishes. if the join completes // after having been woken up, the exception is ignored and we will rejoin synchronized (AbstractCoordinator.this) { state = MemberState.UNJOINED; } } }); } return joinFuture; } /** * Join the group and return the assignment for the next generation. This function handles both * JoinGroup and SyncGroup, delegating to {@link #performAssignment(String, String, Map)} if * elected leader by the coordinator. * * NOTE: This is visible only for testing * * @return A request future which wraps the assignment returned from the group leader */ RequestFuture<ByteBuffer> sendJoinGroupRequest() { if (coordinatorUnknown()) return RequestFuture.coordinatorNotAvailable(); // send a join group request to the coordinator log.info("(Re-)joining group"); JoinGroupRequest.Builder requestBuilder = new JoinGroupRequest.Builder( groupId, this.sessionTimeoutMs, this.generation.memberId, protocolType(), metadata()).setRebalanceTimeout(this.rebalanceTimeoutMs); log.debug("Sending JoinGroup ({}) to coordinator {}", requestBuilder, this.coordinator); // Note that we override the request timeout using the rebalance timeout since that is the // maximum time that it may block on the coordinator. We add an extra 5 seconds for small delays. int joinGroupTimeoutMs = Math.max(rebalanceTimeoutMs, rebalanceTimeoutMs + 5000); return client.send(coordinator, requestBuilder, joinGroupTimeoutMs) .compose(new JoinGroupResponseHandler()); } private class JoinGroupResponseHandler extends CoordinatorResponseHandler<JoinGroupResponse, ByteBuffer> { @Override public void handle(JoinGroupResponse joinResponse, RequestFuture<ByteBuffer> future) { Errors error = joinResponse.error(); if (error == Errors.NONE) { log.debug("Received successful JoinGroup response: {}", joinResponse); sensors.joinLatency.record(response.requestLatencyMs()); synchronized (AbstractCoordinator.this) { if (state != MemberState.REBALANCING) { // if the consumer was woken up before a rebalance completes, we may have already left // the group. In this case, we do not want to continue with the sync group. future.raise(new UnjoinedGroupException()); } else { AbstractCoordinator.this.generation = new Generation(joinResponse.generationId(), joinResponse.memberId(), joinResponse.groupProtocol()); if (joinResponse.isLeader()) { onJoinLeader(joinResponse).chain(future); } else { onJoinFollower().chain(future); } } } } else if (error == Errors.COORDINATOR_LOAD_IN_PROGRESS) { log.debug("Attempt to join group rejected since coordinator {} is loading the group.", coordinator()); // backoff and retry future.raise(error); } else if (error == Errors.UNKNOWN_MEMBER_ID) { // reset the member id and retry immediately resetGeneration(); log.debug("Attempt to join group failed due to unknown member id."); future.raise(Errors.UNKNOWN_MEMBER_ID); } else if (error == Errors.COORDINATOR_NOT_AVAILABLE || error == Errors.NOT_COORDINATOR) { // re-discover the coordinator and retry with backoff markCoordinatorUnknown(); log.debug("Attempt to join group failed due to obsolete coordinator information: {}", error.message()); future.raise(error); } else if (error == Errors.INCONSISTENT_GROUP_PROTOCOL || error == Errors.INVALID_SESSION_TIMEOUT || error == Errors.INVALID_GROUP_ID) { // log the error and re-throw the exception log.error("Attempt to join group failed due to fatal error: {}", error.message()); future.raise(error); } else if (error == Errors.GROUP_AUTHORIZATION_FAILED) { future.raise(new GroupAuthorizationException(groupId)); } else { // unexpected error, throw the exception future.raise(new KafkaException("Unexpected error in join group response: " + error.message())); } } } private RequestFuture<ByteBuffer> onJoinFollower() { // send follower's sync group with an empty assignment SyncGroupRequest.Builder requestBuilder = new SyncGroupRequest.Builder(groupId, generation.generationId, generation.memberId, Collections.<String, ByteBuffer>emptyMap()); log.debug("Sending follower SyncGroup to coordinator {}: {}", this.coordinator, requestBuilder); return sendSyncGroupRequest(requestBuilder); } private RequestFuture<ByteBuffer> onJoinLeader(JoinGroupResponse joinResponse) { try { // perform the leader synchronization and send back the assignment for the group Map<String, ByteBuffer> groupAssignment = performAssignment(joinResponse.leaderId(), joinResponse.groupProtocol(), joinResponse.members()); SyncGroupRequest.Builder requestBuilder = new SyncGroupRequest.Builder(groupId, generation.generationId, generation.memberId, groupAssignment); log.debug("Sending leader SyncGroup to coordinator {}: {}", this.coordinator, requestBuilder); return sendSyncGroupRequest(requestBuilder); } catch (RuntimeException e) { return RequestFuture.failure(e); } } private RequestFuture<ByteBuffer> sendSyncGroupRequest(SyncGroupRequest.Builder requestBuilder) { if (coordinatorUnknown()) return RequestFuture.coordinatorNotAvailable(); return client.send(coordinator, requestBuilder) .compose(new SyncGroupResponseHandler()); } private class SyncGroupResponseHandler extends CoordinatorResponseHandler<SyncGroupResponse, ByteBuffer> { @Override public void handle(SyncGroupResponse syncResponse, RequestFuture<ByteBuffer> future) { Errors error = syncResponse.error(); if (error == Errors.NONE) { sensors.syncLatency.record(response.requestLatencyMs()); future.complete(syncResponse.memberAssignment()); } else { requestRejoin(); if (error == Errors.GROUP_AUTHORIZATION_FAILED) { future.raise(new GroupAuthorizationException(groupId)); } else if (error == Errors.REBALANCE_IN_PROGRESS) { log.debug("SyncGroup failed because the group began another rebalance"); future.raise(error); } else if (error == Errors.UNKNOWN_MEMBER_ID || error == Errors.ILLEGAL_GENERATION) { log.debug("SyncGroup failed: {}", error.message()); resetGeneration(); future.raise(error); } else if (error == Errors.COORDINATOR_NOT_AVAILABLE || error == Errors.NOT_COORDINATOR) { log.debug("SyncGroup failed: {}", error.message()); markCoordinatorUnknown(); future.raise(error); } else { future.raise(new KafkaException("Unexpected error from SyncGroup: " + error.message())); } } } } /** * Discover the current coordinator for the group. Sends a GroupMetadata request to * one of the brokers. The returned future should be polled to get the result of the request. * @return A request future which indicates the completion of the metadata request */ private RequestFuture<Void> sendFindCoordinatorRequest(Node node) { // initiate the group metadata request log.debug("Sending FindCoordinator request to broker {}", node); FindCoordinatorRequest.Builder requestBuilder = new FindCoordinatorRequest.Builder(FindCoordinatorRequest.CoordinatorType.GROUP, this.groupId); return client.send(node, requestBuilder) .compose(new FindCoordinatorResponseHandler()); } private class FindCoordinatorResponseHandler extends RequestFutureAdapter<ClientResponse, Void> { @Override public void onSuccess(ClientResponse resp, RequestFuture<Void> future) { log.debug("Received FindCoordinator response {}", resp); clearFindCoordinatorFuture(); FindCoordinatorResponse findCoordinatorResponse = (FindCoordinatorResponse) resp.responseBody(); Errors error = findCoordinatorResponse.error(); if (error == Errors.NONE) { synchronized (AbstractCoordinator.this) { // use MAX_VALUE - node.id as the coordinator id to allow separate connections // for the coordinator in the underlying network client layer int coordinatorConnectionId = Integer.MAX_VALUE - findCoordinatorResponse.node().id(); AbstractCoordinator.this.coordinator = new Node( coordinatorConnectionId, findCoordinatorResponse.node().host(), findCoordinatorResponse.node().port()); log.info("Discovered group coordinator {}", coordinator); client.tryConnect(coordinator); heartbeat.resetSessionTimeout(); } future.complete(null); } else if (error == Errors.GROUP_AUTHORIZATION_FAILED) { future.raise(new GroupAuthorizationException(groupId)); } else { log.debug("Group coordinator lookup failed: {}", error.message()); future.raise(error); } } @Override public void onFailure(RuntimeException e, RequestFuture<Void> future) { clearFindCoordinatorFuture(); super.onFailure(e, future); } } /** * Check if we know who the coordinator is and we have an active connection * @return true if the coordinator is unknown */ public boolean coordinatorUnknown() { return checkAndGetCoordinator() == null; } /** * Get the coordinator if its connection is still active. Otherwise mark it unknown and * return null. * * @return the current coordinator or null if it is unknown */ protected synchronized Node checkAndGetCoordinator() { if (coordinator != null && client.isUnavailable(coordinator)) { markCoordinatorUnknown(true); return null; } return this.coordinator; } private synchronized Node coordinator() { return this.coordinator; } protected synchronized void markCoordinatorUnknown() { markCoordinatorUnknown(false); } protected synchronized void markCoordinatorUnknown(boolean isDisconnected) { if (this.coordinator != null) { log.info("Group coordinator {} is unavailable or invalid, will attempt rediscovery", this.coordinator); Node oldCoordinator = this.coordinator; // Mark the coordinator dead before disconnecting requests since the callbacks for any pending // requests may attempt to do likewise. This also prevents new requests from being sent to the // coordinator while the disconnect is in progress. this.coordinator = null; // Disconnect from the coordinator to ensure that there are no in-flight requests remaining. // Pending callbacks will be invoked with a DisconnectException on the next call to poll. if (!isDisconnected) client.disconnectAsync(oldCoordinator); } } /** * Get the current generation state if the group is stable. * @return the current generation or null if the group is unjoined/rebalancing */ protected synchronized Generation generation() { if (this.state != MemberState.STABLE) return null; return generation; } /** * Reset the generation and memberId because we have fallen out of the group. */ protected synchronized void resetGeneration() { this.generation = Generation.NO_GENERATION; this.rejoinNeeded = true; this.state = MemberState.UNJOINED; } protected synchronized void requestRejoin() { this.rejoinNeeded = true; } /** * Close the coordinator, waiting if needed to send LeaveGroup. */ @Override public final void close() { close(time.timer(0)); } protected void close(Timer timer) { try { closeHeartbeatThread(); } finally { // Synchronize after closing the heartbeat thread since heartbeat thread // needs this lock to complete and terminate after close flag is set. synchronized (this) { if (leaveGroupOnClose) { maybeLeaveGroup(); } // At this point, there may be pending commits (async commits or sync commits that were // interrupted using wakeup) and the leave group request which have been queued, but not // yet sent to the broker. Wait up to close timeout for these pending requests to be processed. // If coordinator is not known, requests are aborted. Node coordinator = checkAndGetCoordinator(); if (coordinator != null && !client.awaitPendingRequests(coordinator, timer)) log.warn("Close timed out with {} pending requests to coordinator, terminating client connections", client.pendingRequestCount(coordinator)); } } } /** * Leave the current group and reset local generation/memberId. */ public synchronized void maybeLeaveGroup() { if (!coordinatorUnknown() && state != MemberState.UNJOINED && generation != Generation.NO_GENERATION) { // this is a minimal effort attempt to leave the group. we do not // attempt any resending if the request fails or times out. log.info("Sending LeaveGroup request to coordinator {}", coordinator); LeaveGroupRequest.Builder request = new LeaveGroupRequest.Builder(groupId, generation.memberId); client.send(coordinator, request) .compose(new LeaveGroupResponseHandler()); client.pollNoWakeup(); } resetGeneration(); } private class LeaveGroupResponseHandler extends CoordinatorResponseHandler<LeaveGroupResponse, Void> { @Override public void handle(LeaveGroupResponse leaveResponse, RequestFuture<Void> future) { Errors error = leaveResponse.error(); if (error == Errors.NONE) { log.debug("LeaveGroup request returned successfully"); future.complete(null); } else { log.debug("LeaveGroup request failed with error: {}", error.message()); future.raise(error); } } } // visible for testing synchronized RequestFuture<Void> sendHeartbeatRequest() { log.debug("Sending Heartbeat request to coordinator {}", coordinator); HeartbeatRequest.Builder requestBuilder = new HeartbeatRequest.Builder(this.groupId, this.generation.generationId, this.generation.memberId); return client.send(coordinator, requestBuilder) .compose(new HeartbeatResponseHandler()); } private class HeartbeatResponseHandler extends CoordinatorResponseHandler<HeartbeatResponse, Void> { @Override public void handle(HeartbeatResponse heartbeatResponse, RequestFuture<Void> future) { sensors.heartbeatLatency.record(response.requestLatencyMs()); Errors error = heartbeatResponse.error(); if (error == Errors.NONE) { log.debug("Received successful Heartbeat response"); future.complete(null); } else if (error == Errors.COORDINATOR_NOT_AVAILABLE || error == Errors.NOT_COORDINATOR) { log.info("Attempt to heartbeat failed since coordinator {} is either not started or not valid.", coordinator()); markCoordinatorUnknown(); future.raise(error); } else if (error == Errors.REBALANCE_IN_PROGRESS) { log.info("Attempt to heartbeat failed since group is rebalancing"); requestRejoin(); future.raise(Errors.REBALANCE_IN_PROGRESS); } else if (error == Errors.ILLEGAL_GENERATION) { log.info("Attempt to heartbeat failed since generation {} is not current", generation.generationId); resetGeneration(); future.raise(Errors.ILLEGAL_GENERATION); } else if (error == Errors.UNKNOWN_MEMBER_ID) { log.info("Attempt to heartbeat failed for since member id {} is not valid.", generation.memberId); resetGeneration(); future.raise(Errors.UNKNOWN_MEMBER_ID); } else if (error == Errors.GROUP_AUTHORIZATION_FAILED) { future.raise(new GroupAuthorizationException(groupId)); } else { future.raise(new KafkaException("Unexpected error in heartbeat response: " + error.message())); } } } protected abstract class CoordinatorResponseHandler<R, T> extends RequestFutureAdapter<ClientResponse, T> { protected ClientResponse response; public abstract void handle(R response, RequestFuture<T> future); @Override public void onFailure(RuntimeException e, RequestFuture<T> future) { // mark the coordinator as dead if (e instanceof DisconnectException) { markCoordinatorUnknown(true); } future.raise(e); } @Override @SuppressWarnings("unchecked") public void onSuccess(ClientResponse clientResponse, RequestFuture<T> future) { try { this.response = clientResponse; R responseObj = (R) clientResponse.responseBody(); handle(responseObj, future); } catch (RuntimeException e) { if (!future.isDone()) future.raise(e); } } } protected Meter createMeter(Metrics metrics, String groupName, String baseName, String descriptiveName) { return new Meter(new Count(), metrics.metricName(baseName + "-rate", groupName, String.format("The number of %s per second", descriptiveName)), metrics.metricName(baseName + "-total", groupName, String.format("The total number of %s", descriptiveName))); } private class GroupCoordinatorMetrics { public final String metricGrpName; public final Sensor heartbeatLatency; public final Sensor joinLatency; public final Sensor syncLatency; public GroupCoordinatorMetrics(Metrics metrics, String metricGrpPrefix) { this.metricGrpName = metricGrpPrefix + "-coordinator-metrics"; this.heartbeatLatency = metrics.sensor("heartbeat-latency"); this.heartbeatLatency.add(metrics.metricName("heartbeat-response-time-max", this.metricGrpName, "The max time taken to receive a response to a heartbeat request"), new Max()); this.heartbeatLatency.add(createMeter(metrics, metricGrpName, "heartbeat", "heartbeats")); this.joinLatency = metrics.sensor("join-latency"); this.joinLatency.add(metrics.metricName("join-time-avg", this.metricGrpName, "The average time taken for a group rejoin"), new Avg()); this.joinLatency.add(metrics.metricName("join-time-max", this.metricGrpName, "The max time taken for a group rejoin"), new Max()); this.joinLatency.add(createMeter(metrics, metricGrpName, "join", "group joins")); this.syncLatency = metrics.sensor("sync-latency"); this.syncLatency.add(metrics.metricName("sync-time-avg", this.metricGrpName, "The average time taken for a group sync"), new Avg()); this.syncLatency.add(metrics.metricName("sync-time-max", this.metricGrpName, "The max time taken for a group sync"), new Max()); this.syncLatency.add(createMeter(metrics, metricGrpName, "sync", "group syncs")); Measurable lastHeartbeat = new Measurable() { public double measure(MetricConfig config, long now) { return TimeUnit.SECONDS.convert(now - heartbeat.lastHeartbeatSend(), TimeUnit.MILLISECONDS); } }; metrics.addMetric(metrics.metricName("last-heartbeat-seconds-ago", this.metricGrpName, "The number of seconds since the last coordinator heartbeat was sent"), lastHeartbeat); } } private class HeartbeatThread extends KafkaThread { private boolean enabled = false; private boolean closed = false; private AtomicReference<RuntimeException> failed = new AtomicReference<>(null); private HeartbeatThread() { super(HEARTBEAT_THREAD_PREFIX + (groupId.isEmpty() ? "" : " | " + groupId), true); } public void enable() { synchronized (AbstractCoordinator.this) { log.debug("Enabling heartbeat thread"); this.enabled = true; heartbeat.resetTimeouts(); AbstractCoordinator.this.notify(); } } public void disable() { synchronized (AbstractCoordinator.this) { log.debug("Disabling heartbeat thread"); this.enabled = false; } } public void close() { synchronized (AbstractCoordinator.this) { this.closed = true; AbstractCoordinator.this.notify(); } } private boolean hasFailed() { return failed.get() != null; } private RuntimeException failureCause() { return failed.get(); } @Override public void run() { try { log.debug("Heartbeat thread started"); while (true) { synchronized (AbstractCoordinator.this) { if (closed) return; if (!enabled) { AbstractCoordinator.this.wait(); continue; } if (state != MemberState.STABLE) { // the group is not stable (perhaps because we left the group or because the coordinator // kicked us out), so disable heartbeats and wait for the main thread to rejoin. disable(); continue; } client.pollNoWakeup(); long now = time.milliseconds(); if (coordinatorUnknown()) { if (findCoordinatorFuture != null || lookupCoordinator().failed()) // the immediate future check ensures that we backoff properly in the case that no // brokers are available to connect to. AbstractCoordinator.this.wait(retryBackoffMs); } else if (heartbeat.sessionTimeoutExpired(now)) { // the session timeout has expired without seeing a successful heartbeat, so we should // probably make sure the coordinator is still healthy. markCoordinatorUnknown(); } else if (heartbeat.pollTimeoutExpired(now)) { // the poll timeout has expired, which means that the foreground thread has stalled // in between calls to poll(), so we explicitly leave the group. log.warn("This member will leave the group because consumer poll timeout has expired. This " + "means the time between subsequent calls to poll() was longer than the configured " + "max.poll.interval.ms, which typically implies that the poll loop is spending too " + "much time processing messages. You can address this either by increasing " + "max.poll.interval.ms or by reducing the maximum size of batches returned in poll() " + "with max.poll.records."); maybeLeaveGroup(); } else if (!heartbeat.shouldHeartbeat(now)) { // poll again after waiting for the retry backoff in case the heartbeat failed or the // coordinator disconnected AbstractCoordinator.this.wait(retryBackoffMs); } else { heartbeat.sentHeartbeat(now); sendHeartbeatRequest().addListener(new RequestFutureListener<Void>() { @Override public void onSuccess(Void value) { synchronized (AbstractCoordinator.this) { heartbeat.receiveHeartbeat(); } } @Override public void onFailure(RuntimeException e) { synchronized (AbstractCoordinator.this) { if (e instanceof RebalanceInProgressException) { // it is valid to continue heartbeating while the group is rebalancing. This // ensures that the coordinator keeps the member in the group for as long // as the duration of the rebalance timeout. If we stop sending heartbeats, // however, then the session timeout may expire before we can rejoin. heartbeat.receiveHeartbeat(); } else { heartbeat.failHeartbeat(); // wake up the thread if it's sleeping to reschedule the heartbeat AbstractCoordinator.this.notify(); } } } }); } } } } catch (AuthenticationException e) { log.error("An authentication error occurred in the heartbeat thread", e); this.failed.set(e); } catch (GroupAuthorizationException e) { log.error("A group authorization error occurred in the heartbeat thread", e); this.failed.set(e); } catch (InterruptedException | InterruptException e) { Thread.interrupted(); log.error("Unexpected interrupt received in heartbeat thread", e); this.failed.set(new RuntimeException(e)); } catch (Throwable e) { log.error("Heartbeat thread failed due to unexpected error", e); if (e instanceof RuntimeException) this.failed.set((RuntimeException) e); else this.failed.set(new RuntimeException(e)); } finally { log.debug("Heartbeat thread has closed"); } } } protected static class Generation { public static final Generation NO_GENERATION = new Generation( OffsetCommitRequest.DEFAULT_GENERATION_ID, JoinGroupRequest.UNKNOWN_MEMBER_ID, null); public final int generationId; public final String memberId; public final String protocol; public Generation(int generationId, String memberId, String protocol) { this.generationId = generationId; this.memberId = memberId; this.protocol = protocol; } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final Generation that = (Generation) o; return generationId == that.generationId && Objects.equals(memberId, that.memberId) && Objects.equals(protocol, that.protocol); } @Override public int hashCode() { return Objects.hash(generationId, memberId, protocol); } } private static class UnjoinedGroupException extends RetriableException { } }
package be.idoneus.hipchat.buildbot.stepdefs; import be.idoneus.hipchat.buildbot.bot.BotOrchestrationService; import be.idoneus.hipchat.buildbot.data.CannotStartPlugin; import be.idoneus.hipchat.buildbot.data.TestCommandDispatcher; import be.idoneus.hipchat.buildbot.data.TestPlugin; import be.idoneus.hipchat.buildbot.data.TestPluginProvider; import be.idoneus.hipchat.buildbot.hipchat.installations.InstallationContext; import be.idoneus.hipchat.buildbot.hipchat.installations.InstalledPlugin; import be.idoneus.hipchat.buildbot.hipchat.server.install.InstallationHandler; import be.idoneus.hipchat.buildbot.hipchat.server.install.dto.InstalledInformation; import be.idoneus.hipchat.buildbot.hipchat.server.validator.HipChatRequestValidator; import be.idoneus.hipchat.buildbot.hipchat.server.webhooks.RoomMessageHandler; import be.idoneus.hipchat.buildbot.utils.JacksonUtils; import com.google.common.collect.ImmutableMap; import cucumber.api.java.en.And; import cucumber.api.java.en.Given; import cucumber.api.java.en.Then; import cucumber.api.java.en.When; import lombok.Data; import be.idoneus.hipchat.buildbot.api.domain.RoomMessage; import be.idoneus.hipchat.buildbot.api.domain.RoomMessageItem; import be.idoneus.hipchat.buildbot.api.domain.RoomMessageItemData; import be.idoneus.hipchat.buildbot.api.domain.Installation; import be.idoneus.hipchat.buildbot.testing.TestPluginContextBuilder; import be.idoneus.hipchat.buildbot.testing.TestStore; import ratpack.http.Request; import ratpack.registry.Registry; import ratpack.service.StartEvent; import ratpack.service.StopEvent; import ratpack.test.handling.RequestFixture; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Optional; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; public class BotOrchestrationStepdefs { private TestPluginContextBuilder pluginContextBuilder = new TestPluginContextBuilder(); private TestPluginProvider pluginProvider = new TestPluginProvider(() -> new TestPlugin("foo"), () -> new TestPlugin("bar")); private TestStore<Installation> store = new TestStore<>(); private TestCommandDispatcher commandDispatcher = new TestCommandDispatcher(); private BotOrchestrationService service = createService(); private BotOrchestrationService createService() { return new BotOrchestrationService( (variableCombiner) -> pluginContextBuilder, store, pluginProvider, commandDispatcher ); } private List<Installation> givenInstallations; @Given("^the following installations are registered$") public void theFollowingInstallationsAreRegistered(List<Installation> installations) throws IOException { givenInstallations = installations; for (Installation installation : installations) { service.registerInstallation(installation); } } @Given("^the registered installations are unregistered$") public void theRegisteredInstallationsAreUnregistered() throws IOException { for (Installation installation : givenInstallations) { service.unregisterInstallation(installation); } } @Then("^the installation store should contain (\\d+) installations$") public void theInstallationStoreShouldContainInstallations(int numInstallations) { assertThat(store.findAll().size(), is(numInstallations)); } @Given("^the following \"installed information\" coming in from HipChat$") public void theFollowingInstalledInformationComingInFromHipChat(List<InstalledInformation> installedInformations) throws Exception { for (InstalledInformation installedInformation : installedInformations) { RequestFixture.requestFixture() .registry(r -> r.add(service)) .body(JacksonUtils.serialize(installedInformation), "application/json") .method("POST") .handle(new InstallationHandler()); } } @Then("^The following installations should be registered$") public void theFollowingInstallationsShouldBeRegistered(List<Installation> installations) throws Throwable { assertThat(store.findAll().size(), is(installations.size())); for (Installation expected : installations) { final Optional<Installation> actual = store.get(expected.getOauthId()); assertThat(actual.isPresent(), is(true)); assertThat(actual.get(), is(expected)); } } @When("^there is an unregistration for oauthId (\\S+) coming in from HipChat$") public void thereIsAnUnregistrationForOauthIdComingInFromHipChat(String oauthId) throws Exception { RequestFixture.requestFixture() .registry(r -> r.add(service)) .pathBinding(ImmutableMap.of("oauthid", oauthId)) .method("DELETE") .handle(new InstallationHandler()); } @And("^plugin \"foo\" is broken$") public void pluginIsBroken() { // Replace plugin provider pluginProvider = new TestPluginProvider(() -> new CannotStartPlugin("foo"), () -> new TestPlugin("bar")); service = createService(); } @And("^every installation should contain (\\d+) plugin$") public void everyInstallationShouldContainPlugin(int pluginCount) { for (Installation installation : store.findAll()) { assertThat(service.getContext(installation).getPlugins().size(), is(pluginCount)); } } @Data private class CucumberRoomMessage { private String oauthId; private String message; } @When("^the following messages come in from HipChat$") public void theFollowingMessagesComeInFromHipChat(List<CucumberRoomMessage> messages) throws Throwable { for (CucumberRoomMessage message : messages) { final RoomMessage roomMessage = new RoomMessage(); final RoomMessageItem roomMessageItem = new RoomMessageItem(); final RoomMessageItemData roomMessageItemData = new RoomMessageItemData(); roomMessage.setOauthId(message.getOauthId()); roomMessageItemData.setMessage(message.getMessage()); roomMessageItem.setMessage(roomMessageItemData); roomMessage.setItem(roomMessageItem); RequestFixture.requestFixture() .registry(r -> r .add(service) .add(new HipChatRequestValidator() { @Override public void validate(Request request, String oauthSecret) { } })) .method("POST") .body(JacksonUtils.serialize(roomMessage), "application/json") .handle(new RoomMessageHandler()); } } @Data private class CucumberMessage { private String roomId; private String message; } @Then("^the following messages are dispatched$") public void theFollowingMessagesAreDispatched(List<CucumberMessage> commands) { for (CucumberMessage expected : commands) { final List<RoomMessage> messagesForRoom = commandDispatcher.getDispatchedMessages().get(expected.getRoomId()); final String actual = messagesForRoom.get(0).getItem().getMessage().getMessage(); assertThat(actual, is(expected.getMessage())); } } private class FakeStartEvent implements StartEvent { @Override public Registry getRegistry() { return null; } @Override public boolean isReload() { return false; } } @When("^the \"start\" event is received$") public void theStartEventIsReceived() throws IOException { service.onStart(new FakeStartEvent()); } private class FakeStopEvent implements StopEvent { @Override public Registry getRegistry() { return null; } @Override public boolean isReload() { return false; } } @When("^the \"stop\" event is received$") public void theStopEventIsReceived() throws IOException { service.onStop(new FakeStopEvent()); } @Then("^all registered installations should be started with the right context$") public void allExistingInstallationsShouldBeStartedWithTheRightContext() { final List<TestPlugin> createdPlugins = new ArrayList<>(pluginProvider.getCreatedPlugins()); assertThat(createdPlugins.size(), is(givenInstallations.size() * 2)); // 2 plugins per installation for (Installation givenInstallation : givenInstallations) { final Optional<Installation> installation = service.getActiveInstallationByOauthId(givenInstallation.getOauthId()); assertThat(installation.isPresent(), is(true)); installation.ifPresent(i -> { final InstallationContext context = service.getContext(i); assertThat(context.getPlugins().size(), is(2)); for (InstalledPlugin plugin : context.getPlugins()) { assertThat(plugin.getContext().getRoomId(), is(givenInstallation.getRoomId())); final TestPlugin createdPlugin = (TestPlugin) plugin.getPlugin(); assertThat(createdPlugin.getContext(), is(plugin.getContext())); assertThat(createdPlugins.contains(createdPlugin), is(true)); createdPlugins.remove(createdPlugin); } }); } // All created plugins should have been part of an installation assertThat(createdPlugins.size(), is(0)); } @Then("^all registered installations should be stopped$") public void allRegisteredInstallationsShouldBeStopped() { for (Installation givenInstallation : givenInstallations) { final Optional<Installation> installation = service.getActiveInstallationByOauthId(givenInstallation.getOauthId()); assertThat(installation.isPresent(), is(false)); } // All plugins should have been stopped for (TestPlugin plugin : pluginProvider.getCreatedPlugins()) { assertThat(plugin.isStopped(), is(true)); assertThat(plugin.isUnregistered(), is(false)); } } @Then("^the installation store should be empty$") public void theInstallationStoreShouldBeEmpty() { assertThat(store.findAll().size(), is(0)); } @Then("^the plugins should have received an unregister call$") public void thePluginsShouldHaveReceivedAnUnregisterCall() throws Throwable { for (TestPlugin plugin : pluginProvider.getCreatedPlugins()) { assertThat(plugin.isStopped(), is(true)); assertThat(plugin.isUnregistered(), is(true)); final TestStore roomStore = (TestStore) pluginContextBuilder.getContextFor(plugin).getRoomStore(); assertThat(roomStore.getTimesClearCalled(), is(1)); } } }
/** *============================================================================ * The Ohio State University Research Foundation, Emory University, * the University of Minnesota Supercomputing Institute * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cagrid-grid-incubation/LICENSE.txt for details. *============================================================================ **/ /** *============================================================================ *============================================================================ **/ package org.cagrid.i2b2.ontomapper.processor; import gov.nih.nci.cagrid.cqlquery.LogicalOperator; import gov.nih.nci.cagrid.data.QueryProcessingException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.cagrid.i2b2.ontomapper.utils.AttributeNotFoundInModelException; import org.cagrid.i2b2.ontomapper.utils.CdeIdMapper; import org.cagrid.i2b2.ontomapper.utils.ClassNotFoundInModelException; /** * I2B2DataAccessManager * Handles data access requests for the i2b2QueryProcessor * * @author David */ public class I2B2DataAccessManager { private static final Log LOG = LogFactory.getLog(I2B2DataAccessManager.class); private DatabaseConnectionSource connectionSource = null; private CdeIdMapper cdeIdMapper = null; private String encodingServiceUrl = null; private I2B2QueryFactory queryFactory = null; public I2B2DataAccessManager(DatabaseConnectionSource connectionSource, CdeIdMapper cdeIdMapper, String encodingServiceUrl, I2B2QueryFactory queryFactory) { this.connectionSource = connectionSource; this.cdeIdMapper = cdeIdMapper; this.encodingServiceUrl = encodingServiceUrl; this.queryFactory = queryFactory; } /** * Gets all attribute values from the various fact data tables for a given class * * @param className * The name of the class for which to obtain fact data * @return * A Map from Attribute name to a list of fact data entries. The order of * these entries corresponds to the order in which objects are found in * the database. All lists are of the same length. * @throws QueryProcessingException */ public Map<String, List<FactDataEntry>> getAttributeValues(String className) throws QueryProcessingException { // get all the attribute names and their associated CDEs Map<String, Long> attributeCdes = null; try { attributeCdes = cdeIdMapper.getCdeIdsForClass(className); } catch (ClassNotFoundInModelException ex) { LOG.error(ex.getMessage(), ex); throw new QueryProcessingException(ex); } // query for fact data entries for each attribute Map<String, List<FactDataEntry>> attributeEntries = new HashMap<String, List<FactDataEntry>>(); for (String attributeName : attributeCdes.keySet()) { Long cde = attributeCdes.get(attributeName); List<String> paths = getPathsForCde(cde); LinkedList<FactDataEntry> entries = new LinkedList<FactDataEntry>(); if (paths.size() != 0) { entries.addAll(getFactEntriesByPaths(DatabaseFactTable.OBSERVATION, paths)); entries.addAll(getFactEntriesByPaths(DatabaseFactTable.MAP_DATA, paths)); // TODO: this won't have encounter_num or patient_num in it, so we probably have to omit it // entries.addAll(getFactEntriesByPaths(DatabaseFactTable.MAP_AGGREGATE, paths)); } attributeEntries.put(attributeName, entries); } return attributeEntries; } /** * Gets attribute values of a given data type restricted by a map * of required attribute values * * @param className * The name of the data type * @param restrictAttributes * A map from attribute name to a list of values which must be satisfied * @param operator * The logical operation to perform on the attribute values' presence * @return * @throws QueryProcessingException */ public Map<String, List<FactDataEntry>> getAttributeValues(String className, Map<String, List<String>> restrictAttributes, LogicalOperator operator) throws QueryProcessingException { // TODO: this return null; } /** * Gets a list of values of an attribute of a data type * * @param className * The class name of the data type * @param attributeName * The name of the attribute of that data type * @return * @throws QueryProcessingException */ public List<Object> getAttributeValues(String className, String attributeName) throws QueryProcessingException { // get fact table entries List<FactDataEntry> entries = getAttribteEntries(className, attributeName); // convert entries to object values List<Object> values = new ArrayList<Object>(entries.size()); try { for (FactDataEntry entry : entries) { values.add(entry.getTypedValue()); } } catch (ParseException ex) { String message = "Error getting typed value from a fact table entry: " + ex.getMessage(); LOG.error(message, ex); throw new QueryProcessingException(message, ex); } return values; } public List<String> getAttributeStringValues(String className, String attributeName) throws QueryProcessingException{ // get fact table entries List<FactDataEntry> entries = getAttribteEntries(className, attributeName); // convert entries to string values List<String> values = new ArrayList<String>(entries.size()); for (FactDataEntry entry : entries) { values.add(entry.getActualValueAsString()); } return values; } private List<FactDataEntry> getAttribteEntries(String className, String attributeName) throws QueryProcessingException { // figure out the CDE Long cde = getCdeForAttribute(className, attributeName); LOG.debug("CDE for " + className + "." + attributeName + " is " + String.valueOf(cde)); if (cde == null) { throw new QueryProcessingException("No CDE found for " + className + "." + attributeName); } // get query paths for the CDE List<String> paths = getPathsForCde(cde); if (LOG.isDebugEnabled()) { LOG.debug("Paths for CDE " + cde + ":"); for (String path : paths) { LOG.debug("\t" + path); } } // get a list of all entries List<FactDataEntry> entries = new LinkedList<FactDataEntry>(); if (paths.size() != 0) { // get observation instances from the DB based on each query path entries.addAll(getFactEntriesByPaths(DatabaseFactTable.OBSERVATION, paths)); entries.addAll(getFactEntriesByPaths(DatabaseFactTable.MAP_DATA, paths)); entries.addAll(getFactEntriesByPaths(DatabaseFactTable.MAP_AGGREGATE, paths)); } return entries; } private Long getCdeForAttribute(String className, String attributeName) throws QueryProcessingException { // get the CDE of the attribute LOG.debug("Looking up CDE in id mapper"); Long cde = null; try { cde = cdeIdMapper.getCdeIdForAttribute(className, attributeName); } catch (ClassNotFoundInModelException ex) { LOG.error(ex); throw new QueryProcessingException(ex.getMessage(), ex); } catch (AttributeNotFoundInModelException ex) { LOG.error(ex); throw new QueryProcessingException(ex.getMessage(), ex); } // CDE has to exist if (cde == null) { throw new QueryProcessingException("No CDE found for attribute " + className + "." + attributeName); } return cde; } private List<FactDataEntry> getFactEntriesByPaths(DatabaseFactTable table, List<String> paths) throws QueryProcessingException { List<FactDataEntry> entries = null; String parameterisedSql = null; switch (table) { case OBSERVATION: parameterisedSql = queryFactory.getObservationsByPathQuery(paths.size()); break; case MAP_DATA: parameterisedSql = queryFactory.getMapDataByPathQuery(paths.size()); break; case MAP_AGGREGATE: parameterisedSql = queryFactory.getMapAggrByPathQuery(paths.size()); break; } Connection dbConnection = null; PreparedStatement statement = null; ResultSet results = null; try { dbConnection = connectionSource.getConnection(); statement = dbConnection.prepareStatement(parameterisedSql); int index = 1; for (String path : paths) { statement.setString(index, path); index++; } results = statement.executeQuery(); entries = convertResultsToEntries(results); } catch (SQLException ex) { String message = "Error querying for fact data by CDE paths: " + ex.getMessage(); LOG.error(message, ex); LOG.error("SQL:\n" + parameterisedSql); throw new QueryProcessingException(message, ex); } finally { if (results != null) { try { results.close(); } catch (SQLException ex) { LOG.error("Error closing result set: " + ex.getMessage(), ex); } } if (statement != null) { try { statement.close(); } catch (SQLException ex) { LOG.error("Error closing statement: " + ex.getMessage(), ex); } } if (dbConnection != null) { try { dbConnection.close(); } catch (SQLException ex) { LOG.error("Error releasing DB connection: " + ex.getMessage(), ex); } } } return entries; } /** * Gets the concept paths in the I2B2 database * which are mapped to a caBIG CDE * * @param cde * The CDE id * @return * @throws QueryProcessingException */ private List<String> getPathsForCde(Long cde) throws QueryProcessingException { LOG.debug("Looking up paths for CDE " + cde); List<String> paths = new LinkedList<String>(); Connection dbConnection = null; PreparedStatement pathsStatement = null; ResultSet pathsResult = null; String projectName = cdeIdMapper.getProjectShortName(); String projectVersion = cdeIdMapper.getProjectVersion(); try { dbConnection = connectionSource.getConnection(); pathsStatement = dbConnection.prepareStatement(queryFactory.getCdePathsQuery()); pathsStatement.setInt(1, cde.intValue()); pathsStatement.setString(2, encodingServiceUrl); pathsStatement.setString(3, projectName); pathsStatement.setString(4, projectVersion); pathsResult = pathsStatement.executeQuery(); while (pathsResult.next()) { paths.add(pathsResult.getString(1)); } } catch (SQLException ex) { String message = "Error querying for CDE paths: " + ex.getMessage(); LOG.error(message, ex); throw new QueryProcessingException(message, ex); } finally { if (pathsResult != null) { try { pathsResult.close(); } catch (SQLException ex) { LOG.error("Error closing result set: " + ex.getMessage(), ex); } } if (pathsStatement != null) { try { pathsStatement.close(); } catch (SQLException ex) { LOG.error("Error closing statement: " + ex.getMessage(), ex); } } if (dbConnection != null) { try { dbConnection.close(); } catch (SQLException ex) { LOG.error("Error releasing DB connection: " + ex.getMessage(), ex); } } } return paths; } private List<FactDataEntry> convertResultsToEntries(ResultSet results) throws SQLException, QueryProcessingException { List<FactDataEntry> entries = new LinkedList<FactDataEntry>(); ResultSetMetaData metadata = results.getMetaData(); int columnCount = metadata.getColumnCount(); if (!(columnCount == 3 || columnCount == 5)) { String message = "Unexpected number of columns in result set: " + metadata.getColumnCount(); LOG.error(message); throw new QueryProcessingException(message); } while (results.next()) { FactDataEntry entry = null; String valueType = results.getString(I2B2QueryFactory.VALUE_TYPE_FIELD); String textValue = results.getString(I2B2QueryFactory.TEXT_VALUE_FIELD); String numValueString = results.getString(I2B2QueryFactory.NUMERIC_VALUE_FIELD); Double numValue = numValueString != null ? Double.valueOf(numValueString) : null; if (columnCount == 5) { String encounterNumString = results.getString(I2B2QueryFactory.ENCOUNTER_NUMBER_FIELD); String patientNumString = results.getString(I2B2QueryFactory.PATIENT_NUMBER_FIELD); Integer encounterNum = encounterNumString != null ? Integer.valueOf(encounterNumString) : null; Integer patientNum = patientNumString != null ? Integer.valueOf(patientNumString) : null; entry = new FactDataEntry(valueType, textValue, numValue, encounterNum, patientNum); } else { entry = new FactDataEntry(valueType, textValue, numValue); } entries.add(entry); } return entries; } }
/* * Copyright 2013-2014 Richard M. Hightower * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * __________ _____ __ .__ * \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____ * | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\ * | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ > * |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ / * \/ \/ \/ \/ \/ \//_____/ * ____. ___________ _____ ______________.___. * | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | | * | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | | * /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ | * \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______| * \/ \/ \/ \/ \/ \/ */ package org.boon; import org.boon.concurrent.Timer; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static org.boon.Boon.puts; import static org.boon.primitive.Arry.idx; import static org.boon.primitive.Arry.len; import static org.boon.Exceptions.die; import static org.junit.Assert.assertEquals; /** * Created by rick on 12/8/13. */ public class StringScannerTest { private static final String TEST_STRING = "[199984,1384795052823,\"/127.0.0.1:51706\",[\"abc123\",\"rickHigh\"," + "\"217.0.0.1\",\"start\",1234567,12345678,\"abcsesson123\",\"asdfasdf\"]]"; boolean ok = true; //@Test public void speedTestParseInt() { int numIter = 50_000_000; List<String> numbers = new ArrayList<>(numIter); for (int index =0; index < numIter; index++) { numbers.add("" + index); } long total = 0; puts("Number generated", numbers.size()); for (String num : numbers) { final int i = StringScanner.parseInt(num); total += i; } puts (total); total = 0; long start = Timer.timer().now(); for (String num : numbers) { final int i = StringScanner.parseInt(num); total += i; } long stop = Timer.timer().now(); long duration = stop - start; puts("new parse", duration, total); start = Timer.timer().now(); total = 0; for (String num : numbers) { final int i = Integer.parseInt(num); total += i; } stop = Timer.timer().now(); duration = stop - start; puts("old parse", duration, total); } //@Test public void speedTestParseLong() { int numIter = 10_000_000; long BIG_NUM = Integer.MAX_VALUE; List<String> numbers = new ArrayList<>(numIter); for (int index =0; index < numIter; index++) { numbers.add("" + (BIG_NUM+ index)); } long total = 0; puts("Number generated", numbers.size(), BIG_NUM); for (String num : numbers) { final long i = StringScanner.parseLong(num); total += i; } puts (total); total = 0; long start = Timer.timer().now(); for (String num : numbers) { final long i = StringScanner.parseLong(num); total += i; } long stop = Timer.timer().now(); long duration = stop - start; puts("new parse", duration, total); start = Timer.timer().now(); total = 0; for (String num : numbers) { final long i = Long.parseLong(num); total += i; } stop = Timer.timer().now(); duration = stop - start; puts("old parse", duration, total); } //@Test //JDK wins this one but it is close public void speedTestDoubleLong() { int numIter = 10_000_000; long BIG_NUM = Integer.MAX_VALUE; List<String> numbers = new ArrayList<>(numIter); for (int index =0; index < numIter; index++) { numbers.add("" + ((BIG_NUM+ index) * 1.33d)); } long total = 0; puts("Number generated", numbers.size(), BIG_NUM); for (String num : numbers) { final double i = StringScanner.parseDouble(num); total += i; } puts (total); total = 0; long start = Timer.timer().now(); for (String num : numbers) { final double i = StringScanner.parseDouble(num); total += i; } long stop = Timer.timer().now(); long duration = stop - start; puts("new parse", duration, total); start = Timer.timer().now(); total = 0; for (String num : numbers) { final double i = Double.parseDouble(num); total += i; } stop = Timer.timer().now(); duration = stop - start; puts("old parse", duration, total); } //@Test //We win this one public void speedTestDoubleLong2() { int numIter = 10_000_000; long BIG_NUM = 1_000_000; List<String> numbers = new ArrayList<>(numIter); for (int index =0; index < numIter; index++) { numbers.add("" + ((BIG_NUM+ index) * 0.1d)); } long total = 0; puts("Number generated", numbers.size(), BIG_NUM); for (String num : numbers) { final double i = StringScanner.parseDouble(num); total += i; } puts (total); total = 0; long start = Timer.timer().now(); for (String num : numbers) { final double i = StringScanner.parseDouble(num); total += i; } long stop = Timer.timer().now(); long duration = stop - start; puts("new parse", duration, total); start = Timer.timer().now(); total = 0; for (String num : numbers) { final double i = Double.parseDouble(num); total += i; } stop = Timer.timer().now(); duration = stop - start; puts("old parse", duration, total); } @Test public void parseFloatIssue179() { String testString = "-0.0"; float value = StringScanner.parseFloat(testString); String str = ""+value; ok |= str.equals("-0.0") || die(); } @Test public void testRemoveChars() { String testString = "1_2 345 6 _____\t\t7\t890"; String after = StringScanner.removeChars ( testString, '_', ' ', '\t' ); boolean ok = "1234567890".equals (after ) || die( "$"+ after + "$"); } @Test public void testCreateFromString() { String[] split = StringScanner.splitByCharsNoneEmpty( TEST_STRING, '[', ',', '"', '\\', ':', ']', '/' ); String first = idx( split, 0 ); String second = idx( split, 1 ); String third = idx( split, 2 ); String fourth = idx( split, 3 ); String fifth = idx( split, 4 ); String sixth = idx( split, 5 ); String seventh = idx( split, 6 ); String last = idx( split, -1 ); assertEquals( "199984", first ); assertEquals( "1384795052823", second ); assertEquals( "127.0.0.1", third ); assertEquals( "51706", fourth ); assertEquals( "abc123", fifth ); assertEquals( "rickHigh", sixth ); assertEquals( "217.0.0.1", seventh ); assertEquals( "asdfasdf", last ); assertEquals( 12, len( split ) ); } @Test public void testSimple() { String[] split = StringScanner.splitByCharsNoneEmpty( "1,2,3", ',' ); String firstArg = idx( split, 0 ); assertEquals( "1", firstArg ); } @Test public void testSimple2() { String[] split = StringScanner.splitByCharsNoneEmpty( "1,2,,4", ',' ); String firstArg = idx( split, 0 ); String second = idx( split, 1 ); String third = idx( split, 2 ); assertEquals( "1", firstArg ); assertEquals( "2", second ); assertEquals( "4", third ); } @Test public void testSubStringAfter() { final String substringAfter = StringScanner.substringAfter("love :: rocket", " :: "); ok |= substringAfter.equals("rocket") || die(substringAfter); } @Test public void testSubStringBefore() { final String substringBefore = StringScanner.substringBefore("love :: rocket", " :: "); ok |= substringBefore.equals("love") || die(substringBefore); } }
/* Copyright 2013 Nationale-Nederlanden Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.receivers; import java.util.HashMap; import java.util.Map; import nl.nn.adapterframework.core.IPullingListener; import nl.nn.adapterframework.core.IThreadCountControllable; import nl.nn.adapterframework.core.ListenerException; import nl.nn.adapterframework.util.Counter; import nl.nn.adapterframework.util.LogUtil; import nl.nn.adapterframework.util.RunStateEnum; import nl.nn.adapterframework.util.Semaphore; import nl.nn.adapterframework.util.TracingUtil; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.log4j.Logger; import org.apache.log4j.NDC; import org.springframework.core.task.TaskExecutor; import org.springframework.scheduling.SchedulingAwareRunnable; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.DefaultTransactionDefinition; /** * Container that provides threads to exectue pulling listeners. * * @author Tim van der Leeuw * @since 4.8 * @version $Id$ */ public class PullingListenerContainer implements IThreadCountControllable { protected Logger log = LogUtil.getLogger(this); private TransactionDefinition txNew=null; private ReceiverBase receiver; private PlatformTransactionManager txManager; private Counter threadsRunning = new Counter(0); private Counter tasksStarted = new Counter(0); private Semaphore processToken = null; // guard against to many messages being processed at the same time private Semaphore pollToken = null; // guard against to many threads polling at the same time private boolean idle=false; // true if the last messages received was null, will cause wait loop private int retryInterval=1; private int maxThreadCount=1; /** * The thread-pool for spawning threads, injected by Spring */ private TaskExecutor taskExecutor; private PullingListenerContainer() { super(); } public void configure() { if (receiver.getNumThreadsPolling()>0 && receiver.getNumThreadsPolling()<receiver.getNumThreads()) { pollToken = new Semaphore(receiver.getNumThreadsPolling()); } processToken = new Semaphore(receiver.getNumThreads()); maxThreadCount=receiver.getNumThreads(); if (receiver.isTransacted()) { DefaultTransactionDefinition txDef = new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRES_NEW); if (receiver.getTransactionTimeout()>0) { txDef.setTimeout(receiver.getTransactionTimeout()); } txNew=txDef; } } public void start() { taskExecutor.execute(new ControllerTask()); } public void stop() { } public boolean isThreadCountReadable() { return true; } public boolean isThreadCountControllable() { return true; } public int getCurrentThreadCount() { return (int)threadsRunning.getValue(); } public int getMaxThreadCount() { return maxThreadCount; } public void increaseThreadCount() { maxThreadCount++; processToken.release(); } public void decreaseThreadCount() { if (maxThreadCount>1) { maxThreadCount--; processToken.tighten(); } } private class ControllerTask implements SchedulingAwareRunnable { public boolean isLongLived() { return true; } public void run() { log.debug(receiver.getLogPrefix()+" taskExecutor ["+ToStringBuilder.reflectionToString(taskExecutor)+"]"); receiver.setRunState(RunStateEnum.STARTED); log.debug(receiver.getLogPrefix()+"started ControllerTask"); try { while (receiver.isInRunState(RunStateEnum.STARTED) && !Thread.currentThread().isInterrupted()) { processToken.acquire(); if (pollToken != null) { pollToken.acquire(); } if (isIdle() && receiver.getPollInterval()>0) { if (log.isDebugEnabled() && receiver.getPollInterval()>600)log.debug(receiver.getLogPrefix()+"is idle, sleeping for ["+receiver.getPollInterval()+"] seconds"); for (int i=0; i<receiver.getPollInterval() && receiver.isInRunState(RunStateEnum.STARTED); i++) { Thread.sleep(1000); } } taskExecutor.execute(new ListenTask()); } } catch (InterruptedException e) { log.warn("polling interrupted", e); } log.debug(receiver.getLogPrefix()+"closing down ControllerTask"); receiver.stopRunning(); receiver.closeAllResources(); NDC.remove(); } } private class ListenTask implements SchedulingAwareRunnable { public boolean isLongLived() { return false; } public void run() { IPullingListener listener = null; Map threadContext = null; boolean pollTokenReleased=false; try { threadsRunning.increase(); if (receiver.isInRunState(RunStateEnum.STARTED)) { listener = (IPullingListener) receiver.getListener(); threadContext = listener.openThread(); if (threadContext == null) { threadContext = new HashMap(); } long startProcessingTimestamp; Object rawMessage = null; TransactionStatus txStatus = null; try { try { if (receiver.isTransacted()) { txStatus = txManager.getTransaction(txNew); } rawMessage = listener.getRawMessage(threadContext); resetRetryInterval(); setIdle(rawMessage==null); } catch (Exception e) { if (txStatus!=null) { txManager.rollback(txStatus); } if (receiver.isOnErrorContinue()) { increaseRetryIntervalAndWait(e); } else { receiver.error("stopping receiver after exception in retrieving message", e); receiver.stopRunning(); return; } } finally { pollTokenReleased=true; if (pollToken != null) { pollToken.release(); } } if (rawMessage != null) { tasksStarted.increase(); log.debug(receiver.getLogPrefix()+"started ListenTask ["+tasksStarted.getValue()+"]"); Thread.currentThread().setName(receiver.getName()+"-listener["+tasksStarted.getValue()+"]"); // found a message, process it TracingUtil.beforeEvent(this); startProcessingTimestamp = System.currentTimeMillis(); try { receiver.processRawMessage(listener, rawMessage, threadContext); if (txStatus != null) { if (txStatus.isRollbackOnly()) { receiver.warn(receiver.getLogPrefix()+"pipeline processing ended with status RollbackOnly, so rolling back transaction"); txManager.rollback(txStatus); } else { txManager.commit(txStatus); } } } catch (Exception e) { TracingUtil.exceptionEvent(this); if (txStatus != null && !txStatus.isCompleted()) { txManager.rollback(txStatus); } if (receiver.isOnErrorContinue()) { receiver.error(receiver.getLogPrefix()+"caught Exception processing message, will continue processing next message", e); } else { receiver.error(receiver.getLogPrefix()+"stopping receiver after exception in processing message", e); receiver.stopRunning(); } } finally { TracingUtil.afterEvent(this); } } } finally { if (txStatus != null && !txStatus.isCompleted()) { txManager.rollback(txStatus); } } } } catch (Throwable e) { receiver.error("error occured in receiver [" + receiver.getName() + "]", e); } finally { processToken.release(); if (!pollTokenReleased && pollToken != null) { pollToken.release(); } threadsRunning.decrease(); if (listener != null) { try { listener.closeThread(threadContext); } catch (ListenerException e) { receiver.error("Exception closing listener of Receiver [" + receiver.getName() + "]", e); } } NDC.remove(); } } } /** * Starts the receiver. This method is called by the startRunning method.<br/> * Basically: * <ul> * <li>it calls the getRawMessage method to get a message<li> * <li> it performs the onMessage method, resulting a PipeLineResult</li> * <li>it calls the afterMessageProcessed() method of the listener<li> * <li> it optionally sends the result using the sender</li> * </ul> */ // public void run() { // threadsRunning.increase(); // Thread.currentThread().setName(receiver.getName()+"-listener["+threadsRunning.getValue()+"]"); // IPullingListener listener = null; // Map threadContext = null; // try { // listener = (IPullingListener) receiver.getListener(); // threadContext = listener.openThread(); // if (threadContext == null) { // threadContext = new HashMap(); // } // long startProcessingTimestamp; // long finishProcessingTimestamp = System.currentTimeMillis(); // receiver.setRunState(RunStateEnum.STARTED); // while (receiver.isInRunState(RunStateEnum.STARTED)) { // boolean permissionToGo = true; // if (pollToken != null) { // try { // permissionToGo = false; // pollToken.acquire(); // permissionToGo = true; // } catch (Exception e) { // receiver.error("acquisition of polltoken interupted", e); // receiver.stopRunning(); // } // } // Object rawMessage = null; // TransactionStatus txStatus = null; // try { // try { // if (permissionToGo && receiver.isInRunState(RunStateEnum.STARTED)) { // try { // if (receiver.isTransacted()) { // txStatus = txManager.getTransaction(txNew); // } // rawMessage = listener.getRawMessage(threadContext); // resetRetryInterval(); // } catch (Exception e) { // if (txStatus!=null) { // txManager.rollback(txStatus); // } // if (receiver.isOnErrorContinue()) { // increaseRetryIntervalAndWait(e); // } else { // receiver.error("stopping receiver after exception in retrieving message", e); // receiver.stopRunning(); // } // } // } // } finally { // if (pollToken != null) { // pollToken.release(); // } // } // if (rawMessage != null) { // // found a message, process it // try { // TracingUtil.beforeEvent(this); // startProcessingTimestamp = System.currentTimeMillis(); // try { // receiver.processRawMessage(listener, rawMessage, threadContext, finishProcessingTimestamp - startProcessingTimestamp); // if (txStatus != null) { // if (txStatus.isRollbackOnly()) { // receiver.warn(receiver.getLogPrefix()+"pipeline processing ended with status RollbackOnly, so rolling back transaction"); // txManager.rollback(txStatus); // } else { // txManager.commit(txStatus); // } // } // } catch (Exception e) { // TracingUtil.exceptionEvent(this); // if (txStatus != null && !txStatus.isCompleted()) { // txManager.rollback(txStatus); // } // if (receiver.isOnErrorContinue()) { // receiver.error(receiver.getLogPrefix()+"caught Exception processing message, will continue processing next message", e); // } else { // receiver.error(receiver.getLogPrefix()+"stopping receiver after exception in processing message", e); // receiver.stopRunning(); // } // } // } finally { // finishProcessingTimestamp = System.currentTimeMillis(); // TracingUtil.afterEvent(this); // } // } else { // // no message found, cleanup // if (txStatus != null && !txStatus.isCompleted()) { // txManager.rollback(txStatus); // } // if (receiver.getPollInterval()>0) { // for (int i=0; i<receiver.getPollInterval() && receiver.isInRunState(RunStateEnum.STARTED); i++) { // Thread.sleep(1000); // } // } // } // } finally { // if (txStatus != null && !txStatus.isCompleted()) { // txManager.rollback(txStatus); // } // } // } // } catch (Throwable e) { // receiver.error("error occured in receiver [" + receiver.getName() + "]", e); // } finally { // if (listener != null) { // try { // listener.closeThread(threadContext); // } catch (ListenerException e) { // receiver.error("Exception closing listener of Receiver [" + receiver.getName() + "]", e); // } // } // long stillRunning = threadsRunning.decrease(); // if (stillRunning > 0) { // receiver.info("a thread of Receiver [" + receiver.getName() + "] exited, [" + stillRunning + "] are still running"); // receiver.throwEvent(ReceiverBase.RCV_THREAD_EXIT_MONITOR_EVENT); // return; // } // receiver.info("the last thread of Receiver [" + receiver.getName() + "] exited, cleaning up"); // receiver.closeAllResources(); // NDC.remove(); // } // } private void resetRetryInterval() { synchronized (receiver) { if (retryInterval > ReceiverBase.RCV_SUSPENSION_MESSAGE_THRESHOLD) { receiver.throwEvent(ReceiverBase.RCV_SUSPENDED_MONITOR_EVENT); } retryInterval = 1; } } private void increaseRetryIntervalAndWait(Throwable t) { long currentInterval; synchronized (receiver) { currentInterval = retryInterval; retryInterval = retryInterval * 2; if (retryInterval > 3600) { retryInterval = 3600; } } receiver.error("caught Exception retrieving message, will continue retrieving messages in [" + currentInterval + "] seconds", t); if (currentInterval*2 > ReceiverBase.RCV_SUSPENSION_MESSAGE_THRESHOLD) { receiver.throwEvent(ReceiverBase.RCV_SUSPENDED_MONITOR_EVENT); } while (receiver.isInRunState(RunStateEnum.STARTED) && currentInterval-- > 0) { try { Thread.sleep(1000); } catch (Exception e2) { receiver.error("sleep interupted", e2); receiver.stopRunning(); } } } public void setReceiver(ReceiverBase receiver) { this.receiver = receiver; } public ReceiverBase getReceiver() { return receiver; } public void setTxManager(PlatformTransactionManager manager) { txManager = manager; } public PlatformTransactionManager getTxManager() { return txManager; } public void setTaskExecutor(TaskExecutor executor) { taskExecutor = executor; } public TaskExecutor getTaskExecutor() { return taskExecutor; } public synchronized void setIdle(boolean b) { idle = b; } public synchronized boolean isIdle() { return idle; } }
/* * Copyright 2007 - 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sf.jailer.ui; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Toolkit; import java.awt.Window; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.PrintStream; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JDialog; import javax.swing.JFileChooser; import javax.swing.JMenu; import javax.swing.JOptionPane; import javax.swing.JPopupMenu; import javax.swing.SwingUtilities; import javax.swing.WindowConstants; import net.sf.jailer.CommandLineParser; import net.sf.jailer.Jailer; import net.sf.jailer.database.SqlException; import net.sf.jailer.progress.ProgressListener; import net.sf.jailer.util.CancellationException; import net.sf.jailer.util.CancellationHandler; import org.apache.log4j.Logger; /** * Some utility methods. * * @author Ralf Wisser */ public class UIUtil { /** * The logger. */ private static final Logger _log = Logger.getLogger(UIUtil.class); /** * Opens file chooser. * * @param selectedFile * if not <code>null</code> this file will be selected initially * @param startDir * directory to start with * @param description * description of file to chose */ public static String choseFile(File selectedFile, String startDir, final String description, final String extension, Component parent, boolean addExtension, boolean forLoad) { return choseFile(selectedFile, startDir, description, extension, parent, addExtension, forLoad, true); } /** * Opens file chooser. * * @param selectedFile * if not <code>null</code> this file will be selected initially * @param startDir * directory to start with * @param description * description of file to chose */ public static String choseFile(File selectedFile, String startDir, final String description, final String extension, Component parent, boolean addExtension, boolean forLoad, final boolean allowZip) { String newStartDir = restoreCurrentDir(extension); if (newStartDir != null) { startDir = newStartDir; } JFileChooser fileChooser = new JFileChooser(startDir); javax.swing.filechooser.FileFilter filter = new javax.swing.filechooser.FileFilter() { public boolean accept(File pathname) { return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith(extension) || pathname.getName().toLowerCase() .endsWith(extension + ".gz") || pathname.getName().toLowerCase() .endsWith(extension + ".zip"); } public String getDescription() { if (extension.endsWith(".sql") || extension.endsWith(".xml")) { if (allowZip) { return "*" + extension + " *" + extension + ".zip"; } } return "*" + extension; } }; fileChooser.setFileFilter(filter); fileChooser.setDialogTitle(description); if (selectedFile != null) { fileChooser.setSelectedFile(selectedFile); } fileChooser.setDialogType(forLoad ? JFileChooser.OPEN_DIALOG : JFileChooser.SAVE_DIALOG); int returnVal = forLoad ? fileChooser.showOpenDialog(parent) : fileChooser.showSaveDialog(parent); if (returnVal == JFileChooser.APPROVE_OPTION) { String fn = ""; try { File f = fileChooser.getSelectedFile(); String work = new File(".").getCanonicalPath(); fn = f.getName(); f = f.getParentFile(); while (f != null && !f.getCanonicalPath().equals(work)) { fn = f.getName() + File.separator + fn; f = f.getParentFile(); } if (addExtension && !fn.endsWith(extension)) { fn += extension; } try { storeCurrentDir(extension, fileChooser.getSelectedFile() .getParent()); } catch (Exception e) { // ignore } return fn; } catch (IOException e1) { try { fn = fileChooser.getSelectedFile().getCanonicalPath(); if (addExtension && !fn.endsWith(extension)) { fn += extension; } return fn; } catch (IOException e) { throw new RuntimeException(e.getMessage(), e); } } } return null; } /** * File to store current directory of file chooser. */ private final static File cdSettings = new File(".cdsettings"); /** * Stores current directory of file chooser. * * @param key * the key under which to store current directory * @param currentDir * the current directory */ @SuppressWarnings("unchecked") private static void storeCurrentDir(String key, String currentDir) { try { Map<String, String> cd = new HashMap<String, String>(); if (cdSettings.exists()) { try { ObjectInputStream in = new ObjectInputStream( new FileInputStream(cdSettings)); cd = (Map<String, String>) in.readObject(); in.close(); } catch (Exception e) { // ignore } } cdSettings.delete(); cd.put(key, currentDir); ObjectOutputStream out = new ObjectOutputStream( new FileOutputStream(cdSettings)); out.writeObject(cd); out.close(); } catch (Exception e) { // ignore } } /** * Restores current directory of file chooser. * * @param key * the key of the current directory to restore * @return the current directory, or <code>null</code> if no directory has * been stored under the key */ @SuppressWarnings("unchecked") private static String restoreCurrentDir(String key) { if (cdSettings.exists()) { try { ObjectInputStream in = new ObjectInputStream( new FileInputStream(cdSettings)); String cd = ((Map<String, String>) in.readObject()).get(key); in.close(); return cd; } catch (Exception e) { // ignore } } return null; } /** * Calls the Jailer export engine via CLI. * * @param ownerOfConsole * owner component of jailer console * @param cliArgs * CLI arguments * @param showLogfileButton * console property * @param printCommandLine * if true, print CLI command line * @param showExplainLogButton * console property * @param closeOutputWindow * if <code>true</code>, close console immediately after call * @param continueOnErrorQuestion * to ask when call fails * @param password * CLI argument to print as "*****" * @return <code>true</code> iff call succeeded */ public static boolean runJailer(Window ownerOfConsole, List<String> cliArgs, boolean showLogfileButton, final boolean printCommandLine, boolean showExplainLogButton, final boolean closeOutputWindow, String continueOnErrorQuestion, String password, final ProgressListener progressListener, final ProgressPanel progressPanel, final boolean showExeptions, boolean fullSize) { return runJailer(ownerOfConsole, cliArgs, showLogfileButton, printCommandLine, showExplainLogButton, closeOutputWindow, continueOnErrorQuestion, password, progressListener, progressPanel, showExeptions, fullSize, false); } /** * Calls the Jailer export engine via CLI. * * @param ownerOfConsole * owner component of jailer console * @param cliArgs * CLI arguments * @param showLogfileButton * console property * @param printCommandLine * if true, print CLI command line * @param showExplainLogButton * console property * @param closeOutputWindow * if <code>true</code>, close console immediately after call * @param continueOnErrorQuestion * to ask when call fails * @param password * CLI argument to print as "*****" * @return <code>true</code> iff call succeeded */ public static boolean runJailer(Window ownerOfConsole, List<String> cliArgs, boolean showLogfileButton, final boolean printCommandLine, boolean showExplainLogButton, final boolean closeOutputWindow, String continueOnErrorQuestion, String password, final ProgressListener progressListener, final ProgressPanel progressPanel, final boolean showExeptions, boolean fullSize, final boolean closeOutputWindowOnError) { JDialog dialog = new JDialog(ownerOfConsole); List<String> args = new ArrayList<String>(cliArgs); final StringBuffer arglist = createCLIArgumentString(password, args); final String[] argsarray = new String[args.size()]; int i = 0; for (String arg : args) { argsarray[i++] = arg.trim(); } final JailerConsole outputView = new JailerConsole(ownerOfConsole, dialog, showLogfileButton, showExplainLogButton, progressPanel, fullSize); final PrintStream originalOut = System.out; final boolean[] ready = new boolean[] { true }; System.setOut(new PrintStream(new OutputStream() { private int lineNr = 0; StringBuffer buffer = new StringBuffer(); public synchronized void write(byte[] arg0, int arg1, int arg2) throws IOException { super.write(arg0, arg1, arg2); } public void write(int b) throws IOException { if (b != '@') { originalOut.write(b); } boolean wasReady; synchronized (buffer) { wasReady = ready[0]; if (b != '@') { buffer.append((char) b); } } if ((char) b == '\n') { ++lineNr; } if ((char) b == '\n' && lineNr % 60 == 0 || (char) b == '@') { if (wasReady) { synchronized (buffer) { ready[0] = false; } try { SwingUtilities.invokeAndWait(new Runnable() { public void run() { synchronized (buffer) { if (buffer.length() > 0) { outputView.appendText(buffer .toString()); buffer.setLength(0); } } ready[0] = true; } }); } catch (InterruptedException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } } } } })); final boolean[] exceptionShown = new boolean[1]; try { try { File exportLog = new File("export.log"); File sqlLog = new File("sql.log"); if (exportLog.exists()) { FileOutputStream out = new FileOutputStream(exportLog); out.close(); } if (sqlLog.exists()) { FileOutputStream out = new FileOutputStream(sqlLog); out.close(); } } catch (Exception e) { UIUtil.showException(null, "Error", e); } final boolean[] result = new boolean[] { false }; final Throwable[] exp = new Throwable[1]; final StringBuffer warnings = new StringBuffer(); final boolean[] fin = new boolean[] { false }; outputView.dialog.addWindowListener(new WindowAdapter() { boolean cancelled = false; @Override public void windowClosing(WindowEvent e) { boolean f; synchronized (UIUtil.class) { f = exp[0] == null; } if (cancelled && f) { JOptionPane.showMessageDialog(outputView.dialog, "Cancellation in progress...", "Cancellation", JOptionPane.INFORMATION_MESSAGE); } if (exp[0] == null && !fin[0] && !cancelled) { if (JOptionPane.showConfirmDialog(outputView.dialog, "Cancel operation?", "Cancellation", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE) == JOptionPane.YES_OPTION) { new Thread(new Runnable() { @Override public void run() { CancellationHandler.cancel(null); } }).start(); outputView.dialog .setTitle("Jailer Console - cancelling..."); if (progressListener != null) { progressListener.newStage("cancelling", true, true); } cancelled = true; } } } }); new Thread(new Runnable() { public void run() { for (int i = 0;; ++i) { try { Thread.sleep(i == 0 ? 500 : 1000); } catch (InterruptedException e) { } synchronized (fin) { if (fin[0]) { break; } System.out.print("@"); } } } }).start(); new Thread(new Runnable() { public void run() { try { if (printCommandLine) { _log.info("arguments: " + arglist.toString().trim()); } result[0] = Jailer .jailerMain(argsarray, disableWarnings ? new StringBuffer() : warnings, progressListener); } catch (Throwable t) { synchronized (UIUtil.class) { exp[0] = t; } } finally { // flush System.out.println("@"); synchronized (UIUtil.class) { fin[0] = true; } } SwingUtilities.invokeLater(new Runnable() { public void run() { synchronized (UIUtil.class) { outputView.dialog .setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); if (progressListener != null) { progressListener.newStage( exp[0] == null ? "finished" : "failed", exp[0] != null, true); } if ((exp[0] instanceof CancellationException) || closeOutputWindowOnError || (closeOutputWindow && result[0] && exp[0] == null && warnings .length() == 0)) { outputView.dialog.setVisible(false); } else { outputView.finish(result[0] && exp[0] == null); if (result[0] && warnings.length() > 0) { JOptionPane.showMessageDialog( outputView.dialog, warnings.length() > 800 ? warnings .substring(0, 800) + "..." : warnings .toString(), "Warning", JOptionPane.INFORMATION_MESSAGE); outputView.dialog.setVisible(false); } else if (showExeptions && exp[0] != null && !(exp[0] instanceof CancellationException)) { UIUtil.showException(outputView.dialog, "Error", exp[0]); exceptionShown[0] = true; } if (result[0] && progressPanel != null) { progressPanel.confirm(); } } } } }); } }, "jailer-main").start(); outputView.dialog.setVisible(true); synchronized (UIUtil.class) { if (exp[0] != null) { throw exp[0]; } } if (!result[0] && continueOnErrorQuestion != null) { result[0] = JOptionPane.showConfirmDialog(outputView.dialog, continueOnErrorQuestion, "Error", JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION; } return result[0]; } catch (Throwable t) { if (t instanceof CancellationException) { CancellationHandler.reset(null); } else { boolean shown = false; synchronized (UIUtil.class) { shown = exceptionShown[0]; } if (!shown) { UIUtil.showException(null, "Error", t); } } return false; } finally { System.setOut(originalOut); } } public static StringBuffer createCLIArgumentString(String password, List<String> args) { args.add("-datamodel"); args.add(CommandLineParser.getInstance().getDataModelFolder()); args.add("-script-enhancer"); args.add(CommandLineParser.getInstance().enhancerFolder); if (CommandLineParser.getInstance().workingFolder != null) { args.add("-working-folder"); args.add(CommandLineParser.getInstance().workingFolder); } final StringBuffer arglist = new StringBuffer(); for (String arg : args) { if (arg != null && arg.equals(password) && password.length() > 0) { arglist.append(" \"<password>\""); } else { if ("".equals(arg) || arg.contains(" ") || arg.contains("<") || arg.contains(">") || arg.contains("*") || arg.contains("?") || arg.contains("|") || arg.contains("$") || arg.contains("\"") || arg.contains("'") || arg.contains("\\") || arg.contains(";") || arg.contains("&")) { arglist.append(" \""); for (int j = 0; j < arg.length(); ++j) { char c = arg.charAt(j); if (c == '\"' || c == '$') { arglist.append("\\"); } arglist.append(c); } arglist.append("\""); } else { arglist.append(" " + arg); } } } return arglist; } /** * Shows an exception. * * @param parent * parent component of option pane * @param title * title of option pane * @param t * the exception */ public static void showException(Component parent, String title, Throwable t) { t.printStackTrace(); if (!(t instanceof ClassNotFoundException)) { while (t.getCause() != null && t != t.getCause() && !(t instanceof SqlException)) { t = t.getCause(); } } if (t instanceof SqlException) { String message = ((SqlException) t).message; String sql = ((SqlException) t).sqlStatement; new SqlErrorDialog(parent == null ? null : SwingUtilities.getWindowAncestor(parent), lineWrap( message, 120).toString(), lineWrap(sql, 140).toString()); return; } StringBuilder msg = lineWrap(t.getMessage(), 80); JOptionPane.showMessageDialog(parent, msg.toString().trim(), title + " - " + t.getClass().getName(), JOptionPane.ERROR_MESSAGE); } private static StringBuilder lineWrap(String message, int maxwidth) { StringBuilder msg = new StringBuilder(); Pattern wrapRE = Pattern.compile("(\\S\\S{" + maxwidth + ",}|.{1," + maxwidth + "})(\\s+|$)"); Matcher m = wrapRE.matcher(message == null ? "" : message); while (m.find()) { String line = m.group(); while (line.length() > maxwidth + 10) { msg.append(line.substring(0, maxwidth) + "\n"); line = line.substring(maxwidth); } msg.append(line + (line.contains("\n") ? "" : "\n")); } return msg; } /** * Loads table list file and fill a list. * * @param list * to fill * @param fileName * name of file */ public static void loadTableList(List<String> list, String fileName) throws IOException { File file = new File(fileName); if (file.exists()) { BufferedReader in = new BufferedReader(new FileReader(file)); String line; while ((line = in.readLine()) != null) { line = line.trim(); if (line.length() > 0) { list.add(line); } } in.close(); } } /** * Initializes peer of newly created window. * * Should not be neccassary, but there is a strange bug in AWT of jre 6 on * multi-core/processor systems. Sleeping a little after creating peer and * before making peer visible seems to help. */ public static void initPeer() { try { Thread.sleep(200); } catch (InterruptedException e) { } } public static void fit(JDialog d) { try { // Get the size of the screen Dimension dim = Toolkit.getDefaultToolkit().getScreenSize(); int hd = d.getY() + d.getHeight() - (dim.height - 80); if (hd > 0) { d.setSize(d.getWidth(), Math.max(d.getHeight() - hd, 150)); } } catch (Throwable t) { // ignore } } public static boolean disableWarnings = false; /** * Tries to get as much memory as possible and shows it's size. */ public static void showMaxMemory() { long memSize = 0; try { final int MB = 1024 * 1024; List<byte[]> mem = new ArrayList<byte[]>(); while (true) { mem.add(new byte[MB]); memSize += MB; } } catch (OutOfMemoryError e) { JOptionPane.showConfirmDialog(null, "MaxMem=" + memSize, "", JOptionPane.INFORMATION_MESSAGE); } } public static void replace(JComponent component, JComponent replacement) { Container parent = component.getParent(); GridBagConstraints c = ((GridBagLayout) parent.getLayout()) .getConstraints(component); parent.remove(component); if (replacement != null) { parent.add(replacement, c); } } public static void wireComponentWithButton(JComponent component, final JButton button) { component.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { if (e.getButton() == MouseEvent.BUTTON1 && e.getClickCount() > 1) { if (button.isEnabled()) { button.doClick(); } } } }); component.addKeyListener(new KeyListener() { @Override public void keyTyped(KeyEvent e) { if (e.getKeyChar() == '\n') { if (button.isEnabled()) { button.doClick(); } } } @Override public void keyReleased(KeyEvent arg0) { } @Override public void keyPressed(KeyEvent arg0) { } }); } public static void fit(JPopupMenu popup) { final int MAX_ITEMS = 40; Component[] comps = popup.getComponents(); popup.removeAll(); JMenu current = null; int ci = 1; for (int i = 0; i < comps.length; ++i) { if (ci > MAX_ITEMS && i < comps.length - 5) { ci = 1; JMenu newMenu = new JMenu("more..."); if (current == null) { popup.add(newMenu); } else { current.add(newMenu); } current = newMenu; } if (current == null) { popup.add(comps[i]); } else { current.add(comps[i]); } ++ci; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.platform; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.PlatformConfiguration; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteComputeImpl; import org.apache.ignite.internal.binary.*; import org.apache.ignite.internal.cluster.ClusterGroupAdapter; import org.apache.ignite.internal.processors.GridProcessorAdapter; import org.apache.ignite.internal.processors.cache.IgniteCacheProxy; import org.apache.ignite.internal.processors.datastreamer.DataStreamerImpl; import org.apache.ignite.internal.processors.datastructures.GridCacheAtomicLongImpl; import org.apache.ignite.internal.processors.platform.cache.PlatformCache; import org.apache.ignite.internal.processors.platform.cache.affinity.PlatformAffinity; import org.apache.ignite.internal.processors.platform.cache.store.PlatformCacheStore; import org.apache.ignite.internal.processors.platform.cluster.PlatformClusterGroup; import org.apache.ignite.internal.processors.platform.compute.PlatformCompute; import org.apache.ignite.internal.processors.platform.datastreamer.PlatformDataStreamer; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicLong; import org.apache.ignite.internal.processors.platform.dotnet.PlatformDotNetCacheStore; import org.apache.ignite.internal.processors.platform.events.PlatformEvents; import org.apache.ignite.internal.processors.platform.memory.PlatformMemory; import org.apache.ignite.internal.processors.platform.memory.PlatformOutputStream; import org.apache.ignite.internal.processors.platform.messaging.PlatformMessaging; import org.apache.ignite.internal.processors.platform.services.PlatformServices; import org.apache.ignite.internal.processors.platform.transactions.PlatformTransactions; import org.apache.ignite.internal.processors.platform.utils.PlatformConfigurationUtils; import org.apache.ignite.internal.processors.platform.utils.PlatformUtils; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.Collections; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * GridGain platform processor. */ public class PlatformProcessorImpl extends GridProcessorAdapter implements PlatformProcessor { /** Start latch. */ private final CountDownLatch startLatch = new CountDownLatch(1); /** Stores pending initialization. */ private final Collection<StoreInfo> pendingStores = Collections.newSetFromMap(new ConcurrentHashMap<StoreInfo, Boolean>()); /** Started stores. */ private final Collection<PlatformCacheStore> stores = Collections.newSetFromMap(new ConcurrentHashMap<PlatformCacheStore, Boolean>()); /** Lock for store lifecycle operations. */ private final ReadWriteLock storeLock = new ReentrantReadWriteLock(); /** Logger. */ private final IgniteLogger log; /** Context. */ private final PlatformContext platformCtx; /** Interop configuration. */ private final PlatformConfigurationEx interopCfg; /** Whether processor is started. */ private boolean started; /** Whether processor if stopped (or stopping). */ private boolean stopped; /** * Constructor. * * @param ctx Kernal context. */ public PlatformProcessorImpl(GridKernalContext ctx) { super(ctx); log = ctx.log(PlatformProcessorImpl.class); PlatformConfiguration interopCfg0 = ctx.config().getPlatformConfiguration(); assert interopCfg0 != null : "Must be checked earlier during component creation."; if (!(interopCfg0 instanceof PlatformConfigurationEx)) throw new IgniteException("Unsupported platform configuration: " + interopCfg0.getClass().getName()); interopCfg = (PlatformConfigurationEx)interopCfg0; if (!F.isEmpty(interopCfg.warnings())) { for (String w : interopCfg.warnings()) U.warn(log, w); } platformCtx = new PlatformContextImpl(ctx, interopCfg.gate(), interopCfg.memory()); } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { try (PlatformMemory mem = platformCtx.memory().allocate()) { PlatformOutputStream out = mem.output(); BinaryRawWriterEx writer = platformCtx.writer(out); writer.writeString(ctx.gridName()); out.synchronize(); platformCtx.gateway().onStart(this, mem.pointer()); } // At this moment all necessary native libraries must be loaded, so we can process with store creation. storeLock.writeLock().lock(); try { for (StoreInfo store : pendingStores) registerStore0(store.store, store.convertBinary); pendingStores.clear(); started = true; } finally { storeLock.writeLock().unlock(); } // Add Interop node attributes. ctx.addNodeAttribute(PlatformUtils.ATTR_PLATFORM, interopCfg.platform()); } /** {@inheritDoc} */ @Override public void onKernalStop(boolean cancel) { startLatch.countDown(); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { if (platformCtx != null) { // Destroy cache stores. storeLock.writeLock().lock(); try { for (PlatformCacheStore store : stores) { if (store != null) { if (store instanceof PlatformDotNetCacheStore) { PlatformDotNetCacheStore store0 = (PlatformDotNetCacheStore)store; try { store0.destroy(platformCtx.kernalContext()); } catch (Exception e) { U.error(log, "Failed to destroy .Net cache store [store=" + store0 + ", err=" + e.getMessage() + ']'); } } else assert false : "Invalid interop cache store type: " + store; } } } finally { stopped = true; storeLock.writeLock().unlock(); } platformCtx.gateway().onStop(); } } /** {@inheritDoc} */ @Override public Ignite ignite() { return ctx.grid(); } /** {@inheritDoc} */ @Override public long environmentPointer() { return platformCtx.gateway().environmentPointer(); } /** {@inheritDoc} */ public void releaseStart() { startLatch.countDown(); } /** {@inheritDoc} */ public void awaitStart() throws IgniteCheckedException { U.await(startLatch); } /** {@inheritDoc} */ @Override public PlatformContext context() { return platformCtx; } /** {@inheritDoc} */ @Override public PlatformTarget cache(@Nullable String name) throws IgniteCheckedException { IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().cache(name); if (cache == null) throw new IllegalArgumentException("Cache doesn't exist: " + name); return new PlatformCache(platformCtx, cache.keepBinary(), false); } /** {@inheritDoc} */ @Override public PlatformTarget createCache(@Nullable String name) throws IgniteCheckedException { IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createCache(name); assert cache != null; return new PlatformCache(platformCtx, cache.keepBinary(), false); } /** {@inheritDoc} */ @Override public PlatformTarget getOrCreateCache(@Nullable String name) throws IgniteCheckedException { IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateCache(name); assert cache != null; return new PlatformCache(platformCtx, cache.keepBinary(), false); } /** {@inheritDoc} */ @Override public PlatformTarget createCacheFromConfig(long memPtr) throws IgniteCheckedException { BinaryRawReaderEx reader = platformCtx.reader(platformCtx.memory().get(memPtr)); CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createCache(cfg); return new PlatformCache(platformCtx, cache.keepBinary(), false); } /** {@inheritDoc} */ @Override public PlatformTarget getOrCreateCacheFromConfig(long memPtr) throws IgniteCheckedException { BinaryRawReaderEx reader = platformCtx.reader(platformCtx.memory().get(memPtr)); CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg); return new PlatformCache(platformCtx, cache.keepBinary(), false); } /** {@inheritDoc} */ @Override public void destroyCache(@Nullable String name) throws IgniteCheckedException { ctx.grid().destroyCache(name); } /** {@inheritDoc} */ @Override public PlatformTarget affinity(@Nullable String name) throws IgniteCheckedException { return new PlatformAffinity(platformCtx, ctx, name); } /** {@inheritDoc} */ @Override public PlatformTarget dataStreamer(@Nullable String cacheName, boolean keepBinary) throws IgniteCheckedException { IgniteDataStreamer ldr = ctx.dataStream().dataStreamer(cacheName); ldr.keepBinary(true); return new PlatformDataStreamer(platformCtx, cacheName, (DataStreamerImpl)ldr, keepBinary); } /** {@inheritDoc} */ @Override public PlatformTarget transactions() { return new PlatformTransactions(platformCtx); } /** {@inheritDoc} */ @Override public PlatformTarget projection() throws IgniteCheckedException { return new PlatformClusterGroup(platformCtx, ctx.grid().cluster()); } /** {@inheritDoc} */ @Override public PlatformTarget compute(PlatformTarget grp) { PlatformClusterGroup grp0 = (PlatformClusterGroup)grp; assert grp0.projection() instanceof ClusterGroupAdapter; // Safety for very complex ClusterGroup hierarchy. return new PlatformCompute(platformCtx, (IgniteComputeImpl)((ClusterGroupAdapter)grp0.projection()).compute()); } /** {@inheritDoc} */ @Override public PlatformTarget message(PlatformTarget grp) { PlatformClusterGroup grp0 = (PlatformClusterGroup)grp; return new PlatformMessaging(platformCtx, grp0.projection().ignite().message(grp0.projection())); } /** {@inheritDoc} */ @Override public PlatformTarget events(PlatformTarget grp) { PlatformClusterGroup grp0 = (PlatformClusterGroup)grp; return new PlatformEvents(platformCtx, grp0.projection().ignite().events(grp0.projection())); } /** {@inheritDoc} */ @Override public PlatformTarget services(PlatformTarget grp) { PlatformClusterGroup grp0 = (PlatformClusterGroup)grp; return new PlatformServices(platformCtx, grp0.projection().ignite().services(grp0.projection()), false); } /** {@inheritDoc} */ @Override public PlatformTarget extensions() { return null; } /** {@inheritDoc} */ @Override public void registerStore(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException { storeLock.readLock().lock(); try { if (stopped) throw new IgniteCheckedException("Failed to initialize interop store becuase node is stopping: " + store); if (started) registerStore0(store, convertBinary); else pendingStores.add(new StoreInfo(store, convertBinary)); } finally { storeLock.readLock().unlock(); } } /** {@inheritDoc} */ @Override public PlatformTarget atomicLong(String name, long initVal, boolean create) throws IgniteException { GridCacheAtomicLongImpl atomicLong = (GridCacheAtomicLongImpl)ignite().atomicLong(name, initVal, create); if (atomicLong == null) return null; return new PlatformAtomicLong(platformCtx, atomicLong); } /** {@inheritDoc} */ @Override public void getIgniteConfiguration(long memPtr) { PlatformOutputStream stream = platformCtx.memory().get(memPtr).output(); BinaryRawWriterEx writer = platformCtx.writer(stream); PlatformConfigurationUtils.writeIgniteConfiguration(writer, ignite().configuration()); stream.synchronize(); } /** * Internal store initialization routine. * * @param store Store. * @param convertBinary Convert binary flag. * @throws IgniteCheckedException If failed. */ private void registerStore0(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException { if (store instanceof PlatformDotNetCacheStore) { PlatformDotNetCacheStore store0 = (PlatformDotNetCacheStore)store; store0.initialize(ctx, convertBinary); } else throw new IgniteCheckedException("Unsupported interop store: " + store); } /** * Store and manager pair. */ private static class StoreInfo { /** Store. */ private final PlatformCacheStore store; /** Convert binary flag. */ private final boolean convertBinary; /** * Constructor. * * @param store Store. * @param convertBinary Convert binary flag. */ private StoreInfo(PlatformCacheStore store, boolean convertBinary) { this.store = store; this.convertBinary = convertBinary; } } }
/* * Copyright (c) 2009-2014, Peter Abeles. All Rights Reserved. * * This file is part of Efficient Java Matrix Library (EJML). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ejml.alg.dense.decompose.qr; import org.ejml.data.CDenseMatrix64F; import org.ejml.data.Complex64F; import org.ejml.interfaces.decomposition.QRDecomposition; import org.ejml.ops.CCommonOps; /** * <p> * Householder QR decomposition is rich in operations along the columns of the matrix. This can be * taken advantage of by solving for the Q matrix in a column major format to reduce the number * of CPU cache misses and the number of copies that are performed. * </p> * * @see org.ejml.alg.dense.decomposition.qr.QRDecompositionHouseholder_D64 * * @author Peter Abeles */ // TODO figure out why this is significantly slower than col public class QRDecompositionHouseholderTran_CD64 implements QRDecomposition<CDenseMatrix64F> { /** * Where the Q and R matrices are stored. For speed reasons * this is transposed */ protected CDenseMatrix64F QR; // used internally to store temporary data protected double v[]; // dimension of the decomposed matrices protected int numCols; // this is 'n' protected int numRows; // this is 'm' protected int minLength; // the computed gamma for Q_k matrix protected double gammas[]; // local variables protected double gamma; protected Complex64F tau = new Complex64F(); // did it encounter an error? protected boolean error; public void setExpectedMaxSize( int numRows , int numCols ) { this.numCols = numCols; this.numRows = numRows; minLength = Math.min(numCols,numRows); int maxLength = Math.max(numCols,numRows); if( QR == null ) { QR = new CDenseMatrix64F(numCols,numRows); v = new double[ maxLength*2 ]; gammas = new double[ minLength ]; } else { QR.reshape(numCols,numRows); } if( v.length < maxLength*2 ) { v = new double[ maxLength*2 ]; } if( gammas.length < minLength ) { gammas = new double[ minLength ]; } } /** * Inner matrix that stores the decomposition */ public CDenseMatrix64F getQR() { return QR; } /** * Computes the Q matrix from the information stored in the QR matrix. This * operation requires about 4(m<sup2</sup>n-mn<sup>2</sup>+n<sup>3</sup>/3) flops. * * @param Q The orthogonal Q matrix. */ @Override public CDenseMatrix64F getQ( CDenseMatrix64F Q , boolean compact ) { if( compact ) { if( Q == null ) { Q = CCommonOps.identity(numRows, minLength); } else { if( Q.numRows != numRows || Q.numCols != minLength ) { throw new IllegalArgumentException("Unexpected matrix dimension."); } else { CCommonOps.setIdentity(Q); } } } else { if( Q == null ) { Q = CCommonOps.identity(numRows); } else { if( Q.numRows != numRows || Q.numCols != numRows ) { throw new IllegalArgumentException("Unexpected matrix dimension."); } else { CCommonOps.setIdentity(Q); } } } // Unlike applyQ() this takes advantage of zeros in the identity matrix // by not multiplying across all rows. for( int j = minLength-1; j >= 0; j-- ) { int diagIndex = (j*numRows+j)*2; double realBefore = QR.data[diagIndex]; double imagBefore = QR.data[diagIndex+1]; QR.data[diagIndex] = 1; QR.data[diagIndex+1] = 0; QrHelperFunctions_CD64.rank1UpdateMultR(Q, QR.data, j * numRows, gammas[j], j, j, numRows, v); QR.data[diagIndex] = realBefore; QR.data[diagIndex+1] = imagBefore; } return Q; } /** * A = Q*A * * @param A Matrix that is being multiplied by Q. Is modified. */ public void applyQ( CDenseMatrix64F A ) { if( A.numRows != numRows ) throw new IllegalArgumentException("A must have at least "+numRows+" rows."); for( int j = minLength-1; j >= 0; j-- ) { int diagIndex = (j*numRows+j)*2; double realBefore = QR.data[diagIndex]; double imagBefore = QR.data[diagIndex+1]; QR.data[diagIndex] = 1; QR.data[diagIndex+1] = 0; QrHelperFunctions_CD64.rank1UpdateMultR(A, QR.data, j * numRows, gammas[j], 0, j, numRows, v); QR.data[diagIndex] = realBefore; QR.data[diagIndex+1] = imagBefore; } } /** * A = Q<sup>H</sup>*A * * @param A Matrix that is being multiplied by Q<sup>T</sup>. Is modified. */ public void applyTranQ( CDenseMatrix64F A ) { for( int j = 0; j < minLength; j++ ) { int diagIndex = (j*numRows+j)*2; double realBefore = QR.data[diagIndex]; double imagBefore = QR.data[diagIndex+1]; QR.data[diagIndex] = 1; QR.data[diagIndex+1] = 0; QrHelperFunctions_CD64.rank1UpdateMultR(A, QR.data, j * numRows, gammas[j], 0, j, numRows, v); QR.data[diagIndex] = realBefore; QR.data[diagIndex+1] = imagBefore; } } /** * Returns an upper triangular matrix which is the R in the QR decomposition. * * @param R An upper triangular matrix. * @param compact */ @Override public CDenseMatrix64F getR(CDenseMatrix64F R, boolean compact) { if( R == null ) { if( compact ) { R = new CDenseMatrix64F(minLength,numCols); } else R = new CDenseMatrix64F(numRows,numCols); } else { if( compact ) { if( R.numCols != numCols || R.numRows != minLength ) throw new IllegalArgumentException("Unexpected dimensions"); } else { if( R.numCols != numCols || R.numRows != numRows ) throw new IllegalArgumentException("Unexpected dimensions"); } for( int i = 0; i < R.numRows; i++ ) { int min = Math.min(i,R.numCols); for( int j = 0; j < min; j++ ) { R.set(i, j, 0, 0); } } } for( int i = 0; i < R.numRows; i++ ) { for( int j = i; j < R.numCols; j++ ) { int index = QR.getIndex(j,i); R.set(i,j,QR.data[index],QR.data[index+1]); } } return R; } /** * <p> * To decompose the matrix 'A' it must have full rank. 'A' is a 'm' by 'n' matrix. * It requires about 2n*m<sup>2</sup>-2m<sup>2</sup>/3 flops. * </p> * * <p> * The matrix provided here can be of different * dimension than the one specified in the constructor. It just has to be smaller than or equal * to it. * </p> */ @Override public boolean decompose( CDenseMatrix64F A ) { setExpectedMaxSize(A.numRows, A.numCols); CCommonOps.transpose(A, QR); error = false; for( int j = 0; j < minLength; j++ ) { householder(j); updateA(j); } return !error; } @Override public boolean inputModified() { return false; } /** * <p> * Computes the householder vector "u" for the first column of submatrix j. Note this is * a specialized householder for this problem. There is some protection against * overflow and underflow. * </p> * <p> * Q = I - &gamma;uu<sup>H</sup> * </p> * <p> * This function finds the values of 'u' and '&gamma;'. * </p> * * @param j Which submatrix to work off of. */ protected void householder( final int j ) { int startQR = j*numRows; int endQR = startQR+numRows; startQR += j; final double max = QrHelperFunctions_CD64.findMax(QR.data, startQR, numRows - j); if( max == 0.0 ) { gamma = 0; error = true; } else { // computes tau and normalizes u by max gamma = QrHelperFunctions_CD64.computeTauGammaAndDivide(startQR, endQR, QR.data, max,tau); // divide u by u_0 double realU0 = QR.data[startQR*2] + tau.real; double imagU0 = QR.data[startQR*2+1] + tau.imaginary; QrHelperFunctions_CD64.divideElements(startQR + 1, endQR, QR.data,0,realU0, imagU0); tau.real *= max; tau.imaginary *= max; QR.data[startQR*2] = -tau.real; QR.data[startQR*2+1] = -tau.imaginary; } gammas[j] = gamma; } /** * <p> * Takes the results from the householder computation and updates the 'A' matrix.<br> * <br> * A = (I - &gamma;*u*u<sup>H</sup>)A * </p> * * @param w The submatrix. */ protected void updateA( final int w ) { // int rowW = w*numRows; // int rowJ = rowW + numRows; // // for( int j = w+1; j < numCols; j++ , rowJ += numRows) { // double val = QR.data[rowJ + w]; // // // val = gamma*u^T * A // for( int k = w+1; k < numRows; k++ ) { // val += QR.data[rowW + k]*QR.data[rowJ + k]; // } // val *= gamma; // // // A - val*u // QR.data[rowJ + w] -= val; // for( int i = w+1; i < numRows; i++ ) { // QR.data[rowJ + i] -= QR.data[rowW + i]*val; // } // } final double data[] = QR.data; int rowW = w*numRows + w + 1; int rowJ = rowW + numRows; final int rowJEnd = 2*(rowJ + (numCols-w-1)*numRows); final int indexWEnd = 2*(rowW + numRows - w - 1); rowJ = 2*rowJ; rowW = 2*rowW; for( ; rowJEnd != rowJ; rowJ += numRows*2) { // assume the first element in u is 1 double realVal = data[rowJ - 2]; double imagVal = data[rowJ - 1]; int indexW = rowW; int indexJ = rowJ; while( indexW != indexWEnd ) { double realW = data[indexW++]; double imagW = -data[indexW++]; double realJ = data[indexJ++]; double imagJ = data[indexJ++]; realVal += realW*realJ - imagW*imagJ; imagVal += realW*imagJ + imagW*realJ; } realVal *= gamma; imagVal *= gamma; data[rowJ - 2] -= realVal; data[rowJ - 1] -= imagVal; indexW = rowW; indexJ = rowJ; while( indexW != indexWEnd ) { double realW = data[indexW++]; double imagW = data[indexW++]; data[indexJ++] -= realW*realVal - imagW*imagVal; data[indexJ++] -= realW*imagVal + imagW*realVal; } } } public double[] getGammas() { return gammas; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht.preloader; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState; import org.apache.ignite.internal.util.GridPartitionStateMap; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState.MOVING; /** * Partition map from single node. */ public class GridDhtPartitionMap implements Comparable<GridDhtPartitionMap>, Externalizable { /** */ private static final long serialVersionUID = 0L; /** Node ID. */ protected UUID nodeId; /** Update sequence number. */ protected long updateSeq; /** Topology version. */ protected AffinityTopologyVersion top; /** */ protected GridPartitionStateMap map; /** */ private volatile int moving; /** */ private static final AtomicIntegerFieldUpdater<GridDhtPartitionMap> MOVING_FIELD_UPDATER = AtomicIntegerFieldUpdater.newUpdater(GridDhtPartitionMap.class, "moving"); /** * Empty constructor required for {@link Externalizable}. */ public GridDhtPartitionMap() { // No-op. } /** * @param nodeId Node ID. * @param updateSeq Update sequence number. * @param top Topology version. * @param m Map to copy. * @param onlyActive If {@code true}, then only active states will be included. */ public GridDhtPartitionMap(UUID nodeId, long updateSeq, AffinityTopologyVersion top, GridPartitionStateMap m, boolean onlyActive) { assert nodeId != null; assert updateSeq > 0; this.nodeId = nodeId; this.updateSeq = updateSeq; this.top = top; map = new GridPartitionStateMap(m, onlyActive); int moving0 = 0; for (GridDhtPartitionState state : map.values()) { if (state == MOVING) moving0++; } if (moving0 > 0) MOVING_FIELD_UPDATER.set(this, moving0); } /** * @param nodeId Node ID. * @param updateSeq Update sequence number. * @param top Topology version. * @param map Map. * @param moving Number of moving partitions. */ private GridDhtPartitionMap(UUID nodeId, long updateSeq, AffinityTopologyVersion top, GridPartitionStateMap map, int moving) { this.nodeId = nodeId; this.updateSeq = updateSeq; this.top = top; this.map = map; this.moving = moving; } /** * @return Copy with empty partition state map. */ public GridDhtPartitionMap emptyCopy() { return new GridDhtPartitionMap(nodeId, updateSeq, top, new GridPartitionStateMap(0), 0); } /** * @param part Partition. * @param state Partition state. */ public void put(Integer part, GridDhtPartitionState state) { GridDhtPartitionState old = map.put(part, state); if (old == MOVING && state != MOVING) MOVING_FIELD_UPDATER.decrementAndGet(this); else if (old != MOVING && state == MOVING) MOVING_FIELD_UPDATER.incrementAndGet(this); assert moving >= 0 : moving; } /** * @return {@code true} If partition map contains moving partitions. */ public boolean hasMovingPartitions() { assert moving >= 0 : moving; return moving != 0; } /** * @param part Partition. * @return Partition state. */ public GridDhtPartitionState get(int part) { return map.state(part); } /** * @param part Partition. * @return {@code True} if contains given partition. */ public boolean containsKey(Integer part) { return map.containsKey(part); } /** * @return Entries. */ public Set<Map.Entry<Integer, GridDhtPartitionState>> entrySet() { return map.entrySet(); } /** * @return Map size. */ public int size() { return map.size(); } /** * @return Partitions. */ public Set<Integer> keySet() { return map.keySet(); } /** * @return Underlying map. */ public GridPartitionStateMap map() { return map; } /** * @return Node ID. */ public UUID nodeId() { return nodeId; } /** * @return Update sequence. */ public long updateSequence() { return updateSeq; } /** * @param updateSeq New update sequence value. * @param topVer Current topology version. * @return Old update sequence value. */ public long updateSequence(long updateSeq, AffinityTopologyVersion topVer) { assert topVer.compareTo(top) >= 0 : "Invalid topology version [cur=" + top + ", new=" + topVer + "]"; long old = this.updateSeq; // Overwrite update sequence without checking in case of greater topology version if (topVer.compareTo(top) == 0) assert updateSeq >= old : "Invalid update sequence [cur=" + old + ", new=" + updateSeq + ']'; this.updateSeq = updateSeq; top = topVer; return old; } /** * @return Topology version. */ public AffinityTopologyVersion topologyVersion() { return top; } /** {@inheritDoc} */ @Override public int compareTo(GridDhtPartitionMap o) { assert nodeId.equals(o.nodeId); int topVerCompare = top.compareTo(o.top); if (topVerCompare != 0) return topVerCompare; return Long.compare(updateSeq, o.updateSeq); } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { U.writeUuid(out, nodeId); out.writeLong(updateSeq); int size = map.size(); out.writeInt(size); int i = 0; for (Map.Entry<Integer, GridDhtPartitionState> entry : map.entrySet()) { int ordinal = entry.getValue().ordinal(); assert ordinal == (ordinal & 0x7); assert entry.getKey() < CacheConfiguration.MAX_PARTITIONS_COUNT : entry.getKey(); out.writeByte(ordinal); out.writeShort(entry.getKey()); i++; } assert i == size : "Invalid size [size1=" + size + ", size2=" + i + ']'; if (top != null) { out.writeLong(topologyVersion().topologyVersion()); out.writeInt(topologyVersion().minorTopologyVersion()); } else { out.writeLong(0); out.writeInt(0); } } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { nodeId = U.readUuid(in); updateSeq = in.readLong(); int size = in.readInt(); map = new GridPartitionStateMap(); for (int i = 0; i < size; i++) { int ordinal = in.readUnsignedByte(); int part = in.readUnsignedShort(); put(part, GridDhtPartitionState.fromOrdinal(ordinal)); } long ver = in.readLong(); int minorVer = in.readInt(); if (ver != 0) top = new AffinityTopologyVersion(ver, minorVer); } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; GridDhtPartitionMap other = (GridDhtPartitionMap)o; return other.nodeId.equals(nodeId) && other.updateSeq == updateSeq; } /** {@inheritDoc} */ @Override public int hashCode() { return 31 * nodeId.hashCode() + (int)(updateSeq ^ (updateSeq >>> 32)); } /** * @return Full string representation. */ public String toFullString() { return S.toString(GridDhtPartitionMap.class, this, "top", top, "updateSeq", updateSeq, "size", size(), "map", map.toString()); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridDhtPartitionMap.class, this, "top", top, "updateSeq", updateSeq, "size", size()); } }
/* * Copyright (C) 2011 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trivium.dep.org.iq80.leveldb.impl; import io.trivium.dep.com.google.common.base.Preconditions; import io.trivium.dep.org.iq80.leveldb.util.Closeables; import io.trivium.dep.org.iq80.leveldb.util.Slice; import io.trivium.dep.org.iq80.leveldb.util.SliceInput; import io.trivium.dep.org.iq80.leveldb.util.SliceOutput; import io.trivium.dep.org.iq80.leveldb.util.Slices; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.concurrent.atomic.AtomicBoolean; import static io.trivium.dep.org.iq80.leveldb.impl.LogConstants.BLOCK_SIZE; import static io.trivium.dep.org.iq80.leveldb.impl.LogConstants.HEADER_SIZE; public class FileChannelLogWriter implements LogWriter { private final File file; private final long fileNumber; private final FileChannel fileChannel; private final AtomicBoolean closed = new AtomicBoolean(); /** * Current offset in the current block */ private int blockOffset; public FileChannelLogWriter(File file, long fileNumber) throws FileNotFoundException { Preconditions.checkNotNull(file, "file is null"); Preconditions.checkArgument(fileNumber >= 0, "fileNumber is negative"); this.file = file; this.fileNumber = fileNumber; this.fileChannel = new FileOutputStream(file).getChannel(); } @Override public boolean isClosed() { return closed.get(); } @Override public synchronized void close() { closed.set(true); // try to forces the log to disk try { fileChannel.force(true); } catch (IOException ignored) { } // close the channel Closeables.closeQuietly(fileChannel); } @Override public synchronized void delete() { closed.set(true); // close the channel Closeables.closeQuietly(fileChannel); // try to delete the file file.delete(); } @Override public File getFile() { return file; } @Override public long getFileNumber() { return fileNumber; } // Writes a stream of chunks such that no chunk is split across a block boundary @Override public synchronized void addRecord(Slice record, boolean force) throws IOException { Preconditions.checkState(!closed.get(), "Log has been closed"); SliceInput sliceInput = record.input(); // used to track first, middle and last blocks boolean begin = true; // Fragment the record int chunks as necessary and write it. Note that if record // is empty, we still want to iterate once to write a single // zero-length chunk. do { int bytesRemainingInBlock = BLOCK_SIZE - blockOffset; Preconditions.checkState(bytesRemainingInBlock >= 0); // Switch to a new block if necessary if (bytesRemainingInBlock < HEADER_SIZE) { if (bytesRemainingInBlock > 0) { // Fill the rest of the block with zeros // todo lame... need a better way to write zeros fileChannel.write(ByteBuffer.allocate(bytesRemainingInBlock)); } blockOffset = 0; bytesRemainingInBlock = BLOCK_SIZE - blockOffset; } // Invariant: we never leave less than HEADER_SIZE bytes available in a block int bytesAvailableInBlock = bytesRemainingInBlock - HEADER_SIZE; Preconditions.checkState(bytesAvailableInBlock >= 0); // if there are more bytes in the record then there are available in the block, // fragment the record; otherwise write to the end of the record boolean end; int fragmentLength; if (sliceInput.available() > bytesAvailableInBlock) { end = false; fragmentLength = bytesAvailableInBlock; } else { end = true; fragmentLength = sliceInput.available(); } // determine block type LogChunkType type; if (begin && end) { type = LogChunkType.FULL; } else if (begin) { type = LogChunkType.FIRST; } else if (end) { type = LogChunkType.LAST; } else { type = LogChunkType.MIDDLE; } // write the chunk writeChunk(type, sliceInput.readSlice(fragmentLength)); // we are no longer on the first chunk begin = false; } while (sliceInput.isReadable()); if (force) { fileChannel.force(false); } } private void writeChunk(LogChunkType type, Slice slice) throws IOException { Preconditions.checkArgument(slice.length() <= 0xffff, "length %s is larger than two bytes", slice.length()); Preconditions.checkArgument(blockOffset + HEADER_SIZE <= BLOCK_SIZE); // create header Slice header = newLogRecordHeader(type, slice, slice.length()); // write the header and the payload header.getBytes(0, fileChannel, header.length()); slice.getBytes(0, fileChannel, slice.length()); blockOffset += HEADER_SIZE + slice.length(); } private Slice newLogRecordHeader(LogChunkType type, Slice slice, int length) { int crc = Logs.getChunkChecksum(type.getPersistentId(), slice.getRawArray(), slice.getRawOffset(), length); // Format the header SliceOutput header = Slices.allocate(HEADER_SIZE).output(); header.writeInt(crc); header.writeByte((byte) (length & 0xff)); header.writeByte((byte) (length >>> 8)); header.writeByte((byte) (type.getPersistentId())); return header.slice(); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.test.api.form; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.flowable.engine.common.api.FlowableException; import org.flowable.engine.common.api.FlowableIllegalArgumentException; import org.flowable.engine.common.api.FlowableObjectNotFoundException; import org.flowable.engine.common.impl.util.CollectionUtil; import org.flowable.engine.form.FormProperty; import org.flowable.engine.form.StartFormData; import org.flowable.engine.form.TaskFormData; import org.flowable.engine.impl.test.PluggableFlowableTestCase; import org.flowable.engine.repository.ProcessDefinition; import org.flowable.engine.runtime.ProcessInstance; import org.flowable.engine.test.Deployment; /** * @author Joram Barrez * @author Frederik Heremans * @author Tom Baeyens * @author Falko Menge (camunda) */ public class FormServiceTest extends PluggableFlowableTestCase { @Deployment(resources = { "org/flowable/examples/taskforms/VacationRequest_deprecated_forms.bpmn20.xml", "org/flowable/examples/taskforms/approve.form", "org/flowable/examples/taskforms/request.form", "org/flowable/examples/taskforms/adjustRequest.form" }) public void testGetStartFormByProcessDefinitionId() { List<ProcessDefinition> processDefinitions = repositoryService.createProcessDefinitionQuery().list(); assertEquals(1, processDefinitions.size()); ProcessDefinition processDefinition = processDefinitions.get(0); Object startForm = formService.getRenderedStartForm(processDefinition.getId()); assertNotNull(startForm); } @Deployment(resources = { "org/flowable/engine/test/api/oneTaskProcess.bpmn20.xml" }) public void testGetStartFormByProcessDefinitionIdWithoutStartform() { List<ProcessDefinition> processDefinitions = repositoryService.createProcessDefinitionQuery().list(); assertEquals(1, processDefinitions.size()); ProcessDefinition processDefinition = processDefinitions.get(0); Object startForm = formService.getRenderedStartForm(processDefinition.getId()); assertNull(startForm); } public void testGetStartFormByKeyNullKey() { try { formService.getRenderedStartForm(null); fail("ActivitiException expected"); } catch (FlowableIllegalArgumentException ae) { // Exception expected } } public void testGetStartFormByIdNullId() { try { formService.getRenderedStartForm(null); fail("ActivitiException expected"); } catch (FlowableIllegalArgumentException ae) { // Exception expected } } public void testGetStartFormByIdUnexistingProcessDefinitionId() { try { formService.getRenderedStartForm("unexistingId"); fail("ActivitiException expected"); } catch (FlowableObjectNotFoundException ae) { assertTextPresent("no deployed process definition found with id", ae.getMessage()); assertEquals(ProcessDefinition.class, ae.getObjectClass()); } } public void testGetTaskFormNullTaskId() { try { formService.getRenderedTaskForm(null); fail("ActivitiException expected"); } catch (FlowableIllegalArgumentException e) { // Expected Exception } } public void testGetTaskFormUnexistingTaskId() { try { formService.getRenderedTaskForm("unexistingtask"); fail("ActivitiException expected"); } catch (FlowableObjectNotFoundException ae) { assertTextPresent("Task 'unexistingtask' not found", ae.getMessage()); assertEquals(org.flowable.task.api.Task.class, ae.getObjectClass()); } } @Deployment(resources = { "org/flowable/engine/test/api/form/FormsProcess.bpmn20.xml", "org/flowable/engine/test/api/form/start.form", "org/flowable/engine/test/api/form/task.form" }) public void testTaskFormPropertyDefaultsAndFormRendering() { String procDefId = repositoryService.createProcessDefinitionQuery().singleResult().getId(); StartFormData startForm = formService.getStartFormData(procDefId); assertNotNull(startForm); assertEquals(deploymentIdFromDeploymentAnnotation, startForm.getDeploymentId()); assertEquals("org/flowable/engine/test/api/form/start.form", startForm.getFormKey()); assertEquals(new ArrayList<FormProperty>(), startForm.getFormProperties()); assertEquals(procDefId, startForm.getProcessDefinition().getId()); Object renderedStartForm = formService.getRenderedStartForm(procDefId); assertEquals("start form content", renderedStartForm); Map<String, String> properties = new HashMap<>(); properties.put("room", "5b"); properties.put("speaker", "Mike"); String processInstanceId = formService.submitStartFormData(procDefId, properties).getId(); Map<String, Object> expectedVariables = new HashMap<>(); expectedVariables.put("room", "5b"); expectedVariables.put("speaker", "Mike"); Map<String, Object> variables = runtimeService.getVariables(processInstanceId); assertEquals(expectedVariables, variables); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); String taskId = task.getId(); TaskFormData taskForm = formService.getTaskFormData(taskId); assertEquals(deploymentIdFromDeploymentAnnotation, taskForm.getDeploymentId()); assertEquals("org/flowable/engine/test/api/form/task.form", taskForm.getFormKey()); assertEquals(new ArrayList<FormProperty>(), taskForm.getFormProperties()); assertEquals(taskId, taskForm.getTask().getId()); assertEquals("Mike is speaking in room 5b", formService.getRenderedTaskForm(taskId)); properties = new HashMap<>(); properties.put("room", "3f"); formService.submitTaskFormData(taskId, properties); expectedVariables = new HashMap<>(); expectedVariables.put("room", "3f"); expectedVariables.put("speaker", "Mike"); variables = runtimeService.getVariables(processInstanceId); assertEquals(expectedVariables, variables); } @Deployment public void testFormPropertyHandling() { Map<String, String> properties = new HashMap<>(); properties.put("room", "5b"); // default properties.put("speaker", "Mike"); // variable name mapping properties.put("duration", "45"); // type conversion properties.put("free", "true"); // type conversion properties.put("double", "45.5"); // type conversion String procDefId = repositoryService.createProcessDefinitionQuery().singleResult().getId(); String processInstanceId = formService.submitStartFormData(procDefId, properties).getId(); Map<String, Object> expectedVariables = new HashMap<>(); expectedVariables.put("room", "5b"); expectedVariables.put("SpeakerName", "Mike"); expectedVariables.put("duration", 45l); expectedVariables.put("free", Boolean.TRUE); expectedVariables.put("double", 45.5d); Map<String, Object> variables = runtimeService.getVariables(processInstanceId); assertEquals(expectedVariables, variables); Address address = new Address(); address.setStreet("broadway"); runtimeService.setVariable(processInstanceId, "address", address); runtimeService.trigger(runtimeService.createExecutionQuery().processInstanceId(processInstanceId).onlyChildExecutions().singleResult().getId()); String taskId = taskService.createTaskQuery().singleResult().getId(); TaskFormData taskFormData = formService.getTaskFormData(taskId); List<FormProperty> formProperties = taskFormData.getFormProperties(); FormProperty propertyRoom = formProperties.get(0); assertEquals("room", propertyRoom.getId()); assertEquals("5b", propertyRoom.getValue()); FormProperty propertyDuration = formProperties.get(1); assertEquals("duration", propertyDuration.getId()); assertEquals("45", propertyDuration.getValue()); FormProperty propertySpeaker = formProperties.get(2); assertEquals("speaker", propertySpeaker.getId()); assertEquals("Mike", propertySpeaker.getValue()); FormProperty propertyStreet = formProperties.get(3); assertEquals("street", propertyStreet.getId()); assertEquals("broadway", propertyStreet.getValue()); FormProperty propertyFree = formProperties.get(4); assertEquals("free", propertyFree.getId()); assertEquals("true", propertyFree.getValue()); FormProperty propertyDouble = formProperties.get(5); assertEquals("double", propertyDouble.getId()); assertEquals("45.5", propertyDouble.getValue()); assertEquals(6, formProperties.size()); try { formService.submitTaskFormData(taskId, new HashMap<String, String>()); fail("expected exception about required form property 'street'"); } catch (FlowableException e) { // OK } try { properties = new HashMap<>(); properties.put("speaker", "its not allowed to update speaker!"); formService.submitTaskFormData(taskId, properties); fail("expected exception about a non writable form property 'speaker'"); } catch (FlowableException e) { // OK } properties = new HashMap<>(); properties.put("street", "rubensstraat"); formService.submitTaskFormData(taskId, properties); expectedVariables = new HashMap<>(); expectedVariables.put("room", "5b"); expectedVariables.put("SpeakerName", "Mike"); expectedVariables.put("duration", 45l); expectedVariables.put("free", Boolean.TRUE); expectedVariables.put("double", 45.5d); variables = runtimeService.getVariables(processInstanceId); address = (Address) variables.remove("address"); assertEquals("rubensstraat", address.getStreet()); assertEquals(expectedVariables, variables); } @Deployment public void testFormPropertyExpression() { Map<String, Object> varMap = new HashMap<>(); varMap.put("speaker", "Mike"); // variable name mapping Address address = new Address(); varMap.put("address", address); String procDefId = repositoryService.createProcessDefinitionQuery().singleResult().getId(); ProcessInstance processInstance = runtimeService.startProcessInstanceById(procDefId, varMap); String taskId = taskService.createTaskQuery().singleResult().getId(); TaskFormData taskFormData = formService.getTaskFormData(taskId); List<FormProperty> formProperties = taskFormData.getFormProperties(); FormProperty propertySpeaker = formProperties.get(0); assertEquals("speaker", propertySpeaker.getId()); assertEquals("Mike", propertySpeaker.getValue()); assertEquals(2, formProperties.size()); Map<String, String> properties = new HashMap<>(); properties.put("street", "Broadway"); formService.submitTaskFormData(taskId, properties); address = (Address) runtimeService.getVariable(processInstance.getId(), "address"); assertEquals("Broadway", address.getStreet()); } @SuppressWarnings("unchecked") @Deployment public void testFormPropertyDetails() { String procDefId = repositoryService.createProcessDefinitionQuery().singleResult().getId(); StartFormData startFormData = formService.getStartFormData(procDefId); FormProperty property = startFormData.getFormProperties().get(0); assertEquals("speaker", property.getId()); assertNull(property.getValue()); assertTrue(property.isReadable()); assertTrue(property.isWritable()); assertFalse(property.isRequired()); assertEquals("string", property.getType().getName()); property = startFormData.getFormProperties().get(1); assertEquals("start", property.getId()); assertNull(property.getValue()); assertTrue(property.isReadable()); assertTrue(property.isWritable()); assertFalse(property.isRequired()); assertEquals("date", property.getType().getName()); assertEquals("dd-MMM-yyyy", property.getType().getInformation("datePattern")); property = startFormData.getFormProperties().get(2); assertEquals("direction", property.getId()); assertNull(property.getValue()); assertTrue(property.isReadable()); assertTrue(property.isWritable()); assertFalse(property.isRequired()); assertEquals("enum", property.getType().getName()); Map<String, String> values = (Map<String, String>) property.getType().getInformation("values"); Map<String, String> expectedValues = new LinkedHashMap<>(); expectedValues.put("left", "Go Left"); expectedValues.put("right", "Go Right"); expectedValues.put("up", "Go Up"); expectedValues.put("down", "Go Down"); // ACT-1023: check if ordering is retained Iterator<Entry<String, String>> expectedValuesIterator = expectedValues.entrySet().iterator(); for (Entry<String, String> entry : values.entrySet()) { Entry<String, String> expectedEntryAtLocation = expectedValuesIterator.next(); assertEquals(expectedEntryAtLocation.getKey(), entry.getKey()); assertEquals(expectedEntryAtLocation.getValue(), entry.getValue()); } assertEquals(expectedValues, values); } @Deployment public void testInvalidFormKeyReference() { try { formService.getRenderedStartForm(repositoryService.createProcessDefinitionQuery().singleResult().getId()); fail(); } catch (FlowableException e) { assertTextPresent("Form with formKey 'IDoNotExist' does not exist", e.getMessage()); } } @Deployment public void testSubmitStartFormDataWithBusinessKey() { Map<String, String> properties = new HashMap<>(); properties.put("duration", "45"); properties.put("speaker", "Mike"); String procDefId = repositoryService.createProcessDefinitionQuery().singleResult().getId(); ProcessInstance processInstance = formService.submitStartFormData(procDefId, "123", properties); assertEquals("123", processInstance.getBusinessKey()); assertEquals(processInstance.getId(), runtimeService.createProcessInstanceQuery().processInstanceBusinessKey("123").singleResult().getId()); } public void testGetStartFormKeyEmptyArgument() { try { formService.getStartFormKey(null); fail("ActivitiException expected"); } catch (FlowableIllegalArgumentException ae) { assertTextPresent("The process definition id is mandatory, but 'null' has been provided.", ae.getMessage()); } try { formService.getStartFormKey(""); fail("ActivitiException expected"); } catch (FlowableIllegalArgumentException ae) { assertTextPresent("The process definition id is mandatory, but '' has been provided.", ae.getMessage()); } } @Deployment(resources = "org/flowable/engine/test/api/form/FormsProcess.bpmn20.xml") public void testGetStartFormKey() { String processDefinitionId = repositoryService.createProcessDefinitionQuery().singleResult().getId(); String expectedFormKey = formService.getStartFormData(processDefinitionId).getFormKey(); String actualFormKey = formService.getStartFormKey(processDefinitionId); assertEquals(expectedFormKey, actualFormKey); } public void testGetTaskFormKeyEmptyArguments() { try { formService.getTaskFormKey(null, "23"); fail("ActivitiException expected"); } catch (FlowableIllegalArgumentException ae) { assertTextPresent("The process definition id is mandatory, but 'null' has been provided.", ae.getMessage()); } try { formService.getTaskFormKey("", "23"); fail("ActivitiException expected"); } catch (FlowableIllegalArgumentException ae) { assertTextPresent("The process definition id is mandatory, but '' has been provided.", ae.getMessage()); } try { formService.getTaskFormKey("42", null); fail("ActivitiException expected"); } catch (FlowableIllegalArgumentException ae) { assertTextPresent("The task definition key is mandatory, but 'null' has been provided.", ae.getMessage()); } try { formService.getTaskFormKey("42", ""); fail("ActivitiException expected"); } catch (FlowableIllegalArgumentException ae) { assertTextPresent("The task definition key is mandatory, but '' has been provided.", ae.getMessage()); } } @Deployment(resources = "org/flowable/engine/test/api/form/FormsProcess.bpmn20.xml") public void testGetTaskFormKey() { String processDefinitionId = repositoryService.createProcessDefinitionQuery().singleResult().getId(); runtimeService.startProcessInstanceById(processDefinitionId); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertNotNull(task); String expectedFormKey = formService.getTaskFormData(task.getId()).getFormKey(); String actualFormKey = formService.getTaskFormKey(task.getProcessDefinitionId(), task.getTaskDefinitionKey()); assertEquals(expectedFormKey, actualFormKey); } @Deployment public void testGetTaskFormKeyWithExpression() { runtimeService.startProcessInstanceByKey("FormsProcess", CollectionUtil.singletonMap("dynamicKey", "test")); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertNotNull(task); assertEquals("test", formService.getTaskFormData(task.getId()).getFormKey()); } @Deployment(resources = { "org/flowable/engine/test/api/oneTaskProcess.bpmn20.xml" }) public void testSubmitTaskFormData() { List<ProcessDefinition> processDefinitions = repositoryService.createProcessDefinitionQuery().list(); assertEquals(1, processDefinitions.size()); ProcessDefinition processDefinition = processDefinitions.get(0); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey(processDefinition.getKey()); assertNotNull(processInstance); org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNotNull(task); Map<String, String> properties = new HashMap<>(); properties.put("room", "5b"); formService.submitTaskFormData(task.getId(), properties); task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNull(task); } @Deployment(resources = { "org/flowable/engine/test/api/oneTaskProcess.bpmn20.xml" }) public void testSaveFormData() { List<ProcessDefinition> processDefinitions = repositoryService.createProcessDefinitionQuery().list(); assertEquals(1, processDefinitions.size()); ProcessDefinition processDefinition = processDefinitions.get(0); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey(processDefinition.getKey()); assertNotNull(processInstance); org.flowable.task.api.Task task = null; task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNotNull(task); String taskId = task.getId(); Map<String, String> properties = new HashMap<>(); properties.put("room", "5b"); Map<String, String> expectedVariables = new HashMap<>(); expectedVariables.put("room", "5b"); formService.saveFormData(task.getId(), properties); task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertEquals(taskId, task.getId()); Map<String, Object> variables = taskService.getVariables(taskId); assertEquals(expectedVariables, variables); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.cyclefinder; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.io.Files; import com.google.common.io.Resources; import com.google.devtools.j2objc.util.ErrorUtil; import com.google.devtools.j2objc.util.ExternalAnnotations; import com.google.devtools.j2objc.util.SourceVersion; import com.google.devtools.j2objc.util.Version; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Properties; class Options { private static final String XBOOTCLASSPATH = "-Xbootclasspath:"; private static String usageMessage; private static String helpMessage; static { // Load string resources. URL propertiesUrl = Resources.getResource(CycleFinder.class, "CycleFinder.properties"); Properties properties = new Properties(); try { properties.load(propertiesUrl.openStream()); } catch (IOException e) { System.err.println("unable to access tool properties: " + e); System.exit(1); } usageMessage = properties.getProperty("usage-message"); Preconditions.checkNotNull(usageMessage); helpMessage = properties.getProperty("help-message"); Preconditions.checkNotNull(helpMessage); } private String sourcepath; private String classpath; private String bootclasspath; private List<String> whitelistFiles = Lists.newArrayList(); private List<String> blacklistFiles = Lists.newArrayList(); private List<String> sourceFiles = Lists.newArrayList(); private String fileEncoding = System.getProperty("file.encoding", "UTF-8"); private boolean printReferenceGraph = false; private SourceVersion sourceVersion = null; private final ExternalAnnotations externalAnnotations = new ExternalAnnotations(); // Flags that are directly forwarded to the javac parser. private static final ImmutableSet<String> PLATFORM_MODULE_SYSTEM_OPTIONS = ImmutableSet.of("--patch-module", "--system", "--add-reads"); private final List<String> platformModuleSystemOptions = new ArrayList<>(); public List<String> getSourceFiles() { return sourceFiles; } public void setSourceFiles(List<String> files) { this.sourceFiles = files; } public String getSourcepath() { return sourcepath; } public String getClasspath() { return classpath; } public void setClasspath(String classpath) { this.classpath = classpath; } public String getBootclasspath() { return bootclasspath != null ? bootclasspath : System.getProperty("sun.boot.class.path"); } public List<String> getWhitelistFiles() { return whitelistFiles; } public void addWhitelistFile(String fileName) { whitelistFiles.add(fileName); } public List<String> getBlacklistFiles() { return blacklistFiles; } public void addBlacklistFile(String fileName) { blacklistFiles.add(fileName); } private void addManifest(String manifestFile) throws IOException { BufferedReader in = Files.newReader(new File(manifestFile), Charset.forName(fileEncoding)); try { for (String line = in.readLine(); line != null; line = in.readLine()) { if (!Strings.isNullOrEmpty(line)) { sourceFiles.add(line.trim()); } } } finally { in.close(); } } public String fileEncoding() { return fileEncoding; } public SourceVersion sourceVersion() { if (sourceVersion == null) { sourceVersion = SourceVersion.defaultVersion(); } return sourceVersion; } @VisibleForTesting void setSourceVersion(SourceVersion sv) { sourceVersion = sv; } public boolean printReferenceGraph() { return printReferenceGraph; } @VisibleForTesting public void setPrintReferenceGraph() { printReferenceGraph = true; } public ExternalAnnotations externalAnnotations() { return externalAnnotations; } @VisibleForTesting public void addExternalAnnotationFile(String file) throws IOException { externalAnnotations.addExternalAnnotationFile(file); } public void addPlatformModuleSystemOptions(String... flags) { Collections.addAll(platformModuleSystemOptions, flags); } public List<String> getPlatformModuleSystemOptions() { return platformModuleSystemOptions; } public static void usage(String invalidUseMsg) { System.err.println("cycle_finder: " + invalidUseMsg); System.err.println(usageMessage); System.exit(1); } public static void help(boolean errorExit) { System.err.println(helpMessage); // javac exits with 2, but any non-zero value works. System.exit(errorExit ? 2 : 0); } public static void version() { System.err.println("cycle_finder " + Version.jarVersion(Options.class)); System.exit(0); } public static Options parse(String[] args) throws IOException { Options options = new Options(); int nArg = 0; while (nArg < args.length) { String arg = args[nArg]; if (arg.equals("-sourcepath")) { if (++nArg == args.length) { usage("-sourcepath requires an argument"); } options.sourcepath = args[nArg]; } else if (arg.equals("-classpath")) { if (++nArg == args.length) { usage("-classpath requires an argument"); } options.classpath = args[nArg]; } else if (arg.equals("--whitelist") || arg.equals("-w")) { if (++nArg == args.length) { usage("--whitelist requires an argument"); } options.whitelistFiles.add(args[nArg]); } else if (arg.equals("--blacklist")) { if (++nArg == args.length) { usage("--blacklist requires an argument"); } options.blacklistFiles.add(args[nArg]); } else if (arg.equals("--sourcefilelist") || arg.equals("-s")) { if (++nArg == args.length) { usage("--sourcefilelist requires an argument"); } options.addManifest(args[nArg]); } else if (arg.startsWith(XBOOTCLASSPATH)) { options.bootclasspath = arg.substring(XBOOTCLASSPATH.length()); } else if (arg.equals("-encoding")) { if (++nArg == args.length) { usage("-encoding requires an argument"); } options.fileEncoding = args[nArg]; } else if (arg.equals("-source")) { if (++nArg == args.length) { usage("-source requires an argument"); } try { options.sourceVersion = SourceVersion.parse(args[nArg]); SourceVersion maxVersion = SourceVersion.getMaxSupportedVersion(); if (options.sourceVersion.version() > maxVersion.version()) { ErrorUtil.warning("Java " + options.sourceVersion.version() + " source version is not " + "supported, using Java " + maxVersion.version() + "."); options.sourceVersion = maxVersion; } } catch (IllegalArgumentException e) { usage("invalid source release: " + args[nArg]); } } else if (arg.equals("--print-reference-graph")) { options.printReferenceGraph = true; } else if (arg.equals("-external-annotation-file")) { if (++nArg == args.length) { usage(arg + " requires an argument"); } options.addExternalAnnotationFile(args[nArg]); } else if (PLATFORM_MODULE_SYSTEM_OPTIONS.contains(arg)) { String option = arg; if (++nArg == args.length) { usage(option + " requires an argument"); } options.addPlatformModuleSystemOptions(option, args[nArg]); } else if (arg.equals("-version")) { version(); } else if (arg.startsWith("-h") || arg.equals("--help")) { help(false); } else if (arg.startsWith("-")) { usage("invalid flag: " + arg); } else { break; } ++nArg; } while (nArg < args.length) { options.sourceFiles.add(args[nArg++]); } if (options.sourceFiles.isEmpty()) { usage("no source files"); } return options; } }
package org.thunlp.learning.lda; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.lib.IdentityMapper; import org.thunlp.mapred.MapReduceJobConf; import org.thunlp.misc.AnyDoublePair; import org.thunlp.misc.Flags; import org.thunlp.tool.FolderReader; import org.thunlp.tool.FolderWriter; import org.thunlp.tool.GenericTool; import java.io.IOException; import java.util.*; import java.util.Map.Entry; import java.util.logging.Logger; public class InitModelTool implements GenericTool { private static Logger LOG = Logger.getAnonymousLogger(); public void run(String[] args) throws Exception { Flags flags = new Flags(); flags.add("input"); flags.add("output_docs"); flags.add("output_nwz"); flags.add("num_topics"); flags.add("wordlist"); flags.add("max_num_words"); flags.add("min_df"); flags.parseAndCheck(args); Path input = new Path(flags.getString("input")); Path tfdf = new Path(flags.getString("wordlist") + ".tf_df"); Path wordlist = new Path(flags.getString("wordlist")); int maxNumWords = flags.getInt("max_num_words"); int minDf = flags.getInt("min_df"); makeWordList(input, tfdf); int numWords = selectWords(tfdf, wordlist, maxNumWords, minDf); initModel( input, new Path(flags.getString("output_docs")), new Path(flags.getString("output_nwz")), wordlist, flags.getInt("num_topics"), numWords ); } /** * Load word list, make word to id mapping. * All words are first sorted by their TF*IDF value. The top maxNumWords words * are used for training. TF*IDF is a widely used method for selecting * informative words in Information Retrieval, see Wikipedia for a more * detailed explanation. * <p/> * Note: words started with an underscore '_' are always kept, and they are * not count as number of words. This is used for special purpose. * * @param wordFile SequenceFile of "word":"tf df". * @param maxNumWords How many words to keep for training, -1 means all. * @return number of words used. * @throws IOException */ public int selectWords(Path tfdf, Path wordlist, int maxNumWords, int minDf) throws IOException { Map<String, WordFreq> wordCounts = loadWordFreq(tfdf); List<String> specialKeys = new LinkedList<String>(); WordFreq total = wordCounts.get(WordListMapper.NUM_DOCS_STRING); if (total == null) { throw new RuntimeException("No number of docs key in the word list."); } List<AnyDoublePair<String>> weights = new ArrayList<AnyDoublePair<String>>(); for (Entry<String, WordFreq> e : wordCounts.entrySet()) { if (e.getKey().startsWith("_")) { specialKeys.add(e.getKey()); continue; } else if (e.getKey().equals(WordListMapper.NUM_DOCS_STRING)) { continue; } WordFreq wf = e.getValue(); if (wf.df > minDf) { double weight = wf.tf / total.tf * Math.log((total.df / wf.df)); weights.add(new AnyDoublePair<String>(e.getKey(), weight)); } } Collections.sort(weights, new Comparator<AnyDoublePair<String>>() { public int compare(AnyDoublePair<String> o1, AnyDoublePair<String> o2) { return Double.compare(o2.second, o1.second); } }); FolderWriter writer = new FolderWriter(wordlist, Text.class, IntWritable.class); Text key = new Text(); IntWritable value = new IntWritable(); if (maxNumWords == -1) maxNumWords = Integer.MAX_VALUE; int numWords = Math.min(maxNumWords, weights.size()); for (int i = 0; i < numWords; i++) { key.set(weights.get(i).first); value.set(i); writer.append(key, value); } for (String specialKey : specialKeys) { key.set(specialKey); value.set(numWords); writer.append(key, value); numWords++; } writer.close(); LOG.info("Load " + wordCounts.size() + " words, keep " + numWords); return numWords; } public Map<String, WordFreq> loadWordFreq(Path sqfile) throws IOException { Hashtable<String, WordFreq> keymap = new Hashtable<String, WordFreq>(); FolderReader reader = new FolderReader(sqfile); Text key = new Text(); Text value = new Text(); while (reader.next(key, value)) { WordFreq wf = new WordFreq(); String str = value.toString(); int split = str.indexOf(' '); wf.tf = (double) Long.parseLong(str.substring(0, split)); wf.df = (double) Long.parseLong(str.substring(split + 1)); keymap.put(key.toString(), wf); } reader.close(); return keymap; } /** * Given a corpus, make a word list. The word list consists of * KV records where the word is the key and the value is "TF DF" * * @param input Input path consisting of labels and text * records as specified in README * @param output Output path target * @throws IOException */ public void makeWordList(Path input, Path output) throws IOException { MapReduceJobConf job = new MapReduceJobConf(this.getClass()); job.setJobName("EstimateWordFreqForLLDA"); job.setMapReduce(WordListMapper.class, WordListReducer.class); job.setCombinerClass(WordListCombiner.class); job.setKeyValueClass(Text.class, Text.class, Text.class, Text.class); SequenceFileInputFormat.addInputPath(job, input); SequenceFileOutputFormat.setOutputPath(job, output); JobClient.runJob(job); } /** * Given a corpus, make a label list. The label list consists of * KV records where the label is the key and its occurrence count * is the value. * * @param input Input path consisting of labels and text * records as specified in README * @param output Output path target * @throws IOException */ public void makeLabelList(Path input, Path output) throws IOException { MapReduceJobConf job = new MapReduceJobConf(this.getClass()); job.setJobName("EstimateLabelFreqForLLDA"); job.setMapReduce(LabelListMapper.class, LabelListReducer.class); job.setCombinerClass(LabelListReducer.class); job.setKeyValueClass(Text.class, Text.class, Text.class, LongWritable.class); SequenceFileInputFormat.addInputPath(job, input); SequenceFileOutputFormat.setOutputPath(job, output); JobClient.runJob(job); } public void initModel( Path input, Path outputDocs, Path outputNwz, Path wordlist, int numTopics, int numWords) throws IOException { JobConf envConf = new JobConf(); FileSystem fs = FileSystem.get(envConf); Path tmpNwz = new Path(outputNwz + "_tmp").makeQualified(fs); wordlist = wordlist.makeQualified(fs); MapReduceJobConf job = new MapReduceJobConf(this.getClass()); FileSystem.get(job).mkdirs(tmpNwz); job.setJobName("InitializeModelForLDA"); job.setMapReduce(InitModelMapper.class, InitModelReducer.class); job.setKeyValueClass( Text.class, DocumentWritable.class, Text.class, DocumentWritable.class); SequenceFileInputFormat.addInputPath(job, input); SequenceFileOutputFormat.setOutputPath(job, outputDocs); job.set("wordlist", wordlist.toString()); job.set("output.nwz", tmpNwz.toString()); job.setInt("num.topics", numTopics); job.setInt("num.words", numWords); JobClient.runJob(job); combineModelParam(tmpNwz, outputNwz); fs.delete(tmpNwz); System.out.println("Done"); } private void combineModelParam(Path inputNwz, Path outputNwz) throws IOException { MapReduceJobConf job = new MapReduceJobConf(this.getClass()); job.setJobName("CombineModelParametersForLDA"); SequenceFileInputFormat.addInputPath(job, inputNwz); SequenceFileOutputFormat.setOutputPath(job, outputNwz); job.setMapReduce(IdentityMapper.class, CombineModelParamReducer.class); job.setKeyValueClass( IntWritable.class, WordInfoWritable.class, IntWritable.class, WordInfoWritable.class); job.setBoolean("take.mean", false); JobClient.runJob(job); } private static class WordFreq { public double tf; public double df; } }
/** * Copyright 2015 Pavel Khokhlov <pkhokhlov@hotmail.com> * Jack Prescott <jackbprescott@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.optimalCombinations.algo; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; public class Group { ArrayList<Unit> members_ = new ArrayList<Unit>(); int groupSize_; int minimumUnitStrength_; boolean flag_ = false; Group() { } public Group(Unit[] units) { for (Unit u : units) { addUnit(u); } groupSize_ = members_.size(); } public Group(ArrayList<Unit> units) { for(Unit u: units) { addUnit(u); } groupSize_ = members_.size(); } public ArrayList<Unit> getMembers() { return members_; } public int getSize() { return members_.size(); } public void addUnit(Unit u) { members_.add(u); groupSize_ = members_.size(); } public void removeUnit(Unit u) { members_.remove(u); } public boolean containsMembersOf(Group other) { for(Unit u: other.getMembers()) { if(members_.contains(u)) return true; } return false; } /** * This function calculates the group strength. * This function works for any number of positive connections. * This function does not use negConns. Refer to getGroupScoreNegConns * @return groupScore */ public int getGroupScore() { int groupScore = 0; for (int i = 0; i < members_.size(); i++) { members_.get(i).getPosConns().remove(null); ArrayList<Unit> memPosConnsi = members_.get(i).getPosConns(); middleLoop: for (int posConnIndex = 0; posConnIndex < memPosConnsi.size(); posConnIndex++) { Unit posConnx = memPosConnsi.get(posConnIndex); for (int j = 0; j < members_.size(); j++) { if(posConnx == members_.get(j)) { if(posConnIndex == 0) groupScore += 3; else if(posConnIndex == 1) groupScore += 2; else groupScore++; continue middleLoop; // does not go further in comparing once match is found } } } } return groupScore; } /** * This function returns the index of a negConn in the group. * @return index of negConn * @return 0 if there is none */ public int getIndexNegConn() { for(int i = 0; i < members_.size(); i++) { for(int j = 0; j < members_.size(); j++) { if(j == i) continue; if(members_.get(i).getNegConn() == null) continue; if(members_.get(i).getNegConn().name_.equals(members_.get(j).name_)) { return j; } } } return -1; } public int getMinUnitScore() { int minUnitScore = Integer.MAX_VALUE; for (int i = 0; i < members_.size(); i++) { members_.get(i).getPosConns().remove(null); ArrayList<Unit> memPosConnsi = members_.get(i).getPosConns(); int tempUnitScore = 0; middleLoop: for (int x = 0; x < memPosConnsi.size(); x++) { Unit posConnx = memPosConnsi.get(x); for (int j = 0; j < members_.size(); j++) { if(posConnx == members_.get(j)) { if(x == 0) tempUnitScore += 3; else if(x == 1) tempUnitScore += 2; else tempUnitScore++; continue middleLoop; // does not go further in comparing once match is found } } } if(tempUnitScore < minUnitScore) minUnitScore = tempUnitScore; } return minUnitScore; } /** * TODO: fix * @return */ public int getMinimumUnitStrength() { minimumUnitStrength_ = Integer.MAX_VALUE; for (int y = 0; y < members_.size(); y++) { for (int x = 0; x < members_.get(0).posConnSize_; x++) { int unitStrength=0; if(x == y) // cannot have yourself as a positive connection continue; int tempPoints = members_.get(y).getPosConns().indexOf(members_.get(x)); if (tempPoints != -1) // if members_.get(x) is present in the positive connections of members_(y) { unitStrength += members_.get(0).getPosConnSize() - tempPoints; // adds the priority of the member to the unitStrength // the higher the priority, the lower the index in posConns_ } else if (members_.get(y).getNegConn().getName().equals(members_.get(x).getName())) // else if members_.get(x) is { // a negative connection of members_.get(y) unitStrength -= 3; } if(unitStrength<minimumUnitStrength_) { minimumUnitStrength_=unitStrength; } } } return minimumUnitStrength_; } public int getGroupScorePrint() { int groupScore = 0; System.out.println(groupScore); for (int i = 0; i < members_.size(); i++) { members_.get(i).getPosConns().remove(null); ArrayList<Unit> memPosConnsi = members_.get(i).getPosConns(); middleLoop: for (int x = 0; x < memPosConnsi.size(); x++) { Unit posConnx = memPosConnsi.get(x); for (int j = 0; j < members_.size(); j++) { if(posConnx == members_.get(j)) { if(x == 0) { groupScore += 3; System.out.println(groupScore); } else if(x == 1) { groupScore += 2; System.out.println(groupScore); } else { groupScore++; System.out.println(groupScore); } continue middleLoop; // does not go further in comparing once match is found } } } } outerLoop: for(int i = 0; i < members_.size(); i++) { Unit negConni = members_.get(i).getNegConn(); for(int j = 0; j < members_.size(); j++) { if(negConni == members_.get(j)) { groupScore -= 3; System.out.println("- 3"); continue outerLoop; } } } return groupScore; } /** * This function puts the members of the group in alphabetical order. */ public void alphabetize() { Collections.sort(members_, new Comparator<Unit>() { @Override public int compare(final Unit unit1, final Unit unit2) { return unit1.getName().compareTo(unit2.getName()); } }); } /** * This function returns the string representation of the group. The group score is included */ public String toString() { String result = "{"; for(int i = 0; i < members_.size() - 1; i++) { result = result + members_.get(i) + ", "; } return result + members_.get(members_.size() - 1) + "}" + "(" + getGroupScore() + ")"; } /** * This function calculates the group strength. * This function works for any number of positive connections. * This function works if the negative connection is null. * @return groupScore */ public int getGroupScoreNegConns() { int groupScore = 0; for (int i = 0; i < members_.size(); i++) { members_.get(i).getPosConns().remove(null); ArrayList<Unit> memPosConnsi = members_.get(i).getPosConns(); middleLoop: for (int posConnIndex = 0; posConnIndex < memPosConnsi.size(); posConnIndex++) { Unit posConnx = memPosConnsi.get(posConnIndex); for (int j = 0; j < members_.size(); j++) { if(posConnx == members_.get(j)) { if(posConnIndex == 0) groupScore += 3; else if(posConnIndex == 1) groupScore += 2; else groupScore++; continue middleLoop; // does not go further in comparing once match is found } } } } outerLoop: for(int i = 0; i < members_.size(); i++) { Unit negConni = members_.get(i).getNegConn(); for(int j = 0; j < members_.size(); j++) { if(negConni == members_.get(j)) { groupScore -= 3; continue outerLoop; } } } return groupScore; } /** * non-functioning */ public int getGroupScoreObsolete() { int groupScore = 0; for (int i = 0; i < members_.size(); i++) { for (int x = 0; x < members_.get(i).posConnSize_; x++) { ArrayList<Unit> memPosConns = members_.get(i).getPosConns(); int tempPoints = memPosConns.indexOf(members_.get(x)); if (tempPoints != -1) // if members_.get(x) is present in the positive connections of members_(y) { groupScore += members_.get(0).getPosConnSize() - tempPoints; // adds the priority of the member to the groupscore // the higher the priority, the lower the index in posConns_ } else if (members_.get(i).getNegConn().getName().equals(members_.get(x).getName())) // else if members_.get(x) is { // a negative connection of members_.get(y) groupScore -= 3; } } } return groupScore; } }
/** * Copyright 2007 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver.wal; import java.io.DataInput; import java.io.DataOutput; import java.io.EOFException; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableUtils; /** * A Key for an entry in the change log. * * The log intermingles edits to many tables and rows, so each log entry * identifies the appropriate table and row. Within a table and row, they're * also sorted. * * <p>Some Transactional edits (START, COMMIT, ABORT) will not have an * associated row. */ public class HLogKey implements WritableComparable<HLogKey> { // should be < 0 (@see #readFields(DataInput)) // version 2 supports HLog compression enum Version { UNVERSIONED(0), // Initial number we put on HLogKey when we introduced versioning. INITIAL(-1), // Version -2 introduced a dictionary compression facility. Only this // dictionary-based compression is available in version -2. COMPRESSED(-2); final int code; static final Version[] byCode; static { byCode = Version.values(); for (int i = 0; i < byCode.length; i++) { if (byCode[i].code != -1 * i) { throw new AssertionError("Values in this enum should be descending by one"); } } } Version(int code) { this.code = code; } boolean atLeast(Version other) { return code <= other.code; } static Version fromCode(int code) { return byCode[code * -1]; } } private static final Version VERSION = Version.COMPRESSED; // The encoded region name. private byte [] encodedRegionName; private byte [] tablename; private long logSeqNum; // Time at which this edit was written. private long writeTime; private UUID clusterId; private CompressionContext compressionContext; /** Writable Constructor -- Do not use. */ public HLogKey() { this(null, null, 0L, HConstants.LATEST_TIMESTAMP, HConstants.DEFAULT_CLUSTER_ID); } /** * Create the log key! * We maintain the tablename mainly for debugging purposes. * A regionName is always a sub-table object. * * @param encodedRegionName Encoded name of the region as returned by * <code>HRegionInfo#getEncodedNameAsBytes()</code>. * @param tablename - name of table * @param logSeqNum - log sequence number * @param now Time at which this edit was written. * @param clusterId of the cluster (used in Replication) */ public HLogKey(final byte [] encodedRegionName, final byte [] tablename, long logSeqNum, final long now, UUID clusterId) { this.encodedRegionName = encodedRegionName; this.tablename = tablename; this.logSeqNum = logSeqNum; this.writeTime = now; this.clusterId = clusterId; } /** * @param compressionContext Compression context to use */ public void setCompressionContext(CompressionContext compressionContext) { this.compressionContext = compressionContext; } /** @return encoded region name */ public byte [] getEncodedRegionName() { return encodedRegionName; } /** @return table name */ public byte [] getTablename() { return tablename; } /** @return log sequence number */ public long getLogSeqNum() { return logSeqNum; } void setLogSeqNum(long logSeqNum) { this.logSeqNum = logSeqNum; } /** * @return the write time */ public long getWriteTime() { return this.writeTime; } /** * Get the id of the original cluster * @return Cluster id. */ public UUID getClusterId() { return clusterId; } /** * Set the cluster id of this key * @param clusterId */ public void setClusterId(UUID clusterId) { this.clusterId = clusterId; } @Override public String toString() { return Bytes.toString(tablename) + "/" + Bytes.toString(encodedRegionName) + "/" + logSeqNum; } /** * Produces a string map for this key. Useful for programmatic use and * manipulation of the data stored in an HLogKey, for example, printing * as JSON. * * @return a Map containing data from this key */ public Map<String, Object> toStringMap() { Map<String, Object> stringMap = new HashMap<String, Object>(); stringMap.put("table", Bytes.toStringBinary(tablename)); stringMap.put("region", Bytes.toStringBinary(encodedRegionName)); stringMap.put("sequence", logSeqNum); return stringMap; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } return compareTo((HLogKey)obj) == 0; } @Override public int hashCode() { int result = Bytes.hashCode(this.encodedRegionName); result ^= this.logSeqNum; result ^= this.writeTime; result ^= this.clusterId.hashCode(); return result; } public int compareTo(HLogKey o) { int result = Bytes.compareTo(this.encodedRegionName, o.encodedRegionName); if (result == 0) { if (this.logSeqNum < o.logSeqNum) { result = -1; } else if (this.logSeqNum > o.logSeqNum) { result = 1; } if (result == 0) { if (this.writeTime < o.writeTime) { result = -1; } else if (this.writeTime > o.writeTime) { return 1; } } } // why isn't cluster id accounted for? return result; } /** * Drop this instance's tablename byte array and instead * hold a reference to the provided tablename. This is not * meant to be a general purpose setter - it's only used * to collapse references to conserve memory. */ void internTableName(byte []tablename) { // We should not use this as a setter - only to swap // in a new reference to the same table name. assert Bytes.equals(tablename, this.tablename); this.tablename = tablename; } /** * Drop this instance's region name byte array and instead * hold a reference to the provided region name. This is not * meant to be a general purpose setter - it's only used * to collapse references to conserve memory. */ void internEncodedRegionName(byte []encodedRegionName) { // We should not use this as a setter - only to swap // in a new reference to the same table name. assert Bytes.equals(this.encodedRegionName, encodedRegionName); this.encodedRegionName = encodedRegionName; } @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, VERSION.code); if (compressionContext == null) { Bytes.writeByteArray(out, this.encodedRegionName); Bytes.writeByteArray(out, this.tablename); } else { Compressor.writeCompressed(this.encodedRegionName, 0, this.encodedRegionName.length, out, compressionContext.regionDict); Compressor.writeCompressed(this.tablename, 0, this.tablename.length, out, compressionContext.tableDict); } out.writeLong(this.logSeqNum); out.writeLong(this.writeTime); // avoid storing 16 bytes when replication is not enabled if (this.clusterId == HConstants.DEFAULT_CLUSTER_ID) { out.writeBoolean(false); } else { out.writeBoolean(true); out.writeLong(this.clusterId.getMostSignificantBits()); out.writeLong(this.clusterId.getLeastSignificantBits()); } } @Override public void readFields(DataInput in) throws IOException { Version version = Version.UNVERSIONED; // HLogKey was not versioned in the beginning. // In order to introduce it now, we make use of the fact // that encodedRegionName was written with Bytes.writeByteArray, // which encodes the array length as a vint which is >= 0. // Hence if the vint is >= 0 we have an old version and the vint // encodes the length of encodedRegionName. // If < 0 we just read the version and the next vint is the length. // @see Bytes#readByteArray(DataInput) int len = WritableUtils.readVInt(in); if (len < 0) { // what we just read was the version version = Version.fromCode(len); // We only compress V2 of HLogkey. // If compression is on, the length is handled by the dictionary if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) { len = WritableUtils.readVInt(in); } } if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) { this.encodedRegionName = new byte[len]; in.readFully(this.encodedRegionName); this.tablename = Bytes.readByteArray(in); } else { this.encodedRegionName = Compressor.readCompressed(in, compressionContext.regionDict); this.tablename = Compressor.readCompressed(in, compressionContext.tableDict); } this.logSeqNum = in.readLong(); this.writeTime = in.readLong(); this.clusterId = HConstants.DEFAULT_CLUSTER_ID; if (version.atLeast(Version.INITIAL)) { if (in.readBoolean()) { this.clusterId = new UUID(in.readLong(), in.readLong()); } } else { try { // dummy read (former byte cluster id) in.readByte(); } catch(EOFException e) { // Means it's a very old key, just continue } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.io; import java.io.FileDescriptor; import java.io.IOException; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.nativeio.NativeIO; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; /** * Manages a pool of threads which can issue readahead requests on file descriptors. */ @InterfaceAudience.Private @InterfaceStability.Evolving public class ReadaheadPool { static final Log LOG = LogFactory.getLog(ReadaheadPool.class); private static final int POOL_SIZE = 4; private static final int MAX_POOL_SIZE = 16; private static final int CAPACITY = 1024; private final ThreadPoolExecutor pool; private static ReadaheadPool instance; /** * Return the singleton instance for the current process. */ public static ReadaheadPool getInstance() { synchronized (ReadaheadPool.class) { if (instance == null && NativeIO.isAvailable()) { instance = new ReadaheadPool(); } return instance; } } private ReadaheadPool() { pool = new ThreadPoolExecutor(POOL_SIZE, MAX_POOL_SIZE, 3L, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(CAPACITY)); pool.setRejectedExecutionHandler(new ThreadPoolExecutor.DiscardOldestPolicy()); pool.setThreadFactory(new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("Readahead Thread #%d") .build()); } /** * Issue a request to readahead on the given file descriptor. * * @param identifier a textual identifier that will be used in error * messages (e.g. the file name) * @param fd the file descriptor to read ahead * @param curPos the current offset at which reads are being issued * @param readaheadLength the configured length to read ahead * @param maxOffsetToRead the maximum offset that will be readahead * (useful if, for example, only some segment of the file is * requested by the user). Pass {@link Long.MAX_VALUE} to allow * readahead to the end of the file. * @param lastReadahead the result returned by the previous invocation * of this function on this file descriptor, or null if this is * the first call * @return an object representing this outstanding request, or null * if no readahead was performed */ public ReadaheadRequest readaheadStream( String identifier, FileDescriptor fd, long curPos, long readaheadLength, long maxOffsetToRead, ReadaheadRequest lastReadahead) { Preconditions.checkArgument(curPos <= maxOffsetToRead, "Readahead position %s higher than maxOffsetToRead %s", curPos, maxOffsetToRead); if (readaheadLength <= 0) { return null; } long lastOffset = Long.MIN_VALUE; if (lastReadahead != null) { lastOffset = lastReadahead.getOffset(); } // trigger each readahead when we have reached the halfway mark // in the previous readahead. This gives the system time // to satisfy the readahead before we start reading the data. long nextOffset = lastOffset + readaheadLength / 2; if (curPos >= nextOffset) { // cancel any currently pending readahead, to avoid // piling things up in the queue. Each reader should have at most // one outstanding request in the queue. if (lastReadahead != null) { lastReadahead.cancel(); lastReadahead = null; } long length = Math.min(readaheadLength, maxOffsetToRead - curPos); if (length <= 0) { // we've reached the end of the stream return null; } return submitReadahead(identifier, fd, curPos, length); } else { return lastReadahead; } } /** * Submit a request to readahead on the given file descriptor. * @param identifier a textual identifier used in error messages, etc. * @param fd the file descriptor to readahead * @param off the offset at which to start the readahead * @param len the number of bytes to read * @return an object representing this pending request */ public ReadaheadRequest submitReadahead( String identifier, FileDescriptor fd, long off, long len) { ReadaheadRequestImpl req = new ReadaheadRequestImpl( identifier, fd, off, len); pool.execute(req); if (LOG.isTraceEnabled()) { LOG.trace("submit readahead: " + req); } return req; } /** * An outstanding readahead request that has been submitted to * the pool. This request may be pending or may have been * completed. */ public interface ReadaheadRequest { /** * Cancels the request for readahead. This should be used * if the reader no longer needs the requested data, <em>before</em> * closing the related file descriptor. * * It is safe to use even if the readahead request has already * been fulfilled. */ public void cancel(); /** * @return the requested offset */ public long getOffset(); /** * @return the requested length */ public long getLength(); } private static class ReadaheadRequestImpl implements Runnable, ReadaheadRequest { private final String identifier; private final FileDescriptor fd; private final long off, len; private volatile boolean canceled = false; private ReadaheadRequestImpl(String identifier, FileDescriptor fd, long off, long len) { this.identifier = identifier; this.fd = fd; this.off = off; this.len = len; } public void run() { if (canceled) return; // There's a very narrow race here that the file will close right at // this instant. But if that happens, we'll likely receive an EBADF // error below, and see that it's canceled, ignoring the error. // It's also possible that we'll end up requesting readahead on some // other FD, which may be wasted work, but won't cause a problem. try { NativeIO.posixFadviseIfPossible(fd, off, len, NativeIO.POSIX_FADV_WILLNEED); } catch (IOException ioe) { if (canceled) { // no big deal - the reader canceled the request and closed // the file. return; } LOG.warn("Failed readahead on " + identifier, ioe); } } @Override public void cancel() { canceled = true; // We could attempt to remove it from the work queue, but that would // add complexity. In practice, the work queues remain very short, // so removing canceled requests has no gain. } @Override public long getOffset() { return off; } @Override public long getLength() { return len; } @Override public String toString() { return "ReadaheadRequestImpl [identifier='" + identifier + "', fd=" + fd + ", off=" + off + ", len=" + len + "]"; } } }
/* * */ package net.community.chest.awt.stroke; import java.awt.BasicStroke; import java.awt.Stroke; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import net.community.chest.BaseTypedValuesContainer; import net.community.chest.convert.FloatValueStringConstructor; import net.community.chest.dom.DOMUtils; import net.community.chest.dom.transform.XmlTranslator; import net.community.chest.lang.ExceptionUtil; import net.community.chest.lang.StringUtil; import net.community.chest.lang.math.NumberTables; import net.community.chest.util.map.MapEntryImpl; import org.w3c.dom.Document; import org.w3c.dom.Element; /** * <P>Copyright 2008 as per GPLv2</P> * * @param <S> The type of {@link BasicStroke} being instantiated * @author Lyor G. * @since Feb 2, 2009 12:19:41 PM */ public abstract class BasicStrokeValueTranslator<S extends BasicStroke> extends BaseTypedValuesContainer<S> implements XmlTranslator<S> { public BasicStrokeValueTranslator (Class<S> objClass) throws IllegalArgumentException { super(objClass); } protected static final Map.Entry<String,Float> getFloatValue (final float f, final String aName) { if (Float.isInfinite(f) || Float.isNaN(f)) throw new NumberFormatException("getFloatValue(" + aName + ") bad value: " + f); final Float v=Float.valueOf(f); try { final String vs=FloatValueStringConstructor.DEFAULT.convertInstance(v); if ((null == vs) || (vs.length() <= 0)) throw new NumberFormatException("getFloatValue(" + aName + ") no string for: " + f); return new MapEntryImpl<String,Float>(vs, v); } catch(Exception e) // should not happen { throw ExceptionUtil.toRuntimeException(e); } } protected static final Float xlateFloatValue (final String vs) { if ((null == vs) || (vs.length() <= 0)) return null; final Float vf; try { if (null == (vf=FloatValueStringConstructor.DEFAULT.newInstance(vs))) // should not happen throw new IllegalStateException("xlateFloatValue(" + vs + ") no value converted"); } catch(Exception e) { throw ExceptionUtil.toRuntimeException(e); } final float f=vf.floatValue(); if (Float.isInfinite(f) || Float.isNaN(f)) throw new NumberFormatException("xlateFloatValue(" + vs + ") bad value"); return vf; } protected static final Map.Entry<String,Float> getFloatValue (final Element elem, final String aName) { if (null == elem) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("getFloatValue(" + DOMUtils.toString(elem) + ") no attribute specified"); final String vs=elem.getAttribute(aName); final Float vf=xlateFloatValue(vs); if (null == vf) return null; // OK if no attribute return new MapEntryImpl<String,Float>(vs, vf); } protected static final Float getPureFloatValue (final Element elem, final String aName) { final Map.Entry<String,Float> vp=getFloatValue(elem, aName); if (null == vp) return null; final Float vf=vp.getValue(); if (null == vf) throw new IllegalStateException("getPureFloatValue(" + DOMUtils.toString(elem) + ")[" + aName + "] no value returned"); return vf; } public static final String LINE_WIDTH_ATTR="lineWidth", ENDCAP_ATTR="endCap", LINE_JOIN_ATTR="lineJoin", MITER_LIMIT_ATTR="miterLimit", DASH_PHASE_ATTR="dashPhase", DASH_ARRAY_ATTR="dashArray"; public static final String addLineWidth (BasicStroke s, Element elem, String aName) { if ((null == s) || (null == elem)) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("addLineWidth(" + s + ") no attribute name"); final float fv=s.getLineWidth(); final Map.Entry<String,Float> vp=getFloatValue(fv, aName); final String vs=(null == vp) ? null : vp.getKey(); if ((null == vs) || (vs.length() <= 0)) throw new IllegalStateException("addLineWidth(" + aName + ")[" + fv + "] no encoding result"); elem.setAttribute(aName, vs); return vs; } public static final Float getLineWidth (Element elem) { return getPureFloatValue(elem, LINE_WIDTH_ATTR); } public static final String addLineWidth (BasicStroke s, Element elem) { return addLineWidth(s, elem, LINE_WIDTH_ATTR); } public static final String addMiterLimit (BasicStroke s, Element elem, String aName) { if ((null == s) || (null == elem)) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("addMiterLimit(" + s + ") no attribute name"); final float fv=s.getMiterLimit(); final Map.Entry<String,Float> vp=getFloatValue(fv, aName); final String vs=(null == vp) ? null : vp.getKey(); if ((null == vs) || (vs.length() <= 0)) throw new IllegalStateException("addMiterLimit(" + aName + ")[" + fv + "] no encoding result"); elem.setAttribute(aName, vs); return vs; } public static final String addMiterLimit (BasicStroke s, Element elem) { return addMiterLimit(s, elem, MITER_LIMIT_ATTR); } public static final Float getMiterLimit (Element elem) { return getPureFloatValue(elem, MITER_LIMIT_ATTR); } public static final String addDashPhase (BasicStroke s, Element elem, String aName) { if ((null == s) || (null == elem)) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("addDashPhase(" + s + ") no attribute name"); final float fv=s.getDashPhase(); final Map.Entry<String,Float> vp=getFloatValue(fv, aName); final String vs=(null == vp) ? null : vp.getKey(); if ((null == vs) || (vs.length() <= 0)) throw new IllegalStateException("addDashPhase(" + aName + ")[" + fv + "] no encoding result"); elem.setAttribute(aName, vs); return vs; } public static final String addDashPhase (BasicStroke s, Element elem) { return addDashPhase(s, elem, DASH_PHASE_ATTR); } public static final Float getDashPhase (Element elem) { return getPureFloatValue(elem, DASH_PHASE_ATTR); } public static final BasicStrokeDecoration addEndCap (BasicStroke s, Element elem, String aName) { if ((null == s) || (null == elem)) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("addEndCap(" + s + ") no attribute name"); final int v=s.getEndCap(); final BasicStrokeDecoration d=BasicStrokeDecoration.fromDecoration(v); if (null == d) throw new NoSuchElementException("addEndCap(" + s + ")[" + aName + "] unknown value: " + v); elem.setAttribute(aName, d.toString()); return d; } public static final BasicStrokeDecoration addEndCap (BasicStroke s, Element elem) { return addEndCap(s, elem, ENDCAP_ATTR); } public static final BasicStrokeDecoration getEndCap (Element elem, String aName) { if (null == elem) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("getEndCap(" + DOMUtils.toString(elem) + ") no attribute name"); final String vs=elem.getAttribute(aName); if ((null == vs) || (vs.length() <= 0)) return null; // OK if no value final BasicStrokeDecoration d=BasicStrokeDecoration.fromString(vs); if (null == d) throw new NoSuchElementException("getEndCap(" + DOMUtils.toString(elem) + ")[" + aName + "] unknown value: " + vs); return d; } public static final BasicStrokeDecoration getEndCap (Element elem) { return getEndCap(elem, ENDCAP_ATTR); } public static final BasicStrokeJoin addLineJoin (BasicStroke s, Element elem, String aName) { if ((null == s) || (null == elem)) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("addLineJoin(" + s + ") no attribute name"); final int v=s.getLineJoin(); final BasicStrokeJoin j=BasicStrokeJoin.fromJoin(v); if (null == j) throw new NoSuchElementException("addLineJoin(" + s + ")[" + aName + "] unknown value: " + v); elem.setAttribute(aName, j.toString()); return j; } public static final BasicStrokeJoin addLineJoin (BasicStroke s, Element elem) { return addLineJoin(s, elem, LINE_JOIN_ATTR); } public static final BasicStrokeJoin getLineJoin (Element elem, String aName) { if (null == elem) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("getLineJoin(" + DOMUtils.toString(elem) + ") no attribute name"); final String vs=elem.getAttribute(aName); if ((null == vs) || (vs.length() <= 0)) return null; // OK if no value final BasicStrokeJoin j=BasicStrokeJoin.fromString(vs); if (null == j) throw new NoSuchElementException("getLineJoin(" + DOMUtils.toString(elem) + ")[" + aName + "] unknown value: " + vs); return j; } public static final BasicStrokeJoin getLineJoin (Element elem) { return getLineJoin(elem, LINE_JOIN_ATTR); } public static final List<String> addDashArray (BasicStroke s, Element elem, String aName) { if ((null == s) || (null == elem)) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("addEndCap(" + s + ") no attribute name"); final float[] da=s.getDashArray(); if ((null == da) || (da.length <= 0)) return null; final List<String> dl=new ArrayList<String>(da.length); final StringBuilder sb=new StringBuilder(da.length * NumberTables.MAX_UNSIGNED_LONG_DIGITS_NUM); for (final float fv : da) { final Map.Entry<String,Float> vp=getFloatValue(fv, aName); final String vs=(null == vp) ? null : vp.getKey(); if ((null == vs) || (vs.length() <= 0)) throw new IllegalStateException("addDashArray(" + aName + ")[" + fv + "] no encoding result"); dl.add(vs); if (sb.length() > 0) sb.append(','); sb.append(vs); } elem.setAttribute(aName, sb.toString()); return dl; } public static final List<String> addDashArray (BasicStroke s, Element elem) { return addDashArray(s, elem, DASH_ARRAY_ATTR); } public static final List<Float> getDashArray (Element elem, String aName) { if (null == elem) return null; if ((null == aName) || (aName.length() <= 0)) throw new IllegalArgumentException("getDashArray(" + DOMUtils.toString(elem) + ") no attribute name"); final String vs=elem.getAttribute(aName); final Collection<String> vl=StringUtil.splitString(vs, ','); final int numValues=(null == vl) ? 0 : vl.size(); if (numValues <= 0) return null; // OK if no value final List<Float> fl=new ArrayList<Float>(numValues); for (final String fs : vl) { final Float vf=xlateFloatValue(fs); if (null == vf) // unlikely, but OK continue; fl.add(vf); } return fl; } public static final List<Float> getDashArray (Element elem) { return getDashArray(elem, DASH_ARRAY_ATTR); } public static final float DEFAULT_LINE_WIDTH=1.0f, DEFAULT_MITER_LIMIT=10.0f, DEFAULT_DASH_PHASE=0.0f; public static final BasicStroke fromElement (Element elem) { if (null == elem) return null; final Float lineWidth=getLineWidth(elem), lw=(null == lineWidth) ? Float.valueOf(DEFAULT_LINE_WIDTH) : lineWidth, miterJoin=getMiterLimit(elem), mj=(null == miterJoin) ? Float.valueOf(DEFAULT_MITER_LIMIT) : miterJoin, dashPhase=getDashPhase(elem), dp=(null == dashPhase) ? Float.valueOf(DEFAULT_DASH_PHASE) : dashPhase; final BasicStrokeJoin lineJoin=getLineJoin(elem), j=(null == lineJoin) ? BasicStrokeJoin.MITER : lineJoin; final BasicStrokeDecoration lineCap=getEndCap(elem), d=(null == lineCap) ? BasicStrokeDecoration.SQUARE : lineCap; final List<Float> dl=getDashArray(elem); final int numDashes=(null == dl) ? 0 : dl.size(); final float[] da=(numDashes <= 0) ? null : new float[numDashes]; for (int fIndex=0; fIndex < numDashes; fIndex++) { final Float fv=dl.get(fIndex); da[fIndex] = fv.floatValue(); } return new BasicStroke(lw.floatValue(), d.getDecoration(), j.getJoin(), mj.floatValue(), da, dp.floatValue()); } /* * @see net.community.chest.dom.transform.XmlTranslator#toXml(java.lang.Object, org.w3c.dom.Document, org.w3c.dom.Element) */ @Override public Element toXml (S src, Document doc, Element elem) throws Exception { addLineWidth(src, elem); addEndCap(src, elem); addLineJoin(src, elem); addMiterLimit(src, elem); addDashPhase(src, elem); addDashArray(src, elem); return elem; } public static final String STROKE_ELEM_NAME=Stroke.class.getSimpleName().toLowerCase(); public String getRootElementName () { return STROKE_ELEM_NAME; } /* * @see net.community.chest.dom.transform.XmlTranslator#toXml(java.lang.Object, org.w3c.dom.Document) */ @Override public Element toXml (S src, Document doc) throws Exception { return (null == src) ? null : toXml(src, doc, doc.createElement(getRootElementName())); } public static final BasicStrokeValueTranslator<BasicStroke> DEFAULT= new BasicStrokeValueTranslator<BasicStroke>(BasicStroke.class) { /* * @see net.community.chest.dom.transform.XmlValueInstantiator#fromXml(org.w3c.dom.Element) */ @Override public BasicStroke fromXml (Element elem) throws Exception { return fromElement(elem); } }; }
package cgeo.geocaching.connector.trackable; import cgeo.geocaching.CgeoApplication; import cgeo.geocaching.R; import cgeo.geocaching.enumerations.StatusCode; import cgeo.geocaching.location.Geopoint; import cgeo.geocaching.log.AbstractLoggingActivity; import cgeo.geocaching.log.LogTypeTrackable; import cgeo.geocaching.log.TrackableLog; import cgeo.geocaching.models.Geocache; import cgeo.geocaching.models.Trackable; import cgeo.geocaching.network.Network; import cgeo.geocaching.network.Parameters; import cgeo.geocaching.settings.Settings; import cgeo.geocaching.storage.DataStore; import cgeo.geocaching.utils.Log; import cgeo.geocaching.utils.Version; import android.content.Context; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import java.io.InputStream; import java.net.URLEncoder; import java.util.Calendar; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.regex.Pattern; import io.reactivex.Observable; import io.reactivex.functions.Function; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.xml.sax.InputSource; public class GeokretyConnector extends AbstractTrackableConnector { /* 1) tracking code: is generated from the alphabet: "a b c d e f g h i j k l m n p q r s t u v w x y z 1 2 3 4 5 6 7 8 9" (no O and 0) sanity-check for tracking code: if generated code look like reference number (ie GKxxxx): preg_match("#^gk[0-9a-f]{4}$#i", $tc) 2) reference number (GKxxxx): it is just a subsequent number in the database ($id) converted to hex: $gk=sprintf("GK%04X",$id); $id=hexdec(substr($gk, 2, 4)); */ private static final Pattern PATTERN_GK_CODE = Pattern.compile("GK[0-9A-F]{4,}"); private static final Pattern PATTERN_GK_CODE_EXTENDED = Pattern.compile("(GK[0-9A-F]{4,})|([1-9A-NP-Z]{6})"); private static final String HOST = "geokrety.org"; public static final String URL = "https://" + HOST; private static final String URLPROXY = "https://api.geokretymap.org"; @Override @NonNull public String getHost() { return HOST; } @Override @NonNull public String getHostUrl() { return URL; } @Override @Nullable public String getProxyUrl() { return URLPROXY; } @Override public int getPreferenceActivity() { return R.string.preference_screen_geokrety; } @Override public boolean canHandleTrackable(@Nullable final String geocode) { return geocode != null && PATTERN_GK_CODE.matcher(geocode).matches(); } @Override public boolean canHandleTrackable(@Nullable final String geocode, @Nullable final TrackableBrand brand) { if (brand != TrackableBrand.GEOKRETY) { return canHandleTrackable(geocode); } return geocode != null && PATTERN_GK_CODE_EXTENDED.matcher(geocode).matches(); } @Override @NonNull public String getServiceTitle() { return CgeoApplication.getInstance().getString(R.string.init_geokrety); } @Override @NonNull public String getUrl(@NonNull final Trackable trackable) { return URL + "/konkret.php?id=" + getId(trackable.getGeocode()); } @Override @Nullable public Trackable searchTrackable(final String geocode, final String guid, final String id) { return searchTrackable(geocode); } private static String getUrlCache() { return Settings.isGeokretyCacheActive() ? URLPROXY : URL; } @Nullable public static Trackable searchTrackable(final String geocode) { final Integer gkid; if (StringUtils.startsWithIgnoreCase(geocode, "GK")) { gkid = getId(geocode); } else { // This probably a Tracking Code Log.d("GeokretyConnector.searchTrackable: geocode=" + geocode); final String geocodeFound = getGeocodeFromTrackingCode(geocode); if (geocodeFound == null) { Log.d("GeokretyConnector.searchTrackable: Unable to retrieve trackable by TrackingCode"); return null; } gkid = getId(geocodeFound); } Log.d("GeokretyConnector.searchTrackable: gkid=" + gkid); try { final String urlDetails = Settings.isGeokretyCacheActive() ? URLPROXY + "/export-details.php" : URL + "/export2.php"; final InputStream response = Network.getResponseStream(Network.getRequest(urlDetails + "?gkid=" + gkid)); if (response == null) { Log.d("GeokretyConnector.searchTrackable: No data from server"); return null; } try { final InputSource is = new InputSource(response); final List<Trackable> trackables = GeokretyParser.parse(is); if (CollectionUtils.isNotEmpty(trackables)) { final Trackable trackable = trackables.get(0); DataStore.saveTrackable(trackable); return trackable; } } finally { IOUtils.closeQuietly(response); } } catch (final Exception e) { Log.w("GeokretyConnector.searchTrackable", e); } // TODO maybe a fallback to no proxy would be cool? return null; } @Override @NonNull public List<Trackable> searchTrackables(final String geocode) { Log.d("GeokretyConnector.searchTrackables: wpt=" + geocode); try { final InputStream response = Network.getResponseStream(Network.getRequest(getUrlCache() + "/export2.php?wpt=" + URLEncoder.encode(geocode, "utf-8"))); if (response == null) { Log.d("GeokretyConnector.searchTrackable: No data from server"); return Collections.emptyList(); } try { final InputSource is = new InputSource(response); return GeokretyParser.parse(is); } finally { IOUtils.closeQuietly(response); } } catch (final Exception e) { Log.w("GeokretyConnector.searchTrackables", e); return Collections.emptyList(); } } @Override @NonNull public List<Trackable> loadInventory() { return loadInventory(0); } @NonNull private static List<Trackable> loadInventory(final int userid) { Log.d("GeokretyConnector.loadInventory: userid=" + userid); try { final Parameters params = new Parameters("inventory", "1"); if (userid > 0) { // retrieve someone inventory params.put("userid", String.valueOf(userid)); } else { if (StringUtils.isBlank(Settings.getGeokretySecId())) { return Collections.emptyList(); } // Retrieve inventory, with tracking codes params.put("secid", Settings.getGeokretySecId()); } final InputStream response = Network.getResponseStream(Network.getRequest(URL + "/export2.php", params)); if (response == null) { Log.d("GeokretyConnector.loadInventory: No data from server"); return Collections.emptyList(); } try { final InputSource is = new InputSource(response); return GeokretyParser.parse(is); } finally { IOUtils.closeQuietly(response); } } catch (final Exception e) { Log.w("GeokretyConnector.loadInventory", e); return Collections.emptyList(); } } @Override @NonNull public Observable<TrackableLog> trackableLogInventory() { return Observable.fromIterable(loadInventory()).map(new TrackableLogFunction()); } private static class TrackableLogFunction implements Function<Trackable, TrackableLog> { @Override public TrackableLog apply(final Trackable trackable) { return new TrackableLog( trackable.getGeocode(), trackable.getTrackingcode(), trackable.getName(), getId(trackable.getGeocode()), 0, trackable.getBrand() ); } } public static int getId(final String geocode) { try { final String hex = geocode.substring(2); return Integer.parseInt(hex, 16); } catch (final NumberFormatException e) { Log.e("Trackable.getId", e); } return -1; } @Override @Nullable public String getTrackableCodeFromUrl(@NonNull final String url) { // http://geokrety.org/konkret.php?id=38545 final String gkId = StringUtils.substringAfterLast(url, "konkret.php?id="); if (StringUtils.isNumeric(gkId)) { return geocode(Integer.parseInt(gkId)); } // http://geokretymap.org/38545 final String gkmapId = StringUtils.substringAfterLast(url, "geokretymap.org/"); if (StringUtils.isNumeric(gkmapId)) { return geocode(Integer.parseInt(gkmapId)); } return null; } @Override @Nullable public String getTrackableTrackingCodeFromUrl(@NonNull final String url) { // http://geokrety.org/m/qr.php?nr=<TRACKING_CODE> final String gkTrackingCode = StringUtils.substringAfterLast(url, "qr.php?nr="); if (StringUtils.isAlphanumeric(gkTrackingCode)) { return gkTrackingCode; } return null; } /** * Lookup Trackable Geocode from Tracking Code. * * @param trackingCode * the Trackable Tracking Code to lookup * @return * the Trackable Geocode */ @Nullable private static String getGeocodeFromTrackingCode(final String trackingCode) { final Parameters params = new Parameters("nr", trackingCode); final String response = Network.getResponseData(Network.getRequest(URLPROXY + "/nr2id.php", params)); // An empty response means "not found" if (response == null || StringUtils.equals(response, "0")) { return null; } return geocode(Integer.parseInt(response)); } @Override @NonNull public TrackableBrand getBrand() { return TrackableBrand.GEOKRETY; } @Override public boolean isGenericLoggable() { return true; } @Override public boolean isActive() { return Settings.isGeokretyConnectorActive(); } @Override public boolean isRegistered() { return Settings.isRegisteredForGeokretyLogging() && isActive(); } @Override public boolean recommendLogWithGeocode() { return true; } @Override public AbstractTrackableLoggingManager getTrackableLoggingManager(final AbstractLoggingActivity activity) { return new GeokretyLoggingManager(activity); } /** * Get geocode from GeoKrety id * */ public static String geocode(final int id) { return String.format("GK%04X", id); } @Override public boolean isLoggable() { return true; } public static ImmutablePair<StatusCode, List<String>> postLogTrackable(final Context context, final Geocache cache, final TrackableLog trackableLog, final Calendar date, final String log) { // See doc: http://geokrety.org/api.php Log.d("GeokretyConnector.postLogTrackable: nr=" + trackableLog.trackCode); if (trackableLog.brand != TrackableBrand.GEOKRETY) { Log.d("GeokretyConnector.postLogTrackable: received invalid brand"); return new ImmutablePair<>(StatusCode.LOG_POST_ERROR_GK, Collections.<String> emptyList()); } if (trackableLog.action == LogTypeTrackable.DO_NOTHING) { Log.d("GeokretyConnector.postLogTrackable: received invalid logtype"); return new ImmutablePair<>(StatusCode.LOG_POST_ERROR_GK, Collections.<String> emptyList()); } try { // SecId is mandatory when using API, anonymous log are only possible via website final String secId = Settings.getGeokretySecId(); if (StringUtils.isEmpty(secId)) { Log.d("GeokretyConnector.postLogTrackable: not authenticated"); return new ImmutablePair<>(StatusCode.NO_LOGIN_INFO_STORED, Collections.<String> emptyList()); } // Construct Post Parameters final Parameters params = new Parameters( "secid", secId, "gzip", "0", "nr", trackableLog.trackCode, "formname", "ruchy", "logtype", String.valueOf(trackableLog.action.gkid), "data", String.format(Locale.ENGLISH, "%tY-%tm-%td", date, date, date), // YYYY-MM-DD "godzina", String.format("%tH", date), // HH "minuta", String.format("%tM", date), // MM "comment", log, "app", context.getString(R.string.app_name), "app_ver", Version.getVersionName(context), "mobile_lang", Settings.getApplicationLocale().toString() + ".UTF-8" ); // See doc: http://geokrety.org/help.php#acceptableformats if (cache != null) { final Geopoint coords = cache.getCoords(); if (coords != null) { params.add("latlon", coords.toString()); } final String geocode = cache.getGeocode(); if (StringUtils.isNotEmpty(geocode)) { params.add("wpt", geocode); } } final String page = Network.getResponseData(Network.postRequest(URL + "/ruchy.php", params)); if (page == null) { Log.d("GeokretyConnector.postLogTrackable: No data from server"); return new ImmutablePair<>(StatusCode.CONNECTION_FAILED_GK, Collections.<String> emptyList()); } final ImmutablePair<Integer, List<String>> response = GeokretyParser.parseResponse(page); if (response == null) { Log.w("GeokretyConnector.postLogTrackable: Cannot parseResponse GeoKrety"); return new ImmutablePair<>(StatusCode.LOG_POST_ERROR_GK, Collections.<String> emptyList()); } final List<String> errors = response.getRight(); if (CollectionUtils.isNotEmpty(errors)) { for (final String error: errors) { Log.w("GeokretyConnector.postLogTrackable: " + error); } return new ImmutablePair<>(StatusCode.LOG_POST_ERROR_GK, errors); } Log.i("Geokrety Log successfully posted to trackable #" + trackableLog.trackCode); return new ImmutablePair<>(StatusCode.NO_ERROR, Collections.<String> emptyList()); } catch (final RuntimeException e) { Log.w("GeokretyConnector.searchTrackable", e); return new ImmutablePair<>(StatusCode.LOG_POST_ERROR_GK, Collections.<String> emptyList()); } } public static String getCreateAccountUrl() { return URL + "/adduser.php"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.tribes.group; import org.apache.catalina.tribes.Channel; import org.apache.catalina.tribes.ChannelException; import org.apache.catalina.tribes.ChannelMessage; import org.apache.catalina.tribes.ChannelReceiver; import org.apache.catalina.tribes.ChannelSender; import org.apache.catalina.tribes.Member; import org.apache.catalina.tribes.MembershipService; import org.apache.catalina.tribes.MessageListener; import org.apache.catalina.tribes.UniqueId; import org.apache.catalina.tribes.membership.McastService; import org.apache.catalina.tribes.transport.ReplicationTransmitter; import org.apache.catalina.tribes.transport.SenderState; import org.apache.catalina.tribes.transport.nio.NioReceiver; import org.apache.catalina.tribes.util.Arrays; import org.apache.catalina.tribes.util.Logs; /** * The channel coordinator object coordinates the membership service, * the sender and the receiver. * This is the last interceptor in the chain. */ public class ChannelCoordinator extends ChannelInterceptorBase implements MessageListener { private ChannelReceiver clusterReceiver; private ChannelSender clusterSender; private MembershipService membershipService; private int startLevel = 0; public ChannelCoordinator() { this(new NioReceiver(), new ReplicationTransmitter(), new McastService()); } public ChannelCoordinator(ChannelReceiver receiver, ChannelSender sender, MembershipService service) { this.optionFlag = Channel.SEND_OPTIONS_BYTE_MESSAGE | Channel.SEND_OPTIONS_USE_ACK | Channel.SEND_OPTIONS_SYNCHRONIZED_ACK; this.setClusterReceiver(receiver); this.setClusterSender(sender); this.setMembershipService(service); } /** * Send a message to one or more members in the cluster * @param destination Member[] - the destinations, null or zero length means all * @param msg ClusterMessage - the message to send * @param payload TBA */ @Override public void sendMessage(Member[] destination, ChannelMessage msg, InterceptorPayload payload) throws ChannelException { if ( destination == null ) destination = membershipService.getMembers(); if ((msg.getOptions()&Channel.SEND_OPTIONS_MULTICAST) == Channel.SEND_OPTIONS_MULTICAST) { membershipService.broadcast(msg); } else { clusterSender.sendMessage(msg,destination); } if ( Logs.MESSAGES.isTraceEnabled() ) { Logs.MESSAGES.trace("ChannelCoordinator - Sent msg:" + new UniqueId(msg.getUniqueId()) + " at " + new java.sql.Timestamp(System.currentTimeMillis()) + " to " + Arrays.toNameString(destination)); } } /** * Starts up the channel. This can be called multiple times for individual services to start * The svc parameter can be the logical or value of any constants * @param svc int value of <BR> * DEFAULT - will start all services <BR> * MBR_RX_SEQ - starts the membership receiver <BR> * MBR_TX_SEQ - starts the membership broadcaster <BR> * SND_TX_SEQ - starts the replication transmitter<BR> * SND_RX_SEQ - starts the replication receiver<BR> * @throws ChannelException if a startup error occurs or the service is already started. */ @Override public void start(int svc) throws ChannelException { this.internalStart(svc); } /** * Shuts down the channel. This can be called multiple times for individual services to shutdown * The svc parameter can be the logical or value of any constants * @param svc int value of <BR> * DEFAULT - will shutdown all services <BR> * MBR_RX_SEQ - stops the membership receiver <BR> * MBR_TX_SEQ - stops the membership broadcaster <BR> * SND_TX_SEQ - stops the replication transmitter<BR> * SND_RX_SEQ - stops the replication receiver<BR> * @throws ChannelException if a startup error occurs or the service is already started. */ @Override public void stop(int svc) throws ChannelException { this.internalStop(svc); } /** * Starts up the channel. This can be called multiple times for individual services to start * The svc parameter can be the logical or value of any constants * @param svc int value of <BR> * DEFAULT - will start all services <BR> * MBR_RX_SEQ - starts the membership receiver <BR> * MBR_TX_SEQ - starts the membership broadcaster <BR> * SND_TX_SEQ - starts the replication transmitter<BR> * SND_RX_SEQ - starts the replication receiver<BR> * @throws ChannelException if a startup error occurs or the service is already started. */ protected synchronized void internalStart(int svc) throws ChannelException { try { boolean valid = false; //make sure we don't pass down any flags that are unrelated to the bottom layer svc = svc & Channel.DEFAULT; if (startLevel == Channel.DEFAULT) return; //we have already started up all components if (svc == 0 ) return;//nothing to start if (svc == (svc & startLevel)) { throw new ChannelException("Channel already started for level:"+svc); } //must start the receiver first so that we can coordinate the port it //listens to with the local membership settings if ( Channel.SND_RX_SEQ==(svc & Channel.SND_RX_SEQ) ) { clusterReceiver.setMessageListener(this); clusterReceiver.start(); //synchronize, big time FIXME membershipService.setLocalMemberProperties(getClusterReceiver().getHost(), getClusterReceiver().getPort(), getClusterReceiver().getSecurePort(), getClusterReceiver().getUdpPort()); valid = true; } if ( Channel.SND_TX_SEQ==(svc & Channel.SND_TX_SEQ) ) { clusterSender.start(); valid = true; } if ( Channel.MBR_RX_SEQ==(svc & Channel.MBR_RX_SEQ) ) { membershipService.setMembershipListener(this); if (membershipService instanceof McastService) { ((McastService)membershipService).setMessageListener(this); } membershipService.start(MembershipService.MBR_RX); valid = true; } if ( Channel.MBR_TX_SEQ==(svc & Channel.MBR_TX_SEQ) ) { membershipService.start(MembershipService.MBR_TX); valid = true; } if (!valid) { throw new IllegalArgumentException("Invalid start level, valid levels are:" + "SND_RX_SEQ,SND_TX_SEQ,MBR_TX_SEQ,MBR_RX_SEQ"); } startLevel = (startLevel | svc); }catch ( ChannelException cx ) { throw cx; }catch ( Exception x ) { throw new ChannelException(x); } } /** * Shuts down the channel. This can be called multiple times for individual services to shutdown * The svc parameter can be the logical or value of any constants * @param svc int value of <BR> * DEFAULT - will shutdown all services <BR> * MBR_RX_SEQ - starts the membership receiver <BR> * MBR_TX_SEQ - starts the membership broadcaster <BR> * SND_TX_SEQ - starts the replication transmitter<BR> * SND_RX_SEQ - starts the replication receiver<BR> * @throws ChannelException if a startup error occurs or the service is already started. */ protected synchronized void internalStop(int svc) throws ChannelException { try { //make sure we don't pass down any flags that are unrelated to the bottom layer svc = svc & Channel.DEFAULT; if (startLevel == 0) return; //we have already stopped up all components if (svc == 0 ) return;//nothing to stop boolean valid = false; if ( Channel.SND_RX_SEQ==(svc & Channel.SND_RX_SEQ) ) { clusterReceiver.stop(); clusterReceiver.setMessageListener(null); valid = true; } if ( Channel.SND_TX_SEQ==(svc & Channel.SND_TX_SEQ) ) { clusterSender.stop(); valid = true; } if ( Channel.MBR_RX_SEQ==(svc & Channel.MBR_RX_SEQ) ) { membershipService.stop(MembershipService.MBR_RX); membershipService.setMembershipListener(null); valid = true; } if ( Channel.MBR_TX_SEQ==(svc & Channel.MBR_TX_SEQ) ) { valid = true; membershipService.stop(MembershipService.MBR_TX); } if ( !valid) { throw new IllegalArgumentException("Invalid start level, valid levels are:" + "SND_RX_SEQ,SND_TX_SEQ,MBR_TX_SEQ,MBR_RX_SEQ"); } startLevel = (startLevel & (~svc)); } catch (Exception x) { throw new ChannelException(x); } } @Override public void memberAdded(Member member){ SenderState.getSenderState(member); super.memberAdded(member); } @Override public void memberDisappeared(Member member){ SenderState.removeSenderState(member); super.memberDisappeared(member); } @Override public void messageReceived(ChannelMessage msg) { if ( Logs.MESSAGES.isTraceEnabled() ) { Logs.MESSAGES.trace("ChannelCoordinator - Received msg:" + new UniqueId(msg.getUniqueId()) + " at " + new java.sql.Timestamp(System.currentTimeMillis()) + " from " + msg.getAddress().getName()); } super.messageReceived(msg); } @Override public boolean accept(ChannelMessage msg) { return true; } public ChannelReceiver getClusterReceiver() { return clusterReceiver; } public ChannelSender getClusterSender() { return clusterSender; } public MembershipService getMembershipService() { return membershipService; } public void setClusterReceiver(ChannelReceiver clusterReceiver) { if ( clusterReceiver != null ) { this.clusterReceiver = clusterReceiver; this.clusterReceiver.setMessageListener(this); } else { if (this.clusterReceiver!=null ) this.clusterReceiver.setMessageListener(null); this.clusterReceiver = null; } } public void setClusterSender(ChannelSender clusterSender) { this.clusterSender = clusterSender; } public void setMembershipService(MembershipService membershipService) { this.membershipService = membershipService; this.membershipService.setMembershipListener(this); } @Override public void heartbeat() { if ( clusterSender!=null ) clusterSender.heartbeat(); super.heartbeat(); } /** * has members */ @Override public boolean hasMembers() { return this.getMembershipService().hasMembers(); } /** * Get all current cluster members * @return all members or empty array */ @Override public Member[] getMembers() { return this.getMembershipService().getMembers(); } /** * * @param mbr Member * @return Member */ @Override public Member getMember(Member mbr){ return this.getMembershipService().getMember(mbr); } /** * Return the member that represents this node. * * @return Member */ @Override public Member getLocalMember(boolean incAlive) { return this.getMembershipService().getLocalMember(incAlive); } }
// Generated by Haxe 3.4.0 package _Array; import haxe.root.*; @SuppressWarnings(value={"rawtypes", "unchecked"}) public final class ArrayIterator<T> extends haxe.lang.HxObject { public ArrayIterator(haxe.lang.EmptyObject empty) { } public ArrayIterator(haxe.root.Array<T> a) { //line 472 "/usr/local/lib/haxe/std/java/_std/Array.hx" _Array.ArrayIterator.__hx_ctor__Array_ArrayIterator(((_Array.ArrayIterator<T>) (this) ), ((haxe.root.Array<T>) (a) )); } public static <T_c> void __hx_ctor__Array_ArrayIterator(_Array.ArrayIterator<T_c> __hx_this, haxe.root.Array<T_c> a) { //line 474 "/usr/local/lib/haxe/std/java/_std/Array.hx" __hx_this.arr = a; //line 475 "/usr/local/lib/haxe/std/java/_std/Array.hx" __hx_this.len = a.length; //line 476 "/usr/local/lib/haxe/std/java/_std/Array.hx" __hx_this.i = 0; } public haxe.root.Array<T> arr; public int len; public int i; public final boolean hasNext() { //line 479 "/usr/local/lib/haxe/std/java/_std/Array.hx" return ( this.i < this.len ); } public final T next() { //line 480 "/usr/local/lib/haxe/std/java/_std/Array.hx" return this.arr.__get(this.i++); } @Override public double __hx_setField_f(java.lang.String field, double value, boolean handleProperties) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" boolean __temp_executeDef1 = true; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" switch (field.hashCode()) { case 105: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("i")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" this.i = ((int) (value) ); //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return value; } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } case 107029: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("len")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" this.len = ((int) (value) ); //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return value; } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (__temp_executeDef1) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return super.__hx_setField_f(field, value, handleProperties); } else { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" throw null; } } } @Override public java.lang.Object __hx_setField(java.lang.String field, java.lang.Object value, boolean handleProperties) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" boolean __temp_executeDef1 = true; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" switch (field.hashCode()) { case 105: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("i")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" this.i = ((int) (haxe.lang.Runtime.toInt(value)) ); //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return value; } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } case 96865: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("arr")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" this.arr = ((haxe.root.Array<T>) (value) ); //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return value; } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } case 107029: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("len")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" this.len = ((int) (haxe.lang.Runtime.toInt(value)) ); //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return value; } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (__temp_executeDef1) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return super.__hx_setField(field, value, handleProperties); } else { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" throw null; } } } @Override public java.lang.Object __hx_getField(java.lang.String field, boolean throwErrors, boolean isCheck, boolean handleProperties) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" boolean __temp_executeDef1 = true; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" switch (field.hashCode()) { case 3377907: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("next")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return ((haxe.lang.Function) (new haxe.lang.Closure(this, "next")) ); } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } case 96865: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("arr")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return this.arr; } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } case 696759469: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("hasNext")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return ((haxe.lang.Function) (new haxe.lang.Closure(this, "hasNext")) ); } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } case 107029: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("len")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return this.len; } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } case 105: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("i")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return this.i; } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (__temp_executeDef1) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return super.__hx_getField(field, throwErrors, isCheck, handleProperties); } else { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" throw null; } } } @Override public double __hx_getField_f(java.lang.String field, boolean throwErrors, boolean handleProperties) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" boolean __temp_executeDef1 = true; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" switch (field.hashCode()) { case 105: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("i")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return ((double) (this.i) ); } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } case 107029: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("len")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return ((double) (this.len) ); } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (__temp_executeDef1) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return super.__hx_getField_f(field, throwErrors, handleProperties); } else { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" throw null; } } } @Override public java.lang.Object __hx_invokeField(java.lang.String field, haxe.root.Array dynargs) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" boolean __temp_executeDef1 = true; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" switch (field.hashCode()) { case 3377907: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("next")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return this.next(); } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } case 696759469: { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (field.equals("hasNext")) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" __temp_executeDef1 = false; //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return this.hasNext(); } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" break; } } //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" if (__temp_executeDef1) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" return super.__hx_invokeField(field, dynargs); } else { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" throw null; } } } @Override public void __hx_getFields(haxe.root.Array<java.lang.String> baseArr) { //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" baseArr.push("i"); //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" baseArr.push("len"); //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" baseArr.push("arr"); //line 466 "/usr/local/lib/haxe/std/java/_std/Array.hx" super.__hx_getFields(baseArr); } }
/* * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.appcompat.graphics.drawable; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.LOLLIPOP; import static androidx.core.content.res.TypedArrayUtils.obtainAttributes; import android.animation.ObjectAnimator; import android.animation.TimeInterpolator; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.drawable.Animatable; import android.graphics.drawable.AnimationDrawable; import android.graphics.drawable.Drawable; import android.os.Build; import android.util.AttributeSet; import android.util.Log; import android.util.StateSet; import android.util.Xml; import androidx.annotation.DrawableRes; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.resources.Compatibility; import androidx.appcompat.resources.R; import androidx.appcompat.widget.ResourceManagerInternal; import androidx.collection.LongSparseArray; import androidx.collection.SparseArrayCompat; import androidx.core.graphics.drawable.TintAwareDrawable; import androidx.vectordrawable.graphics.drawable.AnimatedVectorDrawableCompat; import androidx.vectordrawable.graphics.drawable.VectorDrawableCompat; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; /** * A {@link Drawable} providing animated transitions between states. * * <p>A port of {@link android.graphics.drawable.AnimatedStateListDrawable} compatible with older * versions of the platform. * * <p>This drawable can be defined in an XML file with the <code> * &lt;animated-selector></code> element. Each keyframe Drawable is defined in a * nested <code>&lt;item></code> element. Transitions are defined in a nested * <code>&lt;transition></code> element. * * <p>Notable exceptions not supported by this class: * * <ul> * <li><code>drawable</code>s defined as children of <code>&lt;item></code>s or * <code>&lt;transition></code>s (<b>except</b> vectors) ignore theme attributes prior to API level * 21</li> * <li>Animated vector transitions do not support reversing prior to API level 24</li> * </ul> * * {@link android.R.attr#state_focused} * {@link android.R.attr#state_window_focused} * {@link android.R.attr#state_enabled} * {@link android.R.attr#state_checkable} * {@link android.R.attr#state_checked} * {@link android.R.attr#state_selected} * {@link android.R.attr#state_activated} * {@link android.R.attr#state_active} * {@link android.R.attr#state_single} * {@link android.R.attr#state_first} * {@link android.R.attr#state_middle} * {@link android.R.attr#state_last} * {@link android.R.attr#state_pressed} */ @SuppressLint("RestrictedAPI") // Temporary until we have correct restriction scopes for 1.0 public class AnimatedStateListDrawableCompat extends StateListDrawable implements TintAwareDrawable { private static final String LOGTAG = AnimatedStateListDrawableCompat.class.getSimpleName(); private static final String ELEMENT_TRANSITION = "transition"; private static final String ELEMENT_ITEM = "item"; private static final String TRANSITION_MISSING_DRAWABLE_ERROR = ": <transition> tag requires a 'drawable' attribute or child tag defining a drawable"; private static final String TRANSITION_MISSING_FROM_TO_ID = ": <transition> tag requires 'fromId' & 'toId' attributes"; private static final String ITEM_MISSING_DRAWABLE_ERROR = ": <item> tag requires a 'drawable' attribute or child tag defining a drawable"; private AnimatedStateListState mState; /** The currently running transition, if any. */ private Transition mTransition; /** Index to be set after the transition ends. */ private int mTransitionToIndex = -1; /** Index away from which we are transitioning. */ private int mTransitionFromIndex = -1; private boolean mMutated; public AnimatedStateListDrawableCompat() { this(null, null); } AnimatedStateListDrawableCompat(@Nullable AnimatedStateListState state, @Nullable Resources res) { super(null); // Every animated state list drawable has its own constant state. final AnimatedStateListState newState = new AnimatedStateListState(state, this, res); setConstantState(newState); onStateChange(getState()); jumpToCurrentState(); } /** * Creates an AnimatedStateListDrawableCompat. * * @param context context to inflate against * @param resId the resource ID for AnimatedStateListDrawable object. * @param theme the theme to apply, may be null. * @return a new AnimatedStateListDrawableCompat or null if parsing error is found. */ @Nullable public static AnimatedStateListDrawableCompat create( @NonNull Context context, @DrawableRes int resId, @Nullable Resources.Theme theme) { try { final Resources res = context.getResources(); @SuppressLint("ResourceType") final XmlPullParser parser = res.getXml(resId); final AttributeSet attrs = Xml.asAttributeSet(parser); int type; while ((type = parser.next()) != XmlPullParser.START_TAG && type != XmlPullParser.END_DOCUMENT) { // Empty loop } if (type != XmlPullParser.START_TAG) { throw new XmlPullParserException("No start tag found"); } return createFromXmlInner(context, res, parser, attrs, theme); } catch (XmlPullParserException e) { Log.e(LOGTAG, "parser error", e); } catch (IOException e) { Log.e(LOGTAG, "parser error", e); } return null; } /** * Create a AnimatedStateListDrawableCompat from inside an XML document using an optional * {@link Resources.Theme}. Called on a parser positioned at a tag in an XML * document, tries to create an AnimatedStateListDrawableCompat from that tag. */ public static AnimatedStateListDrawableCompat createFromXmlInner( @NonNull Context context, @NonNull Resources resources, @NonNull XmlPullParser parser, @NonNull AttributeSet attrs, @Nullable Resources.Theme theme) throws IOException, XmlPullParserException { final String name = parser.getName(); if (!name.equals("animated-selector")) { throw new XmlPullParserException( parser.getPositionDescription() + ": invalid animated-selector tag " + name); } AnimatedStateListDrawableCompat asl = new AnimatedStateListDrawableCompat(); asl.inflate(context, resources, parser, attrs, theme); return asl; } /** * Inflate this Drawable from an XML resource optionally styled by a theme. * This can't be called more than once for each Drawable. * * @param context context to inflate against * @param resources Resources used to resolve attribute values * @param parser XML parser from which to inflate this Drawable * @param attrs Base set of attribute values * @param theme Theme to apply, may be null * @throws XmlPullParserException * @throws IOException */ @Override public void inflate( @NonNull Context context, @NonNull Resources resources, @NonNull XmlPullParser parser, @NonNull AttributeSet attrs, @Nullable Resources.Theme theme) throws XmlPullParserException, IOException { final TypedArray a = obtainAttributes( resources, theme, attrs, R.styleable.AnimatedStateListDrawableCompat); setVisible(a.getBoolean(R.styleable.AnimatedStateListDrawableCompat_android_visible, true), true); updateStateFromTypedArray(a); updateDensity(resources); a.recycle(); inflateChildElements(context, resources, parser, attrs, theme); init(); } @Override public boolean setVisible(boolean visible, boolean restart) { final boolean changed = super.setVisible(visible, restart); if (mTransition != null && (changed || restart)) { if (visible) { mTransition.start(); } else { // Ensure we're showing the correct state when visible. jumpToCurrentState(); } } return changed; } /** * Add a new drawable to the set of keyframes. * * @param stateSet An array of resource IDs to associate with the keyframe * @param drawable The drawable to show when in the specified state, may not be null * @param id The unique identifier for the keyframe */ public void addState(@NonNull int[] stateSet, @NonNull Drawable drawable, int id) { if (drawable == null) { throw new IllegalArgumentException("Drawable must not be null"); } mState.addStateSet(stateSet, drawable, id); onStateChange(getState()); } /** * Adds a new transition between keyframes. * * @param fromId Unique identifier of the starting keyframe * @param toId Unique identifier of the ending keyframe * @param transition An {@link Animatable} drawable to use as a transition, may not be null * @param reversible Whether the transition can be reversed */ public <T extends Drawable & Animatable> void addTransition(int fromId, int toId, @NonNull T transition, boolean reversible) { if (transition == null) { throw new IllegalArgumentException("Transition drawable must not be null"); } mState.addTransition(fromId, toId, transition, reversible); } @Override public boolean isStateful() { return true; } @Override public void jumpToCurrentState() { super.jumpToCurrentState(); if (mTransition != null) { mTransition.stop(); mTransition = null; selectDrawable(mTransitionToIndex); mTransitionToIndex = -1; mTransitionFromIndex = -1; } } @Override protected boolean onStateChange(int[] stateSet) { // If we're not already at the target index, either attempt to find a // valid transition to it or jump directly there. final int targetIndex = mState.indexOfKeyframe(stateSet); boolean changed = targetIndex != getCurrentIndex() && (selectTransition(targetIndex) || selectDrawable(targetIndex)); // We need to propagate the state change to the current drawable, but // we can't call StateListDrawable.onStateChange() without changing the // current drawable. final Drawable current = getCurrent(); if (current != null) { changed |= current.setState(stateSet); } return changed; } private boolean selectTransition(int toIndex) { final int fromIndex; final Transition currentTransition = mTransition; if (currentTransition != null) { if (toIndex == mTransitionToIndex) { // Already animating to that keyframe. return true; } else if (toIndex == mTransitionFromIndex && currentTransition.canReverse()) { // Reverse the current animation. currentTransition.reverse(); mTransitionToIndex = mTransitionFromIndex; mTransitionFromIndex = toIndex; return true; } // Start the next transition from the end of the current one. fromIndex = mTransitionToIndex; // Changing animation, end the current animation. currentTransition.stop(); } else { fromIndex = getCurrentIndex(); } // Reset state. mTransition = null; mTransitionFromIndex = -1; mTransitionToIndex = -1; final AnimatedStateListState state = mState; final int fromId = state.getKeyframeIdAt(fromIndex); final int toId = state.getKeyframeIdAt(toIndex); if (toId == 0 || fromId == 0) { // Missing a keyframe ID. return false; } final int transitionIndex = state.indexOfTransition(fromId, toId); if (transitionIndex < 0) { // Couldn't select a transition. return false; } boolean hasReversibleFlag = state.transitionHasReversibleFlag(fromId, toId); // This may fail if we're already on the transition, but that's okay! selectDrawable(transitionIndex); final Transition transition; final Drawable d = getCurrent(); if (d instanceof AnimationDrawable) { final boolean reversed = state.isTransitionReversed(fromId, toId); transition = new AnimationDrawableTransition((AnimationDrawable) d, reversed, hasReversibleFlag); } else if (d instanceof AnimatedVectorDrawableCompat) { //final boolean reversed = state.isTransitionReversed(fromId, toId); transition = new AnimatedVectorDrawableTransition((AnimatedVectorDrawableCompat) d); } else if (d instanceof Animatable) { transition = new AnimatableTransition((Animatable) d); } else { // We don't know how to animate this transition. return false; } transition.start(); mTransition = transition; mTransitionFromIndex = fromIndex; mTransitionToIndex = toIndex; return true; } private abstract static class Transition { public abstract void start(); public abstract void stop(); public void reverse() { // Not supported by default. } public boolean canReverse() { return false; } } private static class AnimatableTransition extends Transition { private final Animatable mA; AnimatableTransition(Animatable a) { mA = a; } @Override public void start() { mA.start(); } @Override public void stop() { mA.stop(); } } private static class AnimationDrawableTransition extends Transition { private final ObjectAnimator mAnim; // Even AnimationDrawable is always reversible technically, but // we should obey the XML's android:reversible flag. private final boolean mHasReversibleFlag; AnimationDrawableTransition(AnimationDrawable ad, boolean reversed, boolean hasReversibleFlag) { final int frameCount = ad.getNumberOfFrames(); final int fromFrame = reversed ? frameCount - 1 : 0; final int toFrame = reversed ? 0 : frameCount - 1; final FrameInterpolator interp = new FrameInterpolator(ad, reversed); @SuppressLint("ObjectAnimatorBinding") final ObjectAnimator anim = ObjectAnimator.ofInt(ad, "currentIndex", fromFrame, toFrame); if (SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { Compatibility.Api18Impl.setAutoCancel(anim, true); } anim.setDuration(interp.getTotalDuration()); anim.setInterpolator(interp); mHasReversibleFlag = hasReversibleFlag; mAnim = anim; } @Override public boolean canReverse() { return mHasReversibleFlag; } @Override public void start() { mAnim.start(); } @Override public void reverse() { mAnim.reverse(); } @Override public void stop() { mAnim.cancel(); } } private static class AnimatedVectorDrawableTransition extends Transition { private final AnimatedVectorDrawableCompat mAvd; AnimatedVectorDrawableTransition(AnimatedVectorDrawableCompat avd) { mAvd = avd; } @Override public void start() { mAvd.start(); } @Override public void stop() { mAvd.stop(); } } private void updateStateFromTypedArray(TypedArray a) { final AnimatedStateListState state = mState; // Account for any configuration changes. if (SDK_INT >= LOLLIPOP) { state.mChangingConfigurations |= Compatibility.Api21Impl.getChangingConfigurations(a); } // Extract the theme attributes, if any. state.setVariablePadding( a.getBoolean(R.styleable.AnimatedStateListDrawableCompat_android_variablePadding, state.mVariablePadding)); state.setConstantSize( a.getBoolean(R.styleable.AnimatedStateListDrawableCompat_android_constantSize, state.mConstantSize)); state.setEnterFadeDuration( a.getInt(R.styleable.AnimatedStateListDrawableCompat_android_enterFadeDuration, state.mEnterFadeDuration)); state.setExitFadeDuration( a.getInt(R.styleable.AnimatedStateListDrawableCompat_android_exitFadeDuration, state.mExitFadeDuration)); setDither(a.getBoolean(R.styleable.AnimatedStateListDrawableCompat_android_dither, state.mDither)); } private void init() { onStateChange(getState()); } private void inflateChildElements( @NonNull Context context, @NonNull Resources resources, @NonNull XmlPullParser parser, @NonNull AttributeSet attrs, @Nullable Resources.Theme theme) throws XmlPullParserException, IOException { int type; final int innerDepth = parser.getDepth() + 1; int depth; while ((type = parser.next()) != XmlPullParser.END_DOCUMENT && ((depth = parser.getDepth()) >= innerDepth || type != XmlPullParser.END_TAG)) { if (type != XmlPullParser.START_TAG) { continue; } if (depth > innerDepth) { continue; } if (parser.getName().equals(ELEMENT_ITEM)) { parseItem(context, resources, parser, attrs, theme); } else if (parser.getName().equals(ELEMENT_TRANSITION)) { parseTransition(context, resources, parser, attrs, theme); } } } private int parseTransition( @NonNull Context context, @NonNull Resources resources, @NonNull XmlPullParser parser, @NonNull AttributeSet attrs, @Nullable Resources.Theme theme) throws XmlPullParserException, IOException { final TypedArray a = obtainAttributes(resources, theme, attrs, R.styleable.AnimatedStateListDrawableTransition); final int fromId = a.getResourceId( R.styleable.AnimatedStateListDrawableTransition_android_fromId, -1); final int toId = a.getResourceId( R.styleable.AnimatedStateListDrawableTransition_android_toId, -1); Drawable dr = null; final int drawableId = a.getResourceId( R.styleable.AnimatedStateListDrawableTransition_android_drawable, -1); if (drawableId > 0) { dr = ResourceManagerInternal.get().getDrawable(context, drawableId); } final boolean reversible = a.getBoolean( R.styleable.AnimatedStateListDrawableTransition_android_reversible, false); a.recycle(); // Loading child elements modifies the state of the AttributeSet's underlying parser, so // it needs to happen after obtaining attributes and extracting states. if (dr == null) { int type; while ((type = parser.next()) == XmlPullParser.TEXT) { // no-op } if (type != XmlPullParser.START_TAG) { throw new XmlPullParserException( parser.getPositionDescription() + TRANSITION_MISSING_DRAWABLE_ERROR); } // Attempt to parse child AVDs if (parser.getName().equals("animated-vector")) { dr = AnimatedVectorDrawableCompat.createFromXmlInner(context, resources, parser, attrs, theme); } else if (SDK_INT >= LOLLIPOP) { dr = Compatibility.Api21Impl.createFromXmlInner(resources, parser, attrs, theme); } else { dr = Drawable.createFromXmlInner(resources, parser, attrs); } } if (dr == null) { throw new XmlPullParserException( parser.getPositionDescription() + TRANSITION_MISSING_DRAWABLE_ERROR); } if (fromId == -1 || toId == -1) { throw new XmlPullParserException( parser.getPositionDescription() + TRANSITION_MISSING_FROM_TO_ID); } return mState.addTransition(fromId, toId, dr, reversible); } private int parseItem( @NonNull Context context, @NonNull Resources resources, @NonNull XmlPullParser parser, @NonNull AttributeSet attrs, @Nullable Resources.Theme theme) throws XmlPullParserException, IOException { final TypedArray a = obtainAttributes(resources, theme, attrs, R.styleable.AnimatedStateListDrawableItem); final int keyframeId = a.getResourceId(R.styleable.AnimatedStateListDrawableItem_android_id, 0); Drawable dr = null; final int drawableId = a.getResourceId(R.styleable.AnimatedStateListDrawableItem_android_drawable, -1); if (drawableId > 0) { dr = ResourceManagerInternal.get().getDrawable(context, drawableId); } a.recycle(); final int[] states = extractStateSet(attrs); // Loading child elements modifies the state of the AttributeSet's underlying parser, so // it needs to happen after obtaining attributes and extracting states. if (dr == null) { int type; while ((type = parser.next()) == XmlPullParser.TEXT) { // no-op } if (type != XmlPullParser.START_TAG) { throw new XmlPullParserException( parser.getPositionDescription() + ITEM_MISSING_DRAWABLE_ERROR); } // Attempt to parse child VDs if (parser.getName().equals("vector")) { dr = VectorDrawableCompat.createFromXmlInner(resources, parser, attrs, theme); } else if (SDK_INT >= LOLLIPOP) { dr = Compatibility.Api21Impl.createFromXmlInner(resources, parser, attrs, theme); } else { dr = Drawable.createFromXmlInner(resources, parser, attrs); } } if (dr == null) { throw new XmlPullParserException( parser.getPositionDescription() + ITEM_MISSING_DRAWABLE_ERROR); } return mState.addStateSet(states, dr, keyframeId); } @Override public Drawable mutate() { if (!mMutated && super.mutate() == this) { mState.mutate(); mMutated = true; } return this; } @Override AnimatedStateListState cloneConstantState() { return new AnimatedStateListState(mState, this, null); } @Override void clearMutated() { super.clearMutated(); mMutated = false; } static class AnimatedStateListState extends StateListState { // REVERSED_BIT is indicating the current transition's direction. private static final long REVERSED_BIT = 0x100000000L; // REVERSIBLE_FLAG_BIT is indicating whether the whole transition has // reversible flag set to true. private static final long REVERSIBLE_FLAG_BIT = 0x200000000L; LongSparseArray<Long> mTransitions; SparseArrayCompat<Integer> mStateIds; AnimatedStateListState(@Nullable AnimatedStateListState orig, @NonNull AnimatedStateListDrawableCompat owner, @Nullable Resources res) { super(orig, owner, res); if (orig != null) { // Perform a shallow copy and rely on mutate() to deep-copy. mTransitions = orig.mTransitions; mStateIds = orig.mStateIds; } else { mTransitions = new LongSparseArray<>(); mStateIds = new SparseArrayCompat<>(); } } @Override void mutate() { mTransitions = mTransitions.clone(); mStateIds = mStateIds.clone(); } int addTransition(int fromId, int toId, @NonNull Drawable anim, boolean reversible) { final int pos = super.addChild(anim); final long keyFromTo = generateTransitionKey(fromId, toId); long reversibleBit = 0; if (reversible) { reversibleBit = REVERSIBLE_FLAG_BIT; } mTransitions.append(keyFromTo, pos | reversibleBit); if (reversible) { final long keyToFrom = generateTransitionKey(toId, fromId); mTransitions.append(keyToFrom, pos | REVERSED_BIT | reversibleBit); } return pos; } int addStateSet(@NonNull int[] stateSet, @NonNull Drawable drawable, int id) { final int index = super.addStateSet(stateSet, drawable); mStateIds.put(index, id); return index; } int indexOfKeyframe(@NonNull int[] stateSet) { final int index = super.indexOfStateSet(stateSet); if (index >= 0) { return index; } return super.indexOfStateSet(StateSet.WILD_CARD); } int getKeyframeIdAt(int index) { return index < 0 ? 0 : mStateIds.get(index, 0); } int indexOfTransition(int fromId, int toId) { final long keyFromTo = generateTransitionKey(fromId, toId); return (int) mTransitions.get(keyFromTo, -1L).longValue(); } boolean isTransitionReversed(int fromId, int toId) { final long keyFromTo = generateTransitionKey(fromId, toId); return (mTransitions.get(keyFromTo, -1L) & REVERSED_BIT) != 0L; } boolean transitionHasReversibleFlag(int fromId, int toId) { final long keyFromTo = generateTransitionKey(fromId, toId); return (mTransitions.get(keyFromTo, -1L) & REVERSIBLE_FLAG_BIT) != 0L; } @NonNull @Override public Drawable newDrawable() { return new AnimatedStateListDrawableCompat(this, null); } @NonNull @Override public Drawable newDrawable(Resources res) { return new AnimatedStateListDrawableCompat(this, res); } private static long generateTransitionKey(int fromId, int toId) { return (long) fromId << 32 | toId; } } @Override void setConstantState(@NonNull DrawableContainerState state) { super.setConstantState(state); if (state instanceof AnimatedStateListState) { mState = (AnimatedStateListState) state; } } /** * Interpolates between frames with respect to their individual durations. */ private static class FrameInterpolator implements TimeInterpolator { private int[] mFrameTimes; private int mFrames; private int mTotalDuration; FrameInterpolator(AnimationDrawable d, boolean reversed) { updateFrames(d, reversed); } int updateFrames(AnimationDrawable d, boolean reversed) { final int frameCount = d.getNumberOfFrames(); mFrames = frameCount; if (mFrameTimes == null || mFrameTimes.length < frameCount) { mFrameTimes = new int[frameCount]; } final int[] frameTimes = mFrameTimes; int totalDuration = 0; for (int i = 0; i < frameCount; i++) { final int duration = d.getDuration(reversed ? frameCount - i - 1 : i); frameTimes[i] = duration; totalDuration += duration; } mTotalDuration = totalDuration; return totalDuration; } int getTotalDuration() { return mTotalDuration; } @Override public float getInterpolation(float input) { final int elapsed = (int) (input * mTotalDuration + 0.5f); final int frameCount = mFrames; final int[] frameTimes = mFrameTimes; // Find the current frame and remaining time within that frame. int remaining = elapsed; int i = 0; while (i < frameCount && remaining >= frameTimes[i]) { remaining -= frameTimes[i]; i++; } // Remaining time is relative of total duration. final float frameElapsed; if (i < frameCount) { frameElapsed = remaining / (float) mTotalDuration; } else { frameElapsed = 0; } return i / (float) frameCount + frameElapsed; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.distributed.internal; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import org.apache.geode.InternalGemFireException; import org.apache.geode.annotations.VisibleForTesting; import org.apache.geode.cache.server.ServerLoad; import org.apache.geode.cache.wan.GatewayReceiver; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; import org.apache.geode.logging.internal.executors.LoggingExecutors; /** * A data structure used to hold load information for a locator * * @since GemFire 5.7 * */ public class LocatorLoadSnapshot { private static final String LOAD_IMBALANCE_THRESHOLD_PROPERTY_NAME = "gemfire.locator-load-imbalance-threshold"; public static final float DEFAULT_LOAD_IMBALANCE_THRESHOLD = 10; private final Map<ServerLocation, String[]> serverGroupMap = new HashMap<>(); private final Map<String, Map<ServerLocationAndMemberId, LoadHolder>> connectionLoadMap = new HashMap<>(); private final Map<String, Map<ServerLocation, LoadHolder>> queueLoadMap = new HashMap<>(); private final ConcurrentMap<EstimateMapKey, LoadEstimateTask> estimateMap = new ConcurrentHashMap<>(); /** * when replacing a client's current server we do not move a client from a highly loaded server to * a less loaded server until imbalance reaches this threshold. Then we aggressively move clients * until balance is achieved. */ private final float loadImbalanceThreshold; /** * when the loadImbalanceThreshold is hit this variable will be true and it will remain true until * balance is achieved. */ private boolean rebalancing; private final ScheduledExecutorService estimateTimeoutProcessor = LoggingExecutors.newScheduledThreadPool(1, "loadEstimateTimeoutProcessor", false); public LocatorLoadSnapshot() { connectionLoadMap.put(null, new HashMap<>()); queueLoadMap.put(null, new HashMap<>()); String property = System.getProperty(LOAD_IMBALANCE_THRESHOLD_PROPERTY_NAME); if (property != null) { loadImbalanceThreshold = Float.parseFloat(property); } else { loadImbalanceThreshold = DEFAULT_LOAD_IMBALANCE_THRESHOLD; } } /** * Add a new server to the load snapshot. */ public synchronized void addServer(ServerLocation location, String memberId, String[] groups, ServerLoad initialLoad, long loadPollInterval) { serverGroupMap.put(location, groups); LoadHolder connectionLoad = new LoadHolder(location, initialLoad.getConnectionLoad(), initialLoad.getLoadPerConnection(), loadPollInterval); addGroups(connectionLoadMap, groups, connectionLoad, memberId); LoadHolder queueLoad = new LoadHolder(location, initialLoad.getSubscriptionConnectionLoad(), initialLoad.getLoadPerSubscriptionConnection(), loadPollInterval); addGroups(queueLoadMap, groups, queueLoad); updateLoad(location, memberId, initialLoad); } /** * Remove a server from the load snapshot. */ public synchronized void removeServer(ServerLocation location, String memberId) { String[] groups = serverGroupMap.remove(location); /* * Adding null check for #41522 - we were getting a remove from a BridgeServer that was shutting * down and the ServerLocation wasn't in this map. The root cause isn't 100% clear but it might * be a race from profile add / remove from different channels. */ if (groups != null) { removeFromMap(connectionLoadMap, groups, location, memberId); removeFromMap(queueLoadMap, groups, location); } } public void updateLoad(ServerLocation location, String memberId, ServerLoad newLoad) { updateLoad(location, memberId, newLoad, null); } /** * Update the load information for a server that was previously added. */ synchronized void updateLoad(ServerLocation location, String memberId, ServerLoad newLoad, List<ClientProxyMembershipID> clientIds) { String[] groups = serverGroupMap.get(location); // the server was asynchronously removed, so don't do anything. if (groups == null) { return; } if (clientIds != null) { for (ClientProxyMembershipID clientId : clientIds) { cancelClientEstimate(clientId, location); } } updateMap(connectionLoadMap, location, memberId, newLoad.getConnectionLoad(), newLoad.getLoadPerConnection()); updateMap(queueLoadMap, location, newLoad.getSubscriptionConnectionLoad(), newLoad.getLoadPerSubscriptionConnection()); } public synchronized boolean hasBalancedConnections(String group) { if ("".equals(group)) { group = null; } Map<ServerLocationAndMemberId, LoadHolder> groupServers = connectionLoadMap.get(group); return isBalanced(groupServers); } private synchronized boolean isBalanced(Map<ServerLocationAndMemberId, LoadHolder> groupServers) { return isBalanced(groupServers, false); } private synchronized boolean isBalanced(Map<ServerLocationAndMemberId, LoadHolder> groupServers, boolean withThresholdCheck) { if (groupServers == null || groupServers.isEmpty()) { return true; } float bestLoad = Float.MAX_VALUE; float largestLoadPerConnection = Float.MIN_VALUE; float worstLoad = Float.MIN_VALUE; for (Entry<ServerLocationAndMemberId, LoadHolder> loadHolderEntry : groupServers.entrySet()) { LoadHolder nextLoadReference = loadHolderEntry.getValue(); float nextLoad = nextLoadReference.getLoad(); float nextLoadPerConnection = nextLoadReference.getLoadPerConnection(); if (nextLoad < bestLoad) { bestLoad = nextLoad; } if (nextLoad > worstLoad) { worstLoad = nextLoad; } if (nextLoadPerConnection > largestLoadPerConnection) { largestLoadPerConnection = nextLoadPerConnection; } } boolean balanced = (worstLoad - bestLoad) <= largestLoadPerConnection; if (withThresholdCheck) { balanced = thresholdCheck(bestLoad, worstLoad, largestLoadPerConnection, balanced); } return balanced; } /** * In order to keep from ping-ponging clients around the cluster we don't move a client unless * imbalance is greater than the loadImbalanceThreshold. * <p> * When the threshold is reached we report imbalance until proper balance is achieved. * </p> * <p> * This method has the side-effect of setting the <code>rebalancing</code> instance variable * which, at the time of this writing, is only used by this method. * </p> */ private synchronized boolean thresholdCheck(float bestLoad, float worstLoad, float largestLoadPerConnection, boolean balanced) { if (rebalancing) { if (balanced) { rebalancing = false; } return balanced; } // see if we're out of balance enough to trigger rebalancing or whether we // should tolerate the imbalance if (!balanced) { float imbalance = worstLoad - bestLoad; if (imbalance >= (largestLoadPerConnection * loadImbalanceThreshold)) { rebalancing = true; } else { // we're not in balance but are within the threshold balanced = true; } } return balanced; } synchronized boolean isRebalancing() { return rebalancing; } /** * Pick the least loaded server in the given group * * @param group the group, or null or "" if the client has no server group. * @param excludedServers a list of servers to exclude as choices * @return the least loaded server, or null if there are no servers that aren't excluded. */ public synchronized ServerLocation getServerForConnection(String group, Set<ServerLocation> excludedServers) { if ("".equals(group)) { group = null; } Map<ServerLocationAndMemberId, LoadHolder> groupServers = connectionLoadMap.get(group); if (groupServers == null || groupServers.isEmpty()) { return null; } { List bestLHs = findBestServers(groupServers, excludedServers, 1); if (bestLHs.isEmpty()) { return null; } LoadHolder lh = (LoadHolder) bestLHs.get(0); lh.incConnections(); return lh.getLocation(); } } public synchronized ArrayList getServers(String group) { if ("".equals(group)) { group = null; } Map<ServerLocationAndMemberId, LoadHolder> groupServers = connectionLoadMap.get(group); if (groupServers == null || groupServers.isEmpty()) { return null; } ArrayList result = new ArrayList<>(); for (ServerLocationAndMemberId locationAndMemberId : groupServers.keySet()) { result.add(locationAndMemberId.getServerLocation()); } return result; } public void shutDown() { estimateTimeoutProcessor.shutdown(); } /** * Pick the least loaded server in the given group if currentServer is the most loaded server. * * @param group the group, or null or "" if the client has no server group. * @param excludedServers a list of servers to exclude as choices * @return currentServer if it is not the most loaded, null if there are no servers that aren't * excluded, otherwise the least loaded server in the group. */ public synchronized ServerLocation getReplacementServerForConnection(ServerLocation currentServer, String group, Set<ServerLocation> excludedServers) { if ("".equals(group)) { group = null; } Map<ServerLocationAndMemberId, LoadHolder> groupServers = connectionLoadMap.get(group); if (groupServers == null || groupServers.isEmpty()) { return null; } // check to see if we are currently balanced if (isBalanced(groupServers, true)) { // if we are then return currentServer return currentServer; } LoadHolder currentServerLH = isCurrentServerMostLoaded(currentServer, groupServers); if (currentServerLH == null) { return currentServer; } { List<LoadHolder> bestLHs = findBestServers(groupServers, excludedServers, 1); if (bestLHs.isEmpty()) { return null; } LoadHolder bestLH = bestLHs.get(0); currentServerLH.decConnections(); bestLH.incConnections(); return bestLH.getLocation(); } } /** * Pick the least loaded servers in the given group. * * @param group the group, or null or "" if the client has no server group. * @param excludedServers a list of servers to exclude as choices * @param count how many distinct servers to pick. * @return a list containing the best servers. The size of the list will be less than or equal to * count, depending on if there are enough servers available. */ public List getServersForQueue(String group, Set<ServerLocation> excludedServers, int count) { return getServersForQueue(null, group, excludedServers, count); } /** * Pick the least loaded servers in the given group. * * @param id the id of the client creating the queue * @param group the group, or null or "" if the client has no server group. * @param excludedServers a list of servers to exclude as choices * @param count how many distinct servers to pick. * @return a list containing the best servers. The size of the list will be less than or equal to * count, depending on if there are enough servers available. */ synchronized List<ServerLocation> getServersForQueue(ClientProxyMembershipID id, String group, Set<ServerLocation> excludedServers, int count) { if ("".equals(group)) { group = null; } Map<ServerLocation, LoadHolder> groupServers = queueLoadMap.get(group); if (groupServers == null || groupServers.isEmpty()) { return Collections.emptyList(); } { List<LoadHolder> bestLHs = findBestServers(groupServers, excludedServers, count); ArrayList<ServerLocation> result = new ArrayList<>(bestLHs.size()); if (id != null) { ClientProxyMembershipID.Identity actualId = id.getIdentity(); for (LoadHolder load : bestLHs) { EstimateMapKey key = new EstimateMapKey(actualId, load.getLocation()); LoadEstimateTask task = new LoadEstimateTask(key, load); try { final long MIN_TIMEOUT = 60000; // 1 minute long timeout = load.getLoadPollInterval() * 2; if (timeout < MIN_TIMEOUT) { timeout = MIN_TIMEOUT; } task.setFuture(estimateTimeoutProcessor.schedule(task, timeout, TimeUnit.MILLISECONDS)); addEstimate(key, task); } catch (RejectedExecutionException e) { // ignore, the timer has been cancelled, which means we're shutting // down. } result.add(load.getLocation()); } } else { for (LoadHolder load : bestLHs) { load.incConnections(); result.add(load.getLocation()); } } return result; } } /** * Test hook to get the current load for all servers Returns a map of ServerLocation->Load for * each server. */ public synchronized Map<ServerLocation, ServerLoad> getLoadMap() { Map<ServerLocationAndMemberId, LoadHolder> connectionMap = connectionLoadMap.get(null); Map<ServerLocation, LoadHolder> queueMap = queueLoadMap.get(null); Map<ServerLocation, ServerLoad> result = new HashMap<>(); for (Entry<ServerLocationAndMemberId, LoadHolder> entry : connectionMap .entrySet()) { ServerLocation location = entry.getKey().getServerLocation(); LoadHolder connectionLoad = entry.getValue(); LoadHolder queueLoad = queueMap.get(location); // was asynchronously removed if (queueLoad == null) { continue; } result.put(location, new ServerLoad(connectionLoad.getLoad(), connectionLoad.getLoadPerConnection(), queueLoad.getLoad(), queueLoad.getLoadPerConnection())); } return result; } @VisibleForTesting void addGroups(Map<String, Map<ServerLocation, LoadHolder>> map, String[] groups, LoadHolder holder) { for (String group : groups) { Map<ServerLocation, LoadHolder> groupMap = map.computeIfAbsent(group, k -> new HashMap<>()); groupMap.put(holder.getLocation(), holder); } // Special case for GatewayReceiver where we don't put those serverlocation against holder if (!(groups.length > 0 && groups[0].equals(GatewayReceiver.RECEIVER_GROUP))) { Map<ServerLocation, LoadHolder> groupMap = map.computeIfAbsent(null, k -> new HashMap<>()); groupMap.put(holder.getLocation(), holder); } } @VisibleForTesting void addGroups(Map<String, Map<ServerLocationAndMemberId, LoadHolder>> map, String[] groups, LoadHolder holder, String memberId) { for (String group : groups) { Map<ServerLocationAndMemberId, LoadHolder> groupMap = map.computeIfAbsent(group, k -> new HashMap<>()); groupMap.put(new ServerLocationAndMemberId(holder.getLocation(), memberId), holder); } // Special case for GatewayReceiver where we don't put those serverlocation against holder if (!(groups.length > 0 && groups[0].equals(GatewayReceiver.RECEIVER_GROUP))) { Map<ServerLocationAndMemberId, LoadHolder> groupMap = map.computeIfAbsent(null, k -> new HashMap<>()); groupMap.put(new ServerLocationAndMemberId(holder.getLocation(), memberId), holder); } } @VisibleForTesting void removeFromMap(Map<String, Map<ServerLocation, LoadHolder>> map, String[] groups, ServerLocation location) { for (String group : groups) { Map<ServerLocation, LoadHolder> groupMap = map.get(group); if (groupMap != null) { groupMap.remove(location); if (groupMap.size() == 0) { map.remove(group); } } } Map groupMap = map.get(null); groupMap.remove(location); } @VisibleForTesting void removeFromMap(Map<String, Map<ServerLocationAndMemberId, LoadHolder>> map, String[] groups, ServerLocation location, String memberId) { ServerLocationAndMemberId locationAndMemberId = new ServerLocationAndMemberId(location, memberId); for (String group : groups) { Map<ServerLocationAndMemberId, LoadHolder> groupMap = map.get(group); if (groupMap != null) { groupMap.remove(locationAndMemberId); if (groupMap.size() == 0) { map.remove(group); } } } Map groupMap = map.get(null); groupMap.remove(locationAndMemberId); } @VisibleForTesting void updateMap(Map map, ServerLocation location, float load, float loadPerConnection) { updateMap(map, location, "", load, loadPerConnection); } @VisibleForTesting void updateMap(Map map, ServerLocation location, String memberId, float load, float loadPerConnection) { Map groupMap = (Map) map.get(null); LoadHolder holder; if (memberId.equals("")) { holder = (LoadHolder) groupMap.get(location); } else { ServerLocationAndMemberId locationAndMemberId = new ServerLocationAndMemberId(location, memberId); holder = (LoadHolder) groupMap.get(locationAndMemberId); } if (holder != null) { holder.setLoad(load, loadPerConnection); } } /** * * @param groupServers the servers to consider * @param excludedServers servers to exclude * @param count how many you want. a negative number means all of them in order of best to worst * @return a list of best...worst server LoadHolders */ @VisibleForTesting List<LoadHolder> findBestServers( Map<?, LoadHolder> groupServers, Set<ServerLocation> excludedServers, int count) { if (count == 0) { return new ArrayList<>(); } TreeSet<LoadHolder> bestEntries = new TreeSet<>((l1, l2) -> { int difference = Float.compare(l1.getLoad(), l2.getLoad()); if (difference != 0) { return difference; } ServerLocation sl1 = l1.getLocation(); ServerLocation sl2 = l2.getLocation(); return sl1.compareTo(sl2); }); boolean retainAll = (count < 0); float lastBestLoad = Float.MAX_VALUE; for (Map.Entry<?, LoadHolder> loadEntry : groupServers.entrySet()) { ServerLocation location; Object key = loadEntry.getKey(); if (key instanceof ServerLocationAndMemberId) { location = ((ServerLocationAndMemberId) key).getServerLocation(); } else if (key instanceof ServerLocation) { location = ((ServerLocation) key); } else { throw new InternalGemFireException( "findBestServers method was called with incorrect type parameters."); } if (excludedServers.contains(location)) { continue; } LoadHolder nextLoadReference = loadEntry.getValue(); float nextLoad = nextLoadReference.getLoad(); if ((bestEntries.size() < count) || retainAll || (nextLoad < lastBestLoad)) { bestEntries.add(nextLoadReference); if (!retainAll && (bestEntries.size() > count)) { bestEntries.remove(bestEntries.last()); } LoadHolder lastBestHolder = bestEntries.last(); lastBestLoad = lastBestHolder.getLoad(); } } return new ArrayList<>(bestEntries); } /** * If it is most loaded then return its LoadHolder; otherwise return null; */ @VisibleForTesting LoadHolder isCurrentServerMostLoaded(ServerLocation currentServer, Map<ServerLocationAndMemberId, LoadHolder> groupServers) { // Check if there are keys in the map that contains currentServer. LoadHolder currentLH = null; for (ServerLocationAndMemberId locationAndMemberId : groupServers.keySet()) { if (currentServer.equals(locationAndMemberId.getServerLocation())) { currentLH = groupServers.get(locationAndMemberId); break; } } if (currentLH == null) { return null; } final float currentLoad = currentLH.getLoad(); for (Map.Entry<ServerLocationAndMemberId, LoadHolder> loadEntry : groupServers.entrySet()) { ServerLocation location = loadEntry.getKey().getServerLocation(); if (location.equals(currentServer)) { continue; } LoadHolder nextLoadReference = loadEntry.getValue(); float nextLoad = nextLoadReference.getLoad(); if (nextLoad > currentLoad) { // found a server who has a higher load than us return null; } } return currentLH; } private void cancelClientEstimate(ClientProxyMembershipID id, ServerLocation location) { if (id != null) { removeAndCancelEstimate(new EstimateMapKey(id.getIdentity(), location)); } } /** * Add the task to the estimate map at the given key and cancel any old task found */ private void addEstimate(EstimateMapKey key, LoadEstimateTask task) { LoadEstimateTask oldTask; oldTask = estimateMap.put(key, task); if (oldTask != null) { oldTask.cancel(); } } /** * Remove the task from the estimate map at the given key. * * @return true it task was removed; false if it was not the task mapped to key */ private boolean removeIfPresentEstimate(EstimateMapKey key, LoadEstimateTask task) { // no need to cancel task; it already fired return estimateMap.remove(key, task); } /** * Remove and cancel any task estimate mapped to the given key. */ private void removeAndCancelEstimate(EstimateMapKey key) { LoadEstimateTask oldTask; oldTask = estimateMap.remove(key); if (oldTask != null) { oldTask.cancel(); } } /** * Used as a key on the estimateMap. These keys are made up of the identity of the client and * server that will be connected by the resource (e.g. queue) that we are trying to create. */ private static class EstimateMapKey { private final ClientProxyMembershipID.Identity clientId; private final ServerLocation serverId; EstimateMapKey(ClientProxyMembershipID.Identity clientId, ServerLocation serverId) { this.clientId = clientId; this.serverId = serverId; } @Override public int hashCode() { return clientId.hashCode() ^ serverId.hashCode(); } @Override public boolean equals(Object obj) { if (!(obj instanceof EstimateMapKey)) { return false; } EstimateMapKey that = (EstimateMapKey) obj; return clientId.equals(that.clientId) && serverId.equals(that.serverId); } } private class LoadEstimateTask implements Runnable { private final EstimateMapKey key; private final LoadHolder lh; private ScheduledFuture future; LoadEstimateTask(EstimateMapKey key, LoadHolder lh) { this.key = key; this.lh = lh; lh.addEstimate(); } @Override public void run() { if (removeIfPresentEstimate(key, this)) { decEstimate(); } } public void setFuture(ScheduledFuture future) { // Note this is always called once and only once // and always before cancel can be called. this.future = future; } public void cancel() { future.cancel(false); decEstimate(); } private void decEstimate() { synchronized (LocatorLoadSnapshot.this) { lh.removeEstimate(); } } } @VisibleForTesting static class LoadHolder { private float load; private float loadPerConnection; private int estimateCount; private final ServerLocation location; private final long loadPollInterval; LoadHolder(ServerLocation location, float load, float loadPerConnection, long loadPollInterval) { this.location = location; this.load = load; this.loadPerConnection = loadPerConnection; this.loadPollInterval = loadPollInterval; } void setLoad(float load, float loadPerConnection) { this.loadPerConnection = loadPerConnection; this.load = load + (estimateCount * loadPerConnection); } void incConnections() { load += loadPerConnection; } void addEstimate() { estimateCount++; incConnections(); } void removeEstimate() { estimateCount--; decConnections(); } void decConnections() { load -= loadPerConnection; } public float getLoad() { return load; } public float getLoadPerConnection() { return loadPerConnection; } public ServerLocation getLocation() { return location; } public long getLoadPollInterval() { return loadPollInterval; } @Override public String toString() { return "LoadHolder[" + getLoad() + ", " + getLocation() + ", loadPollInterval=" + getLoadPollInterval() + ((estimateCount != 0) ? (", estimates=" + estimateCount) : "") + ", " + loadPerConnection + "]"; } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.cassandra.io.util; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Random; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.Assert.*; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.io.compress.BufferType; import org.apache.cassandra.utils.ByteBufferUtil; public class RandomAccessReaderTest { private static final Logger logger = LoggerFactory.getLogger(RandomAccessReaderTest.class); @BeforeClass public static void setupDD() { DatabaseDescriptor.daemonInitialization(); } private static final class Parameters { final long fileLength; final int bufferSize; BufferType bufferType; int maxSegmentSize; boolean mmappedRegions; public byte[] expected; Parameters(long fileLength, int bufferSize) { this.fileLength = fileLength; this.bufferSize = bufferSize; this.bufferType = BufferType.OFF_HEAP; this.maxSegmentSize = MmappedRegions.MAX_SEGMENT_SIZE; this.mmappedRegions = false; this.expected = "The quick brown fox jumps over the lazy dog".getBytes(FileUtils.CHARSET); } Parameters mmappedRegions(boolean mmappedRegions) { this.mmappedRegions = mmappedRegions; return this; } Parameters bufferType(BufferType bufferType) { this.bufferType = bufferType; return this; } Parameters maxSegmentSize(int maxSegmentSize) { this.maxSegmentSize = maxSegmentSize; return this; } } @Test public void testBufferedOffHeap() throws IOException { testReadFully(new Parameters(8192, 4096).bufferType(BufferType.OFF_HEAP)); } @Test public void testBufferedOnHeap() throws IOException { testReadFully(new Parameters(8192, 4096).bufferType(BufferType.ON_HEAP)); } @Test public void testBigBufferSize() throws IOException { testReadFully(new Parameters(8192, 65536).bufferType(BufferType.ON_HEAP)); } @Test public void testTinyBufferSize() throws IOException { testReadFully(new Parameters(8192, 16).bufferType(BufferType.ON_HEAP)); } @Test public void testOneSegment() throws IOException { testReadFully(new Parameters(8192, 4096).mmappedRegions(true)); } @Test public void testMultipleSegments() throws IOException { // FIXME: This is the same as above. testReadFully(new Parameters(8192, 4096).mmappedRegions(true).maxSegmentSize(1024)); } @Test public void testVeryLarge() throws IOException { final long SIZE = 1L << 32; // 2GB Parameters params = new Parameters(SIZE, 1 << 20); // 1MB try (ChannelProxy channel = new ChannelProxy("abc", new FakeFileChannel(SIZE)); FileHandle.Builder builder = new FileHandle.Builder(channel) .bufferType(params.bufferType).bufferSize(params.bufferSize); FileHandle fh = builder.complete(); RandomAccessReader reader = fh.createReader()) { assertEquals(channel.size(), reader.length()); assertEquals(channel.size(), reader.bytesRemaining()); assertEquals(Integer.MAX_VALUE, reader.available()); assertEquals(channel.size(), reader.skip(channel.size())); assertTrue(reader.isEOF()); assertEquals(0, reader.bytesRemaining()); } } /** A fake file channel that simply increments the position and doesn't * actually read anything. We use it to simulate very large files, > 2G. */ private static final class FakeFileChannel extends FileChannel { private final long size; private long position; FakeFileChannel(long size) { this.size = size; } public int read(ByteBuffer dst) { int ret = dst.remaining(); position += ret; dst.position(dst.limit()); return ret; } public long read(ByteBuffer[] dsts, int offset, int length) { throw new UnsupportedOperationException(); } public int write(ByteBuffer src) { throw new UnsupportedOperationException(); } public long write(ByteBuffer[] srcs, int offset, int length) { throw new UnsupportedOperationException(); } public long position() { return position; } public FileChannel position(long newPosition) { position = newPosition; return this; } public long size() { return size; } public FileChannel truncate(long size) { throw new UnsupportedOperationException(); } public void force(boolean metaData) { throw new UnsupportedOperationException(); } public long transferTo(long position, long count, WritableByteChannel target) { throw new UnsupportedOperationException(); } public long transferFrom(ReadableByteChannel src, long position, long count) { throw new UnsupportedOperationException(); } public int read(ByteBuffer dst, long position) { int ret = dst.remaining(); this.position = position + ret; dst.position(dst.limit()); return ret; } public int write(ByteBuffer src, long position) { throw new UnsupportedOperationException(); } public MappedByteBuffer map(MapMode mode, long position, long size) { throw new UnsupportedOperationException(); } public FileLock lock(long position, long size, boolean shared) { throw new UnsupportedOperationException(); } public FileLock tryLock(long position, long size, boolean shared) { throw new UnsupportedOperationException(); } protected void implCloseChannel() { } } private static File writeFile(Parameters params) throws IOException { final File f = File.createTempFile("testReadFully", "1"); f.deleteOnExit(); try(SequentialWriter writer = new SequentialWriter(f)) { long numWritten = 0; while (numWritten < params.fileLength) { writer.write(params.expected); numWritten += params.expected.length; } writer.finish(); } assert f.exists(); assert f.length() >= params.fileLength; return f; } private static void testReadFully(Parameters params) throws IOException { final File f = writeFile(params); try (FileHandle.Builder builder = new FileHandle.Builder(f.getPath()) .bufferType(params.bufferType).bufferSize(params.bufferSize)) { builder.mmapped(params.mmappedRegions); try (FileHandle fh = builder.complete(); RandomAccessReader reader = fh.createReader()) { assertEquals(f.getAbsolutePath(), reader.getPath()); assertEquals(f.length(), reader.length()); assertEquals(f.length(), reader.bytesRemaining()); assertEquals(Math.min(Integer.MAX_VALUE, f.length()), reader.available()); byte[] b = new byte[params.expected.length]; long numRead = 0; while (numRead < params.fileLength) { reader.readFully(b); assertTrue(Arrays.equals(params.expected, b)); numRead += b.length; } assertTrue(reader.isEOF()); assertEquals(0, reader.bytesRemaining()); } } } @Test public void testReadBytes() throws IOException { File f = File.createTempFile("testReadBytes", "1"); final String expected = "The quick brown fox jumps over the lazy dog"; try(SequentialWriter writer = new SequentialWriter(f)) { writer.write(expected.getBytes()); writer.finish(); } assert f.exists(); try (FileHandle.Builder builder = new FileHandle.Builder(f.getPath()); FileHandle fh = builder.complete(); RandomAccessReader reader = fh.createReader()) { assertEquals(f.getAbsolutePath(), reader.getPath()); assertEquals(expected.length(), reader.length()); ByteBuffer b = ByteBufferUtil.read(reader, expected.length()); assertEquals(expected, new String(b.array(), StandardCharsets.UTF_8)); assertTrue(reader.isEOF()); assertEquals(0, reader.bytesRemaining()); } } @Test public void testReset() throws IOException { File f = File.createTempFile("testMark", "1"); final String expected = "The quick brown fox jumps over the lazy dog"; final int numIterations = 10; try(SequentialWriter writer = new SequentialWriter(f)) { for (int i = 0; i < numIterations; i++) writer.write(expected.getBytes()); writer.finish(); } assert f.exists(); try (FileHandle.Builder builder = new FileHandle.Builder(f.getPath()); FileHandle fh = builder.complete(); RandomAccessReader reader = fh.createReader()) { assertEquals(expected.length() * numIterations, reader.length()); ByteBuffer b = ByteBufferUtil.read(reader, expected.length()); assertEquals(expected, new String(b.array(), StandardCharsets.UTF_8)); assertFalse(reader.isEOF()); assertEquals((numIterations - 1) * expected.length(), reader.bytesRemaining()); DataPosition mark = reader.mark(); assertEquals(0, reader.bytesPastMark()); assertEquals(0, reader.bytesPastMark(mark)); for (int i = 0; i < (numIterations - 1); i++) { b = ByteBufferUtil.read(reader, expected.length()); assertEquals(expected, new String(b.array(), StandardCharsets.UTF_8)); } assertTrue(reader.isEOF()); assertEquals(expected.length() * (numIterations - 1), reader.bytesPastMark()); assertEquals(expected.length() * (numIterations - 1), reader.bytesPastMark(mark)); reader.reset(mark); assertEquals(0, reader.bytesPastMark()); assertEquals(0, reader.bytesPastMark(mark)); assertFalse(reader.isEOF()); for (int i = 0; i < (numIterations - 1); i++) { b = ByteBufferUtil.read(reader, expected.length()); assertEquals(expected, new String(b.array(), StandardCharsets.UTF_8)); } reader.reset(); assertEquals(0, reader.bytesPastMark()); assertEquals(0, reader.bytesPastMark(mark)); assertFalse(reader.isEOF()); for (int i = 0; i < (numIterations - 1); i++) { b = ByteBufferUtil.read(reader, expected.length()); assertEquals(expected, new String(b.array(), StandardCharsets.UTF_8)); } assertTrue(reader.isEOF()); } } @Test public void testSeekSingleThread() throws IOException, InterruptedException { testSeek(1); } @Test public void testSeekMultipleThreads() throws IOException, InterruptedException { testSeek(10); } private static void testSeek(int numThreads) throws IOException, InterruptedException { final File f = File.createTempFile("testMark", "1"); final byte[] expected = new byte[1 << 16]; long seed = System.nanoTime(); //seed = 365238103404423L; logger.info("Seed {}", seed); Random r = new Random(seed); r.nextBytes(expected); try(SequentialWriter writer = new SequentialWriter(f)) { writer.write(expected); writer.finish(); } assert f.exists(); try (FileHandle.Builder builder = new FileHandle.Builder(f.getPath())) { final Runnable worker = () -> { try (FileHandle fh = builder.complete(); RandomAccessReader reader = fh.createReader()) { assertEquals(expected.length, reader.length()); ByteBuffer b = ByteBufferUtil.read(reader, expected.length); assertTrue(Arrays.equals(expected, b.array())); assertTrue(reader.isEOF()); assertEquals(0, reader.bytesRemaining()); reader.seek(0); b = ByteBufferUtil.read(reader, expected.length); assertTrue(Arrays.equals(expected, b.array())); assertTrue(reader.isEOF()); assertEquals(0, reader.bytesRemaining()); for (int i = 0; i < 10; i++) { int pos = r.nextInt(expected.length); reader.seek(pos); assertEquals(pos, reader.getPosition()); ByteBuffer buf = ByteBuffer.wrap(expected, pos, expected.length - pos) .order(ByteOrder.BIG_ENDIAN); while (reader.bytesRemaining() > 4) assertEquals(buf.getInt(), reader.readInt()); } reader.close(); } catch (Exception ex) { ex.printStackTrace(); fail(ex.getMessage()); } }; if (numThreads == 1) { worker.run(); } else { ExecutorService executor = Executors.newFixedThreadPool(numThreads); for (int i = 0; i < numThreads; i++) executor.submit(worker); executor.shutdown(); executor.awaitTermination(1, TimeUnit.MINUTES); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.integration; import kafka.utils.MockTime; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.LongSerializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.KafkaStreams.State; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster; import org.apache.kafka.streams.integration.utils.IntegrationTestUtils; import org.apache.kafka.streams.kstream.Consumed; import org.apache.kafka.streams.kstream.GlobalKTable; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.KeyValueMapper; import org.apache.kafka.streams.kstream.Materialized; import org.apache.kafka.streams.kstream.ValueJoiner; import org.apache.kafka.streams.state.KeyValueStore; import org.apache.kafka.streams.state.QueryableStoreTypes; import org.apache.kafka.streams.state.ReadOnlyKeyValueStore; import org.apache.kafka.streams.state.Stores; import org.apache.kafka.streams.state.ValueAndTimestamp; import org.apache.kafka.test.IntegrationTest; import org.apache.kafka.test.MockProcessorSupplier; import org.apache.kafka.test.TestUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import java.io.IOException; import java.time.Duration; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Properties; import static java.util.Collections.singletonList; import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName; import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.waitForApplicationState; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsEqual.equalTo; import static org.junit.Assert.assertNotNull; @Category({IntegrationTest.class}) public class GlobalKTableIntegrationTest { private static final int NUM_BROKERS = 1; public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS); @BeforeClass public static void startCluster() throws IOException { CLUSTER.start(); } @AfterClass public static void closeCluster() { CLUSTER.stop(); } private final MockTime mockTime = CLUSTER.time; private final KeyValueMapper<String, Long, Long> keyMapper = (key, value) -> value; private final ValueJoiner<Long, String, String> joiner = (value1, value2) -> value1 + "+" + value2; private final String globalStore = "globalStore"; private StreamsBuilder builder; private Properties streamsConfiguration; private KafkaStreams kafkaStreams; private String globalTableTopic; private String streamTopic; private GlobalKTable<Long, String> globalTable; private KStream<String, Long> stream; private MockProcessorSupplier<String, String> supplier; @Rule public TestName testName = new TestName(); @Before public void before() throws Exception { builder = new StreamsBuilder(); createTopics(); streamsConfiguration = new Properties(); final String safeTestName = safeUniqueTestName(getClass(), testName); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "app-" + safeTestName); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath()); streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0); streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100); globalTable = builder.globalTable(globalTableTopic, Consumed.with(Serdes.Long(), Serdes.String()), Materialized.<Long, String, KeyValueStore<Bytes, byte[]>>as(globalStore) .withKeySerde(Serdes.Long()) .withValueSerde(Serdes.String())); final Consumed<String, Long> stringLongConsumed = Consumed.with(Serdes.String(), Serdes.Long()); stream = builder.stream(streamTopic, stringLongConsumed); supplier = new MockProcessorSupplier<>(); } @After public void whenShuttingDown() throws Exception { if (kafkaStreams != null) { kafkaStreams.close(); } IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration); } @Test public void shouldKStreamGlobalKTableLeftJoin() throws Exception { final KStream<String, String> streamTableJoin = stream.leftJoin(globalTable, keyMapper, joiner); streamTableJoin.process(supplier); produceInitialGlobalTableValues(); startStreams(); long firstTimestamp = mockTime.milliseconds(); produceTopicValues(streamTopic); final Map<String, ValueAndTimestamp<String>> expected = new HashMap<>(); expected.put("a", ValueAndTimestamp.make("1+A", firstTimestamp)); expected.put("b", ValueAndTimestamp.make("2+B", firstTimestamp + 1L)); expected.put("c", ValueAndTimestamp.make("3+C", firstTimestamp + 2L)); expected.put("d", ValueAndTimestamp.make("4+D", firstTimestamp + 3L)); expected.put("e", ValueAndTimestamp.make("5+null", firstTimestamp + 4L)); TestUtils.waitForCondition( () -> { if (supplier.capturedProcessorsCount() < 2) { return false; } final Map<String, ValueAndTimestamp<String>> result = new HashMap<>(); result.putAll(supplier.capturedProcessors(2).get(0).lastValueAndTimestampPerKey()); result.putAll(supplier.capturedProcessors(2).get(1).lastValueAndTimestampPerKey()); return result.equals(expected); }, 30000L, "waiting for initial values"); firstTimestamp = mockTime.milliseconds(); produceGlobalTableValues(); final ReadOnlyKeyValueStore<Long, String> replicatedStore = IntegrationTestUtils .getStore(globalStore, kafkaStreams, QueryableStoreTypes.keyValueStore()); assertNotNull(replicatedStore); final Map<Long, String> expectedState = new HashMap<>(); expectedState.put(1L, "F"); expectedState.put(2L, "G"); expectedState.put(3L, "H"); expectedState.put(4L, "I"); expectedState.put(5L, "J"); final Map<Long, String> globalState = new HashMap<>(); TestUtils.waitForCondition( () -> { globalState.clear(); replicatedStore.all().forEachRemaining(pair -> globalState.put(pair.key, pair.value)); return globalState.equals(expectedState); }, 30000, () -> "waiting for data in replicated store" + "\n expected: " + expectedState + "\n received: " + globalState); final ReadOnlyKeyValueStore<Long, ValueAndTimestamp<String>> replicatedStoreWithTimestamp = IntegrationTestUtils .getStore(globalStore, kafkaStreams, QueryableStoreTypes.timestampedKeyValueStore()); assertNotNull(replicatedStoreWithTimestamp); assertThat(replicatedStoreWithTimestamp.get(5L), equalTo(ValueAndTimestamp.make("J", firstTimestamp + 4L))); firstTimestamp = mockTime.milliseconds(); produceTopicValues(streamTopic); expected.put("a", ValueAndTimestamp.make("1+F", firstTimestamp)); expected.put("b", ValueAndTimestamp.make("2+G", firstTimestamp + 1L)); expected.put("c", ValueAndTimestamp.make("3+H", firstTimestamp + 2L)); expected.put("d", ValueAndTimestamp.make("4+I", firstTimestamp + 3L)); expected.put("e", ValueAndTimestamp.make("5+J", firstTimestamp + 4L)); TestUtils.waitForCondition( () -> { if (supplier.capturedProcessorsCount() < 2) { return false; } final Map<String, ValueAndTimestamp<String>> result = new HashMap<>(); result.putAll(supplier.capturedProcessors(2).get(0).lastValueAndTimestampPerKey()); result.putAll(supplier.capturedProcessors(2).get(1).lastValueAndTimestampPerKey()); return result.equals(expected); }, 30000L, "waiting for final values"); } @Test public void shouldKStreamGlobalKTableJoin() throws Exception { final KStream<String, String> streamTableJoin = stream.join(globalTable, keyMapper, joiner); streamTableJoin.process(supplier); produceInitialGlobalTableValues(); startStreams(); long firstTimestamp = mockTime.milliseconds(); produceTopicValues(streamTopic); final Map<String, ValueAndTimestamp<String>> expected = new HashMap<>(); expected.put("a", ValueAndTimestamp.make("1+A", firstTimestamp)); expected.put("b", ValueAndTimestamp.make("2+B", firstTimestamp + 1L)); expected.put("c", ValueAndTimestamp.make("3+C", firstTimestamp + 2L)); expected.put("d", ValueAndTimestamp.make("4+D", firstTimestamp + 3L)); TestUtils.waitForCondition( () -> { if (supplier.capturedProcessorsCount() < 2) { return false; } final Map<String, ValueAndTimestamp<String>> result = new HashMap<>(); result.putAll(supplier.capturedProcessors(2).get(0).lastValueAndTimestampPerKey()); result.putAll(supplier.capturedProcessors(2).get(1).lastValueAndTimestampPerKey()); return result.equals(expected); }, 30000L, "waiting for initial values"); firstTimestamp = mockTime.milliseconds(); produceGlobalTableValues(); final ReadOnlyKeyValueStore<Long, String> replicatedStore = IntegrationTestUtils .getStore(globalStore, kafkaStreams, QueryableStoreTypes.keyValueStore()); assertNotNull(replicatedStore); final Map<Long, String> expectedState = new HashMap<>(); expectedState.put(1L, "F"); expectedState.put(2L, "G"); expectedState.put(3L, "H"); expectedState.put(4L, "I"); expectedState.put(5L, "J"); final Map<Long, String> globalState = new HashMap<>(); TestUtils.waitForCondition( () -> { globalState.clear(); replicatedStore.all().forEachRemaining(pair -> globalState.put(pair.key, pair.value)); return globalState.equals(expectedState); }, 30000, () -> "waiting for data in replicated store" + "\n expected: " + expectedState + "\n received: " + globalState); final ReadOnlyKeyValueStore<Long, ValueAndTimestamp<String>> replicatedStoreWithTimestamp = IntegrationTestUtils .getStore(globalStore, kafkaStreams, QueryableStoreTypes.timestampedKeyValueStore()); assertNotNull(replicatedStoreWithTimestamp); assertThat(replicatedStoreWithTimestamp.get(5L), equalTo(ValueAndTimestamp.make("J", firstTimestamp + 4L))); firstTimestamp = mockTime.milliseconds(); produceTopicValues(streamTopic); expected.put("a", ValueAndTimestamp.make("1+F", firstTimestamp)); expected.put("b", ValueAndTimestamp.make("2+G", firstTimestamp + 1L)); expected.put("c", ValueAndTimestamp.make("3+H", firstTimestamp + 2L)); expected.put("d", ValueAndTimestamp.make("4+I", firstTimestamp + 3L)); expected.put("e", ValueAndTimestamp.make("5+J", firstTimestamp + 4L)); TestUtils.waitForCondition( () -> { if (supplier.capturedProcessorsCount() < 2) { return false; } final Map<String, ValueAndTimestamp<String>> result = new HashMap<>(); result.putAll(supplier.capturedProcessors(2).get(0).lastValueAndTimestampPerKey()); result.putAll(supplier.capturedProcessors(2).get(1).lastValueAndTimestampPerKey()); return result.equals(expected); }, 30000L, "waiting for final values"); } @Test public void shouldRestoreGlobalInMemoryKTableOnRestart() throws Exception { builder = new StreamsBuilder(); globalTable = builder.globalTable( globalTableTopic, Consumed.with(Serdes.Long(), Serdes.String()), Materialized.as(Stores.inMemoryKeyValueStore(globalStore))); produceInitialGlobalTableValues(); startStreams(); ReadOnlyKeyValueStore<Long, String> store = IntegrationTestUtils .getStore(globalStore, kafkaStreams, QueryableStoreTypes.keyValueStore()); assertNotNull(store); assertThat(store.approximateNumEntries(), equalTo(4L)); ReadOnlyKeyValueStore<Long, ValueAndTimestamp<String>> timestampedStore = IntegrationTestUtils .getStore(globalStore, kafkaStreams, QueryableStoreTypes.timestampedKeyValueStore()); assertNotNull(timestampedStore); assertThat(timestampedStore.approximateNumEntries(), equalTo(4L)); kafkaStreams.close(); startStreams(); store = IntegrationTestUtils.getStore(globalStore, kafkaStreams, QueryableStoreTypes.keyValueStore()); assertThat(store.approximateNumEntries(), equalTo(4L)); timestampedStore = IntegrationTestUtils.getStore(globalStore, kafkaStreams, QueryableStoreTypes.timestampedKeyValueStore()); assertThat(timestampedStore.approximateNumEntries(), equalTo(4L)); } @Test public void shouldGetToRunningWithOnlyGlobalTopology() throws Exception { builder = new StreamsBuilder(); globalTable = builder.globalTable( globalTableTopic, Consumed.with(Serdes.Long(), Serdes.String()), Materialized.as(Stores.inMemoryKeyValueStore(globalStore))); startStreams(); waitForApplicationState(singletonList(kafkaStreams), State.RUNNING, Duration.ofSeconds(30)); kafkaStreams.close(); } private void createTopics() throws Exception { final String safeTestName = safeUniqueTestName(getClass(), testName); streamTopic = "stream-" + safeTestName; globalTableTopic = "globalTable-" + safeTestName; CLUSTER.createTopics(streamTopic); CLUSTER.createTopic(globalTableTopic, 2, 1); } private void startStreams() { kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration); kafkaStreams.start(); } private void produceTopicValues(final String topic) { IntegrationTestUtils.produceKeyValuesSynchronously( topic, Arrays.asList( new KeyValue<>("a", 1L), new KeyValue<>("b", 2L), new KeyValue<>("c", 3L), new KeyValue<>("d", 4L), new KeyValue<>("e", 5L)), TestUtils.producerConfig( CLUSTER.bootstrapServers(), StringSerializer.class, LongSerializer.class, new Properties()), mockTime); } private void produceInitialGlobalTableValues() { IntegrationTestUtils.produceKeyValuesSynchronously( globalTableTopic, Arrays.asList( new KeyValue<>(1L, "A"), new KeyValue<>(2L, "B"), new KeyValue<>(3L, "C"), new KeyValue<>(4L, "D") ), TestUtils.producerConfig( CLUSTER.bootstrapServers(), LongSerializer.class, StringSerializer.class ), mockTime); } private void produceGlobalTableValues() { IntegrationTestUtils.produceKeyValuesSynchronously( globalTableTopic, Arrays.asList( new KeyValue<>(1L, "F"), new KeyValue<>(2L, "G"), new KeyValue<>(3L, "H"), new KeyValue<>(4L, "I"), new KeyValue<>(5L, "J")), TestUtils.producerConfig( CLUSTER.bootstrapServers(), LongSerializer.class, StringSerializer.class, new Properties()), mockTime); } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.apigateway.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Gets the documentation parts of an API. The result may be filtered by the type, name, or path of API entities * (targets). * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetDocumentationPartsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * [Required] The identifier of the API of the to-be-retrieved documentation parts. * </p> */ private String restApiId; /** * <p> * The type of API entities of the to-be-retrieved documentation parts. * </p> */ private String type; /** * <p> * The name of API entities of the to-be-retrieved documentation parts. * </p> */ private String nameQuery; /** * <p> * The path of API entities of the to-be-retrieved documentation parts. * </p> */ private String path; /** * <p> * The current pagination position in the paged result set. * </p> */ private String position; /** * <p> * The maximum number of returned results per page. * </p> */ private Integer limit; /** * <p> * [Required] The identifier of the API of the to-be-retrieved documentation parts. * </p> * * @param restApiId * [Required] The identifier of the API of the to-be-retrieved documentation parts. */ public void setRestApiId(String restApiId) { this.restApiId = restApiId; } /** * <p> * [Required] The identifier of the API of the to-be-retrieved documentation parts. * </p> * * @return [Required] The identifier of the API of the to-be-retrieved documentation parts. */ public String getRestApiId() { return this.restApiId; } /** * <p> * [Required] The identifier of the API of the to-be-retrieved documentation parts. * </p> * * @param restApiId * [Required] The identifier of the API of the to-be-retrieved documentation parts. * @return Returns a reference to this object so that method calls can be chained together. */ public GetDocumentationPartsRequest withRestApiId(String restApiId) { setRestApiId(restApiId); return this; } /** * <p> * The type of API entities of the to-be-retrieved documentation parts. * </p> * * @param type * The type of API entities of the to-be-retrieved documentation parts. * @see DocumentationPartType */ public void setType(String type) { this.type = type; } /** * <p> * The type of API entities of the to-be-retrieved documentation parts. * </p> * * @return The type of API entities of the to-be-retrieved documentation parts. * @see DocumentationPartType */ public String getType() { return this.type; } /** * <p> * The type of API entities of the to-be-retrieved documentation parts. * </p> * * @param type * The type of API entities of the to-be-retrieved documentation parts. * @return Returns a reference to this object so that method calls can be chained together. * @see DocumentationPartType */ public GetDocumentationPartsRequest withType(String type) { setType(type); return this; } /** * <p> * The type of API entities of the to-be-retrieved documentation parts. * </p> * * @param type * The type of API entities of the to-be-retrieved documentation parts. * @see DocumentationPartType */ public void setType(DocumentationPartType type) { this.type = type.toString(); } /** * <p> * The type of API entities of the to-be-retrieved documentation parts. * </p> * * @param type * The type of API entities of the to-be-retrieved documentation parts. * @return Returns a reference to this object so that method calls can be chained together. * @see DocumentationPartType */ public GetDocumentationPartsRequest withType(DocumentationPartType type) { setType(type); return this; } /** * <p> * The name of API entities of the to-be-retrieved documentation parts. * </p> * * @param nameQuery * The name of API entities of the to-be-retrieved documentation parts. */ public void setNameQuery(String nameQuery) { this.nameQuery = nameQuery; } /** * <p> * The name of API entities of the to-be-retrieved documentation parts. * </p> * * @return The name of API entities of the to-be-retrieved documentation parts. */ public String getNameQuery() { return this.nameQuery; } /** * <p> * The name of API entities of the to-be-retrieved documentation parts. * </p> * * @param nameQuery * The name of API entities of the to-be-retrieved documentation parts. * @return Returns a reference to this object so that method calls can be chained together. */ public GetDocumentationPartsRequest withNameQuery(String nameQuery) { setNameQuery(nameQuery); return this; } /** * <p> * The path of API entities of the to-be-retrieved documentation parts. * </p> * * @param path * The path of API entities of the to-be-retrieved documentation parts. */ public void setPath(String path) { this.path = path; } /** * <p> * The path of API entities of the to-be-retrieved documentation parts. * </p> * * @return The path of API entities of the to-be-retrieved documentation parts. */ public String getPath() { return this.path; } /** * <p> * The path of API entities of the to-be-retrieved documentation parts. * </p> * * @param path * The path of API entities of the to-be-retrieved documentation parts. * @return Returns a reference to this object so that method calls can be chained together. */ public GetDocumentationPartsRequest withPath(String path) { setPath(path); return this; } /** * <p> * The current pagination position in the paged result set. * </p> * * @param position * The current pagination position in the paged result set. */ public void setPosition(String position) { this.position = position; } /** * <p> * The current pagination position in the paged result set. * </p> * * @return The current pagination position in the paged result set. */ public String getPosition() { return this.position; } /** * <p> * The current pagination position in the paged result set. * </p> * * @param position * The current pagination position in the paged result set. * @return Returns a reference to this object so that method calls can be chained together. */ public GetDocumentationPartsRequest withPosition(String position) { setPosition(position); return this; } /** * <p> * The maximum number of returned results per page. * </p> * * @param limit * The maximum number of returned results per page. */ public void setLimit(Integer limit) { this.limit = limit; } /** * <p> * The maximum number of returned results per page. * </p> * * @return The maximum number of returned results per page. */ public Integer getLimit() { return this.limit; } /** * <p> * The maximum number of returned results per page. * </p> * * @param limit * The maximum number of returned results per page. * @return Returns a reference to this object so that method calls can be chained together. */ public GetDocumentationPartsRequest withLimit(Integer limit) { setLimit(limit); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRestApiId() != null) sb.append("RestApiId: ").append(getRestApiId()).append(","); if (getType() != null) sb.append("Type: ").append(getType()).append(","); if (getNameQuery() != null) sb.append("NameQuery: ").append(getNameQuery()).append(","); if (getPath() != null) sb.append("Path: ").append(getPath()).append(","); if (getPosition() != null) sb.append("Position: ").append(getPosition()).append(","); if (getLimit() != null) sb.append("Limit: ").append(getLimit()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetDocumentationPartsRequest == false) return false; GetDocumentationPartsRequest other = (GetDocumentationPartsRequest) obj; if (other.getRestApiId() == null ^ this.getRestApiId() == null) return false; if (other.getRestApiId() != null && other.getRestApiId().equals(this.getRestApiId()) == false) return false; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; if (other.getNameQuery() == null ^ this.getNameQuery() == null) return false; if (other.getNameQuery() != null && other.getNameQuery().equals(this.getNameQuery()) == false) return false; if (other.getPath() == null ^ this.getPath() == null) return false; if (other.getPath() != null && other.getPath().equals(this.getPath()) == false) return false; if (other.getPosition() == null ^ this.getPosition() == null) return false; if (other.getPosition() != null && other.getPosition().equals(this.getPosition()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRestApiId() == null) ? 0 : getRestApiId().hashCode()); hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); hashCode = prime * hashCode + ((getNameQuery() == null) ? 0 : getNameQuery().hashCode()); hashCode = prime * hashCode + ((getPath() == null) ? 0 : getPath().hashCode()); hashCode = prime * hashCode + ((getPosition() == null) ? 0 : getPosition().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); return hashCode; } @Override public GetDocumentationPartsRequest clone() { return (GetDocumentationPartsRequest) super.clone(); } }
import static org.junit.Assert.*; import org.junit.Test; /** * A JUnit test case class. * Every method starting with the word "test" will be called when running * the test with JUnit. */ public class BSTNodeTest { // use the constructor -- this should work! @Test public void testConstructor_a(){ BSTNode tree1 = new BSTNode("a", "a-val"); assertEquals("[(a,a-val)[][]]", tree1.toString()); } // test remove_min - nonstatic @Test public void testRemoveMin_ab() { BSTNode tree1 = new BSTNode( "a", "a-val", BSTNode.emptyNode, new BSTNode( "b", "b-val", BSTNode.emptyNode, BSTNode.emptyNode ) ); BSTNode trimmedtree = tree1.remove_min(); assertEquals("[(b,b-val)[][]]", trimmedtree.toString()); } // static version of left @Test public void test_static_left(){ BSTNode tree1 = new BSTNode( "b", "b-val", new BSTNode( "a", "a-val", BSTNode.emptyNode, BSTNode.emptyNode ), new BSTNode( "c", "c-val", BSTNode.emptyNode, BSTNode.emptyNode ) ); BSTNode L = BSTNode.left(tree1); assertEquals("[(a,a-val)[][]]", L.toString()); } // nonstatic version of right @Test public void test_nonstatic_right(){ BSTNode tree1 = new BSTNode( "b", "b-val", new BSTNode( "a", "a-val", BSTNode.emptyNode, BSTNode.emptyNode ), new BSTNode( "c", "c-val", BSTNode.emptyNode, BSTNode.emptyNode ) ); BSTNode R = tree1.right(); assertEquals("[(c,c-val)[][]]", R.toString()); } // static version of min @Test public void test_static_min(){ BSTNode tree1 = new BSTNode( "b", "b-val", new BSTNode( "a", "a-val", BSTNode.emptyNode, BSTNode.emptyNode ), new BSTNode( "c", "c-val", BSTNode.emptyNode, BSTNode.emptyNode ) ); String minstr = BSTNode.min(tree1); assertEquals("a", minstr); } // nonstatic version of find @Test public void test_nonstatic_find(){ BSTNode tree1 = new BSTNode( "b", "b-val", new BSTNode( "a", "a-val", BSTNode.emptyNode, BSTNode.emptyNode ), new BSTNode( "c", "c-val", BSTNode.emptyNode, BSTNode.emptyNode ) ); Object c_val = tree1.find("c"); assertEquals("c-val", c_val); } // static version of remove_min @Test public void test_static_remove_min(){ BSTNode tree1 = new BSTNode( "b", "b-val", new BSTNode( "a", "a-val", BSTNode.emptyNode, BSTNode.emptyNode ), new BSTNode( "c", "c-val", BSTNode.emptyNode, BSTNode.emptyNode ) ); BSTNode tree2 = new BSTNode( "b", "b-val", BSTNode.emptyNode , new BSTNode( "c", "c-val", BSTNode.emptyNode, BSTNode.emptyNode ) ); BSTNode smaller = tree1.remove_min(); assertEquals(tree2, // expected smaller); // actual } // insert a node as a right branch from the root @Test public void testInsert_ab(){ BSTNode tree1 = new BSTNode("a", "a-val"); BSTNode tree1_orig = new BSTNode("a", "a-val"); BSTNode tree2 = tree1.insert("b", "b-val"); assertEquals("[(a,a-val)[][(b,b-val)[][]]]", tree2.toString()); assertEquals(tree1_orig, tree1); } // insert a node as a right branch from the root - nonstatic @Test public void testInsert_ab_nonstatic(){ BSTNode tree1 = new BSTNode("a", "a-val"); BSTNode tree1_orig = new BSTNode("a", "a-val"); BSTNode tree2 = BSTNode.insert("b", "b-val",tree1); assertEquals("[(a,a-val)[][(b,b-val)[][]]]", tree2.toString()); assertEquals(tree1_orig, tree1); } // insert a node as a right branch from a node that is not the root @Test public void testInsert_abc(){ BSTNode tree1 = new BSTNode("a", "a-val"); tree1 = tree1.insert("b", "b-val"); tree1 = tree1.insert("c", "c-val"); assertEquals("[(a,a-val)[][(b,b-val)[][(c,c-val)[][]]]]", tree1.toString()); } // insert a node as a left branch from the root @Test public void testInsert_cb(){ BSTNode tree1 = new BSTNode("c", "c-val"); tree1 = tree1.insert("b", "b-val"); assertEquals("[(c,c-val)[(b,b-val)[][]][]]", tree1.toString()); } // insert a node as a left branch from a node that is not the root @Test public void testInsert_cba(){ BSTNode tree1 = new BSTNode("c", "c-val"); tree1 = tree1.insert("b", "b-val"); tree1 = tree1.insert("a", "a-val"); assertEquals("[(c,c-val)[(b,b-val)[(a,a-val)[][]][]][]]", tree1.toString()); } // insert a node on both sides of the root @Test public void testInsert_bac(){ BSTNode tree1 = new BSTNode("b", "b-val"); tree1 = tree1.insert("a", "a-val"); tree1 = tree1.insert("c", "c-val"); assertEquals("[(b,b-val)[(a,a-val)[][]][(c,c-val)[][]]]", tree1.toString()); } // delete the root, which has one right child @Test public void testDelete_abc_a(){ BSTNode tree1 = new BSTNode("a", "a-val"); tree1 = tree1.insert("b", "b-val"); tree1 = tree1.insert("c", "c-val"); tree1 = tree1.delete("a"); assertEquals("[(b,b-val)[][(c,c-val)[][]]]", tree1.toString()); } // delete a node not at the root with one right child @Test public void testDelete_abc_b(){ BSTNode tree1 = new BSTNode("a", "a-val"); tree1 = tree1.insert("b", "b-val"); tree1 = tree1.insert("c", "c-val"); tree1 = tree1.delete("b"); assertEquals("[(a,a-val)[][(c,c-val)[][]]]", tree1.toString()); } // delete a right branch leaf @Test public void testDelete_abc_c(){ BSTNode tree1 = new BSTNode("a", "a-val"); tree1 = tree1.insert("b", "b-val"); tree1 = tree1.insert("c", "c-val"); tree1 = tree1.delete("c"); assertEquals("[(a,a-val)[][(b,b-val)[][]]]", tree1.toString()); } // delete a left branch leaf @Test public void testDelete_cba_a(){ BSTNode tree1 = new BSTNode("c", "c-val"); tree1 = tree1.insert("b", "b-val"); tree1 = tree1.insert("a", "a-val"); tree1 = tree1.delete("a"); assertEquals("[(c,c-val)[(b,b-val)[][]][]]", tree1.toString()); } // delete a node not at the root with one left child = static version @Test public void testDelete_cba_b_static(){ BSTNode tree1 = new BSTNode("c", "c-val"); tree1 = tree1.insert("b", "b-val"); tree1 = tree1.insert("a", "a-val"); tree1 = BSTNode.delete("b",tree1); assertEquals("[(c,c-val)[(a,a-val)[][]][]]", tree1.toString()); } // delete the root, which has one left child @Test public void testDelete_cba_c(){ BSTNode tree1 = new BSTNode("c", "c-val"); tree1 = tree1.insert("b", "b-val"); tree1 = tree1.insert("a", "a-val"); tree1 = tree1.delete("c"); assertEquals("[(b,b-val)[(a,a-val)[][]][]]", tree1.toString()); } // delete a root with two children @Test public void testDelete_bac_b(){ BSTNode tree1 = new BSTNode("b", "b-val"); tree1 = tree1.insert("a", "a-val"); tree1 = tree1.insert("c", "c-val"); tree1 = tree1.delete("b"); assertEquals("[(c,c-val)[(a,a-val)[][]][]]", tree1.toString()); } // delete a root with two children and the replacement node is not a leaf @Test public void testDelete_bafcd_b(){ BSTNode tree1 = new BSTNode("b", "b-val"); tree1 = tree1.insert("a", "a-val"); tree1 = tree1.insert("f", "f-val"); tree1 = tree1.insert("c", "c-val"); tree1 = tree1.insert("d", "d-val"); tree1 = tree1.delete("b"); assertEquals("[(c,c-val)[(a,a-val)[][]][(f,f-val)[(d,d-val)[][]][]]]", tree1.toString()); } }
package com.p4square.ccbapi; import com.p4square.ccbapi.exception.CCBErrorResponseException; import com.p4square.ccbapi.model.*; import com.p4square.ccbapi.serializer.FormBuilder; import com.p4square.ccbapi.serializer.IndividualProfileSerializer; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashMap; import java.util.Map; /** * CCBAPIClient is an implementation of CCBAPI using the Apache HttpClient. * * This implementation is built against the API documentations found here: * https://designccb.s3.amazonaws.com/helpdesk/files/official_docs/api.html * * This client is thread-safe. */ public class CCBAPIClient implements CCBAPI { private static final Map<String, String> EMPTY_MAP = Collections.emptyMap(); private static final IndividualProfileSerializer INDIVIDUAL_PROFILE_SERIALIZER = new IndividualProfileSerializer(); private final URI apiBaseUri; private final HTTPInterface httpClient; private final CCBXmlBinder xmlBinder; /** * Create a new CCB API Client. * * @param church The church identifier used with CCB. * @param username The API username. * @param password The API password. * @throws URISyntaxException If the church parameter contains unsafe URI characters. */ public CCBAPIClient(final String church, final String username, final String password) throws URISyntaxException { this(new URI("https://" + church + ".ccbchurch.com/api.php"), username, password); } /** * Create a new CCB API Client. * * @param apiUri The base URI to use when contacting CCB. * @param username The API username. * @param password The API password. */ public CCBAPIClient(final URI apiUri, final String username, final String password) { this(apiUri, new ApacheHttpClientImpl(apiUri, username, password)); } /** * A private constructor which allows for dependency injection. * * @param apiUri The base URI to use when contacting CCB. * @param httpClient The HTTP client used to send requests. */ protected CCBAPIClient(final URI apiUri, final HTTPInterface httpClient) { this.apiBaseUri = apiUri; this.httpClient = httpClient; this.xmlBinder = new CCBXmlBinder(); } @Override public void close() throws IOException { httpClient.close(); } @Override public GetIndividualProfilesResponse getIndividualProfiles(GetIndividualProfilesRequest request) throws IOException { // Prepare the request. String serviceName; final Map<String, String> params = new HashMap<>(); String form = null; if (request.getId() != 0) { // Use individual_profile_from_id (individual_id) serviceName = "individual_profile_from_id"; params.put("individual_id", String.valueOf(request.getId())); } else if (request.getLogin() != null && request.getPassword() != null) { // Use individual_profile_from_login_password (login, password) serviceName = "individual_profile_from_login_password"; FormBuilder loginform = new FormBuilder(); loginform.appendField("login", request.getLogin()); /* TODO: Don't convert password char[] to String. The whole purpose behind keeping the password in a char[] is so that it can be zeroed out in the heap when its no longer needed. Originally, Church Community Builder decided to send the user's password, among other sensitive fields, as a query parameter. Since the query string had to be a String, I converted the password to String here. CCB has since switched to POST. But there was no grace period to ease the transition. In the interest of fixing the site quickly, I'm leaving this TODO incomplete for now. */ loginform.appendField("password", new String(request.getPassword())); form = loginform.build(); } else if (request.getRoutingNumber() != null && request.getAccountNumber() != null) { // Use individual_profile_from_micr (account_number, routing_number) serviceName = "individual_profile_from_micr"; params.put("routing_number", request.getRoutingNumber()); params.put("account_number", request.getAccountNumber()); } else { // Use individual_profiles serviceName = "individual_profiles"; if (request.getModifiedSince() != null) { params.put("modified_since", request.getModifiedSince().toString()); } if (request.getIncludeInactive() != null) { params.put("include_inactive", request.getIncludeInactive() ? "true" : "false"); } if (request.getPage() != 0) { params.put("page", String.valueOf(request.getPage())); } if (request.getPerPage() != 0) { params.put("per_page", String.valueOf(request.getPerPage())); } } // Send the request and parse the response. return makeRequest(serviceName, params, form, GetIndividualProfilesResponse.class); } @Override public GetCustomFieldLabelsResponse getCustomFieldLabels() throws IOException { return makeRequest("custom_field_labels", EMPTY_MAP, null, GetCustomFieldLabelsResponse.class); } @Override public GetLookupTableResponse getLookupTable(final GetLookupTableRequest request) throws IOException { if (request.getType() == null) { throw new IllegalArgumentException("LookupTableType must not be null."); } final String service = request.getType().getIdentifier() + "_list"; return makeRequest(service, EMPTY_MAP, null, GetLookupTableResponse.class); } @Override public GetCampusListResponse getCampusList() throws IOException { return makeRequest("campus_list", EMPTY_MAP, null, GetCampusListResponse.class); } @Override public UpdateIndividualProfileResponse updateIndividualProfile(UpdateIndividualProfileRequest request) throws IOException { if (request.getIndividualId() == 0) { throw new IllegalArgumentException("individualId must be set on the request."); } final Map<String, String> params = Collections.singletonMap("individual_id", String.valueOf(request.getIndividualId())); final String form = INDIVIDUAL_PROFILE_SERIALIZER.encode(request); return makeRequest("update_individual", params, form, UpdateIndividualProfileResponse.class); } @Override public GetGroupProfilesResponse getGroupProfiles(GetGroupProfilesRequest request) throws IOException { // Prepare the request. String serviceName; final Map<String, String> params = new HashMap<>(); if (request.getId() != 0) { // Use group_profile_from_id (id) serviceName = "group_profile_from_id"; params.put("id", String.valueOf(request.getId())); } else { // Use group_profiles serviceName = "group_profiles"; if (request.getModifiedSince() != null) { params.put("modified_since", request.getModifiedSince().toString()); } if (request.getPage() != 0) { params.put("page", String.valueOf(request.getPage())); } if (request.getPerPage() != 0) { params.put("per_page", String.valueOf(request.getPerPage())); } if (request.getIncludeParticipants() != null) { params.put("include_participants", request.getIncludeParticipants() ? "true" : "false"); } } // This option applies to all request types. if (request.getIncludeImageUrl() != null) { params.put("include_image_link", request.getIncludeImageUrl() ? "true" : "false"); } // Send the request and parse the response. return makeRequest(serviceName, params, null, GetGroupProfilesResponse.class); } /** * Build the URI for a particular service call. * * @param service The CCB API service to call (i.e. the srv query parameter). * @param parameters A map of query parameters to include on the URI. * @return The apiBaseUri with the additional query parameters appended. */ private URI makeURI(final String service, final Map<String, String> parameters) { try { StringBuilder queryStringBuilder = new StringBuilder(); if (apiBaseUri.getQuery() != null) { queryStringBuilder.append(apiBaseUri.getQuery()).append("&"); } queryStringBuilder.append("srv=").append(service); for (Map.Entry<String, String> entry: parameters.entrySet()) { queryStringBuilder.append("&").append(entry.getKey()).append("=").append(entry.getValue()); } return new URI(apiBaseUri.getScheme(), apiBaseUri.getAuthority(), apiBaseUri.getPath(), queryStringBuilder.toString(), apiBaseUri.getFragment()); } catch (URISyntaxException e) { // This shouldn't happen, but needs to be caught regardless. throw new AssertionError("Could not construct API URI", e); } } /** * Send a request to CCB. * * @param api The CCB service name. * @param params The URL query params. * @param form The form body parameters. * @param clazz The response class. * @param <T> The type of response. * @return The response. * @throws IOException if an error occurs. */ private <T extends CCBAPIResponse> T makeRequest(final String api, final Map<String, String> params, final String form, final Class<T> clazz) throws IOException { byte[] payload = null; if (form != null) { payload = form.getBytes(StandardCharsets.UTF_8); } final InputStream entity = httpClient.sendPostRequest(makeURI(api, params), payload); try { T response = xmlBinder.bindResponseXML(entity, clazz); if (response.getErrors() != null && response.getErrors().size() > 0) { throw new CCBErrorResponseException(response.getErrors()); } return response; } finally { if (entity != null) { entity.close(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.beust.android.translate; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; /** * <p> * Provides HTML and XML entity utilities. * </p> * * @see <a href="http://hotwired.lycos.com/webmonkey/reference/special_characters/">ISO Entities</a> * @see <a href="http://www.w3.org/TR/REC-html32#latin1">HTML 3.2 Character Entities for ISO Latin-1</a> * @see <a href="http://www.w3.org/TR/REC-html40/sgml/entities.html">HTML 4.0 Character entity references</a> * @see <a href="http://www.w3.org/TR/html401/charset.html#h-5.3">HTML 4.01 Character References</a> * @see <a href="http://www.w3.org/TR/html401/charset.html#code-position">HTML 4.01 Code positions</a> * * @author <a href="mailto:alex@purpletech.com">Alexander Day Chaffee</a> * @author <a href="mailto:ggregory@seagullsw.com">Gary Gregory</a> * @since 2.0 * @version $Id: Entities.java 636641 2008-03-13 06:11:30Z bayard $ */ class Entities { private static final String[][] BASIC_ARRAY = {{"quot", "34"}, // " - double-quote {"amp", "38"}, // & - ampersand {"lt", "60"}, // < - less-than {"gt", "62"}, // > - greater-than }; private static final String[][] APOS_ARRAY = {{"apos", "39"}, // XML apostrophe }; // package scoped for testing static final String[][] ISO8859_1_ARRAY = {{"nbsp", "160"}, // non-breaking space {"iexcl", "161"}, // inverted exclamation mark {"cent", "162"}, // cent sign {"pound", "163"}, // pound sign {"curren", "164"}, // currency sign {"yen", "165"}, // yen sign = yuan sign {"brvbar", "166"}, // broken bar = broken vertical bar {"sect", "167"}, // section sign {"uml", "168"}, // diaeresis = spacing diaeresis {"copy", "169"}, // - copyright sign {"ordf", "170"}, // feminine ordinal indicator {"laquo", "171"}, // left-pointing double angle quotation mark = left pointing guillemet {"not", "172"}, // not sign {"shy", "173"}, // soft hyphen = discretionary hyphen {"reg", "174"}, // - registered trademark sign {"macr", "175"}, // macron = spacing macron = overline = APL overbar {"deg", "176"}, // degree sign {"plusmn", "177"}, // plus-minus sign = plus-or-minus sign {"sup2", "178"}, // superscript two = superscript digit two = squared {"sup3", "179"}, // superscript three = superscript digit three = cubed {"acute", "180"}, // acute accent = spacing acute {"micro", "181"}, // micro sign {"para", "182"}, // pilcrow sign = paragraph sign {"middot", "183"}, // middle dot = Georgian comma = Greek middle dot {"cedil", "184"}, // cedilla = spacing cedilla {"sup1", "185"}, // superscript one = superscript digit one {"ordm", "186"}, // masculine ordinal indicator {"raquo", "187"}, // right-pointing double angle quotation mark = right pointing guillemet {"frac14", "188"}, // vulgar fraction one quarter = fraction one quarter {"frac12", "189"}, // vulgar fraction one half = fraction one half {"frac34", "190"}, // vulgar fraction three quarters = fraction three quarters {"iquest", "191"}, // inverted question mark = turned question mark {"Agrave", "192"}, // - uppercase A, grave accent {"Aacute", "193"}, // - uppercase A, acute accent {"Acirc", "194"}, // - uppercase A, circumflex accent {"Atilde", "195"}, // - uppercase A, tilde {"Auml", "196"}, // - uppercase A, umlaut {"Aring", "197"}, // - uppercase A, ring {"AElig", "198"}, // - uppercase AE {"Ccedil", "199"}, // - uppercase C, cedilla {"Egrave", "200"}, // - uppercase E, grave accent {"Eacute", "201"}, // - uppercase E, acute accent {"Ecirc", "202"}, // - uppercase E, circumflex accent {"Euml", "203"}, // - uppercase E, umlaut {"Igrave", "204"}, // - uppercase I, grave accent {"Iacute", "205"}, // - uppercase I, acute accent {"Icirc", "206"}, // - uppercase I, circumflex accent {"Iuml", "207"}, // - uppercase I, umlaut {"ETH", "208"}, // - uppercase Eth, Icelandic {"Ntilde", "209"}, // - uppercase N, tilde {"Ograve", "210"}, // - uppercase O, grave accent {"Oacute", "211"}, // - uppercase O, acute accent {"Ocirc", "212"}, // - uppercase O, circumflex accent {"Otilde", "213"}, // - uppercase O, tilde {"Ouml", "214"}, // - uppercase O, umlaut {"times", "215"}, // multiplication sign {"Oslash", "216"}, // - uppercase O, slash {"Ugrave", "217"}, // - uppercase U, grave accent {"Uacute", "218"}, // - uppercase U, acute accent {"Ucirc", "219"}, // - uppercase U, circumflex accent {"Uuml", "220"}, // - uppercase U, umlaut {"Yacute", "221"}, // - uppercase Y, acute accent {"THORN", "222"}, // - uppercase THORN, Icelandic {"szlig", "223"}, // - lowercase sharps, German {"agrave", "224"}, // - lowercase a, grave accent {"aacute", "225"}, // - lowercase a, acute accent {"acirc", "226"}, // - lowercase a, circumflex accent {"atilde", "227"}, // - lowercase a, tilde {"auml", "228"}, // - lowercase a, umlaut {"aring", "229"}, // - lowercase a, ring {"aelig", "230"}, // - lowercase ae {"ccedil", "231"}, // - lowercase c, cedilla {"egrave", "232"}, // - lowercase e, grave accent {"eacute", "233"}, // - lowercase e, acute accent {"ecirc", "234"}, // - lowercase e, circumflex accent {"euml", "235"}, // - lowercase e, umlaut {"igrave", "236"}, // - lowercase i, grave accent {"iacute", "237"}, // - lowercase i, acute accent {"icirc", "238"}, // - lowercase i, circumflex accent {"iuml", "239"}, // - lowercase i, umlaut {"eth", "240"}, // - lowercase eth, Icelandic {"ntilde", "241"}, // - lowercase n, tilde {"ograve", "242"}, // - lowercase o, grave accent {"oacute", "243"}, // - lowercase o, acute accent {"ocirc", "244"}, // - lowercase o, circumflex accent {"otilde", "245"}, // - lowercase o, tilde {"ouml", "246"}, // - lowercase o, umlaut {"divide", "247"}, // division sign {"oslash", "248"}, // - lowercase o, slash {"ugrave", "249"}, // - lowercase u, grave accent {"uacute", "250"}, // - lowercase u, acute accent {"ucirc", "251"}, // - lowercase u, circumflex accent {"uuml", "252"}, // - lowercase u, umlaut {"yacute", "253"}, // - lowercase y, acute accent {"thorn", "254"}, // - lowercase thorn, Icelandic {"yuml", "255"}, // - lowercase y, umlaut }; // http://www.w3.org/TR/REC-html40/sgml/entities.html // package scoped for testing static final String[][] HTML40_ARRAY = { // <!-- Latin Extended-B --> {"fnof", "402"}, // latin small f with hook = function= florin, U+0192 ISOtech --> // <!-- Greek --> {"Alpha", "913"}, // greek capital letter alpha, U+0391 --> {"Beta", "914"}, // greek capital letter beta, U+0392 --> {"Gamma", "915"}, // greek capital letter gamma,U+0393 ISOgrk3 --> {"Delta", "916"}, // greek capital letter delta,U+0394 ISOgrk3 --> {"Epsilon", "917"}, // greek capital letter epsilon, U+0395 --> {"Zeta", "918"}, // greek capital letter zeta, U+0396 --> {"Eta", "919"}, // greek capital letter eta, U+0397 --> {"Theta", "920"}, // greek capital letter theta,U+0398 ISOgrk3 --> {"Iota", "921"}, // greek capital letter iota, U+0399 --> {"Kappa", "922"}, // greek capital letter kappa, U+039A --> {"Lambda", "923"}, // greek capital letter lambda,U+039B ISOgrk3 --> {"Mu", "924"}, // greek capital letter mu, U+039C --> {"Nu", "925"}, // greek capital letter nu, U+039D --> {"Xi", "926"}, // greek capital letter xi, U+039E ISOgrk3 --> {"Omicron", "927"}, // greek capital letter omicron, U+039F --> {"Pi", "928"}, // greek capital letter pi, U+03A0 ISOgrk3 --> {"Rho", "929"}, // greek capital letter rho, U+03A1 --> // <!-- there is no Sigmaf, and no U+03A2 character either --> {"Sigma", "931"}, // greek capital letter sigma,U+03A3 ISOgrk3 --> {"Tau", "932"}, // greek capital letter tau, U+03A4 --> {"Upsilon", "933"}, // greek capital letter upsilon,U+03A5 ISOgrk3 --> {"Phi", "934"}, // greek capital letter phi,U+03A6 ISOgrk3 --> {"Chi", "935"}, // greek capital letter chi, U+03A7 --> {"Psi", "936"}, // greek capital letter psi,U+03A8 ISOgrk3 --> {"Omega", "937"}, // greek capital letter omega,U+03A9 ISOgrk3 --> {"alpha", "945"}, // greek small letter alpha,U+03B1 ISOgrk3 --> {"beta", "946"}, // greek small letter beta, U+03B2 ISOgrk3 --> {"gamma", "947"}, // greek small letter gamma,U+03B3 ISOgrk3 --> {"delta", "948"}, // greek small letter delta,U+03B4 ISOgrk3 --> {"epsilon", "949"}, // greek small letter epsilon,U+03B5 ISOgrk3 --> {"zeta", "950"}, // greek small letter zeta, U+03B6 ISOgrk3 --> {"eta", "951"}, // greek small letter eta, U+03B7 ISOgrk3 --> {"theta", "952"}, // greek small letter theta,U+03B8 ISOgrk3 --> {"iota", "953"}, // greek small letter iota, U+03B9 ISOgrk3 --> {"kappa", "954"}, // greek small letter kappa,U+03BA ISOgrk3 --> {"lambda", "955"}, // greek small letter lambda,U+03BB ISOgrk3 --> {"mu", "956"}, // greek small letter mu, U+03BC ISOgrk3 --> {"nu", "957"}, // greek small letter nu, U+03BD ISOgrk3 --> {"xi", "958"}, // greek small letter xi, U+03BE ISOgrk3 --> {"omicron", "959"}, // greek small letter omicron, U+03BF NEW --> {"pi", "960"}, // greek small letter pi, U+03C0 ISOgrk3 --> {"rho", "961"}, // greek small letter rho, U+03C1 ISOgrk3 --> {"sigmaf", "962"}, // greek small letter final sigma,U+03C2 ISOgrk3 --> {"sigma", "963"}, // greek small letter sigma,U+03C3 ISOgrk3 --> {"tau", "964"}, // greek small letter tau, U+03C4 ISOgrk3 --> {"upsilon", "965"}, // greek small letter upsilon,U+03C5 ISOgrk3 --> {"phi", "966"}, // greek small letter phi, U+03C6 ISOgrk3 --> {"chi", "967"}, // greek small letter chi, U+03C7 ISOgrk3 --> {"psi", "968"}, // greek small letter psi, U+03C8 ISOgrk3 --> {"omega", "969"}, // greek small letter omega,U+03C9 ISOgrk3 --> {"thetasym", "977"}, // greek small letter theta symbol,U+03D1 NEW --> {"upsih", "978"}, // greek upsilon with hook symbol,U+03D2 NEW --> {"piv", "982"}, // greek pi symbol, U+03D6 ISOgrk3 --> // <!-- General Punctuation --> {"bull", "8226"}, // bullet = black small circle,U+2022 ISOpub --> // <!-- bullet is NOT the same as bullet operator, U+2219 --> {"hellip", "8230"}, // horizontal ellipsis = three dot leader,U+2026 ISOpub --> {"prime", "8242"}, // prime = minutes = feet, U+2032 ISOtech --> {"Prime", "8243"}, // double prime = seconds = inches,U+2033 ISOtech --> {"oline", "8254"}, // overline = spacing overscore,U+203E NEW --> {"frasl", "8260"}, // fraction slash, U+2044 NEW --> // <!-- Letterlike Symbols --> {"weierp", "8472"}, // script capital P = power set= Weierstrass p, U+2118 ISOamso --> {"image", "8465"}, // blackletter capital I = imaginary part,U+2111 ISOamso --> {"real", "8476"}, // blackletter capital R = real part symbol,U+211C ISOamso --> {"trade", "8482"}, // trade mark sign, U+2122 ISOnum --> {"alefsym", "8501"}, // alef symbol = first transfinite cardinal,U+2135 NEW --> // <!-- alef symbol is NOT the same as hebrew letter alef,U+05D0 although the // same glyph could be used to depict both characters --> // <!-- Arrows --> {"larr", "8592"}, // leftwards arrow, U+2190 ISOnum --> {"uarr", "8593"}, // upwards arrow, U+2191 ISOnum--> {"rarr", "8594"}, // rightwards arrow, U+2192 ISOnum --> {"darr", "8595"}, // downwards arrow, U+2193 ISOnum --> {"harr", "8596"}, // left right arrow, U+2194 ISOamsa --> {"crarr", "8629"}, // downwards arrow with corner leftwards= carriage return, U+21B5 NEW --> {"lArr", "8656"}, // leftwards double arrow, U+21D0 ISOtech --> // <!-- ISO 10646 does not say that lArr is the same as the 'is implied by' // arrow but also does not have any other character for that function. // So ? lArr canbe used for 'is implied by' as ISOtech suggests --> {"uArr", "8657"}, // upwards double arrow, U+21D1 ISOamsa --> {"rArr", "8658"}, // rightwards double arrow,U+21D2 ISOtech --> // <!-- ISO 10646 does not say this is the 'implies' character but does not // have another character with this function so ?rArr can be used for // 'implies' as ISOtech suggests --> {"dArr", "8659"}, // downwards double arrow, U+21D3 ISOamsa --> {"hArr", "8660"}, // left right double arrow,U+21D4 ISOamsa --> // <!-- Mathematical Operators --> {"forall", "8704"}, // for all, U+2200 ISOtech --> {"part", "8706"}, // partial differential, U+2202 ISOtech --> {"exist", "8707"}, // there exists, U+2203 ISOtech --> {"empty", "8709"}, // empty set = null set = diameter,U+2205 ISOamso --> {"nabla", "8711"}, // nabla = backward difference,U+2207 ISOtech --> {"isin", "8712"}, // element of, U+2208 ISOtech --> {"notin", "8713"}, // not an element of, U+2209 ISOtech --> {"ni", "8715"}, // contains as member, U+220B ISOtech --> // <!-- should there be a more memorable name than 'ni'? --> {"prod", "8719"}, // n-ary product = product sign,U+220F ISOamsb --> // <!-- prod is NOT the same character as U+03A0 'greek capital letter pi' // though the same glyph might be used for both --> {"sum", "8721"}, // n-ary summation, U+2211 ISOamsb --> // <!-- sum is NOT the same character as U+03A3 'greek capital letter sigma' // though the same glyph might be used for both --> {"minus", "8722"}, // minus sign, U+2212 ISOtech --> {"lowast", "8727"}, // asterisk operator, U+2217 ISOtech --> {"radic", "8730"}, // square root = radical sign,U+221A ISOtech --> {"prop", "8733"}, // proportional to, U+221D ISOtech --> {"infin", "8734"}, // infinity, U+221E ISOtech --> {"ang", "8736"}, // angle, U+2220 ISOamso --> {"and", "8743"}, // logical and = wedge, U+2227 ISOtech --> {"or", "8744"}, // logical or = vee, U+2228 ISOtech --> {"cap", "8745"}, // intersection = cap, U+2229 ISOtech --> {"cup", "8746"}, // union = cup, U+222A ISOtech --> {"int", "8747"}, // integral, U+222B ISOtech --> {"there4", "8756"}, // therefore, U+2234 ISOtech --> {"sim", "8764"}, // tilde operator = varies with = similar to,U+223C ISOtech --> // <!-- tilde operator is NOT the same character as the tilde, U+007E,although // the same glyph might be used to represent both --> {"cong", "8773"}, // approximately equal to, U+2245 ISOtech --> {"asymp", "8776"}, // almost equal to = asymptotic to,U+2248 ISOamsr --> {"ne", "8800"}, // not equal to, U+2260 ISOtech --> {"equiv", "8801"}, // identical to, U+2261 ISOtech --> {"le", "8804"}, // less-than or equal to, U+2264 ISOtech --> {"ge", "8805"}, // greater-than or equal to,U+2265 ISOtech --> {"sub", "8834"}, // subset of, U+2282 ISOtech --> {"sup", "8835"}, // superset of, U+2283 ISOtech --> // <!-- note that nsup, 'not a superset of, U+2283' is not covered by the // Symbol font encoding and is not included. Should it be, for symmetry? // It is in ISOamsn --> <!ENTITY nsub", "8836"}, // not a subset of, U+2284 ISOamsn --> {"sube", "8838"}, // subset of or equal to, U+2286 ISOtech --> {"supe", "8839"}, // superset of or equal to,U+2287 ISOtech --> {"oplus", "8853"}, // circled plus = direct sum,U+2295 ISOamsb --> {"otimes", "8855"}, // circled times = vector product,U+2297 ISOamsb --> {"perp", "8869"}, // up tack = orthogonal to = perpendicular,U+22A5 ISOtech --> {"sdot", "8901"}, // dot operator, U+22C5 ISOamsb --> // <!-- dot operator is NOT the same character as U+00B7 middle dot --> // <!-- Miscellaneous Technical --> {"lceil", "8968"}, // left ceiling = apl upstile,U+2308 ISOamsc --> {"rceil", "8969"}, // right ceiling, U+2309 ISOamsc --> {"lfloor", "8970"}, // left floor = apl downstile,U+230A ISOamsc --> {"rfloor", "8971"}, // right floor, U+230B ISOamsc --> {"lang", "9001"}, // left-pointing angle bracket = bra,U+2329 ISOtech --> // <!-- lang is NOT the same character as U+003C 'less than' or U+2039 'single left-pointing angle quotation // mark' --> {"rang", "9002"}, // right-pointing angle bracket = ket,U+232A ISOtech --> // <!-- rang is NOT the same character as U+003E 'greater than' or U+203A // 'single right-pointing angle quotation mark' --> // <!-- Geometric Shapes --> {"loz", "9674"}, // lozenge, U+25CA ISOpub --> // <!-- Miscellaneous Symbols --> {"spades", "9824"}, // black spade suit, U+2660 ISOpub --> // <!-- black here seems to mean filled as opposed to hollow --> {"clubs", "9827"}, // black club suit = shamrock,U+2663 ISOpub --> {"hearts", "9829"}, // black heart suit = valentine,U+2665 ISOpub --> {"diams", "9830"}, // black diamond suit, U+2666 ISOpub --> // <!-- Latin Extended-A --> {"OElig", "338"}, // -- latin capital ligature OE,U+0152 ISOlat2 --> {"oelig", "339"}, // -- latin small ligature oe, U+0153 ISOlat2 --> // <!-- ligature is a misnomer, this is a separate character in some languages --> {"Scaron", "352"}, // -- latin capital letter S with caron,U+0160 ISOlat2 --> {"scaron", "353"}, // -- latin small letter s with caron,U+0161 ISOlat2 --> {"Yuml", "376"}, // -- latin capital letter Y with diaeresis,U+0178 ISOlat2 --> // <!-- Spacing Modifier Letters --> {"circ", "710"}, // -- modifier letter circumflex accent,U+02C6 ISOpub --> {"tilde", "732"}, // small tilde, U+02DC ISOdia --> // <!-- General Punctuation --> {"ensp", "8194"}, // en space, U+2002 ISOpub --> {"emsp", "8195"}, // em space, U+2003 ISOpub --> {"thinsp", "8201"}, // thin space, U+2009 ISOpub --> {"zwnj", "8204"}, // zero width non-joiner,U+200C NEW RFC 2070 --> {"zwj", "8205"}, // zero width joiner, U+200D NEW RFC 2070 --> {"lrm", "8206"}, // left-to-right mark, U+200E NEW RFC 2070 --> {"rlm", "8207"}, // right-to-left mark, U+200F NEW RFC 2070 --> {"ndash", "8211"}, // en dash, U+2013 ISOpub --> {"mdash", "8212"}, // em dash, U+2014 ISOpub --> {"lsquo", "8216"}, // left single quotation mark,U+2018 ISOnum --> {"rsquo", "8217"}, // right single quotation mark,U+2019 ISOnum --> {"sbquo", "8218"}, // single low-9 quotation mark, U+201A NEW --> {"ldquo", "8220"}, // left double quotation mark,U+201C ISOnum --> {"rdquo", "8221"}, // right double quotation mark,U+201D ISOnum --> {"bdquo", "8222"}, // double low-9 quotation mark, U+201E NEW --> {"dagger", "8224"}, // dagger, U+2020 ISOpub --> {"Dagger", "8225"}, // double dagger, U+2021 ISOpub --> {"permil", "8240"}, // per mille sign, U+2030 ISOtech --> {"lsaquo", "8249"}, // single left-pointing angle quotation mark,U+2039 ISO proposed --> // <!-- lsaquo is proposed but not yet ISO standardized --> {"rsaquo", "8250"}, // single right-pointing angle quotation mark,U+203A ISO proposed --> // <!-- rsaquo is proposed but not yet ISO standardized --> {"euro", "8364"}, // -- euro sign, U+20AC NEW --> }; /** * <p> * The set of entities supported by standard XML. * </p> */ public static final Entities XML; /** * <p> * The set of entities supported by HTML 3.2. * </p> */ public static final Entities HTML32; /** * <p> * The set of entities supported by HTML 4.0. * </p> */ public static final Entities HTML40; static { XML = new Entities(); XML.addEntities(BASIC_ARRAY); XML.addEntities(APOS_ARRAY); } static { HTML32 = new Entities(); HTML32.addEntities(BASIC_ARRAY); HTML32.addEntities(ISO8859_1_ARRAY); } static { HTML40 = new Entities(); fillWithHtml40Entities(HTML40); } /** * <p> * Fills the specified entities instance with HTML 40 entities. * </p> * * @param entities * the instance to be filled. */ static void fillWithHtml40Entities(Entities entities) { entities.addEntities(BASIC_ARRAY); entities.addEntities(ISO8859_1_ARRAY); entities.addEntities(HTML40_ARRAY); } static interface EntityMap { /** * <p> * Add an entry to this entity map. * </p> * * @param name * the entity name * @param value * the entity value */ void add(String name, int value); /** * <p> * Returns the name of the entity identified by the specified value. * </p> * * @param value * the value to locate * @return entity name associated with the specified value */ String name(int value); /** * <p> * Returns the value of the entity identified by the specified name. * </p> * * @param name * the name to locate * @return entity value associated with the specified name */ int value(String name); } static class PrimitiveEntityMap implements EntityMap { private Map mapNameToValue = new HashMap(); private Map<Integer, Object> mapValueToName = Maps.newHashMap(); /** * {@inheritDoc} */ public void add(String name, int value) { mapNameToValue.put(name, new Integer(value)); mapValueToName.put(value, name); } /** * {@inheritDoc} */ public String name(int value) { return (String) mapValueToName.get(value); } /** * {@inheritDoc} */ public int value(String name) { Object value = mapNameToValue.get(name); if (value == null) { return -1; } return ((Integer) value).intValue(); } } static abstract class MapIntMap implements Entities.EntityMap { protected Map mapNameToValue; protected Map mapValueToName; /** * {@inheritDoc} */ public void add(String name, int value) { mapNameToValue.put(name, new Integer(value)); mapValueToName.put(new Integer(value), name); } /** * {@inheritDoc} */ public String name(int value) { return (String) mapValueToName.get(new Integer(value)); } /** * {@inheritDoc} */ public int value(String name) { Object value = mapNameToValue.get(name); if (value == null) { return -1; } return ((Integer) value).intValue(); } } static class HashEntityMap extends MapIntMap { /** * Constructs a new instance of <code>HashEntityMap</code>. */ public HashEntityMap() { mapNameToValue = new HashMap(); mapValueToName = new HashMap(); } } static class TreeEntityMap extends MapIntMap { /** * Constructs a new instance of <code>TreeEntityMap</code>. */ public TreeEntityMap() { mapNameToValue = new TreeMap(); mapValueToName = new TreeMap(); } } static class LookupEntityMap extends PrimitiveEntityMap { private String[] lookupTable; private int LOOKUP_TABLE_SIZE = 256; /** * {@inheritDoc} */ public String name(int value) { if (value < LOOKUP_TABLE_SIZE) { return lookupTable()[value]; } return super.name(value); } /** * <p> * Returns the lookup table for this entity map. The lookup table is created if it has not been previously. * </p> * * @return the lookup table */ private String[] lookupTable() { if (lookupTable == null) { createLookupTable(); } return lookupTable; } /** * <p> * Creates an entity lookup table of LOOKUP_TABLE_SIZE elements, initialized with entity names. * </p> */ private void createLookupTable() { lookupTable = new String[LOOKUP_TABLE_SIZE]; for (int i = 0; i < LOOKUP_TABLE_SIZE; ++i) { lookupTable[i] = super.name(i); } } } static class ArrayEntityMap implements EntityMap { protected int growBy = 100; protected int size = 0; protected String[] names; protected int[] values; /** * Constructs a new instance of <code>ArrayEntityMap</code>. */ public ArrayEntityMap() { names = new String[growBy]; values = new int[growBy]; } /** * Constructs a new instance of <code>ArrayEntityMap</code> specifying the size by which the array should * grow. * * @param growBy * array will be initialized to and will grow by this amount */ public ArrayEntityMap(int growBy) { this.growBy = growBy; names = new String[growBy]; values = new int[growBy]; } /** * {@inheritDoc} */ public void add(String name, int value) { ensureCapacity(size + 1); names[size] = name; values[size] = value; size++; } /** * Verifies the capacity of the entity array, adjusting the size if necessary. * * @param capacity * size the array should be */ protected void ensureCapacity(int capacity) { if (capacity > names.length) { int newSize = Math.max(capacity, size + growBy); String[] newNames = new String[newSize]; System.arraycopy(names, 0, newNames, 0, size); names = newNames; int[] newValues = new int[newSize]; System.arraycopy(values, 0, newValues, 0, size); values = newValues; } } /** * {@inheritDoc} */ public String name(int value) { for (int i = 0; i < size; ++i) { if (values[i] == value) { return names[i]; } } return null; } /** * {@inheritDoc} */ public int value(String name) { for (int i = 0; i < size; ++i) { if (names[i].equals(name)) { return values[i]; } } return -1; } } static class BinaryEntityMap extends ArrayEntityMap { /** * Constructs a new instance of <code>BinaryEntityMap</code>. */ public BinaryEntityMap() { super(); } /** * Constructs a new instance of <code>ArrayEntityMap</code> specifying the size by which the underlying array * should grow. * * @param growBy * array will be initialized to and will grow by this amount */ public BinaryEntityMap(int growBy) { super(growBy); } /** * Performs a binary search of the entity array for the specified key. This method is based on code in * {@link java.util.Arrays}. * * @param key * the key to be found * @return the index of the entity array matching the specified key */ private int binarySearch(int key) { int low = 0; int high = size - 1; while (low <= high) { int mid = (low + high) >>> 1; int midVal = values[mid]; if (midVal < key) { low = mid + 1; } else if (midVal > key) { high = mid - 1; } else { return mid; // key found } } return -(low + 1); // key not found. } /** * {@inheritDoc} */ public void add(String name, int value) { ensureCapacity(size + 1); int insertAt = binarySearch(value); if (insertAt > 0) { return; // note: this means you can't insert the same value twice } insertAt = -(insertAt + 1); // binarySearch returns it negative and off-by-one System.arraycopy(values, insertAt, values, insertAt + 1, size - insertAt); values[insertAt] = value; System.arraycopy(names, insertAt, names, insertAt + 1, size - insertAt); names[insertAt] = name; size++; } /** * {@inheritDoc} */ public String name(int value) { int index = binarySearch(value); if (index < 0) { return null; } return names[index]; } } // package scoped for testing EntityMap map = new Entities.LookupEntityMap(); /** * <p> * Adds entities to this entity. * </p> * * @param entityArray * array of entities to be added */ public void addEntities(String[][] entityArray) { for (int i = 0; i < entityArray.length; ++i) { addEntity(entityArray[i][0], Integer.parseInt(entityArray[i][1])); } } /** * <p> * Add an entity to this entity. * </p> * * @param name * name of the entity * @param value * vale of the entity */ public void addEntity(String name, int value) { map.add(name, value); } /** * <p> * Returns the name of the entity identified by the specified value. * </p> * * @param value * the value to locate * @return entity name associated with the specified value */ public String entityName(int value) { return map.name(value); } /** * <p> * Returns the value of the entity identified by the specified name. * </p> * * @param name * the name to locate * @return entity value associated with the specified name */ public int entityValue(String name) { return map.value(name); } /** * <p> * Escapes the characters in a <code>String</code>. * </p> * * <p> * For example, if you have called addEntity(&quot;foo&quot;, 0xA1), escape(&quot;\u00A1&quot;) will return * &quot;&amp;foo;&quot; * </p> * * @param str * The <code>String</code> to escape. * @return A new escaped <code>String</code>. */ public String escape(String str) { StringWriter stringWriter = createStringWriter(str); try { this.escape(stringWriter, str); } catch (IOException e) { // This should never happen because ALL the StringWriter methods called by #escape(Writer, String) do not // throw IOExceptions. throw new RuntimeException(e); } return stringWriter.toString(); } /** * <p> * Escapes the characters in the <code>String</code> passed and writes the result to the <code>Writer</code> * passed. * </p> * * @param writer * The <code>Writer</code> to write the results of the escaping to. Assumed to be a non-null value. * @param str * The <code>String</code> to escape. Assumed to be a non-null value. * @throws IOException * when <code>Writer</code> passed throws the exception from calls to the {@link Writer#write(int)} * methods. * * @see #escape(String) * @see Writer */ public void escape(Writer writer, String str) throws IOException { int len = str.length(); for (int i = 0; i < len; i++) { char c = str.charAt(i); String entityName = this.entityName(c); if (entityName == null) { if (c > 0x7F) { writer.write("&#"); writer.write(Integer.toString(c, 10)); writer.write(';'); } else { writer.write(c); } } else { writer.write('&'); writer.write(entityName); writer.write(';'); } } } /** * <p> * Unescapes the entities in a <code>String</code>. * </p> * * <p> * For example, if you have called addEntity(&quot;foo&quot;, 0xA1), unescape(&quot;&amp;foo;&quot;) will return * &quot;\u00A1&quot; * </p> * * @param str * The <code>String</code> to escape. * @return A new escaped <code>String</code>. */ public String unescape(String str) { int firstAmp = str.indexOf('&'); if (firstAmp < 0) { return str; } else { StringWriter stringWriter = createStringWriter(str); try { this.doUnescape(stringWriter, str, firstAmp); } catch (IOException e) { // This should never happen because ALL the StringWriter methods called by #escape(Writer, String) // do not throw IOExceptions. throw new RuntimeException(e); } return stringWriter.toString(); } } /** * Make the StringWriter 10% larger than the source String to avoid growing the writer * * @param str The source string * @return A newly created StringWriter */ private StringWriter createStringWriter(String str) { return new StringWriter((int) (str.length() + (str.length() * 0.1))); } /** * <p> * Unescapes the escaped entities in the <code>String</code> passed and writes the result to the * <code>Writer</code> passed. * </p> * * @param writer * The <code>Writer</code> to write the results to; assumed to be non-null. * @param str * The source <code>String</code> to unescape; assumed to be non-null. * @throws IOException * when <code>Writer</code> passed throws the exception from calls to the {@link Writer#write(int)} * methods. * * @see #escape(String) * @see Writer */ public void unescape(Writer writer, String str) throws IOException { int firstAmp = str.indexOf('&'); if (firstAmp < 0) { writer.write(str); return; } else { doUnescape(writer, str, firstAmp); } } /** * Underlying unescape method that allows the optimisation of not starting from the 0 index again. * * @param writer * The <code>Writer</code> to write the results to; assumed to be non-null. * @param str * The source <code>String</code> to unescape; assumed to be non-null. * @param firstAmp * The <code>int</code> index of the first ampersand in the source String. * @throws IOException * when <code>Writer</code> passed throws the exception from calls to the {@link Writer#write(int)} * methods. */ private void doUnescape(Writer writer, String str, int firstAmp) throws IOException { writer.write(str, 0, firstAmp); int len = str.length(); for (int i = firstAmp; i < len; i++) { char c = str.charAt(i); if (c == '&') { int nextIdx = i + 1; int semiColonIdx = str.indexOf(';', nextIdx); if (semiColonIdx == -1) { writer.write(c); continue; } int amphersandIdx = str.indexOf('&', i + 1); if (amphersandIdx != -1 && amphersandIdx < semiColonIdx) { // Then the text looks like &...&...; writer.write(c); continue; } String entityContent = str.substring(nextIdx, semiColonIdx); int entityValue = -1; int entityContentLen = entityContent.length(); if (entityContentLen > 0) { if (entityContent.charAt(0) == '#') { // escaped value content is an integer (decimal or // hexidecimal) if (entityContentLen > 1) { char isHexChar = entityContent.charAt(1); try { switch (isHexChar) { case 'X' : case 'x' : { entityValue = Integer.parseInt(entityContent.substring(2), 16); break; } default : { entityValue = Integer.parseInt(entityContent.substring(1), 10); } } if (entityValue > 0xFFFF) { entityValue = -1; } } catch (NumberFormatException e) { entityValue = -1; } } } else { // escaped value content is an entity name entityValue = this.entityValue(entityContent); } } if (entityValue == -1) { writer.write('&'); writer.write(entityContent); writer.write(';'); } else { writer.write(entityValue); } i = semiColonIdx; // move index up to the semi-colon } else { writer.write(c); } } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.licensemanager; import javax.annotation.Generated; import com.amazonaws.services.licensemanager.model.*; import com.amazonaws.*; /** * Abstract implementation of {@code AWSLicenseManager}. Convenient method forms pass through to the corresponding * overload that takes a request object, which throws an {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAWSLicenseManager implements AWSLicenseManager { protected AbstractAWSLicenseManager() { } @Override public AcceptGrantResult acceptGrant(AcceptGrantRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CheckInLicenseResult checkInLicense(CheckInLicenseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CheckoutBorrowLicenseResult checkoutBorrowLicense(CheckoutBorrowLicenseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CheckoutLicenseResult checkoutLicense(CheckoutLicenseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateGrantResult createGrant(CreateGrantRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateGrantVersionResult createGrantVersion(CreateGrantVersionRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateLicenseResult createLicense(CreateLicenseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateLicenseConfigurationResult createLicenseConfiguration(CreateLicenseConfigurationRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateLicenseConversionTaskForResourceResult createLicenseConversionTaskForResource(CreateLicenseConversionTaskForResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateLicenseManagerReportGeneratorResult createLicenseManagerReportGenerator(CreateLicenseManagerReportGeneratorRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateLicenseVersionResult createLicenseVersion(CreateLicenseVersionRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateTokenResult createToken(CreateTokenRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteGrantResult deleteGrant(DeleteGrantRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteLicenseResult deleteLicense(DeleteLicenseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteLicenseConfigurationResult deleteLicenseConfiguration(DeleteLicenseConfigurationRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteLicenseManagerReportGeneratorResult deleteLicenseManagerReportGenerator(DeleteLicenseManagerReportGeneratorRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteTokenResult deleteToken(DeleteTokenRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ExtendLicenseConsumptionResult extendLicenseConsumption(ExtendLicenseConsumptionRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetAccessTokenResult getAccessToken(GetAccessTokenRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetGrantResult getGrant(GetGrantRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetLicenseResult getLicense(GetLicenseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetLicenseConfigurationResult getLicenseConfiguration(GetLicenseConfigurationRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetLicenseConversionTaskResult getLicenseConversionTask(GetLicenseConversionTaskRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetLicenseManagerReportGeneratorResult getLicenseManagerReportGenerator(GetLicenseManagerReportGeneratorRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetLicenseUsageResult getLicenseUsage(GetLicenseUsageRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetServiceSettingsResult getServiceSettings(GetServiceSettingsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListAssociationsForLicenseConfigurationResult listAssociationsForLicenseConfiguration(ListAssociationsForLicenseConfigurationRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListDistributedGrantsResult listDistributedGrants(ListDistributedGrantsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListFailuresForLicenseConfigurationOperationsResult listFailuresForLicenseConfigurationOperations( ListFailuresForLicenseConfigurationOperationsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListLicenseConfigurationsResult listLicenseConfigurations(ListLicenseConfigurationsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListLicenseConversionTasksResult listLicenseConversionTasks(ListLicenseConversionTasksRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListLicenseManagerReportGeneratorsResult listLicenseManagerReportGenerators(ListLicenseManagerReportGeneratorsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListLicenseSpecificationsForResourceResult listLicenseSpecificationsForResource(ListLicenseSpecificationsForResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListLicenseVersionsResult listLicenseVersions(ListLicenseVersionsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListLicensesResult listLicenses(ListLicensesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListReceivedGrantsResult listReceivedGrants(ListReceivedGrantsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListReceivedLicensesResult listReceivedLicenses(ListReceivedLicensesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListResourceInventoryResult listResourceInventory(ListResourceInventoryRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListTagsForResourceResult listTagsForResource(ListTagsForResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListTokensResult listTokens(ListTokensRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListUsageForLicenseConfigurationResult listUsageForLicenseConfiguration(ListUsageForLicenseConfigurationRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public RejectGrantResult rejectGrant(RejectGrantRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public TagResourceResult tagResource(TagResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UntagResourceResult untagResource(UntagResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateLicenseConfigurationResult updateLicenseConfiguration(UpdateLicenseConfigurationRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateLicenseManagerReportGeneratorResult updateLicenseManagerReportGenerator(UpdateLicenseManagerReportGeneratorRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateLicenseSpecificationsForResourceResult updateLicenseSpecificationsForResource(UpdateLicenseSpecificationsForResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateServiceSettingsResult updateServiceSettings(UpdateServiceSettingsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public void shutdown() { throw new java.lang.UnsupportedOperationException(); } @Override public com.amazonaws.ResponseMetadata getCachedResponseMetadata(com.amazonaws.AmazonWebServiceRequest request) { throw new java.lang.UnsupportedOperationException(); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/redis/v1/cloud_redis.proto package com.google.cloud.redis.v1; /** * * * <pre> * Request for * [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. * </pre> * * Protobuf type {@code google.cloud.redis.v1.DeleteInstanceRequest} */ public final class DeleteInstanceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.redis.v1.DeleteInstanceRequest) DeleteInstanceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteInstanceRequest.newBuilder() to construct. private DeleteInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteInstanceRequest() { name_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteInstanceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_DeleteInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_DeleteInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.redis.v1.DeleteInstanceRequest.class, com.google.cloud.redis.v1.DeleteInstanceRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.redis.v1.DeleteInstanceRequest)) { return super.equals(obj); } com.google.cloud.redis.v1.DeleteInstanceRequest other = (com.google.cloud.redis.v1.DeleteInstanceRequest) obj; boolean result = true; result = result && getName().equals(other.getName()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.redis.v1.DeleteInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.redis.v1.DeleteInstanceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for * [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. * </pre> * * Protobuf type {@code google.cloud.redis.v1.DeleteInstanceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.redis.v1.DeleteInstanceRequest) com.google.cloud.redis.v1.DeleteInstanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_DeleteInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_DeleteInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.redis.v1.DeleteInstanceRequest.class, com.google.cloud.redis.v1.DeleteInstanceRequest.Builder.class); } // Construct using com.google.cloud.redis.v1.DeleteInstanceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_DeleteInstanceRequest_descriptor; } @java.lang.Override public com.google.cloud.redis.v1.DeleteInstanceRequest getDefaultInstanceForType() { return com.google.cloud.redis.v1.DeleteInstanceRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.redis.v1.DeleteInstanceRequest build() { com.google.cloud.redis.v1.DeleteInstanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.redis.v1.DeleteInstanceRequest buildPartial() { com.google.cloud.redis.v1.DeleteInstanceRequest result = new com.google.cloud.redis.v1.DeleteInstanceRequest(this); result.name_ = name_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.redis.v1.DeleteInstanceRequest) { return mergeFrom((com.google.cloud.redis.v1.DeleteInstanceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.redis.v1.DeleteInstanceRequest other) { if (other == com.google.cloud.redis.v1.DeleteInstanceRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.redis.v1.DeleteInstanceRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.redis.v1.DeleteInstanceRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region * </pre> * * <code>string name = 1;</code> */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region * </pre> * * <code>string name = 1;</code> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region * </pre> * * <code>string name = 1;</code> */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.redis.v1.DeleteInstanceRequest) } // @@protoc_insertion_point(class_scope:google.cloud.redis.v1.DeleteInstanceRequest) private static final com.google.cloud.redis.v1.DeleteInstanceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.redis.v1.DeleteInstanceRequest(); } public static com.google.cloud.redis.v1.DeleteInstanceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteInstanceRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteInstanceRequest>() { @java.lang.Override public DeleteInstanceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteInstanceRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DeleteInstanceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteInstanceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.redis.v1.DeleteInstanceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/*L * Copyright SAIC, SAIC-Frederick. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/caadapter/LICENSE.txt for details. */ package gov.nih.nci.caadapter.ui.mapping.sdtm; import gov.nih.nci.caadapter.common.ApplicationException; import gov.nih.nci.caadapter.common.csv.CSVDataResult; import gov.nih.nci.caadapter.common.csv.SegmentedCSVParserImpl; import gov.nih.nci.caadapter.common.csv.data.CSVField; import gov.nih.nci.caadapter.common.csv.data.CSVSegment; import gov.nih.nci.caadapter.common.csv.data.CSVSegmentedFile; import gov.nih.nci.caadapter.common.csv.meta.CSVMeta; import gov.nih.nci.caadapter.common.util.CaadapterUtil; import gov.nih.nci.caadapter.common.util.UUIDGenerator; import gov.nih.nci.caadapter.common.util.FileUtil; import gov.nih.nci.caadapter.sdtm.util.CSVMapFileReader; import gov.nih.nci.caadapter.ui.common.AbstractMainFrame; import gov.nih.nci.caadapter.ui.specification.csv.CSVPanel; import javax.swing.*; import java.io.*; import java.util.*; /** * This class implements the fixed length records * * @author OWNER: Harsha Jayanna * @author LAST UPDATE $Author: phadkes $ * @version Since caAdapter v4.0 revision * $Revision: 1.12 $ * $Date: 2008-06-09 19:54:06 $ */ public class RDSTransformer { String directoryLocation = null; Hashtable globaldomainList = null; Hashtable hashTableTransform = null; CSVDataResult csvDataResult = null; LinkedHashMap mappedSegmentRecords = null; Hashtable finalResultList = new Hashtable(); ArrayList removeList = null; HashMap fixedLengthRecords = null; //HashMap prefs; boolean fixedLengthIndicator = false; public RDSTransformer(){ } public void transformCSV(File mapFile, String _csvFileName, String directoryLoc) throws Exception { directoryLocation = directoryLoc; CSVMapFileReader csvMapFileReader = new CSVMapFileReader(mapFile); globaldomainList = RDSHelper.getAllFieldsForDomains(new File(RDSHelper.getDefineXMLNameFromMapFile(mapFile.getAbsolutePath()))); hashTableTransform = csvMapFileReader.getHashTableTransform(); String scsFileName = RDSHelper.getSCSFileFromMapFile(mapFile); prepareCSVDataFromCSVDataFile(_csvFileName, scsFileName); } public RDSTransformer(AbstractMainFrame callingFrame, File mapFile, String _csvFileName, String _directoryLocation) throws Exception { directoryLocation = _directoryLocation; CSVMapFileReader csvMapFileReader = new CSVMapFileReader(mapFile); //check for fixed lenght try { if (((String) CaadapterUtil.getCaAdapterPreferences().get("FIXED_LENGTH_VAR")).equalsIgnoreCase("Fixed")) { fixedLengthIndicator = true; //Prepare the list here and keep it ready so that number of blanks corresponding to the //value set by the user will be applied appropriately RDSFixedLenghtInput rdsFixedLenghtInput = new RDSFixedLenghtInput(callingFrame, csvMapFileReader.getTargetKeyList()); fixedLengthRecords = rdsFixedLenghtInput.getUserValues(); } } catch (Exception e) { System.out.println("the application could not find preference variable for SCS-transformation (RDS module)"); } globaldomainList = RDSHelper.getAllFieldsForDomains(new File(RDSHelper.getDefineXMLNameFromMapFile(mapFile.getAbsolutePath()))); hashTableTransform = csvMapFileReader.getHashTableTransform(); String scsFileName = RDSHelper.getSCSFileFromMapFile(mapFile); prepareCSVDataFromCSVDataFile(_csvFileName, scsFileName); JOptionPane.showMessageDialog(callingFrame, "Transformation was successful, TXT files were created in \""+directoryLocation+"\" directory.", "Transfomation...", JOptionPane.INFORMATION_MESSAGE); } private void prepareCSVDataFromCSVDataFile(String _csvFileName, String _scsFileName) throws ApplicationException { CSVPanel csvPanel = new CSVPanel(); File csvFile = new File(_csvFileName); String tempFileName=null; try { tempFileName = FileUtil.fileLocateOnClasspath(_scsFileName); } catch (FileNotFoundException e) { tempFileName = _scsFileName; } csvPanel.setSaveFile(new File(tempFileName), true); CSVMeta rootMeta = csvPanel.getCSVMeta(false); //SDTMMany2ManyMapping segmentedCSVParser = new SDTMMany2ManyMapping(); SegmentedCSVParserImpl segmentedCSVParser = new SegmentedCSVParserImpl(); csvDataResult = segmentedCSVParser.parse(csvFile, rootMeta); //List csvDATA = segmentedCSVParser.returnCsvMapData1; //csvDATA.add(csvDATA.get(0)); processDataRecords(); } private void processDataRecords() { CSVSegmentedFile csvSegmentedFile = csvDataResult.getCsvSegmentedFile(); List recordsinCSVFile = csvSegmentedFile.getLogicalRecords(); for (int k = 0; k < recordsinCSVFile.size(); k++) { processOneRecord(recordsinCSVFile, k); } } public HashMap getFixedLengthRecords() { return fixedLengthRecords; } private void processOneRecord(List records, int recordNumber) { CSVSegment csvSegment = (CSVSegment) records.get(recordNumber); /* a.)Now we have all the possible leaf segments! b.)The size of the collection is to the number of rows! b.)Process each leaf record and this transforms one row in the resulting txt file! c.)The 'searchMappedField' method must return a arrayList d.)The arraylist is added to the collection and this is written out to the file. */ Enumeration enumerat = hashTableTransform.keys(); while (enumerat.hasMoreElements()) { String domainName = (String) enumerat.nextElement(); mappedSegmentRecords = new LinkedHashMap(); prepareMappedSegments(csvSegment, domainName); prepareRemoveList(domainName); processResultForDomain(domainName); } //finally! write the table out Enumeration enume = finalResultList.keys(); while (enume.hasMoreElements()) { String domainName = (String) enume.nextElement(); writeFile(domainName, (ArrayList) finalResultList.get(domainName)); } } private void prepareRemoveList(String domainName) { removeList = new ArrayList(); Set set = mappedSegmentRecords.keySet(); Iterator iter = set.iterator(); while (iter.hasNext()) { String uuID = (String) iter.next(); CSVSegment csvSegment = (CSVSegment) mappedSegmentRecords.get(uuID); if (areChildrenSegmentsMapped(csvSegment, domainName)) { removeList.add(uuID); } } } private boolean areChildrenSegmentsMapped(CSVSegment csvSegment, String domainName) { boolean retVal = false; if (csvSegment.getChildSegments().size() > 0) { List list = csvSegment.getChildSegments(); for (int i = 0; i < list.size(); i++) { CSVSegment csvChildSegment = (CSVSegment) list.get(i); if (isSegmentMapped(csvChildSegment, domainName)) { return true; } areChildrenSegmentsMapped(csvChildSegment, domainName); } } return retVal; } private void prepareMappedSegments(CSVSegment csvSegment, String domainName) { if (isSegmentMapped(csvSegment, domainName)) { //mappedSegmentRecords.add(csvSegment); mappedSegmentRecords.put(UUIDGenerator.getUniqueString(), csvSegment); } //parent check for this domain only checkForMappedParents(csvSegment, domainName); //children check for this domain only checkForMappedChildren(csvSegment, domainName); } private void checkForMappedParents(CSVSegment csvSegment, String domainName) { if (csvSegment.getParentSegment() != null) { CSVSegment csvParentSegment = csvSegment.getParentSegment(); if (isSegmentMapped(csvParentSegment, domainName)) { mappedSegmentRecords.put(UUIDGenerator.getUniqueString(), csvParentSegment); } checkForMappedParents(csvParentSegment, domainName); } } private void checkForMappedChildren(CSVSegment csvSegment, String domainName) { if (csvSegment.getChildSegments().size() > 0) { List list = csvSegment.getChildSegments(); for (int i = 0; i < list.size(); i++) { CSVSegment csvChildSegment = (CSVSegment) list.get(i); if (isSegmentMapped(csvChildSegment, domainName)) { mappedSegmentRecords.put(UUIDGenerator.getUniqueString(), csvChildSegment); } checkForMappedChildren(csvChildSegment, domainName); } } } private void processResultForDomain(String domainName) { Set set = mappedSegmentRecords.keySet(); Iterator iter = set.iterator(); while (iter.hasNext()) { String _UUID = (String) iter.next(); if (!doesRemoveListHaveUUID(_UUID)) { processResultForDomain2((CSVSegment) mappedSegmentRecords.get(_UUID), domainName); } } } private boolean doesRemoveListHaveUUID(String uuid) { boolean retVal = false; for (int k = 0; k < removeList.size(); k++) { if (uuid.equals(removeList.get(k))) return true; } return retVal; } private void processResultForDomain2(CSVSegment csvSegment, String domainName) { ArrayList rowData; rowData = new ArrayList(); int sizeOfDomain = new Integer(((ArrayList) globaldomainList.get(domainName)).size()); for (int l = 0; l < sizeOfDomain; l++) { rowData.add(" ");//an empty space buffer pre-added for data assumed to be mapped } searchMappedField(csvSegment, rowData, domainName); //writeFile(domainName, rowData); if (finalResultList.containsKey(domainName)) { ((ArrayList) finalResultList.get(domainName)).add(rowData); } else { ArrayList resArray = new ArrayList(); resArray.add(rowData); finalResultList.put(domainName, resArray); } } private void searchMappedField(CSVSegment csvSegment, ArrayList rowData, String domainName) { if (!isSegmentMapped(csvSegment, rowData, domainName)) { checkParentSegment(csvSegment, rowData, domainName); } } private void checkParentSegment(CSVSegment csvSegment, ArrayList rowData, String domainName) { if (csvSegment.getParentSegment() != null) { csvSegment = csvSegment.getParentSegment(); searchMappedField(csvSegment, rowData, domainName); checkParentSegment(csvSegment, rowData, domainName); } } private boolean isSegmentMapped(CSVSegment csvSegment, ArrayList rowData, String domainName) { boolean retVal = false; //This Xpath will be used to prefix all the fields String xpathPrefix = RDSHelper.getParentasXPath(csvSegment); if (csvSegment.getFields().size() > 0) { List fieldsForEachSegment = csvSegment.getFields(); for (int fields = 0; fields < fieldsForEachSegment.size(); fields++) { CSVField fieldObject = (CSVField) fieldsForEachSegment.get(fields); String mappedValue = isFieldMapped(xpathPrefix + "\\" + fieldObject.getMetaObject().toString()); if (mappedValue != null)// okay what about parent segments are they mapped; check that in the else clause { if (domainName.equalsIgnoreCase(mappedValue.substring(0, 2))) { //A simple check to continue for only matching domains and fields if (!fixedLengthIndicator) { fixedLengthRecords = null; } new RDSMapResult(domainName, globaldomainList, mappedValue, xpathPrefix + "\\" + fieldObject.getMetaObject().toString(), fieldObject.getValue(), csvSegment, hashTableTransform, rowData, fixedLengthRecords).getArrayList(); retVal = true; } } } } return retVal; } //////////////////////////////////////////////////////////////////////////////////////////////////// private boolean isSegmentMapped(CSVSegment csvSegment, String domainName) { boolean retVal = false; //This Xpath will be used to prefix all the fields String xpathPrefix = RDSHelper.getParentasXPath(csvSegment); if (csvSegment.getFields().size() > 0) { List fieldsForEachSegment = csvSegment.getFields(); for (int fields = 0; fields < fieldsForEachSegment.size(); fields++) { CSVField fieldObject = (CSVField) fieldsForEachSegment.get(fields); String mappedValue = isFieldMapped(xpathPrefix + "\\" + fieldObject.getMetaObject().toString()); if (mappedValue != null)// okay what about parent segments are they mapped; check that in the else clause { if (domainName.equalsIgnoreCase(mappedValue.substring(0, 2))) { retVal = true; } } } } return retVal; } private String isFieldMapped(String field) { Enumeration enum1 = hashTableTransform.keys(); while (enum1.hasMoreElements()) { String domainName = (String) enum1.nextElement(); ArrayList array = (ArrayList) hashTableTransform.get(domainName); for (int i = 0; i < array.size(); i++) { String _tempStr = array.get(i).toString(); String _temStr2 = _tempStr.substring(0, _tempStr.indexOf("~")); if (_temStr2.equals(field)) { return _tempStr.substring(_tempStr.indexOf("~") + 1, _tempStr.length()); } } } return null; } private void writeFile(String domainName, ArrayList resultData) { try { ArrayList domainHeader = (ArrayList) globaldomainList.get(domainName); //create a file with domain name FileWriter fstream = new FileWriter(directoryLocation + "\\" + domainName + ".txt"); BufferedWriter out = new BufferedWriter(fstream); out.write(domainHeader.toString().substring(1, domainHeader.toString().indexOf(']'))); for (int i = 0; i < resultData.size(); i++) { try { ArrayList writeToList = (ArrayList) resultData.get(i); if (checkIfValueExists(writeToList)) { String tempStr = writeToList.toString(); String resultStr = tempStr.substring(1, tempStr.indexOf(']')); // out.write("\n" + resultStr); out.newLine(); out.write(resultStr); } } catch (IOException e) { e.printStackTrace();//To change body of catch statement use File | Settings | File Templates. System.out.println("continuing to next loop " + e.getMessage()); } } out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } private boolean checkIfValueExists(ArrayList rowData) { boolean retVal = false; for (int i = 0; i < rowData.size(); i++) { if (rowData.get(i).toString().length() > 1) return true; } return retVal; } } /** * Change History * $Log: not supported by cvs2svn $ * Revision 1.11 2007/11/12 15:05:00 wangeug * add "newline" at the end of each STDM record * * Revision 1.10 2007/11/05 15:41:58 jayannah * Changed the message/wording * * Revision 1.9 2007/10/15 21:01:14 jayannah * Changed the wat reading files to accomodate the working directory path * * Revision 1.8 2007/10/15 19:49:32 jayannah * Added a public API for the transformation of the CSV and DB in order to be compliant with the caCore. * * Revision 1.7 2007/09/18 02:52:39 jayannah * removed SDTMMany2Many mappings object and changed to use the CVSDataResult instead * * Revision 1.6 2007/09/18 02:39:29 jayannah * Modified the code so that an exception is caught when the preference variable is not found, the other change is for the construction of RDS transformer * * Revision 1.5 2007/09/07 19:29:51 wangeug * relocate readPreference and savePreference methods * * Revision 1.4 2007/08/17 15:15:02 jayannah * added wait window during transformation * * Revision 1.3 2007/08/16 19:39:45 jayannah * Reformatted and added the Comments and the log tags for all the files * */
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.observables; import static org.junit.Assert.*; import java.util.Iterator; import java.util.NoSuchElementException; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import rx.Observable; import rx.Observer; import rx.Subscriber; import rx.Subscription; import rx.functions.Action1; import rx.functions.Func1; import rx.subscriptions.BooleanSubscription; import rx.subscriptions.Subscriptions; public class BlockingObservableTest { @Mock Subscriber<Integer> w; @Before public void before() { MockitoAnnotations.initMocks(this); } @Test public void testLast() { BlockingObservable<String> obs = BlockingObservable.from(Observable.from("one", "two", "three")); assertEquals("three", obs.last()); } @Test(expected = IllegalArgumentException.class) public void testLastEmptyObservable() { BlockingObservable<Object> obs = BlockingObservable.from(Observable.empty()); obs.last(); } @Test public void testLastOrDefault() { BlockingObservable<Integer> observable = BlockingObservable.from(Observable.from(1, 0, -1)); int last = observable.lastOrDefault(-100, new Func1<Integer, Boolean>() { @Override public Boolean call(Integer args) { return args >= 0; } }); assertEquals(0, last); } @Test public void testLastOrDefault1() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two", "three")); assertEquals("three", observable.lastOrDefault("default")); } @Test public void testLastOrDefault2() { BlockingObservable<Object> observable = BlockingObservable.from(Observable.empty()); assertEquals("default", observable.lastOrDefault("default")); } @Test public void testLastOrDefaultWithPredicate() { BlockingObservable<Integer> observable = BlockingObservable.from(Observable.from(1, 0, -1)); int last = observable.lastOrDefault(0, new Func1<Integer, Boolean>() { @Override public Boolean call(Integer args) { return args < 0; } }); assertEquals(-1, last); } @Test public void testLastOrDefaultWrongPredicate() { BlockingObservable<Integer> observable = BlockingObservable.from(Observable.from(-1, -2, -3)); int last = observable.lastOrDefault(0, new Func1<Integer, Boolean>() { @Override public Boolean call(Integer args) { return args >= 0; } }); assertEquals(0, last); } @Test public void testLastWithPredicate() { BlockingObservable<String> obs = BlockingObservable.from(Observable.from("one", "two", "three")); assertEquals("two", obs.last(new Func1<String, Boolean>() { @Override public Boolean call(String s) { return s.length() == 3; } })); } public void testSingle() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one")); assertEquals("one", observable.single()); } @Test public void testSingleDefault() { BlockingObservable<Object> observable = BlockingObservable.from(Observable.empty()); assertEquals("default", observable.singleOrDefault("default")); } @Test(expected = IllegalArgumentException.class) public void testSingleDefaultPredicateMatchesMoreThanOne() { BlockingObservable.from(Observable.from("one", "two")).singleOrDefault("default", new Func1<String, Boolean>() { @Override public Boolean call(String args) { return args.length() == 3; } }); } @Test public void testSingleDefaultPredicateMatchesNothing() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two")); String result = observable.singleOrDefault("default", new Func1<String, Boolean>() { @Override public Boolean call(String args) { return args.length() == 4; } }); assertEquals("default", result); } @Test(expected = IllegalArgumentException.class) public void testSingleDefaultWithMoreThanOne() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two", "three")); observable.singleOrDefault("default"); } @Test public void testSingleWithPredicateDefault() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two", "four")); assertEquals("four", observable.single(new Func1<String, Boolean>() { @Override public Boolean call(String s) { return s.length() == 4; } })); } @Test(expected = IllegalArgumentException.class) public void testSingleWrong() { BlockingObservable<Integer> observable = BlockingObservable.from(Observable.from(1, 2)); observable.single(); } @Test(expected = IllegalArgumentException.class) public void testSingleWrongPredicate() { BlockingObservable<Integer> observable = BlockingObservable.from(Observable.from(-1)); observable.single(new Func1<Integer, Boolean>() { @Override public Boolean call(Integer args) { return args > 0; } }); } @Test public void testToIterable() { BlockingObservable<String> obs = BlockingObservable.from(Observable.from("one", "two", "three")); Iterator<String> it = obs.toIterable().iterator(); assertEquals(true, it.hasNext()); assertEquals("one", it.next()); assertEquals(true, it.hasNext()); assertEquals("two", it.next()); assertEquals(true, it.hasNext()); assertEquals("three", it.next()); assertEquals(false, it.hasNext()); } @Test(expected = NoSuchElementException.class) public void testToIterableNextOnly() { BlockingObservable<Integer> obs = BlockingObservable.from(Observable.from(1, 2, 3)); Iterator<Integer> it = obs.toIterable().iterator(); Assert.assertEquals((Integer) 1, it.next()); Assert.assertEquals((Integer) 2, it.next()); Assert.assertEquals((Integer) 3, it.next()); it.next(); } @Test(expected = NoSuchElementException.class) public void testToIterableNextOnlyTwice() { BlockingObservable<Integer> obs = BlockingObservable.from(Observable.from(1, 2, 3)); Iterator<Integer> it = obs.toIterable().iterator(); Assert.assertEquals((Integer) 1, it.next()); Assert.assertEquals((Integer) 2, it.next()); Assert.assertEquals((Integer) 3, it.next()); boolean exc = false; try { it.next(); } catch (NoSuchElementException ex) { exc = true; } Assert.assertEquals(true, exc); it.next(); } @Test public void testToIterableManyTimes() { BlockingObservable<Integer> obs = BlockingObservable.from(Observable.from(1, 2, 3)); Iterable<Integer> iter = obs.toIterable(); for (int j = 0; j < 3; j++) { Iterator<Integer> it = iter.iterator(); Assert.assertTrue(it.hasNext()); Assert.assertEquals((Integer) 1, it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals((Integer) 2, it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals((Integer) 3, it.next()); Assert.assertFalse(it.hasNext()); } } @Test(expected = TestException.class) public void testToIterableWithException() { BlockingObservable<String> obs = BlockingObservable.from(Observable.create(new Observable.OnSubscribeFunc<String>() { @Override public Subscription onSubscribe(Observer<? super String> observer) { observer.onNext("one"); observer.onError(new TestException()); return Subscriptions.empty(); } })); Iterator<String> it = obs.toIterable().iterator(); assertEquals(true, it.hasNext()); assertEquals("one", it.next()); assertEquals(true, it.hasNext()); it.next(); } @Test public void testForEachWithError() { try { BlockingObservable.from(Observable.create(new Observable.OnSubscribeFunc<String>() { @Override public Subscription onSubscribe(final Observer<? super String> observer) { final BooleanSubscription subscription = new BooleanSubscription(); new Thread(new Runnable() { @Override public void run() { observer.onNext("one"); observer.onNext("two"); observer.onNext("three"); observer.onCompleted(); } }).start(); return subscription; } })).forEach(new Action1<String>() { @Override public void call(String t1) { throw new RuntimeException("fail"); } }); fail("we expect an exception to be thrown"); } catch (Throwable e) { // do nothing as we expect this } } @Test public void testFirst() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two", "three")); assertEquals("one", observable.first()); } @Test(expected = IllegalArgumentException.class) public void testFirstWithEmpty() { BlockingObservable.from(Observable.<String> empty()).first(); } @Test public void testFirstWithPredicate() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two", "three")); String first = observable.first(new Func1<String, Boolean>() { @Override public Boolean call(String args) { return args.length() > 3; } }); assertEquals("three", first); } @Test(expected = IllegalArgumentException.class) public void testFirstWithPredicateAndEmpty() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two", "three")); observable.first(new Func1<String, Boolean>() { @Override public Boolean call(String args) { return args.length() > 5; } }); } @Test public void testFirstOrDefault() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two", "three")); assertEquals("one", observable.firstOrDefault("default")); } @Test public void testFirstOrDefaultWithEmpty() { BlockingObservable<String> observable = BlockingObservable.from(Observable.<String> empty()); assertEquals("default", observable.firstOrDefault("default")); } @Test public void testFirstOrDefaultWithPredicate() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two", "three")); String first = observable.firstOrDefault("default", new Func1<String, Boolean>() { @Override public Boolean call(String args) { return args.length() > 3; } }); assertEquals("three", first); } @Test public void testFirstOrDefaultWithPredicateAndEmpty() { BlockingObservable<String> observable = BlockingObservable.from(Observable.from("one", "two", "three")); String first = observable.firstOrDefault("default", new Func1<String, Boolean>() { @Override public Boolean call(String args) { return args.length() > 5; } }); assertEquals("default", first); } private static class TestException extends RuntimeException { private static final long serialVersionUID = 1L; } }
/* * Copyright 2016 Square Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package flowless; import android.app.Activity; import android.app.Application; import android.app.Fragment; import android.content.Intent; import android.os.Bundle; import android.os.Parcelable; import android.support.annotation.Nullable; import java.util.ArrayList; import java.util.Iterator; import flowless.preset.FlowLifecycles; import static flowless.Preconditions.checkArgument; import static flowless.Preconditions.checkNotNull; /** * Pay no attention to this class. It's only public because it has to be. */ public /*final*/ class InternalLifecycleIntegration extends Fragment { static final String INTENT_KEY = "Flow_history"; static final String TAG = "flow-lifecycle-integration"; static final String PERSISTENCE_KEY = "Flow_state"; static InternalLifecycleIntegration find(Activity activity) { return (InternalLifecycleIntegration) activity.getFragmentManager().findFragmentByTag(TAG); } static void install(final Application app, final Activity activity, @Nullable final KeyParceler parceler, final History defaultHistory, final Dispatcher dispatcher, final ServiceProvider serviceProvider, final KeyManager keyManager) { app.registerActivityLifecycleCallbacks(new Application.ActivityLifecycleCallbacks() { @Override public void onActivityCreated(Activity a, Bundle savedInstanceState) { if(a == activity) { InternalLifecycleIntegration fragment = find(activity); boolean newFragment = fragment == null; if(newFragment) { fragment = new InternalLifecycleIntegration(); } if(fragment.keyManager == null) { fragment.defaultHistory = defaultHistory; fragment.parceler = parceler; fragment.keyManager = keyManager; fragment.serviceProvider = serviceProvider; } // We always replace the dispatcher because it frequently references the Activity. fragment.dispatcher = dispatcher; fragment.intent = a.getIntent(); if(newFragment) { activity.getFragmentManager() // .beginTransaction() // .add(fragment, TAG) // .commit(); } app.unregisterActivityLifecycleCallbacks(this); } } @Override public void onActivityStarted(Activity activity) { } @Override public void onActivityResumed(Activity activity) { } @Override public void onActivityPaused(Activity activity) { } @Override public void onActivityStopped(Activity activity) { } @Override public void onActivitySaveInstanceState(Activity activity, Bundle outState) { } @Override public void onActivityDestroyed(Activity a) { } }); } Flow flow; KeyManager keyManager; ServiceProvider serviceProvider; @Nullable KeyParceler parceler; History defaultHistory; Dispatcher dispatcher; Intent intent; public InternalLifecycleIntegration() { super(); setRetainInstance(true); } static void addHistoryToIntent(Intent intent, History history, KeyParceler parceler, KeyManager keyManager) { Bundle bundle = new Bundle(); Bundle innerBundle = new Bundle(); ArrayList<Parcelable> parcelables = new ArrayList<>(history.size()); final Iterator<Object> keys = history.reverseIterator(); while(keys.hasNext()) { Object key = keys.next(); State keyState; if(keyManager != null && keyManager.hasState(key)) { keyState = keyManager.getState(key); } else { keyState = State.empty(key); } parcelables.add(keyState.toBundle(parceler)); } innerBundle.putParcelableArrayList(KeyManager.HISTORY_KEYS, parcelables); if(keyManager != null) { innerBundle.putParcelableArrayList(KeyManager.GLOBAL_KEYS, collectStatesFromKeys(keyManager, parceler, keyManager.globalKeys.iterator(), keyManager.globalKeys.size())); innerBundle.putParcelableArrayList(KeyManager.REGISTERED_KEYS, collectStatesFromKeys(keyManager, parceler, keyManager.registeredKeys.iterator(), keyManager.registeredKeys.size())); } bundle.putBundle(PERSISTENCE_KEY, innerBundle); intent.putExtra(INTENT_KEY, bundle); } void onNewIntent(Intent intent) { if(intent.hasExtra(INTENT_KEY)) { checkNotNull(parceler, "Intent has a Flow history extra, but Flow was not installed with a KeyParceler"); History.Builder builder = History.emptyBuilder(); load(intent.<Bundle>getParcelableExtra(INTENT_KEY), parceler, builder, keyManager); flow.setHistory(builder.build(), Direction.REPLACE); } } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); if(flow == null) { History savedHistory = null; if(savedInstanceState != null && savedInstanceState.containsKey(INTENT_KEY)) { checkNotNull(parceler, "no KeyParceler installed"); History.Builder builder = History.emptyBuilder(); Bundle bundle = savedInstanceState.getBundle(INTENT_KEY); load(bundle, parceler, builder, keyManager); savedHistory = builder.build(); } History history = selectHistory(intent, savedHistory, defaultHistory, parceler, keyManager); flow = new Flow(keyManager, serviceProvider, history); } Flow.get(getActivity().getBaseContext()); // force existence of Flow in InternalContextWrapper flow.setDispatcher(dispatcher, true); if(dispatcher instanceof FlowLifecycles.CreateDestroyListener) { ((FlowLifecycles.CreateDestroyListener) dispatcher).onCreate(savedInstanceState); } } @Override public void onStart() { super.onStart(); if(dispatcher instanceof FlowLifecycles.StartStopListener) { ((FlowLifecycles.StartStopListener) dispatcher).onStart(); } } @Override public void onResume() { super.onResume(); if(!flow.hasDispatcher()) { flow.setDispatcher(dispatcher, false); } if(dispatcher instanceof FlowLifecycles.ResumePauseListener) { ((FlowLifecycles.ResumePauseListener) dispatcher).onResume(); } } @Override public void onPause() { if(dispatcher instanceof FlowLifecycles.ResumePauseListener) { ((FlowLifecycles.ResumePauseListener) dispatcher).onPause(); } if(flow.hasDispatcher()) { flow.removeDispatcher(dispatcher); } super.onPause(); } @Override public void onStop() { if(dispatcher instanceof FlowLifecycles.StartStopListener) { ((FlowLifecycles.StartStopListener) dispatcher).onStop(); } super.onStop(); } @Override public void onDestroyView() { if(dispatcher instanceof FlowLifecycles.CreateDestroyListener) { ((FlowLifecycles.CreateDestroyListener) dispatcher).onDestroy(); } if(flow != null) { flow.executePendingTraversal(); } super.onDestroyView(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); if(dispatcher instanceof FlowLifecycles.ViewStatePersistenceListener) { ((FlowLifecycles.ViewStatePersistenceListener) dispatcher).onSaveInstanceState(outState); } checkArgument(outState != null, "outState may not be null"); if(parceler == null) { return; } Bundle bundle = new Bundle(); save(bundle, parceler, flow.getHistory(), keyManager); if(!bundle.isEmpty()) { outState.putParcelable(INTENT_KEY, bundle); } } private static History selectHistory(Intent intent, History saved, History defaultHistory, @Nullable KeyParceler parceler, KeyManager keyManager) { if(saved != null) { return saved; } if(intent != null && intent.hasExtra(INTENT_KEY)) { checkNotNull(parceler, "Intent has a Flow history extra, but Flow was not installed with a KeyParceler"); History.Builder history = History.emptyBuilder(); load(intent.<Bundle>getParcelableExtra(INTENT_KEY), parceler, history, keyManager); return history.build(); } return defaultHistory; } private static void save(Bundle bundle, KeyParceler parceler, History history, KeyManager keyManager) { ArrayList<Parcelable> historyStates = collectStatesFromKeys(keyManager, parceler, history.reverseIterator(), history.size()); ArrayList<Parcelable> globalStates = collectStatesFromKeys(keyManager, parceler, keyManager.globalKeys.iterator(), keyManager.globalKeys.size()); ArrayList<Parcelable> registeredKeyStates = collectStatesFromKeys(keyManager, parceler, keyManager.registeredKeys.iterator(), keyManager.registeredKeys.size()); Bundle innerBundle = new Bundle(); innerBundle.putParcelableArrayList(KeyManager.GLOBAL_KEYS, globalStates); innerBundle.putParcelableArrayList(KeyManager.HISTORY_KEYS, historyStates); innerBundle.putParcelableArrayList(KeyManager.REGISTERED_KEYS, registeredKeyStates); bundle.putBundle(PERSISTENCE_KEY, innerBundle); } private static ArrayList<Parcelable> collectStatesFromKeys(KeyManager keyManager, KeyParceler parceler, Iterator<Object> keys, int size) { ArrayList<Parcelable> parcelables = new ArrayList<>(size); while(keys.hasNext()) { Object key = keys.next(); if(!key.getClass().isAnnotationPresent(NotPersistent.class)) { parcelables.add(keyManager.getState(key).toBundle(parceler)); } } return parcelables; } private static void loadStatesIntoManager(ArrayList<Parcelable> stateBundles, KeyParceler parceler, KeyManager keyManager, History.Builder builder, boolean addToHistory, boolean addToRegisteredKeys) { if(stateBundles != null) { for(Parcelable stateBundle : stateBundles) { State state = State.fromBundle((Bundle) stateBundle, parceler); if(addToHistory) { builder.push(state.getKey()); } if(addToRegisteredKeys) { keyManager.registeredKeys.add(state.getKey()); } if(!keyManager.hasState(state.getKey())) { keyManager.addState(state); } } } } private static void load(Bundle bundle, KeyParceler parceler, History.Builder builder, KeyManager keyManager) { if(!bundle.containsKey(PERSISTENCE_KEY)) { return; } Bundle innerBundle = bundle.getBundle(PERSISTENCE_KEY); if(innerBundle != null) { //noinspection ConstantConditions loadStatesIntoManager(innerBundle.getParcelableArrayList(KeyManager.REGISTERED_KEYS), parceler, keyManager, builder, false, true); loadStatesIntoManager(innerBundle.getParcelableArrayList(KeyManager.HISTORY_KEYS), parceler, keyManager, builder, true, false); loadStatesIntoManager(innerBundle.getParcelableArrayList(KeyManager.GLOBAL_KEYS), parceler, keyManager, builder, false, false); } } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rkr.simplekeyboard.inputmethod.latin; import android.inputmethodservice.InputMethodService; import android.os.SystemClock; import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.text.style.CharacterStyle; import android.util.Log; import android.view.KeyEvent; import android.view.inputmethod.ExtractedText; import android.view.inputmethod.ExtractedTextRequest; import android.view.inputmethod.InputConnection; import rkr.simplekeyboard.inputmethod.latin.common.Constants; import rkr.simplekeyboard.inputmethod.latin.common.StringUtils; import rkr.simplekeyboard.inputmethod.latin.common.UnicodeSurrogate; import rkr.simplekeyboard.inputmethod.latin.settings.SpacingAndPunctuations; import rkr.simplekeyboard.inputmethod.latin.utils.CapsModeUtils; import rkr.simplekeyboard.inputmethod.latin.utils.DebugLogUtils; /** * Enrichment class for InputConnection to simplify interaction and add functionality. * * This class serves as a wrapper to be able to simply add hooks to any calls to the underlying * InputConnection. It also keeps track of a number of things to avoid having to call upon IPC * all the time to find out what text is in the buffer, when we need it to determine caps mode * for example. */ public final class RichInputConnection { private static final String TAG = "RichInputConnection"; private static final boolean DBG = false; private static final boolean DEBUG_PREVIOUS_TEXT = false; private static final boolean DEBUG_BATCH_NESTING = false; private static final int INVALID_CURSOR_POSITION = -1; /** * The amount of time a {@link #reloadTextCache} call needs to take for the keyboard to enter */ private static final long SLOW_INPUT_CONNECTION_ON_FULL_RELOAD_MS = 1000; /** * The amount of time a {@link #getTextBeforeCursor} call needs */ private static final long SLOW_INPUT_CONNECTION_ON_PARTIAL_RELOAD_MS = 200; private static final int OPERATION_GET_TEXT_BEFORE_CURSOR = 0; private static final int OPERATION_GET_TEXT_AFTER_CURSOR = 1; private static final int OPERATION_RELOAD_TEXT_CACHE = 3; private static final String[] OPERATION_NAMES = new String[] { "GET_TEXT_BEFORE_CURSOR", "GET_TEXT_AFTER_CURSOR", "GET_WORD_RANGE_AT_CURSOR", "RELOAD_TEXT_CACHE"}; /** * This variable contains an expected value for the selection start position. This is where the * cursor or selection start may end up after all the keyboard-triggered updates have passed. We * keep this to compare it to the actual selection start to guess whether the move was caused by * a keyboard command or not. * It's not really the selection start position: the selection start may not be there yet, and * in some cases, it may never arrive there. */ private int mExpectedSelStart = INVALID_CURSOR_POSITION; // in chars, not code points /** * The expected selection end. Only differs from mExpectedSelStart if a non-empty selection is * expected. The same caveats as mExpectedSelStart apply. */ private int mExpectedSelEnd = INVALID_CURSOR_POSITION; // in chars, not code points /** * This contains the committed text immediately preceding the cursor and the composing * text, if any. It is refreshed when the cursor moves by calling upon the TextView. */ private final StringBuilder mCommittedTextBeforeComposingText = new StringBuilder(); /** * This contains the currently composing text, as LatinIME thinks the TextView is seeing it. */ private final StringBuilder mComposingText = new StringBuilder(); /** * This variable is a temporary object used in {@link #commitText(CharSequence,int)} * to avoid object creation. */ private SpannableStringBuilder mTempObjectForCommitText = new SpannableStringBuilder(); private final InputMethodService mParent; private InputConnection mIC; private int mNestLevel; public RichInputConnection(final InputMethodService parent) { mParent = parent; mIC = null; mNestLevel = 0; } public boolean isConnected() { return mIC != null; } private void checkConsistencyForDebug() { final ExtractedTextRequest r = new ExtractedTextRequest(); r.hintMaxChars = 0; r.hintMaxLines = 0; r.token = 1; r.flags = 0; final ExtractedText et = mIC.getExtractedText(r, 0); final CharSequence beforeCursor = getTextBeforeCursor(Constants.EDITOR_CONTENTS_CACHE_SIZE, 0); final StringBuilder internal = new StringBuilder(mCommittedTextBeforeComposingText) .append(mComposingText); if (null == et || null == beforeCursor) return; final int actualLength = Math.min(beforeCursor.length(), internal.length()); if (internal.length() > actualLength) { internal.delete(0, internal.length() - actualLength); } final String reference = (beforeCursor.length() <= actualLength) ? beforeCursor.toString() : beforeCursor.subSequence(beforeCursor.length() - actualLength, beforeCursor.length()).toString(); if (et.selectionStart != mExpectedSelStart || !(reference.equals(internal.toString()))) { final String context = "Expected selection start = " + mExpectedSelStart + "\nActual selection start = " + et.selectionStart + "\nExpected text = " + internal.length() + " " + internal + "\nActual text = " + reference.length() + " " + reference; ((LatinIME)mParent).debugDumpStateAndCrashWithException(context); } else { Log.e(TAG, DebugLogUtils.getStackTrace(2)); Log.e(TAG, "Exp <> Actual : " + mExpectedSelStart + " <> " + et.selectionStart); } } public void beginBatchEdit() { if (++mNestLevel == 1) { mIC = mParent.getCurrentInputConnection(); if (isConnected()) { mIC.beginBatchEdit(); } } else { if (DBG) { throw new RuntimeException("Nest level too deep"); } Log.e(TAG, "Nest level too deep : " + mNestLevel); } if (DEBUG_BATCH_NESTING) checkBatchEdit(); if (DEBUG_PREVIOUS_TEXT) checkConsistencyForDebug(); } public void endBatchEdit() { if (mNestLevel <= 0) Log.e(TAG, "Batch edit not in progress!"); // TODO: exception instead if (--mNestLevel == 0 && isConnected()) { mIC.endBatchEdit(); } if (DEBUG_PREVIOUS_TEXT) checkConsistencyForDebug(); } /** * Reset the cached text and retrieve it again from the editor. * * This should be called when the cursor moved. It's possible that we can't connect to * the application when doing this; notably, this happens sometimes during rotation, probably * because of a race condition in the framework. In this case, we just can't retrieve the * data, so we empty the cache and note that we don't know the new cursor position, and we * return false so that the caller knows about this and can retry later. * * @param newSelStart the new position of the selection start, as received from the system. * @param newSelEnd the new position of the selection end, as received from the system. * @return true if we were able to connect to the editor successfully, false otherwise. When * this method returns false, the caches could not be correctly refreshed so they were only * reset: the caller should try again later to return to normal operation. */ public boolean resetCachesUponCursorMoveAndReturnSuccess(final int newSelStart, final int newSelEnd) { mExpectedSelStart = newSelStart; mExpectedSelEnd = newSelEnd; mComposingText.setLength(0); final boolean didReloadTextSuccessfully = reloadTextCache(); if (!didReloadTextSuccessfully) { Log.d(TAG, "Will try to retrieve text later."); return false; } return true; } /** * Reload the cached text from the InputConnection. * * @return true if successful */ private boolean reloadTextCache() { mCommittedTextBeforeComposingText.setLength(0); mIC = mParent.getCurrentInputConnection(); // Call upon the inputconnection directly since our own method is using the cache, and // we want to refresh it. final CharSequence textBeforeCursor = getTextBeforeCursorAndDetectLaggyConnection( OPERATION_RELOAD_TEXT_CACHE, SLOW_INPUT_CONNECTION_ON_FULL_RELOAD_MS, Constants.EDITOR_CONTENTS_CACHE_SIZE, 0 /* flags */); if (null == textBeforeCursor) { // For some reason the app thinks we are not connected to it. This looks like a // framework bug... Fall back to ground state and return false. mExpectedSelStart = INVALID_CURSOR_POSITION; mExpectedSelEnd = INVALID_CURSOR_POSITION; Log.e(TAG, "Unable to connect to the editor to retrieve text."); return false; } mCommittedTextBeforeComposingText.append(textBeforeCursor); return true; } private void checkBatchEdit() { if (mNestLevel != 1) { // TODO: exception instead Log.e(TAG, "Batch edit level incorrect : " + mNestLevel); Log.e(TAG, DebugLogUtils.getStackTrace(4)); } } public void finishComposingText() { if (DEBUG_BATCH_NESTING) checkBatchEdit(); if (DEBUG_PREVIOUS_TEXT) checkConsistencyForDebug(); // TODO: this is not correct! The cursor is not necessarily after the composing text. // In the practice right now this is only called when input ends so it will be reset so // it works, but it's wrong and should be fixed. mCommittedTextBeforeComposingText.append(mComposingText); mComposingText.setLength(0); if (isConnected()) { mIC.finishComposingText(); } } /** * Calls {@link InputConnection#commitText(CharSequence, int)}. * * @param text The text to commit. This may include styles. * @param newCursorPosition The new cursor position around the text. */ public void commitText(final CharSequence text, final int newCursorPosition) { RichInputMethodManager.getInstance().resetSubtypeCycleOrder(); if (DEBUG_BATCH_NESTING) checkBatchEdit(); if (DEBUG_PREVIOUS_TEXT) checkConsistencyForDebug(); mCommittedTextBeforeComposingText.append(text); // TODO: the following is exceedingly error-prone. Right now when the cursor is in the // middle of the composing word mComposingText only holds the part of the composing text // that is before the cursor, so this actually works, but it's terribly confusing. Fix this. if (hasCursorPosition()) { mExpectedSelStart += text.length() - mComposingText.length(); mExpectedSelEnd = mExpectedSelStart; } mComposingText.setLength(0); if (isConnected()) { mTempObjectForCommitText.clear(); mTempObjectForCommitText.append(text); final CharacterStyle[] spans = mTempObjectForCommitText.getSpans( 0, text.length(), CharacterStyle.class); for (final CharacterStyle span : spans) { final int spanStart = mTempObjectForCommitText.getSpanStart(span); final int spanEnd = mTempObjectForCommitText.getSpanEnd(span); final int spanFlags = mTempObjectForCommitText.getSpanFlags(span); // We have to adjust the end of the span to include an additional character. // This is to avoid splitting a unicode surrogate pair. // See rkr.simplekeyboard.inputmethod.latin.common.Constants.UnicodeSurrogate // See https://b.corp.google.com/issues/19255233 if (0 < spanEnd && spanEnd < mTempObjectForCommitText.length()) { final char spanEndChar = mTempObjectForCommitText.charAt(spanEnd - 1); final char nextChar = mTempObjectForCommitText.charAt(spanEnd); if (UnicodeSurrogate.isLowSurrogate(spanEndChar) && UnicodeSurrogate.isHighSurrogate(nextChar)) { mTempObjectForCommitText.setSpan(span, spanStart, spanEnd + 1, spanFlags); } } } mIC.commitText(mTempObjectForCommitText, newCursorPosition); } } public CharSequence getSelectedText(final int flags) { return isConnected() ? mIC.getSelectedText(flags) : null; } public boolean canDeleteCharacters() { return mExpectedSelStart > 0; } /** * Gets the caps modes we should be in after this specific string. * * This returns a bit set of TextUtils#CAP_MODE_*, masked by the inputType argument. * This method also supports faking an additional space after the string passed in argument, * to support cases where a space will be added automatically, like in phantom space * state for example. * Note that for English, we are using American typography rules (which are not specific to * American English, it's just the most common set of rules for English). * * @param inputType a mask of the caps modes to test for. * @param spacingAndPunctuations the values of the settings to use for locale and separators. * @return the caps modes that should be on as a set of bits */ public int getCursorCapsMode(final int inputType, final SpacingAndPunctuations spacingAndPunctuations) { mIC = mParent.getCurrentInputConnection(); if (!isConnected()) { return Constants.TextUtils.CAP_MODE_OFF; } if (!TextUtils.isEmpty(mComposingText)) { // We have some composing text - we should be in MODE_CHARACTERS only. return TextUtils.CAP_MODE_CHARACTERS & inputType; } // TODO: this will generally work, but there may be cases where the buffer contains SOME // information but not enough to determine the caps mode accurately. This may happen after // heavy pressing of delete, for example DEFAULT_TEXT_CACHE_SIZE - 5 times or so. // getCapsMode should be updated to be able to return a "not enough info" result so that // we can get more context only when needed. if (TextUtils.isEmpty(mCommittedTextBeforeComposingText) && 0 != mExpectedSelStart) { if (!reloadTextCache()) { Log.w(TAG, "Unable to connect to the editor. " + "Setting caps mode without knowing text."); } } // This never calls InputConnection#getCapsMode - in fact, it's a static method that // never blocks or initiates IPC. // TODO: don't call #toString() here. Instead, all accesses to // mCommittedTextBeforeComposingText should be done on the main thread. return CapsModeUtils.getCapsMode(mCommittedTextBeforeComposingText.toString(), inputType, spacingAndPunctuations); } public int getCodePointBeforeCursor() { final int length = mCommittedTextBeforeComposingText.length(); if (length < 1) return Constants.NOT_A_CODE; return Character.codePointBefore(mCommittedTextBeforeComposingText, length); } public CharSequence getTextBeforeCursor(final int n, final int flags) { final int cachedLength = mCommittedTextBeforeComposingText.length() + mComposingText.length(); // If we have enough characters to satisfy the request, or if we have all characters in // the text field, then we can return the cached version right away. // However, if we don't have an expected cursor position, then we should always // go fetch the cache again (as it happens, INVALID_CURSOR_POSITION < 0, so we need to // test for this explicitly) if (INVALID_CURSOR_POSITION != mExpectedSelStart && (cachedLength >= n || cachedLength >= mExpectedSelStart)) { final StringBuilder s = new StringBuilder(mCommittedTextBeforeComposingText); // We call #toString() here to create a temporary object. // In some situations, this method is called on a worker thread, and it's possible // the main thread touches the contents of mComposingText while this worker thread // is suspended, because mComposingText is a StringBuilder. This may lead to crashes, // so we call #toString() on it. That will result in the return value being strictly // speaking wrong, but since this is used for basing bigram probability off, and // it's only going to matter for one getSuggestions call, it's fine in the practice. s.append(mComposingText.toString()); if (s.length() > n) { s.delete(0, s.length() - n); } return s; } return getTextBeforeCursorAndDetectLaggyConnection( OPERATION_GET_TEXT_BEFORE_CURSOR, SLOW_INPUT_CONNECTION_ON_PARTIAL_RELOAD_MS, n, flags); } public CharSequence getTextAfterCursor(final int n, final int flags) { return getTextAfterCursorAndDetectLaggyConnection( OPERATION_GET_TEXT_AFTER_CURSOR, SLOW_INPUT_CONNECTION_ON_PARTIAL_RELOAD_MS, n, flags); } private CharSequence getTextBeforeCursorAndDetectLaggyConnection( final int operation, final long timeout, final int n, final int flags) { mIC = mParent.getCurrentInputConnection(); if (!isConnected()) { return null; } final long startTime = SystemClock.uptimeMillis(); final CharSequence result = mIC.getTextBeforeCursor(n, flags); detectLaggyConnection(operation, timeout, startTime); return result; } private CharSequence getTextAfterCursorAndDetectLaggyConnection( final int operation, final long timeout, final int n, final int flags) { mIC = mParent.getCurrentInputConnection(); if (!isConnected()) { return null; } final long startTime = SystemClock.uptimeMillis(); final CharSequence result = mIC.getTextAfterCursor(n, flags); detectLaggyConnection(operation, timeout, startTime); return result; } private void detectLaggyConnection(final int operation, final long timeout, final long startTime) { final long duration = SystemClock.uptimeMillis() - startTime; if (duration >= timeout) { final String operationName = OPERATION_NAMES[operation]; Log.w(TAG, "Slow InputConnection: " + operationName + " took " + duration + " ms."); } } public void deleteTextBeforeCursor(final int beforeLength) { if (DEBUG_BATCH_NESTING) checkBatchEdit(); // TODO: the following is incorrect if the cursor is not immediately after the composition. // Right now we never come here in this case because we reset the composing state before we // come here in this case, but we need to fix this. final int remainingChars = mComposingText.length() - beforeLength; if (remainingChars >= 0) { mComposingText.setLength(remainingChars); } else { mComposingText.setLength(0); // Never cut under 0 final int len = Math.max(mCommittedTextBeforeComposingText.length() + remainingChars, 0); mCommittedTextBeforeComposingText.setLength(len); } if (mExpectedSelStart > beforeLength) { mExpectedSelStart -= beforeLength; mExpectedSelEnd -= beforeLength; } else { // There are fewer characters before the cursor in the buffer than we are being asked to // delete. Only delete what is there, and update the end with the amount deleted. mExpectedSelEnd -= mExpectedSelStart; mExpectedSelStart = 0; } if (isConnected()) { mIC.deleteSurroundingText(beforeLength, 0); } if (DEBUG_PREVIOUS_TEXT) checkConsistencyForDebug(); } public void performEditorAction(final int actionId) { mIC = mParent.getCurrentInputConnection(); if (isConnected()) { mIC.performEditorAction(actionId); } } public void sendKeyEvent(final KeyEvent keyEvent) { if (DEBUG_BATCH_NESTING) checkBatchEdit(); if (keyEvent.getAction() == KeyEvent.ACTION_DOWN) { if (DEBUG_PREVIOUS_TEXT) checkConsistencyForDebug(); // This method is only called for enter or backspace when speaking to old applications // (target SDK <= 15 (Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)), or for digits. // When talking to new applications we never use this method because it's inherently // racy and has unpredictable results, but for backward compatibility we continue // sending the key events for only Enter and Backspace because some applications // mistakenly catch them to do some stuff. switch (keyEvent.getKeyCode()) { case KeyEvent.KEYCODE_ENTER: mCommittedTextBeforeComposingText.append("\n"); if (hasCursorPosition()) { mExpectedSelStart += 1; mExpectedSelEnd = mExpectedSelStart; } break; case KeyEvent.KEYCODE_DEL: if (0 == mComposingText.length()) { if (mCommittedTextBeforeComposingText.length() > 0) { mCommittedTextBeforeComposingText.delete( mCommittedTextBeforeComposingText.length() - 1, mCommittedTextBeforeComposingText.length()); } } else { mComposingText.delete(mComposingText.length() - 1, mComposingText.length()); } if (mExpectedSelStart > 0 && mExpectedSelStart == mExpectedSelEnd) { // TODO: Handle surrogate pairs. mExpectedSelStart -= 1; } mExpectedSelEnd = mExpectedSelStart; break; case KeyEvent.KEYCODE_UNKNOWN: if (null != keyEvent.getCharacters()) { mCommittedTextBeforeComposingText.append(keyEvent.getCharacters()); if (hasCursorPosition()) { mExpectedSelStart += keyEvent.getCharacters().length(); mExpectedSelEnd = mExpectedSelStart; } } break; default: final String text = StringUtils.newSingleCodePointString(keyEvent.getUnicodeChar()); mCommittedTextBeforeComposingText.append(text); if (hasCursorPosition()) { mExpectedSelStart += text.length(); mExpectedSelEnd = mExpectedSelStart; } break; } } if (isConnected()) { mIC.sendKeyEvent(keyEvent); } } /** * Set the selection of the text editor. * * Calls through to {@link InputConnection#setSelection(int, int)}. * * @param start the character index where the selection should start. * @param end the character index where the selection should end. * @return Returns true on success, false on failure: either the input connection is no longer * valid when setting the selection or when retrieving the text cache at that point, or * invalid arguments were passed. */ public void setSelection(int start, int end) { if (DEBUG_BATCH_NESTING) checkBatchEdit(); if (DEBUG_PREVIOUS_TEXT) checkConsistencyForDebug(); if (start < 0 || end < 0) { return; } if (mExpectedSelStart == start && mExpectedSelEnd == end) { return; } mExpectedSelStart = start; mExpectedSelEnd = end; if (isConnected()) { final boolean isIcValid = mIC.setSelection(start, end); if (!isIcValid) { return; } } reloadTextCache(); } public int getExpectedSelectionStart() { return mExpectedSelStart; } public int getExpectedSelectionEnd() { return mExpectedSelEnd; } /** * @return whether there is a selection currently active. */ public boolean hasSelection() { return mExpectedSelEnd != mExpectedSelStart; } public boolean hasCursorPosition() { return mExpectedSelStart != INVALID_CURSOR_POSITION && mExpectedSelEnd != INVALID_CURSOR_POSITION; } /** * Some chars, such as emoji consist of 2 chars (surrogate pairs). We should treat them as one character. */ public int getUnicodeSteps(int chars, boolean rightSidePointer) { int steps = 0; if (chars < 0) { CharSequence charsBeforeCursor = rightSidePointer && hasSelection() ? getSelectedText(0) : getTextBeforeCursor(-chars * 2, 0); if (charsBeforeCursor != null) { for (int i = charsBeforeCursor.length() - 1; i >= 0 && chars < 0; i--, chars++, steps--) { if (Character.isSurrogate(charsBeforeCursor.charAt(i))) { steps--; i--; } } } } else if (chars > 0) { CharSequence charsAfterCursor = !rightSidePointer && hasSelection() ? getSelectedText(0) : getTextAfterCursor(chars * 2, 0); if (charsAfterCursor != null) { for (int i = 0; i < charsAfterCursor.length() && chars > 0; i++, chars--, steps++) { if (Character.isSurrogate(charsAfterCursor.charAt(i))) { steps++; i++; } } } } return steps; } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.uamp.ui; import android.app.ActivityOptions; import android.app.FragmentManager; import android.content.Intent; import android.content.res.Configuration; import android.graphics.Color; import android.os.Bundle; import android.os.Handler; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarActivity; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.MediaRouteButton; import android.support.v7.media.MediaRouter; import android.support.v7.widget.Toolbar; import android.view.Gravity; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ListView; import android.widget.SimpleAdapter; import com.example.android.uamp.R; import com.example.android.uamp.utils.LogHelper; import com.example.android.uamp.utils.PrefUtils; import com.example.android.uamp.utils.ResourceHelper; import com.github.amlcurran.showcaseview.ShowcaseView; import com.github.amlcurran.showcaseview.targets.ViewTarget; import com.google.android.libraries.cast.companionlibrary.cast.VideoCastManager; import com.google.android.libraries.cast.companionlibrary.cast.callbacks.VideoCastConsumerImpl; /** * Abstract activity with toolbar, navigation drawer and cast support. Needs to be extended by * any activity that wants to be shown as a top level activity. * * The requirements for a subclass is to call {@link #initializeToolbar()} on onCreate, after * setContentView() is called and have three mandatory layout elements: * a {@link android.support.v7.widget.Toolbar} with id 'toolbar', * a {@link android.support.v4.widget.DrawerLayout} with id 'drawerLayout' and * a {@link android.widget.ListView} with id 'drawerList'. */ public abstract class ActionBarCastActivity extends ActionBarActivity { private static final String TAG = LogHelper.makeLogTag(ActionBarCastActivity.class); private static final int DELAY_MILLIS = 1000; private VideoCastManager mCastManager; private MenuItem mMediaRouteMenuItem; private Toolbar mToolbar; private ActionBarDrawerToggle mDrawerToggle; private DrawerLayout mDrawerLayout; private ListView mDrawerList; private DrawerMenuContents mDrawerMenuContents; private boolean mToolbarInitialized; private int mItemToOpenWhenDrawerCloses = -1; private VideoCastConsumerImpl mCastConsumer = new VideoCastConsumerImpl() { @Override public void onFailed(int resourceId, int statusCode) { LogHelper.d(TAG, "onFailed ", resourceId, " status ", statusCode); } @Override public void onConnectionSuspended(int cause) { LogHelper.d(TAG, "onConnectionSuspended() was called with cause: ", cause); } @Override public void onConnectivityRecovered() { } @Override public void onCastDeviceDetected(final MediaRouter.RouteInfo info) { // FTU stands for First Time Use: if (!PrefUtils.isFtuShown(ActionBarCastActivity.this)) { // If user is seeing the cast button for the first time, we will // show an overlay that explains what that button means. PrefUtils.setFtuShown(ActionBarCastActivity.this, true); LogHelper.d(TAG, "Route is visible: ", info); new Handler().postDelayed(new Runnable() { @Override public void run() { if (mMediaRouteMenuItem.isVisible()) { LogHelper.d(TAG, "Cast Icon is visible: ", info.getName()); showFtu(); } } }, DELAY_MILLIS); } } }; private DrawerLayout.DrawerListener mDrawerListener = new DrawerLayout.DrawerListener() { @Override public void onDrawerClosed(View drawerView) { if (mDrawerToggle != null) mDrawerToggle.onDrawerClosed(drawerView); int position = mItemToOpenWhenDrawerCloses; if (position >= 0) { Bundle extras = ActivityOptions.makeCustomAnimation( ActionBarCastActivity.this, R.anim.fade_in, R.anim.fade_out).toBundle(); Class activityClass = mDrawerMenuContents.getActivity(position); startActivity(new Intent(ActionBarCastActivity.this, activityClass), extras); } } @Override public void onDrawerStateChanged(int newState) { if (mDrawerToggle != null) mDrawerToggle.onDrawerStateChanged(newState); } @Override public void onDrawerSlide(View drawerView, float slideOffset) { if (mDrawerToggle != null) mDrawerToggle.onDrawerSlide(drawerView, slideOffset); } @Override public void onDrawerOpened(View drawerView) { if (mDrawerToggle != null) mDrawerToggle.onDrawerOpened(drawerView); getSupportActionBar().setTitle(R.string.app_name); } }; private FragmentManager.OnBackStackChangedListener mBackStackChangedListener = new FragmentManager.OnBackStackChangedListener() { @Override public void onBackStackChanged() { updateDrawerToggle(); } }; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); LogHelper.d(TAG, "Activity onCreate"); // Ensure that Google Play Service is available. VideoCastManager.checkGooglePlayServices(this); mCastManager = VideoCastManager.getInstance(); mCastManager.reconnectSessionIfPossible(); } @Override protected void onStart() { super.onStart(); if (!mToolbarInitialized) { throw new IllegalStateException("You must run super.initializeToolbar at " + "the end of your onCreate method"); } } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); if (mDrawerToggle != null) { mDrawerToggle.syncState(); } } @Override public void onResume() { super.onResume(); mCastManager.addVideoCastConsumer(mCastConsumer); mCastManager.incrementUiCounter(); // Whenever the fragment back stack changes, we may need to update the // action bar toggle: only top level screens show the hamburger-like icon, inner // screens - either Activities or fragments - show the "Up" icon instead. getFragmentManager().addOnBackStackChangedListener(mBackStackChangedListener); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); if (mDrawerToggle != null) { mDrawerToggle.onConfigurationChanged(newConfig); } } @Override public void onPause() { super.onPause(); mCastManager.removeVideoCastConsumer(mCastConsumer); mCastManager.decrementUiCounter(); getFragmentManager().removeOnBackStackChangedListener(mBackStackChangedListener); } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); getMenuInflater().inflate(R.menu.main, menu); mMediaRouteMenuItem = mCastManager.addMediaRouterButton(menu, R.id.media_route_menu_item); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mDrawerToggle != null && mDrawerToggle.onOptionsItemSelected(item)) { return true; } // If not handled by drawerToggle, home needs to be handled by returning to previous if (item != null && item.getItemId() == android.R.id.home) { onBackPressed(); return true; } return super.onOptionsItemSelected(item); } @Override public void onBackPressed() { // If the drawer is open, back will close it if (mDrawerLayout != null && mDrawerLayout.isDrawerOpen(Gravity.START)) { mDrawerLayout.closeDrawers(); return; } // Otherwise, it may return to the previous fragment stack FragmentManager fragmentManager = getFragmentManager(); if (fragmentManager.getBackStackEntryCount() > 0) { fragmentManager.popBackStack(); } else { // Lastly, it will rely on the system behavior for back super.onBackPressed(); } } @Override public void setTitle(CharSequence title) { super.setTitle(title); mToolbar.setTitle(title); } @Override public void setTitle(int titleId) { super.setTitle(titleId); mToolbar.setTitle(titleId); } protected void initializeToolbar() { mToolbar = (Toolbar) findViewById(R.id.toolbar); if (mToolbar == null) { throw new IllegalStateException("Layout is required to include a Toolbar with id " + "'toolbar'"); } mToolbar.inflateMenu(R.menu.main); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawerLayout); if (mDrawerLayout != null) { mDrawerList = (ListView) findViewById(R.id.drawer_list); if (mDrawerList == null) { throw new IllegalStateException("A layout with a drawerLayout is required to" + "include a ListView with id 'drawerList'"); } // Create an ActionBarDrawerToggle that will handle opening/closing of the drawer: mDrawerToggle = new ActionBarDrawerToggle(this, mDrawerLayout, mToolbar, R.string.open_content_drawer, R.string.close_content_drawer); mDrawerLayout.setDrawerListener(mDrawerListener); mDrawerLayout.setStatusBarBackgroundColor( ResourceHelper.getThemeColor(this, R.attr.colorPrimary, android.R.color.black)); populateDrawerItems(); setSupportActionBar(mToolbar); updateDrawerToggle(); } else { setSupportActionBar(mToolbar); } mToolbarInitialized = true; } private void populateDrawerItems() { mDrawerMenuContents = new DrawerMenuContents(this); final int selectedPosition = mDrawerMenuContents.getPosition(this.getClass()); final int unselectedColor = Color.WHITE; final int selectedColor = getResources().getColor(R.color.drawer_item_selected_background); SimpleAdapter adapter = new SimpleAdapter(this, mDrawerMenuContents.getItems(), R.layout.drawer_list_item, new String[]{DrawerMenuContents.FIELD_TITLE, DrawerMenuContents.FIELD_ICON}, new int[]{R.id.drawer_item_title, R.id.drawer_item_icon}) { @Override public View getView(int position, View convertView, ViewGroup parent) { View view = super.getView(position, convertView, parent); int color = unselectedColor; if (position == selectedPosition) { color = selectedColor; } view.setBackgroundColor(color); return view; } }; mDrawerList.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { if (position != selectedPosition) { view.setBackgroundColor(getResources().getColor( R.color.drawer_item_selected_background)); mItemToOpenWhenDrawerCloses = position; } mDrawerLayout.closeDrawers(); } }); mDrawerList.setAdapter(adapter); } protected void updateDrawerToggle() { if (mDrawerToggle == null) { return; } boolean isRoot = getFragmentManager().getBackStackEntryCount() == 0; mDrawerToggle.setDrawerIndicatorEnabled(isRoot); getSupportActionBar().setDisplayShowHomeEnabled(!isRoot); getSupportActionBar().setDisplayHomeAsUpEnabled(!isRoot); getSupportActionBar().setHomeButtonEnabled(!isRoot); if (isRoot) { mDrawerToggle.syncState(); } } /** * Shows the Cast First Time User experience to the user (an overlay that explains what is * the Cast icon) */ private void showFtu() { Menu menu = mToolbar.getMenu(); View view = menu.findItem(R.id.media_route_menu_item).getActionView(); if (view != null && view instanceof MediaRouteButton) { new ShowcaseView.Builder(this) .setTarget(new ViewTarget(view)) .setContentTitle(R.string.touch_to_cast) .hideOnTouchOutside() .build(); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v9/services/conversion_upload_service.proto package com.google.ads.googleads.v9.services; /** * <pre> * Request message for [ConversionUploadService.UploadClickConversions][google.ads.googleads.v9.services.ConversionUploadService.UploadClickConversions]. * </pre> * * Protobuf type {@code google.ads.googleads.v9.services.UploadClickConversionsRequest} */ public final class UploadClickConversionsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v9.services.UploadClickConversionsRequest) UploadClickConversionsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UploadClickConversionsRequest.newBuilder() to construct. private UploadClickConversionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UploadClickConversionsRequest() { customerId_ = ""; conversions_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new UploadClickConversionsRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UploadClickConversionsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); customerId_ = s; break; } case 18: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { conversions_ = new java.util.ArrayList<com.google.ads.googleads.v9.services.ClickConversion>(); mutable_bitField0_ |= 0x00000001; } conversions_.add( input.readMessage(com.google.ads.googleads.v9.services.ClickConversion.parser(), extensionRegistry)); break; } case 24: { partialFailure_ = input.readBool(); break; } case 32: { validateOnly_ = input.readBool(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { conversions_ = java.util.Collections.unmodifiableList(conversions_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v9_services_UploadClickConversionsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v9_services_UploadClickConversionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.services.UploadClickConversionsRequest.class, com.google.ads.googleads.v9.services.UploadClickConversionsRequest.Builder.class); } public static final int CUSTOMER_ID_FIELD_NUMBER = 1; private volatile java.lang.Object customerId_; /** * <pre> * Required. The ID of the customer performing the upload. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The customerId. */ @java.lang.Override public java.lang.String getCustomerId() { java.lang.Object ref = customerId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customerId_ = s; return s; } } /** * <pre> * Required. The ID of the customer performing the upload. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The bytes for customerId. */ @java.lang.Override public com.google.protobuf.ByteString getCustomerIdBytes() { java.lang.Object ref = customerId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); customerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONVERSIONS_FIELD_NUMBER = 2; private java.util.List<com.google.ads.googleads.v9.services.ClickConversion> conversions_; /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v9.services.ClickConversion> getConversionsList() { return conversions_; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v9.services.ClickConversionOrBuilder> getConversionsOrBuilderList() { return conversions_; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public int getConversionsCount() { return conversions_.size(); } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.ads.googleads.v9.services.ClickConversion getConversions(int index) { return conversions_.get(index); } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.ads.googleads.v9.services.ClickConversionOrBuilder getConversionsOrBuilder( int index) { return conversions_.get(index); } public static final int PARTIAL_FAILURE_FIELD_NUMBER = 3; private boolean partialFailure_; /** * <pre> * Required. If true, successful operations will be carried out and invalid * operations will return errors. If false, all operations will be carried * out in one transaction if and only if they are all valid. * This should always be set to true. * See * https://developers.google.com/google-ads/api/docs/best-practices/partial-failures * for more information about partial failure. * </pre> * * <code>bool partial_failure = 3 [(.google.api.field_behavior) = REQUIRED];</code> * @return The partialFailure. */ @java.lang.Override public boolean getPartialFailure() { return partialFailure_; } public static final int VALIDATE_ONLY_FIELD_NUMBER = 4; private boolean validateOnly_; /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 4;</code> * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_); } for (int i = 0; i < conversions_.size(); i++) { output.writeMessage(2, conversions_.get(i)); } if (partialFailure_ != false) { output.writeBool(3, partialFailure_); } if (validateOnly_ != false) { output.writeBool(4, validateOnly_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_); } for (int i = 0; i < conversions_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, conversions_.get(i)); } if (partialFailure_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, partialFailure_); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, validateOnly_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v9.services.UploadClickConversionsRequest)) { return super.equals(obj); } com.google.ads.googleads.v9.services.UploadClickConversionsRequest other = (com.google.ads.googleads.v9.services.UploadClickConversionsRequest) obj; if (!getCustomerId() .equals(other.getCustomerId())) return false; if (!getConversionsList() .equals(other.getConversionsList())) return false; if (getPartialFailure() != other.getPartialFailure()) return false; if (getValidateOnly() != other.getValidateOnly()) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER; hash = (53 * hash) + getCustomerId().hashCode(); if (getConversionsCount() > 0) { hash = (37 * hash) + CONVERSIONS_FIELD_NUMBER; hash = (53 * hash) + getConversionsList().hashCode(); } hash = (37 * hash) + PARTIAL_FAILURE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getPartialFailure()); hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getValidateOnly()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v9.services.UploadClickConversionsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Request message for [ConversionUploadService.UploadClickConversions][google.ads.googleads.v9.services.ConversionUploadService.UploadClickConversions]. * </pre> * * Protobuf type {@code google.ads.googleads.v9.services.UploadClickConversionsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.services.UploadClickConversionsRequest) com.google.ads.googleads.v9.services.UploadClickConversionsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v9_services_UploadClickConversionsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v9_services_UploadClickConversionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.services.UploadClickConversionsRequest.class, com.google.ads.googleads.v9.services.UploadClickConversionsRequest.Builder.class); } // Construct using com.google.ads.googleads.v9.services.UploadClickConversionsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getConversionsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); customerId_ = ""; if (conversionsBuilder_ == null) { conversions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { conversionsBuilder_.clear(); } partialFailure_ = false; validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v9.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v9_services_UploadClickConversionsRequest_descriptor; } @java.lang.Override public com.google.ads.googleads.v9.services.UploadClickConversionsRequest getDefaultInstanceForType() { return com.google.ads.googleads.v9.services.UploadClickConversionsRequest.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v9.services.UploadClickConversionsRequest build() { com.google.ads.googleads.v9.services.UploadClickConversionsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v9.services.UploadClickConversionsRequest buildPartial() { com.google.ads.googleads.v9.services.UploadClickConversionsRequest result = new com.google.ads.googleads.v9.services.UploadClickConversionsRequest(this); int from_bitField0_ = bitField0_; result.customerId_ = customerId_; if (conversionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { conversions_ = java.util.Collections.unmodifiableList(conversions_); bitField0_ = (bitField0_ & ~0x00000001); } result.conversions_ = conversions_; } else { result.conversions_ = conversionsBuilder_.build(); } result.partialFailure_ = partialFailure_; result.validateOnly_ = validateOnly_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v9.services.UploadClickConversionsRequest) { return mergeFrom((com.google.ads.googleads.v9.services.UploadClickConversionsRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v9.services.UploadClickConversionsRequest other) { if (other == com.google.ads.googleads.v9.services.UploadClickConversionsRequest.getDefaultInstance()) return this; if (!other.getCustomerId().isEmpty()) { customerId_ = other.customerId_; onChanged(); } if (conversionsBuilder_ == null) { if (!other.conversions_.isEmpty()) { if (conversions_.isEmpty()) { conversions_ = other.conversions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureConversionsIsMutable(); conversions_.addAll(other.conversions_); } onChanged(); } } else { if (!other.conversions_.isEmpty()) { if (conversionsBuilder_.isEmpty()) { conversionsBuilder_.dispose(); conversionsBuilder_ = null; conversions_ = other.conversions_; bitField0_ = (bitField0_ & ~0x00000001); conversionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConversionsFieldBuilder() : null; } else { conversionsBuilder_.addAllMessages(other.conversions_); } } } if (other.getPartialFailure() != false) { setPartialFailure(other.getPartialFailure()); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v9.services.UploadClickConversionsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v9.services.UploadClickConversionsRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object customerId_ = ""; /** * <pre> * Required. The ID of the customer performing the upload. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The customerId. */ public java.lang.String getCustomerId() { java.lang.Object ref = customerId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customerId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Required. The ID of the customer performing the upload. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The bytes for customerId. */ public com.google.protobuf.ByteString getCustomerIdBytes() { java.lang.Object ref = customerId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); customerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Required. The ID of the customer performing the upload. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @param value The customerId to set. * @return This builder for chaining. */ public Builder setCustomerId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } customerId_ = value; onChanged(); return this; } /** * <pre> * Required. The ID of the customer performing the upload. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return This builder for chaining. */ public Builder clearCustomerId() { customerId_ = getDefaultInstance().getCustomerId(); onChanged(); return this; } /** * <pre> * Required. The ID of the customer performing the upload. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @param value The bytes for customerId to set. * @return This builder for chaining. */ public Builder setCustomerIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); customerId_ = value; onChanged(); return this; } private java.util.List<com.google.ads.googleads.v9.services.ClickConversion> conversions_ = java.util.Collections.emptyList(); private void ensureConversionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { conversions_ = new java.util.ArrayList<com.google.ads.googleads.v9.services.ClickConversion>(conversions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v9.services.ClickConversion, com.google.ads.googleads.v9.services.ClickConversion.Builder, com.google.ads.googleads.v9.services.ClickConversionOrBuilder> conversionsBuilder_; /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public java.util.List<com.google.ads.googleads.v9.services.ClickConversion> getConversionsList() { if (conversionsBuilder_ == null) { return java.util.Collections.unmodifiableList(conversions_); } else { return conversionsBuilder_.getMessageList(); } } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public int getConversionsCount() { if (conversionsBuilder_ == null) { return conversions_.size(); } else { return conversionsBuilder_.getCount(); } } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v9.services.ClickConversion getConversions(int index) { if (conversionsBuilder_ == null) { return conversions_.get(index); } else { return conversionsBuilder_.getMessage(index); } } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setConversions( int index, com.google.ads.googleads.v9.services.ClickConversion value) { if (conversionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConversionsIsMutable(); conversions_.set(index, value); onChanged(); } else { conversionsBuilder_.setMessage(index, value); } return this; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setConversions( int index, com.google.ads.googleads.v9.services.ClickConversion.Builder builderForValue) { if (conversionsBuilder_ == null) { ensureConversionsIsMutable(); conversions_.set(index, builderForValue.build()); onChanged(); } else { conversionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder addConversions(com.google.ads.googleads.v9.services.ClickConversion value) { if (conversionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConversionsIsMutable(); conversions_.add(value); onChanged(); } else { conversionsBuilder_.addMessage(value); } return this; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder addConversions( int index, com.google.ads.googleads.v9.services.ClickConversion value) { if (conversionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConversionsIsMutable(); conversions_.add(index, value); onChanged(); } else { conversionsBuilder_.addMessage(index, value); } return this; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder addConversions( com.google.ads.googleads.v9.services.ClickConversion.Builder builderForValue) { if (conversionsBuilder_ == null) { ensureConversionsIsMutable(); conversions_.add(builderForValue.build()); onChanged(); } else { conversionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder addConversions( int index, com.google.ads.googleads.v9.services.ClickConversion.Builder builderForValue) { if (conversionsBuilder_ == null) { ensureConversionsIsMutable(); conversions_.add(index, builderForValue.build()); onChanged(); } else { conversionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder addAllConversions( java.lang.Iterable<? extends com.google.ads.googleads.v9.services.ClickConversion> values) { if (conversionsBuilder_ == null) { ensureConversionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, conversions_); onChanged(); } else { conversionsBuilder_.addAllMessages(values); } return this; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder clearConversions() { if (conversionsBuilder_ == null) { conversions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { conversionsBuilder_.clear(); } return this; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder removeConversions(int index) { if (conversionsBuilder_ == null) { ensureConversionsIsMutable(); conversions_.remove(index); onChanged(); } else { conversionsBuilder_.remove(index); } return this; } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v9.services.ClickConversion.Builder getConversionsBuilder( int index) { return getConversionsFieldBuilder().getBuilder(index); } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v9.services.ClickConversionOrBuilder getConversionsOrBuilder( int index) { if (conversionsBuilder_ == null) { return conversions_.get(index); } else { return conversionsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public java.util.List<? extends com.google.ads.googleads.v9.services.ClickConversionOrBuilder> getConversionsOrBuilderList() { if (conversionsBuilder_ != null) { return conversionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(conversions_); } } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v9.services.ClickConversion.Builder addConversionsBuilder() { return getConversionsFieldBuilder().addBuilder( com.google.ads.googleads.v9.services.ClickConversion.getDefaultInstance()); } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v9.services.ClickConversion.Builder addConversionsBuilder( int index) { return getConversionsFieldBuilder().addBuilder( index, com.google.ads.googleads.v9.services.ClickConversion.getDefaultInstance()); } /** * <pre> * Required. The conversions that are being uploaded. * </pre> * * <code>repeated .google.ads.googleads.v9.services.ClickConversion conversions = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public java.util.List<com.google.ads.googleads.v9.services.ClickConversion.Builder> getConversionsBuilderList() { return getConversionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v9.services.ClickConversion, com.google.ads.googleads.v9.services.ClickConversion.Builder, com.google.ads.googleads.v9.services.ClickConversionOrBuilder> getConversionsFieldBuilder() { if (conversionsBuilder_ == null) { conversionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v9.services.ClickConversion, com.google.ads.googleads.v9.services.ClickConversion.Builder, com.google.ads.googleads.v9.services.ClickConversionOrBuilder>( conversions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); conversions_ = null; } return conversionsBuilder_; } private boolean partialFailure_ ; /** * <pre> * Required. If true, successful operations will be carried out and invalid * operations will return errors. If false, all operations will be carried * out in one transaction if and only if they are all valid. * This should always be set to true. * See * https://developers.google.com/google-ads/api/docs/best-practices/partial-failures * for more information about partial failure. * </pre> * * <code>bool partial_failure = 3 [(.google.api.field_behavior) = REQUIRED];</code> * @return The partialFailure. */ @java.lang.Override public boolean getPartialFailure() { return partialFailure_; } /** * <pre> * Required. If true, successful operations will be carried out and invalid * operations will return errors. If false, all operations will be carried * out in one transaction if and only if they are all valid. * This should always be set to true. * See * https://developers.google.com/google-ads/api/docs/best-practices/partial-failures * for more information about partial failure. * </pre> * * <code>bool partial_failure = 3 [(.google.api.field_behavior) = REQUIRED];</code> * @param value The partialFailure to set. * @return This builder for chaining. */ public Builder setPartialFailure(boolean value) { partialFailure_ = value; onChanged(); return this; } /** * <pre> * Required. If true, successful operations will be carried out and invalid * operations will return errors. If false, all operations will be carried * out in one transaction if and only if they are all valid. * This should always be set to true. * See * https://developers.google.com/google-ads/api/docs/best-practices/partial-failures * for more information about partial failure. * </pre> * * <code>bool partial_failure = 3 [(.google.api.field_behavior) = REQUIRED];</code> * @return This builder for chaining. */ public Builder clearPartialFailure() { partialFailure_ = false; onChanged(); return this; } private boolean validateOnly_ ; /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 4;</code> * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 4;</code> * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; onChanged(); return this; } /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 4;</code> * @return This builder for chaining. */ public Builder clearValidateOnly() { validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.services.UploadClickConversionsRequest) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v9.services.UploadClickConversionsRequest) private static final com.google.ads.googleads.v9.services.UploadClickConversionsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v9.services.UploadClickConversionsRequest(); } public static com.google.ads.googleads.v9.services.UploadClickConversionsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UploadClickConversionsRequest> PARSER = new com.google.protobuf.AbstractParser<UploadClickConversionsRequest>() { @java.lang.Override public UploadClickConversionsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new UploadClickConversionsRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<UploadClickConversionsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UploadClickConversionsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v9.services.UploadClickConversionsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.library; import android.content.Context; import android.os.Build; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.util.SparseArrayCompat; import android.support.v4.view.ViewPager; import android.support.v7.widget.RecyclerView; import android.util.AttributeSet; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.AbsListView; import android.widget.LinearLayout; import android.widget.ScrollView; import com.library.scroll.ScrollFragment; import com.library.scroll.ScrollHolder; import java.util.ArrayList; import java.util.List; /** * Created by sj on 15/11/20. */ public class StickHeaderViewPager extends StickHeaderLayout implements ScrollHolder, ViewPager.OnPageChangeListener { private static final int ID_VIEWPAGER = 1; private Context mContext; protected LinearLayout mStickheader; protected ViewPager mViewPager; private int mStickViewHeight; private int mStickHeaderHeight; private int mMinHeaderTranslation; private ViewPagerAdapter mAdapter; private List<ScrollFragment> mScrollFragmentList = new ArrayList<>(); public StickHeaderViewPager(Context context) { this(context, null); } public StickHeaderViewPager(Context context, AttributeSet attrs) { super(context, attrs); this.mContext = context; // add viewpager mViewPager = new ViewPager(context); mViewPager.setId(ID_VIEWPAGER); addView(mViewPager, ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); // add stickheader mStickheader = new LinearLayout(context); mStickheader.setOrientation(LinearLayout.VERTICAL); addView(mStickheader, LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT); mViewPager.addOnPageChangeListener(this); } @Override public final void addView(View child, int index, ViewGroup.LayoutParams params) { if (getChildCount() < 2) { super.addView(child, index, params); } else { if (mStickheader.getChildCount() > 2) { throw new IllegalStateException("only can host 2 elements"); } mStickheader.addView(child, params); } } @Override protected void onFinishInflate() { super.onFinishInflate(); if (mStickheader.getChildCount() < 2) { throw new IllegalStateException("stickHeader must have 2 elements"); } initStickHeaderViewHight(); } private void initStickHeaderViewHight() { final ViewTreeObserver vto = mStickheader.getViewTreeObserver(); vto.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { mStickHeaderHeight = mStickheader.getMeasuredHeight(); mStickViewHeight = mStickheader.getChildAt(1).getMeasuredHeight(); if (mStickHeaderHeight > 0 && mStickViewHeight > 0) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { mStickheader.getViewTreeObserver().removeOnGlobalLayoutListener(this); } else { mStickheader.getViewTreeObserver().removeGlobalOnLayoutListener(this); } updateChildViewHight(); } } }); } private void updateChildViewHight() { if (mStickHeaderHeight != 0 && mStickViewHeight != 0) { mMinHeaderTranslation = -mStickHeaderHeight + mStickViewHeight; if (mScrollFragmentList != null) { for (ScrollFragment scrollFragment : mScrollFragmentList) { scrollFragment.updatePlaceHolderViewHight(mStickHeaderHeight); } } } } public ViewPager getViewPager() { return mViewPager; } @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { int currentItem = mViewPager.getCurrentItem(); if (positionOffsetPixels > 0) { SparseArrayCompat<ScrollHolder> scrollTabHolders = mAdapter.getScrollTabHolders(); ScrollHolder fragmentContent; if (position < currentItem) { fragmentContent = scrollTabHolders.valueAt(position); } else { fragmentContent = scrollTabHolders.valueAt(position + 1); } fragmentContent.adjustScroll((int) (mStickheader.getHeight() + mStickheader.getTranslationY()), mStickheader.getHeight()); } } @Override public void onPageSelected(int position) { SparseArrayCompat<ScrollHolder> scrollTabHolders = mAdapter.getScrollTabHolders(); if (scrollTabHolders == null || scrollTabHolders.size() != mAdapter.getCount()) { return; } ScrollHolder currentHolder = scrollTabHolders.valueAt(position); currentHolder.adjustScroll((int) (mStickheader.getHeight() + mStickheader.getTranslationY()), mStickheader.getHeight()); } @Override public void onPageScrollStateChanged(int state) { } @Override public void adjustScroll(int scrollHeight, int headerHeight) { } @Override public void onListViewScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount, int pagePosition) { if (mViewPager.getCurrentItem() == pagePosition) { scrollHeader(getScrollY(view)); } } @Override public void onScrollViewScroll(ScrollView view, int x, int y, int oldX, int oldY, int pagePosition) { if (mViewPager.getCurrentItem() == pagePosition) { scrollHeader(view.getScrollY()); } } @Override public void onRecyclerViewScroll(RecyclerView view, int scrollY, int pagePosition) { if (mViewPager.getCurrentItem() == pagePosition) { scrollHeader(scrollY); } } private void scrollHeader(int scrollY) { float translationY = Math.max(-scrollY, mMinHeaderTranslation); mStickheader.setTranslationY(translationY); } private int getScrollY(AbsListView view) { View child = view.getChildAt(0); if (child == null) { return 0; } int firstVisiblePosition = view.getFirstVisiblePosition(); int top = child.getTop(); int headerHeight = 0; if (firstVisiblePosition >= 1) { headerHeight = mStickHeaderHeight; } return -top + firstVisiblePosition * child.getHeight() + headerHeight; } private class ViewPagerAdapter extends StickHeaderViewPagerAdapter { public ViewPagerAdapter(FragmentManager fm, StickHeaderViewPager stickHeaderViewPager) { super(fm, stickHeaderViewPager); } @Override public Fragment getItem(int position) { return mScrollFragmentList.get(position); } @Override public int getCount() { return mScrollFragmentList == null ? 0 : mScrollFragmentList.size(); } @Override public int getItemPosition(Object object) { return POSITION_NONE; } @Override public CharSequence getPageTitle(int position) { return ((ScrollFragment) getItem(position)).getTitle(); } } private void update(StickHeaderViewPagerBuilder buider) { if (buider == null) { return; } if (buider.fm == null) { throw new IllegalStateException("FragmentManager is null"); } if (buider.scrollFragmentList == null) { throw new IllegalStateException("At least one scrollFragment"); } mAdapter = new ViewPagerAdapter(buider.fm, this); mScrollFragmentList = buider.scrollFragmentList; mViewPager.setAdapter(mAdapter); } public static class StickHeaderViewPagerBuilder { private StickHeaderViewPager stickHeaderViewPager; private List<ScrollFragment> scrollFragmentList; private FragmentManager fm; protected StickHeaderViewPagerBuilder(final StickHeaderViewPager stickHeaderViewPager) { this.stickHeaderViewPager = stickHeaderViewPager; } public StickHeaderViewPagerBuilder setFragmentManager(FragmentManager fm) { this.fm = fm; return this; } public void notifyData() { stickHeaderViewPager.update(this); } public StickHeaderViewPagerBuilder addScrollFragments(ScrollFragment... fragments) { if (fragments == null || fragments.length == 0) { throw new IllegalStateException("can't add a null fragment"); } if (scrollFragmentList == null) { scrollFragmentList = new ArrayList<>(); } for (ScrollFragment scrollFragment : fragments) { scrollFragment.setPosition(scrollFragmentList.size()); scrollFragmentList.add(scrollFragment); } return this; } public static StickHeaderViewPagerBuilder stickTo(final StickHeaderViewPager stickHeaderViewPager) { return new StickHeaderViewPagerBuilder(stickHeaderViewPager); } } }
package it.g2.commons; import java.util.*; /** * Created by gigitsu on 13/01/15. */ public class HalvingDoublingArray<T> implements List<T> { private Object[] buffer; private int s = -1; //last useful index public HalvingDoublingArray() { this(10); } public HalvingDoublingArray(int size) { this.buffer = new Object[size]; } public int size() { return s +1; } public T pop() { T t = get(s); if(buffer.length / 4 >= --s) halving(); return t; } public void push(T item) { try { buffer[++s] = item; } catch (ArrayIndexOutOfBoundsException e) { doubling(); buffer[s] = item; } } @Override public boolean isEmpty() { return s >= 0; } @Override public T get(int index) { checkIndex(index); @SuppressWarnings("unchecked") final T t = (T) buffer[index]; return t; } @Override public T set(int index, T item) { checkIndex(index); buffer[index] = item; return item; } @Override public boolean add(T item) { push(item); return true; } @Override public void add(int index, T element) { if (index > s) throw new ArrayIndexOutOfBoundsException(index); add((T)buffer[s]); for (int i = s-1; i > index; i--) buffer[i] = buffer[i-1]; buffer[index] = element; } @Override public T remove(int index) { @SuppressWarnings("unchecked") T item = (T)buffer[index]; for (int i = index; i < s; i++) buffer[i] = buffer[i+1]; pop(); //remove last item and trigger halving if necessary return item; } @Override public void clear() { this.buffer = new Objects[10]; } @Override public Object[] toArray() { return Arrays.copyOf(buffer, s); } @Override public ListIterator<T> listIterator() { return listIterator(0); } @Override public ListIterator<T> listIterator(int index) { return new HalvingDoublingIterator<>(index); } @Override public Iterator<T> iterator() { return listIterator(); } private void doubling() { Object[] temp = buffer; buffer = new Object[Math.max(temp.length, 1) * 2]; System.arraycopy(temp, 0, buffer, 0, temp.length); } private void halving() { buffer = Arrays.copyOf(buffer, buffer.length / 2); } private void checkIndex(int index) { if(index > s) throw new ArrayIndexOutOfBoundsException(index); } //region Unsupported operations @Override public int lastIndexOf(Object o) { throw new UnsupportedOperationException(); } @Override public boolean contains(Object o) { throw new UnsupportedOperationException(); } @Override public int indexOf(Object o) { throw new UnsupportedOperationException(); } @Override public List<T> subList(int fromIndex, int toIndex) { throw new UnsupportedOperationException(); } @Override public <T1> T1[] toArray(T1[] a) { throw new UnsupportedOperationException(); } @Override public boolean remove(Object o) { throw new UnsupportedOperationException(); } @Override public boolean containsAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public boolean addAll(Collection<? extends T> c) { throw new UnsupportedOperationException(); } @Override public boolean addAll(int index, Collection<? extends T> c) { throw new UnsupportedOperationException(); } @Override public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException(); } //endregion private class HalvingDoublingIterator<E> implements ListIterator<E> { private int i; public HalvingDoublingIterator(int index) { i = index; } @Override public boolean hasNext() { return i <= s; } @Override public E next() { checkIndex(i); @SuppressWarnings("unchecked") E item = (E)buffer[i++]; return item; } @Override public boolean hasPrevious() { return i >= 0; } @Override public E previous() { checkIndex(i); @SuppressWarnings("unchecked") E item = (E)buffer[i--]; return item; } @Override public int nextIndex() { return i + 1; } @Override public int previousIndex() { return i - 1; } //region Unsupported operations @Override public void remove() { throw new UnsupportedOperationException(); } @Override public void set(E t) { throw new UnsupportedOperationException(); } @Override public void add(E item) { throw new UnsupportedOperationException(); } //endregion } }
package com.betfair.publicapi.types.global.v3; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for UpdatePaymentCardResp complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="UpdatePaymentCardResp"> * &lt;complexContent> * &lt;extension base="{http://www.betfair.com/publicapi/types/global/v3/}APIResponse"> * &lt;sequence> * &lt;element name="errorCode" type="{http://www.betfair.com/publicapi/types/global/v3/}UpdatePaymentCardErrorEnum"/> * &lt;element name="minorErrorCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="nickName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="billingName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="cardType" type="{http://www.betfair.com/publicapi/types/global/v3/}CardTypeEnum"/> * &lt;element name="expiryDate" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="startDate" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="address1" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="address2" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="address3" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="address4" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="zipCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="country" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "UpdatePaymentCardResp", propOrder = { "errorCode", "minorErrorCode", "nickName", "billingName", "cardType", "expiryDate", "startDate", "address1", "address2", "address3", "address4", "zipCode", "country" }) public class UpdatePaymentCardResp extends APIResponse { @XmlElement(required = true) protected UpdatePaymentCardErrorEnum errorCode; @XmlElement(required = true, nillable = true) protected String minorErrorCode; @XmlElement(required = true) protected String nickName; @XmlElement(required = true) protected String billingName; @XmlElement(required = true) protected CardTypeEnum cardType; @XmlElement(required = true) protected String expiryDate; @XmlElement(required = true, nillable = true) protected String startDate; @XmlElement(required = true) protected String address1; @XmlElement(required = true, nillable = true) protected String address2; @XmlElement(required = true, nillable = true) protected String address3; @XmlElement(required = true, nillable = true) protected String address4; @XmlElement(required = true, nillable = true) protected String zipCode; @XmlElement(required = true, nillable = true) protected String country; /** * Gets the value of the errorCode property. * * @return * possible object is * {@link UpdatePaymentCardErrorEnum } * */ public UpdatePaymentCardErrorEnum getErrorCode() { return errorCode; } /** * Sets the value of the errorCode property. * * @param value * allowed object is * {@link UpdatePaymentCardErrorEnum } * */ public void setErrorCode(UpdatePaymentCardErrorEnum value) { this.errorCode = value; } /** * Gets the value of the minorErrorCode property. * * @return * possible object is * {@link String } * */ public String getMinorErrorCode() { return minorErrorCode; } /** * Sets the value of the minorErrorCode property. * * @param value * allowed object is * {@link String } * */ public void setMinorErrorCode(String value) { this.minorErrorCode = value; } /** * Gets the value of the nickName property. * * @return * possible object is * {@link String } * */ public String getNickName() { return nickName; } /** * Sets the value of the nickName property. * * @param value * allowed object is * {@link String } * */ public void setNickName(String value) { this.nickName = value; } /** * Gets the value of the billingName property. * * @return * possible object is * {@link String } * */ public String getBillingName() { return billingName; } /** * Sets the value of the billingName property. * * @param value * allowed object is * {@link String } * */ public void setBillingName(String value) { this.billingName = value; } /** * Gets the value of the cardType property. * * @return * possible object is * {@link CardTypeEnum } * */ public CardTypeEnum getCardType() { return cardType; } /** * Sets the value of the cardType property. * * @param value * allowed object is * {@link CardTypeEnum } * */ public void setCardType(CardTypeEnum value) { this.cardType = value; } /** * Gets the value of the expiryDate property. * * @return * possible object is * {@link String } * */ public String getExpiryDate() { return expiryDate; } /** * Sets the value of the expiryDate property. * * @param value * allowed object is * {@link String } * */ public void setExpiryDate(String value) { this.expiryDate = value; } /** * Gets the value of the startDate property. * * @return * possible object is * {@link String } * */ public String getStartDate() { return startDate; } /** * Sets the value of the startDate property. * * @param value * allowed object is * {@link String } * */ public void setStartDate(String value) { this.startDate = value; } /** * Gets the value of the address1 property. * * @return * possible object is * {@link String } * */ public String getAddress1() { return address1; } /** * Sets the value of the address1 property. * * @param value * allowed object is * {@link String } * */ public void setAddress1(String value) { this.address1 = value; } /** * Gets the value of the address2 property. * * @return * possible object is * {@link String } * */ public String getAddress2() { return address2; } /** * Sets the value of the address2 property. * * @param value * allowed object is * {@link String } * */ public void setAddress2(String value) { this.address2 = value; } /** * Gets the value of the address3 property. * * @return * possible object is * {@link String } * */ public String getAddress3() { return address3; } /** * Sets the value of the address3 property. * * @param value * allowed object is * {@link String } * */ public void setAddress3(String value) { this.address3 = value; } /** * Gets the value of the address4 property. * * @return * possible object is * {@link String } * */ public String getAddress4() { return address4; } /** * Sets the value of the address4 property. * * @param value * allowed object is * {@link String } * */ public void setAddress4(String value) { this.address4 = value; } /** * Gets the value of the zipCode property. * * @return * possible object is * {@link String } * */ public String getZipCode() { return zipCode; } /** * Sets the value of the zipCode property. * * @param value * allowed object is * {@link String } * */ public void setZipCode(String value) { this.zipCode = value; } /** * Gets the value of the country property. * * @return * possible object is * {@link String } * */ public String getCountry() { return country; } /** * Sets the value of the country property. * * @param value * allowed object is * {@link String } * */ public void setCountry(String value) { this.country = value; } }
package org.joml.lwjgl; import org.joml.Matrix4f; import org.joml.Vector3f; import org.lwjgl.BufferUtils; import org.lwjgl.glfw.*; import org.lwjgl.opengl.*; import java.nio.FloatBuffer; import java.nio.IntBuffer; import static org.lwjgl.glfw.GLFW.*; import static org.lwjgl.opengl.GL11.*; import static org.lwjgl.system.MemoryUtil.*; public class CameraDemo { GLFWErrorCallback errorCallback; GLFWKeyCallback keyCallback; GLFWFramebufferSizeCallback fbCallback; long window; int width = 300; int height = 300; // Declare matrices for two cameras Matrix4f[] projMatrix = {new Matrix4f(), new Matrix4f()}; Matrix4f[] viewMatrix = {new Matrix4f(), new Matrix4f()}; int active = 0; int inactive = 1; // And a model matrix for a rotating cube Matrix4f modelMatrix = new Matrix4f(); // Temporary vector Vector3f tmp = new Vector3f(); // Rotation of the inactive camera float rotate = 0.0f; float[] rotation = {0.0f, 0.0f}; void run() { try { init(); loop(); glfwDestroyWindow(window); keyCallback.free(); } finally { glfwTerminate(); errorCallback.free(); } } void init() { glfwSetErrorCallback(errorCallback = GLFWErrorCallback.createPrint(System.err)); if (!glfwInit()) throw new IllegalStateException("Unable to initialize GLFW"); // Configure our window glfwDefaultWindowHints(); glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE); glfwWindowHint(GLFW_RESIZABLE, GLFW_TRUE); window = glfwCreateWindow(width, height, "Hello Cameras!", NULL, NULL); if ( window == NULL ) throw new RuntimeException("Failed to create the GLFW window"); System.out.println("Press 'C' to switch between the two cameras"); glfwSetKeyCallback(window, keyCallback = new GLFWKeyCallback() { public void invoke(long window, int key, int scancode, int action, int mods) { if (key == GLFW_KEY_ESCAPE && action == GLFW_RELEASE) glfwSetWindowShouldClose(window, true); if (key == GLFW_KEY_C && action == GLFW_RELEASE) switchCamera(); if (key == GLFW_KEY_LEFT && (action == GLFW_PRESS || action == GLFW_REPEAT)) { rotate = 1.0f; } else if (key == GLFW_KEY_LEFT && (action == GLFW_RELEASE)) { rotate = 0.0f; } else if (key == GLFW_KEY_RIGHT && (action == GLFW_PRESS || action == GLFW_REPEAT)) { rotate = -1.0f; } else if (key == GLFW_KEY_RIGHT && (action == GLFW_RELEASE)) { rotate = 0.0f; } } }); glfwSetFramebufferSizeCallback(window, fbCallback = new GLFWFramebufferSizeCallback() { public void invoke(long window, int w, int h) { if (w > 0 && h > 0) { width = w; height = h; } } }); GLFWVidMode vidmode = glfwGetVideoMode(glfwGetPrimaryMonitor()); glfwSetWindowPos(window, (vidmode.width() - width) / 2, (vidmode.height() - height) / 2); glfwMakeContextCurrent(window); glfwSwapInterval(0); glfwShowWindow(window); IntBuffer framebufferSize = BufferUtils.createIntBuffer(2); nglfwGetFramebufferSize(window, memAddress(framebufferSize), memAddress(framebufferSize) + 4); width = framebufferSize.get(0); height = framebufferSize.get(1); } void renderCube() { glBegin(GL_QUADS); glColor3f( 0.0f, 0.0f, 0.2f ); glVertex3f( 0.5f, -0.5f, -0.5f ); glVertex3f( -0.5f, -0.5f, -0.5f ); glVertex3f( -0.5f, 0.5f, -0.5f ); glVertex3f( 0.5f, 0.5f, -0.5f ); glColor3f( 0.0f, 0.0f, 1.0f ); glVertex3f( 0.5f, -0.5f, 0.5f ); glVertex3f( 0.5f, 0.5f, 0.5f ); glVertex3f( -0.5f, 0.5f, 0.5f ); glVertex3f( -0.5f, -0.5f, 0.5f ); glColor3f( 1.0f, 0.0f, 0.0f ); glVertex3f( 0.5f, -0.5f, -0.5f ); glVertex3f( 0.5f, 0.5f, -0.5f ); glVertex3f( 0.5f, 0.5f, 0.5f ); glVertex3f( 0.5f, -0.5f, 0.5f ); glColor3f( 0.2f, 0.0f, 0.0f ); glVertex3f( -0.5f, -0.5f, 0.5f ); glVertex3f( -0.5f, 0.5f, 0.5f ); glVertex3f( -0.5f, 0.5f, -0.5f ); glVertex3f( -0.5f, -0.5f, -0.5f ); glColor3f( 0.0f, 1.0f, 0.0f ); glVertex3f( 0.5f, 0.5f, 0.5f ); glVertex3f( 0.5f, 0.5f, -0.5f ); glVertex3f( -0.5f, 0.5f, -0.5f ); glVertex3f( -0.5f, 0.5f, 0.5f ); glColor3f( 0.0f, 0.2f, 0.0f ); glVertex3f( 0.5f, -0.5f, -0.5f ); glVertex3f( 0.5f, -0.5f, 0.5f ); glVertex3f( -0.5f, -0.5f, 0.5f ); glVertex3f( -0.5f, -0.5f, -0.5f ); glEnd(); } void renderGrid() { glBegin(GL_LINES); glColor3f(0.2f, 0.2f, 0.2f); for (int i = -20; i <= 20; i++) { glVertex3f(-20.0f, 0.0f, i); glVertex3f(20.0f, 0.0f, i); glVertex3f(i, 0.0f, -20.0f); glVertex3f(i, 0.0f, 20.0f); } glEnd(); } void renderFrustum(Matrix4f m) { // Perspective origin to near plane Vector3f v = tmp; glBegin(GL_LINES); glColor3f(0.2f, 0.2f, 0.2f); for (int i = 0; i < 4; i++) { m.perspectiveOrigin(v); glVertex3f(v.x, v.y, v.z); m.frustumCorner(i, v); glVertex3f(v.x, v.y, v.z); } glEnd(); // Near plane glBegin(GL_LINE_STRIP); glColor3f(0.8f, 0.2f, 0.2f); for (int i = 0; i < 4 + 1; i++) { m.frustumCorner(i & 3, v); glVertex3f(v.x, v.y, v.z); } glEnd(); // Edges glBegin(GL_LINES); for (int i = 0; i < 4; i++) { m.frustumCorner(3 - i, v); glVertex3f(v.x, v.y, v.z); m.frustumCorner(4 + ((i + 2) & 3), v); glVertex3f(v.x, v.y, v.z); } glEnd(); // Far plane glBegin(GL_LINE_STRIP); for (int i = 0; i < 4 + 1; i++) { m.frustumCorner(4 + (i & 3), v); glVertex3f(v.x, v.y, v.z); } glEnd(); } void switchCamera() { active = 1 - active; inactive = 1 - inactive; } void loop() { GL.createCapabilities(); // Set the clear color glClearColor(0.6f, 0.7f, 0.8f, 1.0f); // Enable depth testing glEnable(GL_DEPTH_TEST); glEnable(GL_CULL_FACE); // Remember the current time. long firstTime = System.nanoTime(); long lastTime = firstTime; // FloatBuffer for transferring matrices to OpenGL FloatBuffer fb = BufferUtils.createFloatBuffer(16); // Matrix to build combined model-view Matrix4f modelView = new Matrix4f(); // Matrix to build combined view-projection Matrix4f viewProj = new Matrix4f(); while ( !glfwWindowShouldClose(window) ) { long thisTime = System.nanoTime(); float diff = (thisTime - firstTime) / 1E9f; float angle = diff; float delta = (thisTime - lastTime) / 1E9f; lastTime = thisTime; // Process rotation rotation[inactive] += rotate * delta; // Setup both camera's projection matrices projMatrix[0].setPerspective((float) Math.toRadians(40), (float)width/height, 1.0f, 20.0f); projMatrix[1].setPerspective((float) Math.toRadians(30), (float)width/height, 2.0f, 5.0f); // Load the active camera's projection glMatrixMode(GL_PROJECTION); glLoadMatrixf(projMatrix[active].get(fb)); // Setup both camera's view matrices viewMatrix[0].setLookAt(0, 2, 10, 0, 0, 0, 0, 1, 0).rotateY(rotation[0]); viewMatrix[1].setLookAt(3, 1, 1, 0, 0, 0, 0, 1, 0).rotateY(rotation[1]); // Apply model transformation to active camera's view modelMatrix.rotationY(angle * (float) Math.toRadians(10)); viewMatrix[active].mul(modelMatrix, modelView); // And load it glMatrixMode(GL_MODELVIEW); glLoadMatrixf(modelView.get(fb)); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glViewport(0, 0, width, height); // Render a cube renderCube(); // Load the active camera's view again to render the inactive camera's frustum glLoadMatrixf(viewMatrix[active].get(fb)); // Compute and render the inactive camera's frustum viewProj.set(projMatrix[inactive]).mul(viewMatrix[inactive]); renderFrustum(viewProj); glfwSwapBuffers(window); glfwPollEvents(); } } public static void main(String[] args) { new CameraDemo().run(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.server; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.util.Pair; import org.apache.accumulo.core.volume.Volume; import org.apache.accumulo.core.volume.VolumeConfiguration; import org.apache.accumulo.server.fs.VolumeManager; import org.apache.accumulo.server.fs.VolumeUtil; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import com.google.common.collect.Sets; public class ServerConstants { public static final String VERSION_DIR = "version"; public static final String INSTANCE_ID_DIR = "instance_id"; /** * version (10) reflects changes to how root tablet metadata is serialized in zookeeper starting * with 2.1 */ public static final int ROOT_TABLET_META_CHANGES = 10; /** * version (9) reflects changes to crypto that resulted in RFiles and WALs being serialized * differently in version 2.0.0. Also RFiles in 2.0.0 may have summary data. */ public static final int CRYPTO_CHANGES = 9; /** * version (8) reflects changes to RFile index (ACCUMULO-1124) AND the change to WAL tracking in * ZK in version 1.8.0 */ public static final int SHORTEN_RFILE_KEYS = 8; /** * Historic data versions * * <ul> * <li>version (7) also reflects the addition of a replication table in 1.7.0 * <li>version (6) reflects the addition of a separate root table (ACCUMULO-1481) in 1.6.0 - * <li>version (5) moves delete file markers for the metadata table into the root tablet * <li>version (4) moves logging to HDFS in 1.5.0 * </ul> * * */ public static final int DATA_VERSION = ROOT_TABLET_META_CHANGES; public static final Set<Integer> CAN_RUN = Set.of(SHORTEN_RFILE_KEYS, CRYPTO_CHANGES, DATA_VERSION); public static final Set<Integer> NEEDS_UPGRADE = Sets.difference(CAN_RUN, Set.of(DATA_VERSION)); private static Set<String> baseUris = null; private static List<Pair<Path,Path>> replacementsList = null; public static Set<String> getBaseUris(ServerContext context) { return getBaseUris(context.getConfiguration(), context.getHadoopConf()); } // these are functions to delay loading the Accumulo configuration unless we must public static synchronized Set<String> getBaseUris(AccumuloConfiguration conf, Configuration hadoopConf) { if (baseUris == null) { baseUris = Collections.unmodifiableSet(checkBaseUris(conf, hadoopConf, VolumeConfiguration.getVolumeUris(conf, hadoopConf), false)); } return baseUris; } public static Set<String> checkBaseUris(AccumuloConfiguration conf, Configuration hadoopConf, Set<String> configuredBaseDirs, boolean ignore) { // all base dirs must have same instance id and data version, any dirs that have neither should // be ignored String firstDir = null; String firstIid = null; Integer firstVersion = null; // preserve order from configuration (to match user expectations a bit when volumes get sent to // user-implemented VolumeChoosers) LinkedHashSet<String> baseDirsList = new LinkedHashSet<>(); for (String baseDir : configuredBaseDirs) { Path path = new Path(baseDir, INSTANCE_ID_DIR); String currentIid; int currentVersion; try { currentIid = VolumeManager.getInstanceIDFromHdfs(path, conf, hadoopConf); Path vpath = new Path(baseDir, VERSION_DIR); currentVersion = ServerUtil.getAccumuloPersistentVersion(vpath.getFileSystem(hadoopConf), vpath); } catch (Exception e) { if (ignore) { continue; } else { throw new IllegalArgumentException("Accumulo volume " + path + " not initialized", e); } } if (firstIid == null) { firstIid = currentIid; firstDir = baseDir; firstVersion = currentVersion; } else if (!currentIid.equals(firstIid)) { throw new IllegalArgumentException("Configuration " + Property.INSTANCE_VOLUMES.getKey() + " contains paths that have different instance ids " + baseDir + " has " + currentIid + " and " + firstDir + " has " + firstIid); } else if (currentVersion != firstVersion) { throw new IllegalArgumentException("Configuration " + Property.INSTANCE_VOLUMES.getKey() + " contains paths that have different versions " + baseDir + " has " + currentVersion + " and " + firstDir + " has " + firstVersion); } baseDirsList.add(baseDir); } if (baseDirsList.isEmpty()) { throw new RuntimeException("None of the configured paths are initialized."); } return baseDirsList; } public static final String TABLE_DIR = "tables"; public static final String RECOVERY_DIR = "recovery"; public static final String WAL_DIR = "wal"; public static Set<String> getTablesDirs(ServerContext context) { return VolumeConfiguration.prefix(getBaseUris(context), TABLE_DIR); } public static Set<String> getRecoveryDirs(ServerContext context) { return VolumeConfiguration.prefix(getBaseUris(context), RECOVERY_DIR); } public static Path getInstanceIdLocation(Volume v) { // all base dirs should have the same instance id, so can choose any one return v.prefixChild(INSTANCE_ID_DIR); } public static Path getDataVersionLocation(Volume v) { // all base dirs should have the same version, so can choose any one return v.prefixChild(VERSION_DIR); } public static synchronized List<Pair<Path,Path>> getVolumeReplacements(AccumuloConfiguration conf, Configuration hadoopConf) { if (replacementsList == null) { String replacements = conf.get(Property.INSTANCE_VOLUMES_REPLACEMENTS); replacements = replacements.trim(); if (replacements.isEmpty()) { return Collections.emptyList(); } String[] pairs = replacements.split(","); List<Pair<Path,Path>> ret = new ArrayList<>(); for (String pair : pairs) { String[] uris = pair.split("\\s+"); if (uris.length != 2) { throw new IllegalArgumentException( Property.INSTANCE_VOLUMES_REPLACEMENTS.getKey() + " contains malformed pair " + pair); } Path p1, p2; try { // URI constructor handles hex escaping p1 = new Path(new URI(VolumeUtil.removeTrailingSlash(uris[0].trim()))); if (p1.toUri().getScheme() == null) { throw new IllegalArgumentException(Property.INSTANCE_VOLUMES_REPLACEMENTS.getKey() + " contains " + uris[0] + " which is not fully qualified"); } } catch (URISyntaxException e) { throw new IllegalArgumentException(Property.INSTANCE_VOLUMES_REPLACEMENTS.getKey() + " contains " + uris[0] + " which has a syntax error", e); } try { p2 = new Path(new URI(VolumeUtil.removeTrailingSlash(uris[1].trim()))); if (p2.toUri().getScheme() == null) { throw new IllegalArgumentException(Property.INSTANCE_VOLUMES_REPLACEMENTS.getKey() + " contains " + uris[1] + " which is not fully qualified"); } } catch (URISyntaxException e) { throw new IllegalArgumentException(Property.INSTANCE_VOLUMES_REPLACEMENTS.getKey() + " contains " + uris[1] + " which has a syntax error", e); } ret.add(new Pair<>(p1, p2)); } HashSet<Path> baseDirs = new HashSet<>(); for (String baseDir : getBaseUris(conf, hadoopConf)) { // normalize using path baseDirs.add(new Path(baseDir)); } for (Pair<Path,Path> pair : ret) { if (!baseDirs.contains(pair.getSecond())) { throw new IllegalArgumentException(Property.INSTANCE_VOLUMES_REPLACEMENTS.getKey() + " contains " + pair.getSecond() + " which is not a configured volume"); } } // only set if get here w/o exception replacementsList = ret; } return replacementsList; } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.ui.listener.evaluation; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; import javax.faces.event.AbortProcessingException; import javax.faces.event.ActionEvent; import javax.faces.event.ActionListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.math3.util.Precision; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.event.api.EventTrackingService; import org.sakaiproject.event.api.NotificationService; import org.sakaiproject.samigo.util.SamigoConstants; import org.sakaiproject.tool.assessment.data.dao.grading.AssessmentGradingData; import org.sakaiproject.tool.assessment.data.dao.grading.ItemGradingAttachment; import org.sakaiproject.tool.assessment.data.dao.grading.ItemGradingData; import org.sakaiproject.tool.assessment.facade.AgentFacade; import org.sakaiproject.tool.assessment.services.GradebookServiceException; import org.sakaiproject.tool.assessment.services.GradingService; import org.sakaiproject.tool.assessment.ui.bean.delivery.DeliveryBean; import org.sakaiproject.tool.assessment.ui.bean.delivery.ItemContentsBean; import org.sakaiproject.tool.assessment.ui.bean.delivery.SectionContentsBean; import org.sakaiproject.tool.assessment.ui.bean.evaluation.StudentScoresBean; import org.sakaiproject.tool.assessment.ui.bean.evaluation.TotalScoresBean; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; import org.sakaiproject.tool.assessment.util.SamigoLRSStatements; import org.sakaiproject.tool.assessment.util.TextFormat; /** * <p> * This handles the updating of the Student Score page. * </p> * <p>Description: Action Listener Evaluation Updating Student Score page</p> * <p>Copyright: Copyright (c) 2004</p> * <p>Organization: Sakai Project</p> * @author Rachel Gollub * @version $Id$ */ public class StudentScoreUpdateListener implements ActionListener { private static Logger log = LoggerFactory.getLogger(StudentScoreUpdateListener.class); private final EventTrackingService eventTrackingService= ComponentManager.get( EventTrackingService.class ); private static ContextUtil cu; /** * Standard process action method. * @param ae ActionEvent * @throws AbortProcessingException */ public void processAction(ActionEvent ae) throws AbortProcessingException { log.debug("Student Score Update LISTENER."); StudentScoresBean bean = (StudentScoresBean) cu.lookupBean("studentScores"); TotalScoresBean tbean = (TotalScoresBean) cu.lookupBean("totalScores"); tbean.setAssessmentGradingHash(tbean.getPublishedAssessment().getPublishedAssessmentId()); DeliveryBean delivery = (DeliveryBean) cu.lookupBean("delivery"); log.debug("Calling saveStudentScores."); try { if (!saveStudentScores(bean, tbean, delivery)) { throw new RuntimeException("failed to call saveStudentScores."); } } catch (GradebookServiceException ge) { FacesContext context = FacesContext.getCurrentInstance(); String err=(String)cu.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages", "gradebook_exception_error"); context.addMessage(null, new FacesMessage(err)); } } /** * Persist the results from the ActionForm in the student page. * @param bean StudentScoresBean bean * @return true if successful */ public boolean saveStudentScores(StudentScoresBean bean, TotalScoresBean tbean, DeliveryBean delivery) { GradingService delegate = new GradingService(); HashSet itemGradingSet = new HashSet(); AssessmentGradingData adata = null; try { ArrayList parts = delivery.getPageContents().getPartsContents(); Iterator iter = parts.iterator(); boolean updateFlag = false; while (iter.hasNext()) { ArrayList items = ((SectionContentsBean) iter.next()).getItemContents(); Iterator iter2 = items.iterator(); while (iter2.hasNext()) { ItemContentsBean question = (ItemContentsBean) iter2.next(); List<ItemGradingData> gradingarray = question.getItemGradingDataArray(); log.debug("****1. pub questionId = " + question.getItemData().getItemId()); log.debug("****2. Gradingarray length = " + gradingarray.size()); // Create a new one if we need it. if (gradingarray.isEmpty() && (question.getExactPoints() > 0 || (question.getGradingComment() != null && !question.getGradingComment().trim().equals("")) )) { // this is another mystery, no idea why review is involved here - daiyf question.setReview(false); // This creates an itemgradingdata gradingarray = question.getItemGradingDataArray(); } int fibFinNumCorrect = 0; if (question.getItemData().getTypeId().equals(Long.valueOf(8)) || question.getItemData().getTypeId().equals(Long.valueOf(11))) { Iterator itemGradingIter = gradingarray.iterator(); while (itemGradingIter.hasNext()){ Object obj = itemGradingIter.next(); ItemGradingData data = (ItemGradingData) obj; if (Boolean.TRUE.equals(data.getIsCorrect())) { fibFinNumCorrect++; } } } log.debug("****3a Gradingarray length2 = " + gradingarray.size()); log.debug("****3b set points = " + question.getExactPoints() + ", comments to " + question.getGradingComment()); Iterator iter3 = gradingarray.iterator(); while (iter3.hasNext()) { ItemGradingData data = (ItemGradingData) iter3.next(); if (adata == null && data.getAssessmentGradingId() != null){ adata = delegate.load(data.getAssessmentGradingId().toString()); } if (data.getAgentId() == null) { // this is a skipped question, set submittedDate=null data.setSubmittedDate(null); data.setAgentId(bean.getStudentId()); } double newAutoScore = 0; if ((question.getItemData().getTypeId().equals(Long.valueOf(8)) || question.getItemData().getTypeId().equals(Long.valueOf(11))) && fibFinNumCorrect != 0) { if (Boolean.TRUE.equals(data.getIsCorrect())) { newAutoScore = (question.getExactPoints() / (double) fibFinNumCorrect); } } else { newAutoScore = (question.getExactPoints() / (double) gradingarray.size()); } double oldAutoScore = 0; if (data.getAutoScore() !=null) { oldAutoScore=data.getAutoScore().doubleValue(); } String newComments = TextFormat.convertPlaintextToFormattedTextNoHighUnicode(log, question.getGradingComment()); if (newComments != null) { newComments = newComments.trim(); } else { newComments = ""; } String oldComments = data.getComments(); if (oldComments != null) { oldComments = oldComments.trim(); } else { oldComments = ""; } // if newAutoScore != oldAutoScore then updateScore = true boolean updateScore = !(Precision.equalsIncludingNaN(newAutoScore, oldAutoScore, 0.0001)); boolean updateComments = !newComments.equals(oldComments); StringBuffer logString = new StringBuffer(); logString.append("gradedBy="); logString.append(AgentFacade.getAgentString()); logString.append(", itemGradingId="); logString.append(data.getItemGradingId()); if (updateScore) { data.setAutoScore(Double.valueOf(newAutoScore)); logString.append(", newAutoScore="); logString.append(newAutoScore); logString.append(", oldAutoScore="); logString.append(oldAutoScore); } if (updateComments) { data.setComments(newComments); logString.append(", newComments="); logString.append(newComments); logString.append(", oldComments="); logString.append(oldComments); } if (updateScore || updateComments) { updateFlag = true; data.setGradedBy(AgentFacade.getAgentString()); data.setGradedDate(new Date()); String targetString = "siteId=" + AgentFacade.getCurrentSiteId() + ", " + logString.toString(); String safeString = targetString.length() > 255 ? targetString.substring(0, 255) : targetString; eventTrackingService.post(eventTrackingService.newEvent(SamigoConstants.EVENT_ASSESSMENT_STUDENT_SCORE_UPDATE, safeString, true)); log.debug("****4 itemGradingId="+data.getItemGradingId()); log.debug("****5 set points = " + data.getAutoScore() + ", comments to " + data.getComments()); } data.setAnswerText(ContextUtil.stringWYSIWYG(data.getAnswerText())); itemGradingSet.add(data); } } if (adata==null){ // this is for cases when studnet submitted an assessment but skipped all teh questions // when we won't be able to get teh assessmentGrading based on itemGrdaing ('cos there is none). String assessmentGradingId = cu.lookupParam("gradingData"); adata = delegate.load(assessmentGradingId); } adata.setItemGradingSet(itemGradingSet); } if (adata == null) return true; // Nothing to save. String newComments = TextFormat.convertPlaintextToFormattedTextNoHighUnicode(log, bean.getComments()); if (newComments != null) { newComments = newComments.trim(); } else { newComments = ""; } String oldComments = adata.getComments(); if (oldComments != null) { oldComments = oldComments.trim(); } else { oldComments = ""; } StringBuffer logString = new StringBuffer(); logString.append("gradedBy="); logString.append(AgentFacade.getAgentString()); logString.append(", assessmentGradingId="); logString.append(adata.getAssessmentGradingId()); if (!newComments.equals(oldComments)) { updateFlag = true; adata.setComments(newComments); adata.setGradedBy(AgentFacade.getAgentString()); adata.setGradedDate(new Date()); logString.append(", newComments="); logString.append(newComments); logString.append(", oldComments="); logString.append(oldComments); } if (updateFlag) { delegate.updateAssessmentGradingScore(adata, tbean.getPublishedAssessment()); eventTrackingService.post(eventTrackingService.newEvent(SamigoConstants.EVENT_ASSESSMENT_STUDENT_SCORE_UPDATE, logString.toString(), AgentFacade.getCurrentSiteId(), true, NotificationService.NOTI_OPTIONAL, SamigoLRSStatements.getStatementForStudentScoreUpdate(adata, tbean.getPublishedAssessment()))); } log.debug("Saved student scores."); updateAttachment(delivery); } catch (GradebookServiceException ge) { FacesContext context = FacesContext.getCurrentInstance(); String err=(String)cu.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AuthorMessages", "gradebook_exception_error"); context.addMessage(null, new FacesMessage(err)); } catch (Exception e) { e.printStackTrace(); return false; } return true; } public void updateAttachment(DeliveryBean delivery){ ArrayList parts = delivery.getPageContents().getPartsContents(); Iterator iter = parts.iterator(); List attachmentList = new ArrayList(); while (iter.hasNext()) { ArrayList items = ((SectionContentsBean) iter.next()).getItemContents(); Iterator iter2 = items.iterator(); while (iter2.hasNext()) { ItemContentsBean question = (ItemContentsBean) iter2.next(); List<ItemGradingData> gradingarray = question.getItemGradingDataArray(); log.debug("Gradingarray length2 = " + gradingarray.size()); Iterator<ItemGradingData> iter3 = gradingarray.iterator(); while (iter3.hasNext()) { ItemGradingData itemGradingData = iter3.next(); List oldList = itemGradingData.getItemGradingAttachmentList(); List newList = question.getItemGradingAttachmentList(); if ((oldList == null || oldList.size() == 0 ) && (newList == null || newList.size() == 0)) { continue; } HashMap map = getAttachmentIdHash(oldList); for (int i=0; i<newList.size(); i++){ ItemGradingAttachment itemGradingAttachment = (ItemGradingAttachment) newList.get(i); if (map.get(itemGradingAttachment.getAttachmentId()) != null){ // exist already, remove it from map map.remove(itemGradingAttachment.getAttachmentId()); } else{ // new attachments itemGradingAttachment.setItemGrading(itemGradingData); attachmentList.add(itemGradingAttachment); } } // save new ones GradingService gradingService = new GradingService(); if (attachmentList.size() > 0) { gradingService.saveOrUpdateAttachments(attachmentList); eventTrackingService.post(eventTrackingService.newEvent(SamigoConstants.EVENT_ASSESSMENT_STUDENT_SCORE_UPDATE, "siteId=" + AgentFacade.getCurrentSiteId() + ", Adding " + attachmentList.size() + " attachments for itemGradingData id = " + itemGradingData.getItemGradingId(), true)); } // remove old ones Set set = map.keySet(); Iterator iter4 = set.iterator(); while (iter4.hasNext()){ Long attachmentId = (Long)iter4.next(); gradingService.removeItemGradingAttachment(attachmentId.toString()); eventTrackingService.post(eventTrackingService.newEvent(SamigoConstants.EVENT_ASSESSMENT_STUDENT_SCORE_UPDATE, "siteId=" + AgentFacade.getCurrentSiteId() + ", Removing attachmentId = " + attachmentId, true)); } } } } } private HashMap getAttachmentIdHash(List list){ HashMap map = new HashMap(); for (int i=0; i<list.size(); i++){ ItemGradingAttachment a = (ItemGradingAttachment)list.get(i); map.put(a.getAttachmentId(), a); } return map; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iotwireless.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotwireless-2020-11-22/ListDeviceProfiles" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListDeviceProfilesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The token to use to get the next set of results, or <b>null</b> if there are no additional results. * </p> */ private String nextToken; /** * <p> * The list of device profiles. * </p> */ private java.util.List<DeviceProfile> deviceProfileList; /** * <p> * The token to use to get the next set of results, or <b>null</b> if there are no additional results. * </p> * * @param nextToken * The token to use to get the next set of results, or <b>null</b> if there are no additional results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token to use to get the next set of results, or <b>null</b> if there are no additional results. * </p> * * @return The token to use to get the next set of results, or <b>null</b> if there are no additional results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token to use to get the next set of results, or <b>null</b> if there are no additional results. * </p> * * @param nextToken * The token to use to get the next set of results, or <b>null</b> if there are no additional results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDeviceProfilesResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * The list of device profiles. * </p> * * @return The list of device profiles. */ public java.util.List<DeviceProfile> getDeviceProfileList() { return deviceProfileList; } /** * <p> * The list of device profiles. * </p> * * @param deviceProfileList * The list of device profiles. */ public void setDeviceProfileList(java.util.Collection<DeviceProfile> deviceProfileList) { if (deviceProfileList == null) { this.deviceProfileList = null; return; } this.deviceProfileList = new java.util.ArrayList<DeviceProfile>(deviceProfileList); } /** * <p> * The list of device profiles. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setDeviceProfileList(java.util.Collection)} or {@link #withDeviceProfileList(java.util.Collection)} if * you want to override the existing values. * </p> * * @param deviceProfileList * The list of device profiles. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDeviceProfilesResult withDeviceProfileList(DeviceProfile... deviceProfileList) { if (this.deviceProfileList == null) { setDeviceProfileList(new java.util.ArrayList<DeviceProfile>(deviceProfileList.length)); } for (DeviceProfile ele : deviceProfileList) { this.deviceProfileList.add(ele); } return this; } /** * <p> * The list of device profiles. * </p> * * @param deviceProfileList * The list of device profiles. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDeviceProfilesResult withDeviceProfileList(java.util.Collection<DeviceProfile> deviceProfileList) { setDeviceProfileList(deviceProfileList); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getDeviceProfileList() != null) sb.append("DeviceProfileList: ").append(getDeviceProfileList()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListDeviceProfilesResult == false) return false; ListDeviceProfilesResult other = (ListDeviceProfilesResult) obj; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getDeviceProfileList() == null ^ this.getDeviceProfileList() == null) return false; if (other.getDeviceProfileList() != null && other.getDeviceProfileList().equals(this.getDeviceProfileList()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getDeviceProfileList() == null) ? 0 : getDeviceProfileList().hashCode()); return hashCode; } @Override public ListDeviceProfilesResult clone() { try { return (ListDeviceProfilesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.auditmanager.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/auditmanager-2017-07-25/GetEvidenceFoldersByAssessment" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetEvidenceFoldersByAssessmentResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The list of evidence folders that the <code>GetEvidenceFoldersByAssessment</code> API returned. * </p> */ private java.util.List<AssessmentEvidenceFolder> evidenceFolders; /** * <p> * The pagination token that's used to fetch the next set of results. * </p> */ private String nextToken; /** * <p> * The list of evidence folders that the <code>GetEvidenceFoldersByAssessment</code> API returned. * </p> * * @return The list of evidence folders that the <code>GetEvidenceFoldersByAssessment</code> API returned. */ public java.util.List<AssessmentEvidenceFolder> getEvidenceFolders() { return evidenceFolders; } /** * <p> * The list of evidence folders that the <code>GetEvidenceFoldersByAssessment</code> API returned. * </p> * * @param evidenceFolders * The list of evidence folders that the <code>GetEvidenceFoldersByAssessment</code> API returned. */ public void setEvidenceFolders(java.util.Collection<AssessmentEvidenceFolder> evidenceFolders) { if (evidenceFolders == null) { this.evidenceFolders = null; return; } this.evidenceFolders = new java.util.ArrayList<AssessmentEvidenceFolder>(evidenceFolders); } /** * <p> * The list of evidence folders that the <code>GetEvidenceFoldersByAssessment</code> API returned. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setEvidenceFolders(java.util.Collection)} or {@link #withEvidenceFolders(java.util.Collection)} if you * want to override the existing values. * </p> * * @param evidenceFolders * The list of evidence folders that the <code>GetEvidenceFoldersByAssessment</code> API returned. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEvidenceFoldersByAssessmentResult withEvidenceFolders(AssessmentEvidenceFolder... evidenceFolders) { if (this.evidenceFolders == null) { setEvidenceFolders(new java.util.ArrayList<AssessmentEvidenceFolder>(evidenceFolders.length)); } for (AssessmentEvidenceFolder ele : evidenceFolders) { this.evidenceFolders.add(ele); } return this; } /** * <p> * The list of evidence folders that the <code>GetEvidenceFoldersByAssessment</code> API returned. * </p> * * @param evidenceFolders * The list of evidence folders that the <code>GetEvidenceFoldersByAssessment</code> API returned. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEvidenceFoldersByAssessmentResult withEvidenceFolders(java.util.Collection<AssessmentEvidenceFolder> evidenceFolders) { setEvidenceFolders(evidenceFolders); return this; } /** * <p> * The pagination token that's used to fetch the next set of results. * </p> * * @param nextToken * The pagination token that's used to fetch the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The pagination token that's used to fetch the next set of results. * </p> * * @return The pagination token that's used to fetch the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The pagination token that's used to fetch the next set of results. * </p> * * @param nextToken * The pagination token that's used to fetch the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEvidenceFoldersByAssessmentResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEvidenceFolders() != null) sb.append("EvidenceFolders: ").append(getEvidenceFolders()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetEvidenceFoldersByAssessmentResult == false) return false; GetEvidenceFoldersByAssessmentResult other = (GetEvidenceFoldersByAssessmentResult) obj; if (other.getEvidenceFolders() == null ^ this.getEvidenceFolders() == null) return false; if (other.getEvidenceFolders() != null && other.getEvidenceFolders().equals(this.getEvidenceFolders()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEvidenceFolders() == null) ? 0 : getEvidenceFolders().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public GetEvidenceFoldersByAssessmentResult clone() { try { return (GetEvidenceFoldersByAssessmentResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.jdbc.pool; import java.io.IOException; import java.sql.Connection; import java.util.Hashtable; import java.util.Properties; import javax.management.ObjectName; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.Name; import javax.naming.NamingException; import javax.naming.RefAddr; import javax.naming.Reference; import javax.naming.spi.ObjectFactory; import javax.sql.DataSource; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; /** * <p>JNDI object factory that creates an instance of * <code>BasicDataSource</code> that has been configured based on the * <code>RefAddr</code> values of the specified <code>Reference</code>, * which must match the names and data types of the * <code>BasicDataSource</code> bean properties.</p> * <br/> * Properties available for configuration:<br/> * <a href="http://commons.apache.org/dbcp/configuration.html">Commons DBCP properties</a><br/> *<ol> * <li>initSQL - A query that gets executed once, right after the connection is established.</li> * <li>testOnConnect - run validationQuery after connection has been established.</li> * <li>validationInterval - avoid excess validation, only run validation at most at this frequency - time in milliseconds.</li> * <li>jdbcInterceptors - a semicolon separated list of classnames extending {@link JdbcInterceptor} class.</li> * <li>jmxEnabled - true of false, whether to register the pool with JMX.</li> * <li>fairQueue - true of false, whether the pool should sacrifice a little bit of performance for true fairness.</li> *</ol> * @author Craig R. McClanahan * @author Dirk Verbeeck * @author Filip Hanik */ public class DataSourceFactory implements ObjectFactory { private static final Log log = LogFactory.getLog(DataSourceFactory.class); protected final static String PROP_DEFAULTAUTOCOMMIT = "defaultAutoCommit"; protected final static String PROP_DEFAULTREADONLY = "defaultReadOnly"; protected final static String PROP_DEFAULTTRANSACTIONISOLATION = "defaultTransactionIsolation"; protected final static String PROP_DEFAULTCATALOG = "defaultCatalog"; protected final static String PROP_DRIVERCLASSNAME = "driverClassName"; protected final static String PROP_PASSWORD = "password"; protected final static String PROP_URL = "url"; protected final static String PROP_USERNAME = "username"; protected final static String PROP_MAXACTIVE = "maxActive"; protected final static String PROP_MAXIDLE = "maxIdle"; protected final static String PROP_MINIDLE = "minIdle"; protected final static String PROP_INITIALSIZE = "initialSize"; protected final static String PROP_MAXWAIT = "maxWait"; protected final static String PROP_MAXAGE = "maxAge"; protected final static String PROP_TESTONBORROW = "testOnBorrow"; protected final static String PROP_TESTONRETURN = "testOnReturn"; protected final static String PROP_TESTWHILEIDLE = "testWhileIdle"; protected final static String PROP_TESTONCONNECT = "testOnConnect"; protected final static String PROP_VALIDATIONQUERY = "validationQuery"; protected final static String PROP_VALIDATOR_CLASS_NAME = "validatorClassName"; protected final static String PROP_TIMEBETWEENEVICTIONRUNSMILLIS = "timeBetweenEvictionRunsMillis"; protected final static String PROP_NUMTESTSPEREVICTIONRUN = "numTestsPerEvictionRun"; protected final static String PROP_MINEVICTABLEIDLETIMEMILLIS = "minEvictableIdleTimeMillis"; protected final static String PROP_ACCESSTOUNDERLYINGCONNECTIONALLOWED = "accessToUnderlyingConnectionAllowed"; protected final static String PROP_REMOVEABANDONED = "removeAbandoned"; protected final static String PROP_REMOVEABANDONEDTIMEOUT = "removeAbandonedTimeout"; protected final static String PROP_LOGABANDONED = "logAbandoned"; protected final static String PROP_ABANDONWHENPERCENTAGEFULL = "abandonWhenPercentageFull"; protected final static String PROP_POOLPREPAREDSTATEMENTS = "poolPreparedStatements"; protected final static String PROP_MAXOPENPREPAREDSTATEMENTS = "maxOpenPreparedStatements"; protected final static String PROP_CONNECTIONPROPERTIES = "connectionProperties"; protected final static String PROP_INITSQL = "initSQL"; protected final static String PROP_INTERCEPTORS = "jdbcInterceptors"; protected final static String PROP_VALIDATIONINTERVAL = "validationInterval"; protected final static String PROP_JMX_ENABLED = "jmxEnabled"; protected final static String PROP_FAIR_QUEUE = "fairQueue"; protected static final String PROP_USE_EQUALS = "useEquals"; protected static final String PROP_USE_CON_LOCK = "useLock"; protected static final String PROP_DATASOURCE= "dataSource"; protected static final String PROP_DATASOURCE_JNDI = "dataSourceJNDI"; protected static final String PROP_SUSPECT_TIMEOUT = "suspectTimeout"; protected static final String PROP_ALTERNATE_USERNAME_ALLOWED = "alternateUsernameAllowed"; public static final int UNKNOWN_TRANSACTIONISOLATION = -1; public static final String OBJECT_NAME = "object_name"; protected final static String[] ALL_PROPERTIES = { PROP_DEFAULTAUTOCOMMIT, PROP_DEFAULTREADONLY, PROP_DEFAULTTRANSACTIONISOLATION, PROP_DEFAULTCATALOG, PROP_DRIVERCLASSNAME, PROP_MAXACTIVE, PROP_MAXIDLE, PROP_MINIDLE, PROP_INITIALSIZE, PROP_MAXWAIT, PROP_TESTONBORROW, PROP_TESTONRETURN, PROP_TIMEBETWEENEVICTIONRUNSMILLIS, PROP_NUMTESTSPEREVICTIONRUN, PROP_MINEVICTABLEIDLETIMEMILLIS, PROP_TESTWHILEIDLE, PROP_TESTONCONNECT, PROP_PASSWORD, PROP_URL, PROP_USERNAME, PROP_VALIDATIONQUERY, PROP_VALIDATOR_CLASS_NAME, PROP_VALIDATIONINTERVAL, PROP_ACCESSTOUNDERLYINGCONNECTIONALLOWED, PROP_REMOVEABANDONED, PROP_REMOVEABANDONEDTIMEOUT, PROP_LOGABANDONED, PROP_POOLPREPAREDSTATEMENTS, PROP_MAXOPENPREPAREDSTATEMENTS, PROP_CONNECTIONPROPERTIES, PROP_INITSQL, PROP_INTERCEPTORS, PROP_JMX_ENABLED, PROP_FAIR_QUEUE, PROP_USE_EQUALS, OBJECT_NAME, PROP_ABANDONWHENPERCENTAGEFULL, PROP_MAXAGE, PROP_USE_CON_LOCK, PROP_DATASOURCE, PROP_DATASOURCE_JNDI, PROP_SUSPECT_TIMEOUT, PROP_ALTERNATE_USERNAME_ALLOWED }; // -------------------------------------------------- ObjectFactory Methods /** * <p>Create and return a new <code>BasicDataSource</code> instance. If no * instance can be created, return <code>null</code> instead.</p> * * @param obj The possibly null object containing location or * reference information that can be used in creating an object * @param name The name of this object relative to <code>nameCtx</code> * @param nameCtx The context relative to which the <code>name</code> * parameter is specified, or <code>null</code> if <code>name</code> * is relative to the default initial context * @param environment The possibly null environment that is used in * creating this object * * @exception Exception if an exception occurs creating the instance */ public Object getObjectInstance(Object obj, Name name, Context nameCtx, Hashtable<?,?> environment) throws Exception { // We only know how to deal with <code>javax.naming.Reference</code>s // that specify a class name of "javax.sql.DataSource" if ((obj == null) || !(obj instanceof Reference)) { return null; } Reference ref = (Reference) obj; boolean XA = false; boolean ok = false; if ("javax.sql.DataSource".equals(ref.getClassName())) { ok = true; } if ("javax.sql.XADataSource".equals(ref.getClassName())) { ok = true; XA = true; } if (org.apache.tomcat.jdbc.pool.DataSource.class.getName().equals(ref.getClassName())) { ok = true; } if (!ok) { log.warn(ref.getClassName()+" is not a valid class name/type for this JNDI factory."); return null; } Properties properties = new Properties(); for (int i = 0; i < ALL_PROPERTIES.length; i++) { String propertyName = ALL_PROPERTIES[i]; RefAddr ra = ref.get(propertyName); if (ra != null) { String propertyValue = ra.getContent().toString(); properties.setProperty(propertyName, propertyValue); } } return createDataSource(properties,nameCtx,XA); } public static PoolConfiguration parsePoolProperties(Properties properties) throws IOException{ PoolConfiguration poolProperties = new PoolProperties(); String value = null; value = properties.getProperty(PROP_DEFAULTAUTOCOMMIT); if (value != null) { poolProperties.setDefaultAutoCommit(Boolean.valueOf(value)); } value = properties.getProperty(PROP_DEFAULTREADONLY); if (value != null) { poolProperties.setDefaultReadOnly(Boolean.valueOf(value)); } value = properties.getProperty(PROP_DEFAULTTRANSACTIONISOLATION); if (value != null) { int level = UNKNOWN_TRANSACTIONISOLATION; if ("NONE".equalsIgnoreCase(value)) { level = Connection.TRANSACTION_NONE; } else if ("READ_COMMITTED".equalsIgnoreCase(value)) { level = Connection.TRANSACTION_READ_COMMITTED; } else if ("READ_UNCOMMITTED".equalsIgnoreCase(value)) { level = Connection.TRANSACTION_READ_UNCOMMITTED; } else if ("REPEATABLE_READ".equalsIgnoreCase(value)) { level = Connection.TRANSACTION_REPEATABLE_READ; } else if ("SERIALIZABLE".equalsIgnoreCase(value)) { level = Connection.TRANSACTION_SERIALIZABLE; } else { try { level = Integer.parseInt(value); } catch (NumberFormatException e) { System.err.println("Could not parse defaultTransactionIsolation: " + value); System.err.println("WARNING: defaultTransactionIsolation not set"); System.err.println("using default value of database driver"); level = UNKNOWN_TRANSACTIONISOLATION; } } poolProperties.setDefaultTransactionIsolation(level); } value = properties.getProperty(PROP_DEFAULTCATALOG); if (value != null) { poolProperties.setDefaultCatalog(value); } value = properties.getProperty(PROP_DRIVERCLASSNAME); if (value != null) { poolProperties.setDriverClassName(value); } value = properties.getProperty(PROP_MAXACTIVE); if (value != null) { poolProperties.setMaxActive(Integer.parseInt(value)); } value = properties.getProperty(PROP_MAXIDLE); if (value != null) { poolProperties.setMaxIdle(Integer.parseInt(value)); } value = properties.getProperty(PROP_MINIDLE); if (value != null) { poolProperties.setMinIdle(Integer.parseInt(value)); } value = properties.getProperty(PROP_INITIALSIZE); if (value != null) { poolProperties.setInitialSize(Integer.parseInt(value)); } value = properties.getProperty(PROP_MAXWAIT); if (value != null) { poolProperties.setMaxWait(Integer.parseInt(value)); } value = properties.getProperty(PROP_TESTONBORROW); if (value != null) { poolProperties.setTestOnBorrow(Boolean.valueOf(value).booleanValue()); } value = properties.getProperty(PROP_TESTONRETURN); if (value != null) { poolProperties.setTestOnReturn(Boolean.valueOf(value).booleanValue()); } value = properties.getProperty(PROP_TESTONCONNECT); if (value != null) { poolProperties.setTestOnConnect(Boolean.valueOf(value).booleanValue()); } value = properties.getProperty(PROP_TIMEBETWEENEVICTIONRUNSMILLIS); if (value != null) { poolProperties.setTimeBetweenEvictionRunsMillis(Integer.parseInt(value)); } value = properties.getProperty(PROP_NUMTESTSPEREVICTIONRUN); if (value != null) { poolProperties.setNumTestsPerEvictionRun(Integer.parseInt(value)); } value = properties.getProperty(PROP_MINEVICTABLEIDLETIMEMILLIS); if (value != null) { poolProperties.setMinEvictableIdleTimeMillis(Integer.parseInt(value)); } value = properties.getProperty(PROP_TESTWHILEIDLE); if (value != null) { poolProperties.setTestWhileIdle(Boolean.valueOf(value).booleanValue()); } value = properties.getProperty(PROP_PASSWORD); if (value != null) { poolProperties.setPassword(value); } value = properties.getProperty(PROP_URL); if (value != null) { poolProperties.setUrl(value); } value = properties.getProperty(PROP_USERNAME); if (value != null) { poolProperties.setUsername(value); } value = properties.getProperty(PROP_VALIDATIONQUERY); if (value != null) { poolProperties.setValidationQuery(value); } value = properties.getProperty(PROP_VALIDATOR_CLASS_NAME); if (value != null) { poolProperties.setValidatorClassName(value); } value = properties.getProperty(PROP_VALIDATIONINTERVAL); if (value != null) { poolProperties.setValidationInterval(Long.parseLong(value)); } value = properties.getProperty(PROP_ACCESSTOUNDERLYINGCONNECTIONALLOWED); if (value != null) { poolProperties.setAccessToUnderlyingConnectionAllowed(Boolean.valueOf(value).booleanValue()); } value = properties.getProperty(PROP_REMOVEABANDONED); if (value != null) { poolProperties.setRemoveAbandoned(Boolean.valueOf(value).booleanValue()); } value = properties.getProperty(PROP_REMOVEABANDONEDTIMEOUT); if (value != null) { poolProperties.setRemoveAbandonedTimeout(Integer.parseInt(value)); } value = properties.getProperty(PROP_LOGABANDONED); if (value != null) { poolProperties.setLogAbandoned(Boolean.valueOf(value).booleanValue()); } value = properties.getProperty(PROP_POOLPREPAREDSTATEMENTS); if (value != null) { log.warn(PROP_POOLPREPAREDSTATEMENTS + " is not a valid setting, it will have no effect."); } value = properties.getProperty(PROP_MAXOPENPREPAREDSTATEMENTS); if (value != null) { log.warn(PROP_MAXOPENPREPAREDSTATEMENTS + " is not a valid setting, it will have no effect."); } value = properties.getProperty(PROP_CONNECTIONPROPERTIES); if (value != null) { Properties p = getProperties(value); poolProperties.setDbProperties(p); } else { poolProperties.setDbProperties(new Properties()); } if (poolProperties.getUsername()!=null) { poolProperties.getDbProperties().setProperty("user",poolProperties.getUsername()); } if (poolProperties.getPassword()!=null) { poolProperties.getDbProperties().setProperty("password",poolProperties.getPassword()); } value = properties.getProperty(PROP_INITSQL); if (value != null) { poolProperties.setInitSQL(value); } value = properties.getProperty(PROP_INTERCEPTORS); if (value != null) { poolProperties.setJdbcInterceptors(value); } value = properties.getProperty(PROP_JMX_ENABLED); if (value != null) { poolProperties.setJmxEnabled(Boolean.parseBoolean(value)); } value = properties.getProperty(PROP_FAIR_QUEUE); if (value != null) { poolProperties.setFairQueue(Boolean.parseBoolean(value)); } value = properties.getProperty(PROP_USE_EQUALS); if (value != null) { poolProperties.setUseEquals(Boolean.parseBoolean(value)); } value = properties.getProperty(OBJECT_NAME); if (value != null) { poolProperties.setName(ObjectName.quote(value)); } value = properties.getProperty(PROP_ABANDONWHENPERCENTAGEFULL); if (value != null) { poolProperties.setAbandonWhenPercentageFull(Integer.parseInt(value)); } value = properties.getProperty(PROP_MAXAGE); if (value != null) { poolProperties.setMaxAge(Long.parseLong(value)); } value = properties.getProperty(PROP_USE_CON_LOCK); if (value != null) { poolProperties.setUseLock(Boolean.parseBoolean(value)); } value = properties.getProperty(PROP_DATASOURCE); if (value != null) { //this should never happen throw new IllegalArgumentException("Can't set dataSource property as a string, this must be a javax.sql.DataSource object."); } value = properties.getProperty(PROP_DATASOURCE_JNDI); if (value != null) { poolProperties.setDataSourceJNDI(value); } value = properties.getProperty(PROP_SUSPECT_TIMEOUT); if (value != null) { poolProperties.setSuspectTimeout(Integer.parseInt(value)); } value = properties.getProperty(PROP_ALTERNATE_USERNAME_ALLOWED); if (value != null) { poolProperties.setAlternateUsernameAllowed(Boolean.parseBoolean(value)); } return poolProperties; } /** * Creates and configures a {@link DataSource} instance based on the * given properties. * * @param properties the datasource configuration properties * @throws Exception if an error occurs creating the data source */ public DataSource createDataSource(Properties properties) throws Exception { return createDataSource(properties,null,false); } public DataSource createDataSource(Properties properties,Context context, boolean XA) throws Exception { PoolConfiguration poolProperties = DataSourceFactory.parsePoolProperties(properties); if (poolProperties.getDataSourceJNDI()!=null && poolProperties.getDataSource()==null) { performJNDILookup(context, poolProperties); } org.apache.tomcat.jdbc.pool.DataSource dataSource = XA? new org.apache.tomcat.jdbc.pool.XADataSource(poolProperties) : new org.apache.tomcat.jdbc.pool.DataSource(poolProperties); //initialise the pool itself dataSource.createPool(); // Return the configured DataSource instance return dataSource; } public void performJNDILookup(Context context, PoolConfiguration poolProperties) { Object jndiDS = null; try { if (context!=null) { jndiDS = context.lookup(poolProperties.getDataSourceJNDI()); } else { log.warn("dataSourceJNDI property is configued, but local JNDI context is null."); } } catch (NamingException e) { log.debug("The name \""+poolProperties.getDataSourceJNDI()+"\" can not be found in the local context."); } if (jndiDS==null) { try { context = (Context) (new InitialContext()); jndiDS = context.lookup(poolProperties.getDataSourceJNDI()); } catch (NamingException e) { log.warn("The name \""+poolProperties.getDataSourceJNDI()+"\" can not be found in the InitialContext."); } } if (jndiDS!=null) { poolProperties.setDataSource(jndiDS); } } /** * <p>Parse properties from the string. Format of the string must be [propertyName=property;]*<p> * @param propText * @return Properties * @throws Exception */ static protected Properties getProperties(String propText) throws IOException { return PoolProperties.getProperties(propText,null); } }
package pipe.actions.gui; import pipe.controllers.application.PipeApplicationController; import pipe.gui.PetriNetTab; import javax.swing.*; import javax.swing.plaf.LayerUI; import java.awt.*; import java.awt.event.*; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; /** * Zoom UI which intercepts mouse presses on a zoomed panel and transforms them to their * correct location */ @SuppressWarnings("serial") public class ZoomUI extends LayerUI<JComponent> implements ZoomManager { /** * Message fired on a zoom out */ public static final String ZOOM_OUT_CHANGE_MESSAGE = "zoomOut"; /** * Message fired on a zoom in */ public static final String ZOOM_IN_CHANGE_MESSAGE = "zoomIn"; /** * Amount to zoom in and out by */ private final double zoomAmount; /** * Minimum scale allowed to zoom to */ private final double zoomMin; /** * ApplicationView that this zooming belongs for * is used to get petri net tab */ private final PipeApplicationController controller; /** * Maximum scale allowed to zoom to */ private final double zoomMax; /** * Change support for firing events when percent is changed. */ protected PropertyChangeSupport changeSupport = new PropertyChangeSupport(this); /** * Zoom transformation 1 = unzoomed */ private double zoom = 1; /** * @param startingScale initialZoomScale where 1 = unzoomed * @param zoomAmount amount to zoom in/out by * @param zoomMax maximum allowed zoom value * @param zoomMin minimum allowed zoom value * @param controller controller */ public ZoomUI(double startingScale, double zoomAmount, double zoomMax, double zoomMin, PipeApplicationController controller) { zoom = startingScale; this.zoomAmount = zoomAmount; this.zoomMax = zoomMax; this.zoomMin = zoomMin; this.controller = controller; } /** * Paints the component with the current zoom scale * @param g graphics * @param c component */ @Override public void paint(Graphics g, JComponent c) { g.clearRect(c.getX(), c.getY(), c.getWidth(), c.getHeight()); Graphics2D g2 = (Graphics2D) g; g2.scale(zoom, zoom); super.paint(g2, c); } /** * Transforms zoomed mouse events to their unzoomed coordinates * * @param e event * @param l component */ @Override protected void processMouseEvent(MouseEvent e, JLayer<? extends JComponent> l) { MouseEvent localEvent = translateToLayerCoordinates(e, l); if (clickNotOutOfBounds(localEvent, l)) { Component component = getComponentClickedOn(l, localEvent); if (localEvent.getID() == MouseEvent.MOUSE_PRESSED) { for (ActionListener listener : component.getListeners(ActionListener.class)) { ActionEvent actionEvent = new ActionEvent(component, localEvent.getID(), "CLICK"); listener.actionPerformed(actionEvent); } for (MouseListener listener : component.getListeners(MouseListener.class)) { listener.mousePressed(getNewMouseClickEvent(component, localEvent)); } } else if (localEvent.getID() == MouseEvent.MOUSE_RELEASED) { for (MouseListener listener : component.getListeners(MouseListener.class)) { listener.mouseReleased(getNewMouseClickEvent(component, localEvent)); } } else if (localEvent.getID() == MouseEvent.MOUSE_CLICKED) { for (MouseListener listener : component.getListeners(MouseListener.class)) { listener.mouseClicked(getNewMouseClickEvent(component, localEvent)); } } e.consume(); } } /** * Translates the event to a zoomed event point * @param e mouse event * @param l component */ @Override protected void processMouseMotionEvent(MouseEvent e, JLayer<? extends JComponent> l) { MouseEvent localEvent = translateToLayerCoordinates(e, l); if (clickNotOutOfBounds(localEvent, l)) { Component component = getComponentClickedOn(l, localEvent); if (localEvent.getID() == MouseEvent.MOUSE_MOVED) { for (MouseMotionListener listener : component.getListeners(MouseMotionListener.class)) { listener.mouseMoved(getNewMouseClickEvent(component, localEvent)); } } else if (localEvent.getID() == MouseEvent.MOUSE_DRAGGED) { for (MouseMotionListener listener : component.getListeners(MouseMotionListener.class)) { listener.mouseDragged(getNewMouseClickEvent(component, localEvent)); } } } e.consume(); } /** * Noop action * @param e mouse event * @param l component */ @Override protected void processMouseWheelEvent(MouseWheelEvent e, JLayer<? extends JComponent> l) { //No action needed } /** * Install the UI * @param c component */ @Override public void installUI(JComponent c) { super.installUI(c); JLayer<? extends JComponent> jlayer = (JLayer<? extends JComponent>) c; jlayer.setLayerEventMask(AWTEvent.MOUSE_EVENT_MASK | AWTEvent.MOUSE_MOTION_EVENT_MASK); } /** * Uninstall the UI * @param c component */ @Override public void uninstallUI(JComponent c) { JLayer<? extends JComponent> jlayer = (JLayer<? extends JComponent>) c; jlayer.setLayerEventMask(0); super.uninstallUI(c); } /** * Add a listener for zoom updates * @param listener to add */ @Override public void addPropertyChangeListener(PropertyChangeListener listener) { changeSupport.addPropertyChangeListener(listener); } /** * Remove a listener from the zoom UI * @param listener to remove */ @Override public void removePropertyChangeListener(PropertyChangeListener listener) { changeSupport.removePropertyChangeListener(listener); } /** * * @param event mouse event * @param l component * @return true if the event is within the component bounds */ private boolean clickNotOutOfBounds(MouseEvent event, JLayer<? extends JComponent> l) { return getComponentClickedOn(l, event) != null; } /** * Perform a zoom out of the canvas */ @Override public void zoomOut() { if (canZoomOut()) { double old = zoom; zoom -= zoomAmount; changeSupport.firePropertyChange(ZOOM_OUT_CHANGE_MESSAGE, old, zoom); } } /** * @param l layer clicked * @param e mouse event with coordinates releative to l * @return component in l clicked on */ private Component getComponentClickedOn(JLayer<? extends JComponent> l, MouseEvent e) { PetriNetTab tab = controller.getActiveTab(); Point coordinates = zoomedXY(e); return tab.getComponentAt(coordinates.x, coordinates.y); } /** * @param e mouse click event * @return the events x y coordinates zoomed */ private Point zoomedXY(MouseEvent e) { int x = e.getX() == 0 ? 0 : (int) (e.getX() / zoom); int y = e.getY() == 0 ? 0 : (int) (e.getY() / zoom); return new Point(x, y); } /** * @param e mouse event * @param layer component * @return a new event with x y pointing to the coordinate space of the layer * rather than the whole application */ private MouseEvent translateToLayerCoordinates(MouseEvent e, JLayer<? extends JComponent> layer) { PetriNetTab tab = controller.getActiveTab(); return SwingUtilities.convertMouseEvent(e.getComponent(), e, tab); } /** * * @param component clicked * @param mouseEvent mouse event * @return translated mouse click event */ private MouseEvent getNewMouseClickEvent(Component component, MouseEvent mouseEvent) { Point coordinates = zoomedXY(mouseEvent); return new MouseEvent(component, mouseEvent.getID(), mouseEvent.getWhen(), mouseEvent.getModifiers(), coordinates.x, coordinates.y, mouseEvent.getClickCount(), mouseEvent.isPopupTrigger(), mouseEvent.getButton()); } /** * * @return the zoom scale as a percentage e.g. 20%, 100%, 120% */ @Override public int getPercentageZoom() { return (int) (zoom * 100); } /** * * @return the scale of the zoom e.g. 0.2, 1.0, 1.2 */ @Override public double getScale() { return zoom; } /** * * @return true if can zoom out any further */ @Override public boolean canZoomOut() { return zoom - zoomAmount >= zoomMin; } /** * Performs the zoom in on the canvas */ @Override public void zoomIn() { if (canZoomIn()) { double old = zoom; zoom += zoomAmount; changeSupport.firePropertyChange(ZOOM_IN_CHANGE_MESSAGE, old, zoom); } } /** * * @return true if can zoom in any further */ @Override public boolean canZoomIn() { return zoom + zoomAmount <= zoomMax; } }
/* Copyright 2018 Nationale-Nederlanden, 2020 WeAreFrank! Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.extensions.test; import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.InvocationTargetException; import java.security.AccessControlException; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.config.Configurator; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockServletContext; import nl.nn.adapterframework.configuration.Configuration; import nl.nn.adapterframework.configuration.IbisContext; import nl.nn.adapterframework.core.Adapter; import nl.nn.adapterframework.lifecycle.IbisApplicationServlet; import nl.nn.adapterframework.util.AppConstants; import nl.nn.adapterframework.util.DateUtils; import nl.nn.adapterframework.util.LogUtil; import nl.nn.adapterframework.util.Misc; import nl.nn.adapterframework.util.ProcessMetrics; import nl.nn.adapterframework.util.RunState; import nl.nn.adapterframework.util.XmlUtils; public class IbisTester { private AppConstants appConstants; String webAppPath; IbisContext ibisContext; MockServletContext application; private class Result { private String resultString; private long duration; public Result(String resultString, long duration) { this.resultString = resultString; this.duration = duration; } } private class ScenarioRunner implements Callable<String> { private String scenariosRootDir; private String scenario; public ScenarioRunner(String scenariosRootDir, String scenario) { this.scenariosRootDir = scenariosRootDir; this.scenario = scenario; } @Override public String call() throws Exception { MockHttpServletRequest request = new MockHttpServletRequest(); request.setServletPath("/larva/index.jsp"); boolean silent; if (scenario == null) { String ibisContextKey = appConstants.getResolvedProperty(IbisApplicationServlet.KEY_CONTEXT); application = new MockServletContext("file:" + webAppPath, null); application.setAttribute(ibisContextKey, ibisContext); silent = false; } else { request.setParameter("loglevel", "scenario passed/failed"); request.setParameter("execute", scenario); silent = true; } if (scenariosRootDir != null) { request.setParameter("scenariosrootdirectory", scenariosRootDir); } Writer writer = new StringWriter(); runScenarios(application, request, writer, silent); if (scenario == null) { String htmlString = "<html><head/><body>" + writer.toString() + "</body></html>"; return XmlUtils.toXhtml(htmlString); } else { return writer.toString(); } } public void runScenarios(ServletContext application, HttpServletRequest request, Writer out, boolean silent) throws IllegalArgumentException, SecurityException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, ClassNotFoundException { Class<?>[] args_types = new Class<?>[4]; args_types[0] = ServletContext.class; args_types[1] = HttpServletRequest.class; args_types[2] = Writer.class; args_types[3] = boolean.class; Object[] args = new Object[4]; args[0] = application; args[1] = request; args[2] = out; args[3] = silent; Class.forName("nl.nn.adapterframework.testtool.TestTool").getMethod("runScenarios", args_types).invoke(null, args); } } public String doTest() { initTest(); try { String result = testStartAdapters(); if (result==null) { result = testLarva(); } return result; } finally { closeTest(); } } // all called methods in doTest must be public so they can also be called // from outside public void initTest() { try { // fix for GitLab Runner File file = new File("target/log"); String canonicalPath = file.getCanonicalPath(); canonicalPath = canonicalPath.replace("\\", "/"); System.setProperty("log.dir", canonicalPath); } catch (IOException e) { e.printStackTrace(); System.setProperty("log.dir", "target/log"); } System.setProperty("log.level", "INFO"); System.setProperty("dtap.stage", "LOC"); System.setProperty(AppConstants.APPLICATION_SERVER_TYPE_PROPERTY, "IBISTEST"); System.setProperty("flow.create.url", ""); debug("***start***"); ibisContext = null; } public void closeTest() { if (ibisContext != null) { ibisContext.close(); } debug("***end***"); } /** * returns a string containing the error, if any */ public String testStartAdapters() { // Log4J2 will automatically create a console appender and basic pattern layout. Configurator.setLevel(LogUtil.getRootLogger().getName(), Level.INFO); // remove AppConstants because it can be present from another JUnit test AppConstants.removeInstance(); appConstants = AppConstants.getInstance(); webAppPath = getWebContentDirectory(); String projectBaseDir = Misc.getProjectBaseDir(); appConstants.put("project.basedir", projectBaseDir); debug("***set property with name [project.basedir] and value [" + projectBaseDir + "]***"); System.setProperty("jdbc.migrator.active", "true"); // appConstants.put("validators.disabled", "true"); // appConstants.put("xmlValidator.lazyInit", "true"); // appConstants.put("xmlValidator.maxInitialised", "200"); ibisContext = new IbisContext(); long configLoadStartTime = System.currentTimeMillis(); ibisContext.init(false); long configLoadEndTime = System.currentTimeMillis(); debug("***configuration loaded in ["+ (configLoadEndTime - configLoadStartTime) + "] msec***"); List<Configuration> configurations = ibisContext.getIbisManager().getConfigurations(); for(Configuration configuration : configurations) { if(configuration.getConfigurationException() != null) { error("error loading configuration ["+configuration.getName()+"]: "+ configuration.getConfigurationException().getMessage()); } else { debug("loading configuration ["+configuration.getName()+"] with ["+configuration.getRegisteredAdapters().size()+"] adapters"); } } debug("***starting adapters***"); int adaptersStarted = 0; int adaptersCount = 0; for (Adapter adapter: ibisContext.getIbisManager().getRegisteredAdapters()) { adaptersCount++; RunState runState = adapter.getRunState(); if (!(RunState.STARTED).equals(runState)) { debug("adapter [" + adapter.getName() + "] has state [" + runState + "], will retry..."); int count = 30; while (count-- > 0 && !(RunState.STARTED).equals(runState)) { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } runState = adapter.getRunState(); if (!(RunState.STARTED).equals(runState)) { debug("adapter [" + adapter.getName() + "] has state [" + runState + "], retries left [" + count + "]"); } else { debug("adapter [" + adapter.getName() + "] has state [" + runState + "]"); } } } else { debug("adapter [" + adapter.getName() + "] has state [" + runState + "]"); } if ((RunState.STARTED).equals(runState)) { adaptersStarted++; } else { error("adapter [" + adapter.getName() + "] has state [" + runState + "]"); } } String msg = "adapters started [" + adaptersStarted + "] from [" + adaptersCount + "]"; if (adaptersCount == adaptersStarted) { debug(msg); return null; // null == good } else { return error(msg); } } public String testLarva() { debug("***start larva***"); Result result; try { result = runScenario(null, null, null); } catch (Exception e) { e.printStackTrace(); result = null; } if (result == null) { return error("First call to get scenarios failed"); } else { Double countScenariosRootDirs = evaluateXPathNumber(result.resultString, "count(html/body//select[@name='scenariosrootdirectory']/option)"); if (countScenariosRootDirs == 0) { return error("No scenarios root directories found"); } Collection<String> scenariosRootDirsUnselected = evaluateXPath(result.resultString, "(html/body//select[@name='scenariosrootdirectory'])[1]/option[not(@selected)]/@value"); String runScenariosResult = runScenarios(result.resultString); if (runScenariosResult!=null) { return runScenariosResult; } if (scenariosRootDirsUnselected != null && scenariosRootDirsUnselected.size() > 0) { for (String scenariosRootDirUnselected : scenariosRootDirsUnselected) { try { result = runScenario(scenariosRootDirUnselected, null, null); } catch (Exception e) { e.printStackTrace(); result = null; } if (result == null) { return error("Call to get scenarios from [" + scenariosRootDirUnselected + "] failed"); } runScenariosResult = runScenarios(result.resultString); if (runScenariosResult!=null) { return runScenariosResult; } } } } return null; } private String runScenarios(String xhtml) { Collection<String> scenarios = evaluateXPath( xhtml, "(html/body//select[@name='execute'])[1]/option/@value[ends-with(.,'.properties')]"); if (scenarios == null || scenarios.size() == 0) { return error("No scenarios found"); } else { String scenariosRootDir = evaluateXPathFirst( xhtml, "(html/body//select[@name='scenariosrootdirectory'])[1]/option[@selected]/@value"); String scenariosRoot = evaluateXPathFirst(xhtml, "(html/body//select[@name='scenariosrootdirectory'])[1]/option[@selected]"); debug("Found " + scenarios.size() + " scenario(s) in root [" + scenariosRoot + "]"); int scenariosTotal = scenarios.size(); int scenariosPassed = 0; int scenariosCount = 0; Result result; for (String scenario : scenarios) { scenariosCount++; String scenarioShortName; if (StringUtils.isNotEmpty(scenario) && StringUtils.isNotEmpty(scenariosRootDir)) { if (scenario.startsWith(scenariosRootDir)) { scenarioShortName = scenario.substring(scenariosRootDir .length()); } else { scenarioShortName = scenario; } } else { scenarioShortName = scenario; } String scenarioInfo = "scenario [" + scenariosCount + "/" + scenariosTotal + "] [" + scenarioShortName + "]"; try { result = runScenario(scenariosRootDir, scenario, scenarioInfo); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); result = null; } if (result == null) { error(scenarioInfo + " failed"); } else { if (result.resultString != null && result.resultString.contains("passed") ) { debug(scenarioInfo + " passed in [" + result.duration + "] msec"); scenariosPassed++; } else { error(scenarioInfo + " failed in [" + result.duration + "] msec"); error(result.resultString); } } } String msg = "scenarios passed [" + scenariosPassed + "] from [" + scenariosCount + "]"; if (scenariosCount == scenariosPassed) { debug(msg); } else { return error(msg); } } return null; } private Result runScenario(String scenariosRootDir, String scenario, String scenarioInfo) { int count = 2; String resultString = null; long startTime = 0; while (count-- > 0 && resultString == null) { startTime = System.currentTimeMillis(); ScenarioRunner scenarioRunner = new ScenarioRunner( scenariosRootDir, scenario); ExecutorService service = Executors.newSingleThreadExecutor(); Future<String> future = service.submit(scenarioRunner); long timeout = 60; try { try { resultString = future.get(timeout, TimeUnit.SECONDS); } catch (TimeoutException e) { debug(scenarioInfo + " timed out, retries left [" + count + "]"); } catch (Exception e) { e.printStackTrace(); debug(scenarioInfo + " got error, retries left [" + count + "]"); } } finally { service.shutdown(); } } long endTime = System.currentTimeMillis(); return new Result(resultString, endTime - startTime); } private static void debug(String string) { System.out.println(getIsoTimeStamp() + " " + getMemoryInfo() + " " + string); } private static String error(String string) { System.err.println(getIsoTimeStamp() + " " + getMemoryInfo() + " " + string); return string; } private static String getIsoTimeStamp() { return DateUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss.SSS"); } private static String getMemoryInfo() { long freeMem = Runtime.getRuntime().freeMemory(); long totalMem = Runtime.getRuntime().totalMemory(); return "[" + ProcessMetrics.normalizedNotation(totalMem - freeMem) + "/" + ProcessMetrics.normalizedNotation(totalMem) + "]"; } private static String evaluateXPathFirst(String xhtml, String xpath) { try { return XmlUtils.evaluateXPathNodeSetFirstElement(xhtml, xpath); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } private static Collection<String> evaluateXPath(String xhtml, String xpath) { try { return XmlUtils.evaluateXPathNodeSet(xhtml, xpath); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } private static Double evaluateXPathNumber(String xhtml, String xpath) { try { return XmlUtils.evaluateXPathNumber(xhtml, xpath); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } private static String getWebContentDirectory() { String buildOutputDirectory = Misc.getBuildOutputDirectory(); if (buildOutputDirectory != null && buildOutputDirectory.endsWith("classes")) { String wcDirectory = null; File file = new File(buildOutputDirectory); while (wcDirectory == null) { try { File file2 = new File(file, "WebContent"); if (file2.exists() && file2.isAbsolute()) { wcDirectory = file2.getPath(); } else { file2 = new File(file, "src/main"); if (file2.exists() && file2.isAbsolute()) { wcDirectory = new File(file2, "webapp").getPath(); } else { file = file.getParentFile(); if (file == null) { return null; } } } } catch (AccessControlException e) { error(e.getMessage()); return null; } } return wcDirectory; } else { return null; } } public IbisContext getIbisContext() { return ibisContext; } }
package mariculture.core.tile; import java.util.ArrayList; import java.util.List; import mariculture.api.core.MaricultureHandlers; import mariculture.api.core.RecipeVat; import mariculture.core.helpers.cofh.CoFhItemHelper; import mariculture.core.network.PacketHandler; import mariculture.core.tile.base.TileMultiBlock; import mariculture.core.tile.base.TileMultiStorage; import mariculture.core.util.ITank; import mariculture.core.util.Tank; import net.minecraft.inventory.ISidedInventory; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.tileentity.TileEntity; import net.minecraftforge.common.util.ForgeDirection; import net.minecraftforge.fluids.Fluid; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fluids.FluidTankInfo; import net.minecraftforge.fluids.IFluidHandler; public class TileVat extends TileMultiStorage implements ISidedInventory, IFluidHandler, ITank { public int max_lrg = 30000; public int max_sml = 6000; public Tank tank; public Tank tank2; public Tank tank3; public int timeNeeded; public int timeRemaining; public boolean canWork; private int machineTick; public TileVat() { tank = new Tank(max_sml); tank2 = new Tank(max_sml); tank3 = new Tank(max_sml); inventory = new ItemStack[2]; needsInit = true; } public boolean onTick(int i) { return machineTick % i == 0; } @Override public boolean canUpdate() { return true; } @Override public void updateEntity() { super.updateEntity(); if (master == null) { updateSingle(); } } //Updating the Multi-Block form @Override public void updateMaster() { if (tank.getCapacity() != max_lrg) { tank.setCapacity(max_lrg); } if (tank2.getCapacity() != max_lrg) { tank2.setCapacity(max_lrg); } if (tank3.getCapacity() != max_lrg) { tank3.setCapacity(max_lrg); } machineTick++; if (!isInit() && !worldObj.isRemote) { //Init Master PacketHandler.syncMultiBlock(getMaster(), this, facing); for (MultiPart slave : slaves) { TileEntity te = worldObj.getTileEntity(slave.xCoord, slave.yCoord, slave.zCoord); if (te != null && te instanceof TileVat) { PacketHandler.syncMultiBlock(getMaster(), te, ((TileMultiBlock) te).facing); } } setInit(true); } if (!worldObj.isRemote) if (canWork && onTick(20)) { timeRemaining++; } if (worldObj.isRemote && canWork) { worldObj.spawnParticle("smoke", xCoord + 0.5D + +worldObj.rand.nextFloat() - worldObj.rand.nextFloat() / 2, yCoord + 0.8D + worldObj.rand.nextFloat() - worldObj.rand.nextFloat() / 2, zCoord + 0.5D + +worldObj.rand.nextFloat() - worldObj.rand.nextFloat() / 2, 0, 0, 0); } updateAll(); } //The Code for when we are updating a single block! public void updateSingle() { machineTick++; if (!worldObj.isRemote) if (canWork) if (onTick(30)) { timeRemaining++; } updateAll(); if (worldObj.isRemote && canWork) { worldObj.spawnParticle("smoke", xCoord + 0.5D + +worldObj.rand.nextFloat() - worldObj.rand.nextFloat() / 2, yCoord + 0.8D + worldObj.rand.nextFloat() - worldObj.rand.nextFloat() / 2, zCoord + 0.5D + +worldObj.rand.nextFloat() - worldObj.rand.nextFloat() / 2, 0, 0, 0); } } public void updateAll() { if (onTick(20)) { canWork = canWork(); } if (canWork && !worldObj.isRemote) { if (timeNeeded == 0) { RecipeVat recipe = (RecipeVat) getResult()[0]; if (recipe != null) { timeNeeded = recipe.processTime; } timeRemaining = 0; } if (timeRemaining >= timeNeeded) { for (int i = 0; i < 64; i++) { Object[] result = getResult(); RecipeVat recipe = (RecipeVat) result[0]; byte tankNum = (Byte) result[1]; if (recipe != null) { createResult(recipe, tankNum); } else break; } timeRemaining = 0; timeNeeded = 0; canWork = canWork(); } } } private FluidStack drain(byte id, FluidStack input, FluidStack output, boolean doDrain) { int drain = input.copy().amount; if (input.isFluidEqual(output)) { drain -= output.amount; output = null; } if (doDrain) if (id == (byte) 1) { tank.drain(drain, true); } else { tank2.drain(drain, true); } return output; } private void createResult(RecipeVat recipe, byte tankNum) { //Drain out the fluid1 FluidStack outputFluid = null; if (recipe.outputFluid != null) { outputFluid = recipe.outputFluid.copy(); } if (tankNum == 1) { if (recipe.inputFluid1 != null) { outputFluid = drain((byte) 1, recipe.inputFluid1, outputFluid, true); } if (recipe.inputFluid2 != null) { outputFluid = drain((byte) 2, recipe.inputFluid2, outputFluid, true); } } else { if (recipe.inputFluid1 != null) { outputFluid = drain((byte) 2, recipe.inputFluid1, outputFluid, true); } if (recipe.inputFluid2 != null) { outputFluid = drain((byte) 1, recipe.inputFluid2, outputFluid, true); } } //Decrease the StackSize of the input, by this much if it's valid if (recipe.inputItem != null) { decrStackSize(0, recipe.inputItem instanceof ItemStack ? (((ItemStack) recipe.inputItem).stackSize) : 1); } //Add the new Fluid if (outputFluid != null && outputFluid.amount > 0) { tank3.fill(recipe.outputFluid.copy(), true); } //Add the new Item if (recipe.outputItem != null) { ItemStack output = recipe.outputItem.copy(); if (inventory[1] != null) { output.stackSize += inventory[1].stackSize; } setInventorySlotContents(1, output); } PacketHandler.syncFluidTank(this, getFluid((byte) 1), (byte) 1); PacketHandler.syncFluidTank(this, getFluid((byte) 2), (byte) 2); PacketHandler.syncFluidTank(this, getFluid((byte) 3), (byte) 3); } public boolean canWork() { if (tank.getFluidAmount() <= 0 && tank2.getFluidAmount() <= 0) return false; RecipeVat res = (RecipeVat) getResult()[0]; return res != null; } public Object[] getResult() { byte tankNum = 1; RecipeVat result = MaricultureHandlers.vat.getResult(tank.getFluid(), tank2.getFluid(), inventory[0]); result = result == null || !hasRoom(result.outputItem, result.outputFluid) ? null : result; if (result == null) { tankNum = 2; result = MaricultureHandlers.vat.getResult(tank2.getFluid(), tank.getFluid(), inventory[0]); result = result == null || !hasRoom(result.outputItem, result.outputFluid) ? null : result; } return new Object[] { result, tankNum }; } private boolean hasRoom(ItemStack stack, FluidStack newFluid) { if (tank3.getFluid() != null && newFluid != null) if (newFluid != tank3.getFluid() || newFluid.amount + tank3.getFluidAmount() > tank3.getCapacity()) return false; return stack == null || inventory[1] == null || CoFhItemHelper.areItemStackEqualNoNull(stack, inventory[1]) && inventory[1].stackSize + stack.stackSize <= inventory[1].getMaxStackSize(); } @Override public Class getTEClass() { return this.getClass(); } //ISided @Override public int[] getAccessibleSlotsFromSide(int side) { return new int[] { 0, 1 }; } @Override public boolean canInsertItem(int slot, ItemStack stack, int side) { return slot == 0; } @Override public boolean canExtractItem(int slot, ItemStack stack, int side) { if (slot == 0) return side != ForgeDirection.DOWN.ordinal(); return true; } //Inventory Logic @Override public ItemStack getStackInSlot(int slot) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return null; return vat.inventory[slot]; } @Override public ItemStack getStackInSlotOnClosing(int slot) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return null; if (vat.inventory[slot] != null) { ItemStack stack = vat.inventory[slot]; vat.inventory[slot] = null; return stack; } return null; } @Override public void setInventorySlotContents(int slot, ItemStack stack) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return; vat.inventory[slot] = stack; if (stack != null && stack.stackSize > vat.getInventoryStackLimit()) { stack.stackSize = vat.getInventoryStackLimit(); } vat.markDirty(); } @Override public ItemStack decrStackSize(int slot, int amount) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return null; if (vat.inventory[slot] != null) { ItemStack stack; if (vat.inventory[slot].stackSize <= amount) { stack = vat.inventory[slot]; vat.inventory[slot] = null; vat.markDirty(); return stack; } else { stack = vat.inventory[slot].splitStack(amount); if (vat.inventory[slot].stackSize == 0) { vat.inventory[slot] = null; } vat.markDirty(); return stack; } } return null; } @Override public void markDirty() { super.markDirty(); if (!worldObj.isRemote) { PacketHandler.syncInventory(this, inventory); } } //Tank Logic @Override public FluidStack getFluid(int transfer) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return null; if (vat.tank.getFluid() == null) return null; if (vat.tank.getFluidAmount() - transfer < 0) return null; return new FluidStack(vat.tank.getFluid(), transfer); } @Override public FluidStack getFluid() { return getFluid((byte) 1); } @Override public FluidStack getFluid(byte tank) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return null; if (tank == 1) return vat.tank.getFluid(); else if (tank == 2) return vat.tank2.getFluid(); else if (tank == 3) return vat.tank3.getFluid(); return null; } @Override public void setFluid(FluidStack fluid) { setFluid(fluid, (byte) 1); } @Override public void setFluid(FluidStack fluid, byte tank) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return; if (tank == (byte) 1) { vat.tank.setFluid(fluid); } else if (tank == (byte) 2) { vat.tank2.setFluid(fluid); } else if (tank == (byte) 3) { vat.tank3.setFluid(fluid); } } @Override public FluidStack drain(ForgeDirection from, int maxDrain, boolean doDrain) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return null; //If the draining from tank3 didn't fail, send packet update, otherwise try for tank2, then tank1 FluidStack ret = vat.tank3.drain(maxDrain, doDrain); if (ret != null) { if (doDrain && !worldObj.isRemote) { PacketHandler.syncFluidTank(this, getFluid((byte) 3), (byte) 3); } } else { ret = vat.tank2.drain(maxDrain, doDrain); if (ret != null) { if (doDrain && !worldObj.isRemote) { PacketHandler.syncFluidTank(this, getFluid((byte) 2), (byte) 2); } } else { ret = vat.tank.drain(maxDrain, doDrain); if (ret != null) { if (doDrain && !worldObj.isRemote) { PacketHandler.syncFluidTank(this, getFluid((byte) 1), (byte) 1); } } } } return ret; } @Override public int fill(ForgeDirection from, FluidStack resource, boolean doFill) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return 0; int ret = vat.tank.fill(resource, doFill, vat.tank2); if (ret > 0) { if (doFill) { PacketHandler.syncFluidTank(this, getFluid((byte) 1), (byte) 1); } } else { ret = vat.tank2.fill(resource, doFill, vat.tank); if (ret > 0) if (doFill) { PacketHandler.syncFluidTank(this, getFluid((byte) 2), (byte) 2); } } return ret; } @Override public FluidTankInfo[] getTankInfo(ForgeDirection from) { TileVat vat = master != null ? (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord) : this; if (vat == null) return null; return new FluidTankInfo[] { vat.tank.getInfo(), vat.tank2.getInfo(), vat.tank3.getInfo() }; } /** End Tank Stuffs**/ @Override public void readFromNBT(NBTTagCompound nbt) { super.readFromNBT(nbt); NBTTagList tagList = nbt.getTagList("Tanks", 10); for (int i = 0; i < 3; i++) { NBTTagCompound tag = tagList.getCompoundTagAt(i); byte tank = tag.getByte("Tank"); getTank(i).readFromNBT(tag); } timeNeeded = nbt.getInteger("TimeNeeded"); timeRemaining = nbt.getInteger("TimeRemaining"); canWork = nbt.getBoolean("CanWork"); } public Tank getTank(int i) { if (i == 0) return tank; if (i == 1) return tank2; return tank3; } @Override public void writeToNBT(NBTTagCompound nbt) { super.writeToNBT(nbt); NBTTagList tankList = new NBTTagList(); for (int i = 0; i < 3; i++) { NBTTagCompound tag = new NBTTagCompound(); tag.setByte("Tank", (byte) i); getTank(i).writeToNBT(tag); tankList.appendTag(tag); } nbt.setTag("Tanks", tankList); nbt.setInteger("TimeNeeded", timeNeeded); nbt.setInteger("TimeRemaining", timeRemaining); nbt.setBoolean("CanWork", canWork); } //Master Logic @Override public void onBlockBreak() { if (master != null) { TileVat mstr = (TileVat) worldObj.getTileEntity(master.xCoord, master.yCoord, master.zCoord); if (mstr != null) { //Set all three tanks back to small size mstr.tank.setCapacity(max_sml); if (mstr.tank.getFluidAmount() > max_sml) { mstr.tank.setFluidAmount(max_sml); } mstr.tank2.setCapacity(max_sml); if (mstr.tank2.getFluidAmount() > max_sml) { mstr.tank2.setFluidAmount(max_sml); } mstr.tank3.setCapacity(max_sml); if (mstr.tank3.getFluidAmount() > max_sml) { mstr.tank3.setFluidAmount(max_sml); } } } super.onBlockBreak(); } @Override public boolean isPartnered(int x, int y, int z) { TileEntity tile = worldObj.getTileEntity(x, y, z); return tile instanceof TileVat ? ((TileVat) tile).master != null : false; } @Override public boolean isPart(int x, int y, int z) { return worldObj.getTileEntity(x, y, z) instanceof TileVat && !isPartnered(x, y, z); } @Override public void onBlockPlaced() { onBlockPlaced(xCoord, yCoord, zCoord); worldObj.markBlockForUpdate(xCoord, yCoord, zCoord); } public void onBlockPlaced(int x, int y, int z) { if (isPart(x, y, z) && isPart(x + 1, y, z) && isPart(x, y, z + 1) && isPart(x + 1, y, z + 1)) { MultiPart mstr = new MultiPart(x, y, z); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x + 1, y, z, ForgeDirection.WEST)); parts.add(setAsSlave(mstr, x + 1, y, z + 1, ForgeDirection.NORTH)); parts.add(setAsSlave(mstr, x, y, z + 1, ForgeDirection.EAST)); setAsMaster(mstr, parts, ForgeDirection.SOUTH); } if (isPart(x - 1, y, z) && isPart(x, y, z) && isPart(x, y, z + 1) && isPart(x - 1, y, z + 1)) { MultiPart mstr = new MultiPart(x - 1, y, z); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z, ForgeDirection.WEST)); parts.add(setAsSlave(mstr, x - 1, y, z + 1, ForgeDirection.EAST)); parts.add(setAsSlave(mstr, x, y, z + 1, ForgeDirection.NORTH)); setAsMaster(mstr, parts, ForgeDirection.SOUTH); } if (isPart(x, y, z) && isPart(x - 1, y, z) && isPart(x - 1, y, z - 1) && isPart(x, y, z - 1)) { MultiPart mstr = new MultiPart(x - 1, y, z - 1); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z, ForgeDirection.NORTH)); parts.add(setAsSlave(mstr, x - 1, y, z, ForgeDirection.EAST)); parts.add(setAsSlave(mstr, x, y, z - 1, ForgeDirection.WEST)); setAsMaster(mstr, parts, ForgeDirection.SOUTH); } if (isPart(x, y, z) && isPart(x + 1, y, z) && isPart(x, y, z - 1) && isPart(x + 1, y, z - 1)) { MultiPart mstr = new MultiPart(x, y, z - 1); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z, ForgeDirection.EAST)); parts.add(setAsSlave(mstr, x + 1, y, z, ForgeDirection.NORTH)); parts.add(setAsSlave(mstr, x + 1, y, z - 1, ForgeDirection.WEST)); setAsMaster(mstr, parts, ForgeDirection.SOUTH); } worldObj.markBlockForUpdate(xCoord, yCoord, zCoord); } @Override public FluidStack drain(ForgeDirection from, FluidStack resource, boolean doDrain) { return drain(from, resource.amount, doDrain); } @Override public boolean canFill(ForgeDirection from, Fluid fluid) { return true; } @Override public boolean canDrain(ForgeDirection from, Fluid fluid) { return true; } @Override public int getTankScaled(int i) { return 0; } @Override public String getFluidName() { return null; } @Override public List getFluidQty(List tooltip) { return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.proxy2.stub; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Iterator; import org.apache.commons.lang3.StringUtils; import org.apache.commons.proxy2.ObjectProvider; import org.apache.commons.proxy2.provider.BeanProvider; import org.apache.commons.proxy2.provider.ObjectProviderUtils; import org.junit.Test; public class StubBuilderTest extends AbstractStubTestCase { //---------------------------------------------------------------------------------------------------------------------- // Other Methods //---------------------------------------------------------------------------------------------------------------------- @Override protected StubInterface createProxy(Trainer<StubInterface> trainer) { return new StubBuilder<StubInterface>(proxyFactory, StubInterface.class).train(trainer).build(); } @Test public void testWithConcreteTarget() { StubBuilder<StubInterface> builder = new StubBuilder<StubInterface>(proxyFactory, StubInterface.class, new SimpleStub()); builder.train(new Trainer<StubInterface>() { @Override protected void train(StubInterface trainee) { when(trainee.one("Foo")).thenReturn("Bar"); } }); StubInterface stub = builder.build(); assertEquals("Bar", stub.one("Foo")); } @Test public void testWithNoTargetAndNoInterceptors() { StubBuilder<StubInterface> builder = new StubBuilder<StubInterface>(proxyFactory, StubInterface.class); StubInterface stub = builder.build(); assertNull(stub.one("Whatever")); } @Test public void testWithNoTargetWithInterceptor() { StubBuilder<StubInterface> builder = new StubBuilder<StubInterface>(proxyFactory, StubInterface.class); builder.train(new Trainer<StubInterface>() { @Override protected void train(StubInterface trainee) { when(trainee.one("Foo")).thenReturn("Bar"); } }); StubInterface stub = builder.build(); assertEquals("Bar", stub.one("Foo")); } @Test public void testWithObjectProviderTarget() { StubBuilder<StubInterface> builder = new StubBuilder<StubInterface>(proxyFactory, StubInterface.class, new BeanProvider<StubInterface>(SimpleStub.class)); builder.train(new Trainer<StubInterface>() { @Override protected void train(StubInterface trainee) { when(trainee.one("Foo")).thenReturn("Bar"); } }); StubInterface stub = builder.build(); assertEquals("Bar", stub.one("Foo")); } @Test public void testAdditionalInterfaces() { StubBuilder<StubInterface> builder = new StubBuilder<StubInterface>(proxyFactory, StubInterface.class, ObjectProviderUtils.constant(new SimpleStub())); builder.train(new Trainer<Iterable<String>>() { @Override protected void train(Iterable<String> trainee) { when(trainee.iterator()).thenAnswer(new ObjectProvider<Iterator<String>>() { private static final long serialVersionUID = 1L; @Override public Iterator<String> getObject() { return Arrays.asList("foo", "bar", "baz").iterator(); } }); } }); builder.addProxyTypes(Cloneable.class, Marker.class); StubInterface stub = builder.build(); assertTrue(stub instanceof Iterable<?>); assertTrue(stub instanceof Cloneable); assertTrue(stub instanceof Marker); } //---------------------------------------------------------------------------------------------------------------------- // Inner Classes //---------------------------------------------------------------------------------------------------------------------- private static class SimpleStub implements StubInterface { @Override public String one(String value) { return value; } @Override public String three(String arg1, String arg2) { return arg1 + arg2; } @Override public String two(String value) { return StringUtils.repeat(value, 2); } @Override public byte[] byteArray() { return new byte[] { 1, 2, 3 }; } @Override public char[] charArray() { return new char[] { '1', '2', '3' }; } @Override public short[] shortArray() { return new short[] { 1, 2, 3 }; } @Override public int[] intArray() { return new int[] { 1, 2, 3 }; } @Override public long[] longArray() { return new long[] { 1, 2, 3 }; } @Override public float[] floatArray() { return new float[] { 1.0f, 2.0f, 3.0f }; } @Override public double[] doubleArray() { return new double[] { 1.0, 2.0, 3.0 }; } @Override public boolean[] booleanArray() { return new boolean[] { true, false, true }; } @Override public String[] stringArray() { return new String[] { "One", "Two", "Three" }; } @Override public String arrayParameter(String... strings) { return StringUtils.join(strings, ", "); } @Override public void voidMethod(String arg) { } @Override public StubInterface stub() { return null; } @Override public StubInterface[] stubs() { return new StubInterface[0]; } } public interface Marker { } }
/* * This file is part of Nucleus, licensed under the MIT License (MIT). See the LICENSE.txt file * at the root of this project for more details. */ package io.github.nucleuspowered.nucleus.internal.text; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.reflect.TypeToken; import io.github.nucleuspowered.nucleus.Nucleus; import io.github.nucleuspowered.nucleus.api.text.NucleusTextTemplate; import io.github.nucleuspowered.nucleus.util.JsonConfigurateStringHelper; import io.github.nucleuspowered.nucleus.util.Tuples; import ninja.leaping.configurate.ConfigurationOptions; import ninja.leaping.configurate.SimpleConfigurationNode; import ninja.leaping.configurate.objectmapping.ObjectMappingException; import ninja.leaping.configurate.objectmapping.serialize.TypeSerializer; import org.spongepowered.api.command.CommandSource; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.TextRepresentable; import org.spongepowered.api.text.TextTemplate; import org.spongepowered.api.util.Tuple; import org.spongepowered.api.util.annotation.NonnullByDefault; import java.util.ArrayDeque; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Nullable; @NonnullByDefault public abstract class NucleusTextTemplateImpl implements NucleusTextTemplate { private static final Map<String, Object> emptyVariables = Maps.newHashMap(); @Nullable private final Text prefix; @Nullable private final Text suffix; private final String representation; private final TextTemplate textTemplate; private final Map<String, Function<CommandSource, Text>> tokenMap = Maps.newHashMap(); public NucleusTextTemplateImpl(String representation, @Nullable Text prefix, @Nullable Text suffix) { this.representation = representation; Tuple<TextTemplate, Map<String, Function<CommandSource, Text>>> t = parse(representation); this.textTemplate = t.getFirst(); this.tokenMap.putAll(t.getSecond()); this.prefix = prefix; this.suffix = suffix; } public NucleusTextTemplateImpl(String representation) { this(representation, null, null); } @Override public boolean isEmpty() { return false; } @Override public Optional<Text> getPrefix() { return Optional.ofNullable(this.prefix); } @Override public Optional<Text> getSuffix() { return Optional.ofNullable(this.suffix); } public String getRepresentation() { return this.representation; } @Override public TextTemplate getTextTemplate() { return this.textTemplate; } abstract Tuple<TextTemplate, Map<String, Function<CommandSource, Text>>> parse(String parser); @Override public boolean containsTokens() { return !this.textTemplate.getArguments().isEmpty(); } @Override @SuppressWarnings("SameParameterValue") public Text getForCommandSource(CommandSource source, @Nullable Map<String, Function<CommandSource, Optional<Text>>> tokensArray, @Nullable Map<String, Object> variables) { final Map<String, Object> variables2 = variables == null ? emptyVariables : variables; Map<String, TextTemplate.Arg> tokens = this.textTemplate.getArguments(); Map<String, Text> finalArgs = Maps.newHashMap(); tokens.forEach((k, v) -> { String key = k.toLowerCase(); Text t; if (this.tokenMap.containsKey(key)) { t = this.tokenMap.get(key).apply(source); } else if (tokensArray != null && tokensArray.containsKey(key)) { t = tokensArray.get(key).apply(source).orElse(null); } else { t = Nucleus.getNucleus().getMessageTokenService().parseToken(key, source, variables2).orElse(null); } if (t != null) { finalArgs.put(k, t); } }); Text.Builder builder = Text.builder(); TextParsingUtils.StyleTuple st = null; if (this.prefix != null) { builder.append(this.prefix); st = TextParsingUtils.getLastColourAndStyle(this.prefix, null); } Text finalText = this.textTemplate.apply(finalArgs).build(); // Don't append text if there is no text to append! if (!finalText.isEmpty()) { if (st == null) { builder.append(finalText); } else { builder.append(Text.builder().color(st.colour).style(st.style).append(finalText).build()); } } if (this.suffix != null) { builder.append(this.suffix); } return builder.build(); } public Text toText() { return this.textTemplate.toText(); } /** * Creates a {@link TextTemplate} from an Ampersand encoded string. */ static class Ampersand extends NucleusTextTemplateImpl { private static final Pattern pattern = Pattern.compile("(?<url>\\[[^\\[]+]\\(/[^)]*?)?(?<match>\\{\\{(?!subject)(?<name>[^\\s{}]+)}})" + "(?<urltwo>[^(]*?\\))?"); Ampersand(String representation) { super(representation); } Ampersand(String representation, @Nullable Text prefix, @Nullable Text suffix) { super(representation, prefix, suffix); } @Override Tuple<TextTemplate, Map<String, Function<CommandSource, Text>>> parse(String input) { // regex! final String string = NucleusTextTemplateFactory.INSTANCE.performReplacements(input); Matcher mat = pattern.matcher(string); List<String> map = Lists.newArrayList(); List<String> s = Lists.newArrayList(pattern.split(string)); int index = 0; while (mat.find()) { if (mat.group("url") != null && mat.group("urltwo") != null) { String toUpdate = s.get(index); toUpdate = toUpdate + mat.group(); if (s.size() < index + 1) { toUpdate += s.get(index + 1); s.remove(index + 1); s.set(index, toUpdate); } } else { String out = mat.group("url"); if (out != null) { if (s.isEmpty()) { s.add(out); } else { s.set(index, s.get(index) + out); } } index++; out = mat.group("urltwo"); if (out != null) { if (s.size() <= index) { s.add(out); } else { s.set(index, out + s.get(index)); } } map.add(mat.group("name").toLowerCase()); } } // Generic hell. ArrayDeque<TextRepresentable> texts = new ArrayDeque<>(); Map<String, Function<CommandSource, Text>> tokens = Maps.newHashMap(); // TextParsingUtils URL parsing needed here. TextParsingUtils cu = Nucleus.getNucleus().getTextParsingUtils(); // This condition only occurs if you _just_ use the token. Otherwise, you get a part either side - so it's either 0 or 2. if (s.size() > 0) { cu.createTextTemplateFragmentWithLinks(s.get(0)).mapIfPresent(texts::addAll, tokens::putAll); } for (int i = 0; i < map.size(); i++) { TextTemplate.Arg.Builder arg = TextTemplate.arg(map.get(i)).optional(); TextRepresentable r = texts.peekLast(); TextParsingUtils.StyleTuple style = null; if (r != null) { // Create the argument style = TextParsingUtils.getLastColourAndStyle(r, null); style.applyTo(st -> arg.color(st.colour).style(st.style)); } texts.add(arg.build()); if (s.size() > i + 1) { Tuples.NullableTuple<List<TextRepresentable>, Map<String, Function<CommandSource, Text>>> tt = cu.createTextTemplateFragmentWithLinks(s.get(i + 1)); if (style != null && tt.getFirst().isPresent()) { texts.push(style.getTextOf()); } cu.createTextTemplateFragmentWithLinks(s.get(i + 1)).mapIfPresent(texts::addAll, tokens::putAll); } } return Tuple.of(TextTemplate.of(texts.toArray(new Object[0])), tokens); } } static class Json extends NucleusTextTemplateImpl { @Nullable private static TypeSerializer<TextTemplate> textTemplateTypeSerializer = null; private static TypeSerializer<TextTemplate> getSerialiser() { if (textTemplateTypeSerializer == null) { textTemplateTypeSerializer = ConfigurationOptions.defaults().getSerializers().get(TypeToken.of(TextTemplate.class)); } return textTemplateTypeSerializer; } Json(String representation, @Nullable Text prefix, @Nullable Text suffix) { super(representation, prefix, suffix); } Json(String representation) { super(representation); } Json(TextTemplate textTemplate) { super(JsonConfigurateStringHelper.getJsonStringFrom(textTemplate)); } @Override Tuple<TextTemplate, Map<String, Function<CommandSource, Text>>> parse(String parser) { try { return Tuple.of( getSerialiser().deserialize( TypeToken.of(TextTemplate.class), JsonConfigurateStringHelper.getNodeFromJson(parser) .orElseGet(() -> SimpleConfigurationNode.root().setValue(parser))), Maps.newHashMap()); } catch (ObjectMappingException e) { throw new RuntimeException(e); } } } public static class Empty extends NucleusTextTemplateImpl { public static final NucleusTextTemplateImpl INSTANCE = new Empty(); private Empty() { super(""); } @Override Tuple<TextTemplate, Map<String, Function<CommandSource, Text>>> parse(String parser) { return Tuple.of(TextTemplate.EMPTY, Maps.newHashMap()); } @Override public boolean isEmpty() { return true; } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iot1clickdevices.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/devices-2018-05-14/ListDevices" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListDevicesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The type of the device, such as "button". * </p> */ private String deviceType; /** * <p> * The maximum number of results to return per request. If not set, a default value of 100 is used. * </p> */ private Integer maxResults; /** * <p> * The token to retrieve the next set of results. * </p> */ private String nextToken; /** * <p> * The type of the device, such as "button". * </p> * * @param deviceType * The type of the device, such as "button". */ public void setDeviceType(String deviceType) { this.deviceType = deviceType; } /** * <p> * The type of the device, such as "button". * </p> * * @return The type of the device, such as "button". */ public String getDeviceType() { return this.deviceType; } /** * <p> * The type of the device, such as "button". * </p> * * @param deviceType * The type of the device, such as "button". * @return Returns a reference to this object so that method calls can be chained together. */ public ListDevicesRequest withDeviceType(String deviceType) { setDeviceType(deviceType); return this; } /** * <p> * The maximum number of results to return per request. If not set, a default value of 100 is used. * </p> * * @param maxResults * The maximum number of results to return per request. If not set, a default value of 100 is used. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of results to return per request. If not set, a default value of 100 is used. * </p> * * @return The maximum number of results to return per request. If not set, a default value of 100 is used. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of results to return per request. If not set, a default value of 100 is used. * </p> * * @param maxResults * The maximum number of results to return per request. If not set, a default value of 100 is used. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDevicesRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * The token to retrieve the next set of results. * </p> * * @param nextToken * The token to retrieve the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token to retrieve the next set of results. * </p> * * @return The token to retrieve the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token to retrieve the next set of results. * </p> * * @param nextToken * The token to retrieve the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDevicesRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDeviceType() != null) sb.append("DeviceType: ").append(getDeviceType()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListDevicesRequest == false) return false; ListDevicesRequest other = (ListDevicesRequest) obj; if (other.getDeviceType() == null ^ this.getDeviceType() == null) return false; if (other.getDeviceType() != null && other.getDeviceType().equals(this.getDeviceType()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDeviceType() == null) ? 0 : getDeviceType().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListDevicesRequest clone() { return (ListDevicesRequest) super.clone(); } }
/******************************************************************************* * Copyright 2012 University of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This code was developed by the Information Integration Group as part * of the Karma project at the Information Sciences Institute of the * University of Southern California. For more information, publications, * and related projects, please see: http://www.isi.edu/integration ******************************************************************************/ package edu.isi.karma.kr2rml.mapping; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.json.JSONArray; import org.json.JSONException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.NodeIterator; import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.ResIterator; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Statement; import edu.isi.karma.kr2rml.KR2RMLVersion; import edu.isi.karma.kr2rml.ObjectMap; import edu.isi.karma.kr2rml.Predicate; import edu.isi.karma.kr2rml.PredicateObjectMap; import edu.isi.karma.kr2rml.Prefix; import edu.isi.karma.kr2rml.RefObjectMap; import edu.isi.karma.kr2rml.SubjectMap; import edu.isi.karma.kr2rml.formatter.KR2RMLColumnNameFormatter; import edu.isi.karma.kr2rml.formatter.KR2RMLColumnNameFormatterFactory; import edu.isi.karma.kr2rml.planning.TriplesMap; import edu.isi.karma.kr2rml.planning.TriplesMapLink; import edu.isi.karma.kr2rml.template.ColumnTemplateTerm; import edu.isi.karma.kr2rml.template.StringTemplateTerm; import edu.isi.karma.kr2rml.template.TemplateTerm; import edu.isi.karma.kr2rml.template.TemplateTermSet; import edu.isi.karma.kr2rml.template.TemplateTermSetBuilder; import edu.isi.karma.modeling.Uris; import edu.isi.karma.rep.metadata.WorksheetProperties.SourceTypes; import edu.isi.karma.webserver.KarmaException; public class WorksheetR2RMLJenaModelParser { private Model model; private R2RMLMappingIdentifier id; private KR2RMLMapping mapping; private static Logger logger = LoggerFactory.getLogger(WorksheetR2RMLJenaModelParser.class); public WorksheetR2RMLJenaModelParser(R2RMLMappingIdentifier id) throws JSONException, KarmaException { this.id = id; } public WorksheetR2RMLJenaModelParser(Model model, R2RMLMappingIdentifier id) throws JSONException, KarmaException { this.id = id; } public synchronized Model getModel() throws IOException { loadModel(); return model; } private void loadModel() throws IOException { if (model != null) { return; } synchronized(this) { if(model == null) { this.model = loadSourceModelIntoJenaModel(id.getLocation()); } } } public KR2RMLMapping parse() throws IOException, KarmaException, JSONException { loadModel(); if(null != mapping) { return mapping; } synchronized(this) { if(null != mapping) { return mapping; } // Capture the main mapping resource that corresponds to the source name Resource mappingResource = getMappingResourceFromSourceName(); if (mappingResource == null) { throw new KarmaException("Resource not found in model for the source: " + id.getName()); } Property modelVersionNameProp = model.getProperty(Uris.KM_MODEL_VERSION_URI); Statement s = model.getProperty(mappingResource, modelVersionNameProp); KR2RMLVersion version = null; try { version = new KR2RMLVersion(s.getString()); } catch (Exception e) { version = KR2RMLVersion.unknown; } KR2RMLMapping kr2rmlMapping = new KR2RMLMapping(id, version); Map<String, String> prefixes = model.getNsPrefixMap(); for(Entry<String, String> prefix : prefixes.entrySet()) { Prefix p = new Prefix(prefix.getKey(), prefix.getValue()); kr2rmlMapping.addPrefix(p); } SourceTypes sourceType = getSourceType(mappingResource); kr2rmlMapping.setColumnNameFormatter(KR2RMLColumnNameFormatterFactory.getFormatter(sourceType)); // Load any transformations on the worksheet if required loadWorksheetHistory(mappingResource, kr2rmlMapping); // Generate TriplesMap for each InternalNode in the tree List<Resource> subjectResources = createSubjectMaps(mappingResource, kr2rmlMapping); // Identify the object property links createPredicateObjectMaps(mappingResource, kr2rmlMapping); // Calculate the nodes covered by each InternalNode calculateColumnNodesCoveredByBlankNodes(kr2rmlMapping, subjectResources); return mapping = kr2rmlMapping; } } private SourceTypes getSourceType(Resource mappingResource) { Property sourceNameProp = model.getProperty(Uris.KM_SOURCE_TYPE_URI); Statement s = model.getProperty(mappingResource, sourceNameProp); String sourceType = null; if(s != null) { RDFNode node = s.getObject(); if(node != null && node.isLiteral()) { sourceType = node.asLiteral().getString(); return SourceTypes.valueOf(sourceType); } } return SourceTypes.CSV; } public static Model loadSourceModelIntoJenaModel(URL modelURL) throws IOException { // Create an empty Model Model model = ModelFactory.createDefaultModel(); InputStream s = modelURL.openStream(); model.read(s, null, "TURTLE"); return model; } private Resource getMappingResourceFromSourceName() throws KarmaException { Property sourceNameProp = model.getProperty(Uris.KM_SOURCE_NAME_URI); RDFNode node = model.createLiteral(id.getName()); ResIterator res = model.listResourcesWithProperty(sourceNameProp, node); List<Resource> resList = res.toList(); if (resList.size() > 1) { throw new KarmaException("More than one resource exists with source name: " + id.getName()); } else if (resList.size() == 1) { return resList.get(0); } else { //If we didnt find the sourceName in the model, maybe it is a different source with the //same schema. //Maybe we need to substitute the sourceName in the model with this one NodeIterator sourceObjectIter = model.listObjectsOfProperty(sourceNameProp); List<RDFNode> sourceObjects = sourceObjectIter.toList(); if(sourceObjects.size() > 1) { throw new KarmaException("More than one resource exists with source name: " + id.getName()); } else if(sourceObjects.size() == 1) { RDFNode prevSourceObject = sourceObjects.get(0); //We got the previous source object, now get the Subject Node for this ResIterator prevSourceSubjectsIter = model.listResourcesWithProperty(sourceNameProp, prevSourceObject); List<Resource> prevSourceSubjects = prevSourceSubjectsIter.toList(); if (prevSourceSubjects.size() == 1) { Resource subject = prevSourceSubjects.get(0); model.remove(subject, sourceNameProp, prevSourceObject); model.add(subject, sourceNameProp, node); return subject; } else if(prevSourceSubjects.size() > 1) { throw new KarmaException("More than one resource exists with model source name: " + prevSourceObject.toString()); } } return null; } } private void loadWorksheetHistory(Resource mappingResource, KR2RMLMapping kr2rmlMapping) throws JSONException { JSONArray normalizedCommandsJSON = getWorksheetHistory(mappingResource); kr2rmlMapping.setWorksheetHistory(normalizedCommandsJSON); } private JSONArray getWorksheetHistory(Resource mappingResource) throws JSONException { Property hasTransformation = model.getProperty(Uris.KM_HAS_WORKSHEET_HISTORY_URI); NodeIterator transItr = model.listObjectsOfProperty(mappingResource, hasTransformation); while (transItr.hasNext()) { String commands = transItr.next().toString(); return new JSONArray(commands); } return new JSONArray(); } private void createPredicateObjectMaps(Resource mappingResource, KR2RMLMapping kr2rmlMapping) throws JSONException { Property hasTrMapUri = model.getProperty(Uris.KM_HAS_TRIPLES_MAP_URI); // Get all the triple maps NodeIterator trMapsResItr = model.listObjectsOfProperty(mappingResource, hasTrMapUri); while (trMapsResItr.hasNext()) { // Add the predicate object maps addPredicateObjectMapsForTripleMap(trMapsResItr.next().asResource(), kr2rmlMapping); } } private List<Resource> createSubjectMaps(Resource mappingResource, KR2RMLMapping kr2rmlMapping) throws JSONException { List<Resource> subjectMapResources = new ArrayList<Resource>(); Property hasTrMapUri = model.getProperty(Uris.KM_HAS_TRIPLES_MAP_URI); // Get all the triple maps NodeIterator trMapsResItr = model.listObjectsOfProperty(mappingResource, hasTrMapUri); while (trMapsResItr.hasNext()) { Resource trMapRes = trMapsResItr.next().asResource(); SubjectMap subjMap = addSubjectMapForTripleMap(trMapRes, kr2rmlMapping, subjectMapResources); // Add the Triples map TriplesMap trMap = new TriplesMap(trMapRes.getURI(), subjMap); kr2rmlMapping.getTriplesMapIndex().put(trMapRes.getURI(), trMap); kr2rmlMapping.addTriplesMap(trMap); kr2rmlMapping.getAuxInfo().getTriplesMapGraph().addTriplesMap(trMap); } return subjectMapResources; } private void addPredicateObjectMapsForTripleMap(Resource trMapRes, KR2RMLMapping kr2rmlMapping) throws JSONException { int predicateIdCounter = 0; int objectMapCounter = 0; Property predObjMapProp = model.getProperty(Uris.RR_PRED_OBJ_MAP_URI); Property predProp = model.getProperty(Uris.RR_PREDICATE_URI); Property objectMapProp = model.getProperty(Uris.RR_OBJECTMAP_URI); Property columnProp = model.getProperty(Uris.RR_COLUMN_URI); Property rdfLiteralTypeProp = model.getProperty(Uris.RR_DATATYPE_URI); Resource rfObjClassUri = model.getResource(Uris.RR_REF_OBJECT_MAP_CLASS_URI); Property parentTriplesMapProp = model.getProperty(Uris.RR_PARENT_TRIPLE_MAP_URI); Property rdfTypeProp = model.getProperty(Uris.RDF_TYPE_URI); Property templateProp = model.getProperty(Uris.RR_TEMPLATE_URI); Property constantProp = model.getProperty(Uris.RR_CONSTANT); KR2RMLColumnNameFormatter formatter = kr2rmlMapping.getColumnNameFormatter(); TriplesMap trMap = kr2rmlMapping.getTriplesMapIndex().get(trMapRes.getURI()); if (trMap == null) { logger.error("No Triples Map found for resource: " + trMapRes.getURI()); return; } NodeIterator predObjItr = model.listObjectsOfProperty(trMapRes, predObjMapProp); while (predObjItr.hasNext()) { Resource pomBlankNode = predObjItr.next().asResource(); // Create the PredicateObjectMap object for current POM PredicateObjectMap pom = new PredicateObjectMap(pomBlankNode.getURI(), trMap); // Get the predicate for the POM Predicate pred = null; NodeIterator pomPredItr = model.listObjectsOfProperty(pomBlankNode, predProp); while (pomPredItr.hasNext()) { RDFNode pomPredNode = pomPredItr.next(); pred = new Predicate(pomPredNode.toString() + "-" + predicateIdCounter++); // Check if the predicate value is a URI or a literal (such as column name) if (pomPredNode instanceof Resource) { pred.getTemplate().addTemplateTermToSet( new StringTemplateTerm(((Resource) pomPredNode).getURI(), true)); } else { pred.setTemplate(TemplateTermSetBuilder. constructTemplateTermSetFromR2rmlTemplateString( pomPredNode.toString(), formatter)); } } pom.setPredicate(pred); // Get the object for the POM ObjectMap objMap = null; NodeIterator pomObjItr = model.listObjectsOfProperty(pomBlankNode, objectMapProp); while (pomObjItr.hasNext()) { Resource objNode = pomObjItr.next().asResource(); /** Check if objBlankNode is a RefObjectMap or a normal object map with column **/ if (model.contains(objNode, rdfTypeProp, rfObjClassUri)) { NodeIterator parentTripleMapItr = model.listObjectsOfProperty(objNode, parentTriplesMapProp); while (parentTripleMapItr.hasNext()) { Resource parentTripleRes = parentTripleMapItr.next().asResource(); TriplesMap parentTM = kr2rmlMapping.getTriplesMapIndex().get(parentTripleRes.getURI()); // Create a RefObjectMap RefObjectMap rfMap = new RefObjectMap(objNode.getURI(), parentTM); objMap = new ObjectMap(getNewObjectMapId(objectMapCounter++), rfMap); // Add the link between triple maps in the auxInfo TriplesMapLink link = new TriplesMapLink(trMap, parentTM, pom); kr2rmlMapping.getAuxInfo().getTriplesMapGraph().addLink(link); } } else { NodeIterator objMapColStmts = model.listObjectsOfProperty(objNode, columnProp); // RDF Literal Type Statement objMapRdfLiteralTypeStmt = model.getProperty(objNode, rdfLiteralTypeProp); TemplateTermSet rdfLiteralTypeTermSet = null; if (objMapRdfLiteralTypeStmt != null && objMapRdfLiteralTypeStmt.getObject().isLiteral()) { StringTemplateTerm rdfLiteralTypeTerm = new StringTemplateTerm(objMapRdfLiteralTypeStmt.getObject().toString(), true); rdfLiteralTypeTermSet = new TemplateTermSet(); rdfLiteralTypeTermSet.addTemplateTermToSet(rdfLiteralTypeTerm); } while (objMapColStmts.hasNext()) { RDFNode colNode = objMapColStmts.next(); objMap = new ObjectMap(getNewObjectMapId(objectMapCounter++), TemplateTermSetBuilder.constructTemplateTermSetFromR2rmlColumnString( colNode.toString(), formatter), rdfLiteralTypeTermSet); } if(objMap == null) { NodeIterator templateItr = model.listObjectsOfProperty(objNode, templateProp); //try a literal/constant node if(templateItr == null || !templateItr.hasNext()){ templateItr = model.listObjectsOfProperty(objNode, constantProp); } TemplateTermSet objTemplTermSet = null; while (templateItr.hasNext()) { RDFNode templNode = templateItr.next(); String template = templNode.toString(); boolean isUri = !templNode.isLiteral(); objTemplTermSet = TemplateTermSetBuilder.constructTemplateTermSetFromR2rmlTemplateString( template, isUri, kr2rmlMapping.getColumnNameFormatter()); } objMap = new ObjectMap(getNewObjectMapId(objectMapCounter++), objTemplTermSet, rdfLiteralTypeTermSet); } // Check if anything needs to be added to the columnNameToPredicateObjectMap Map if(objMap != null) addColumnNameToPredObjectMapLink(objMap, pom, kr2rmlMapping); } } pom.setObject(objMap); trMap.addPredicateObjectMap(pom); } // Try to add template to pom TemplateTermSet subjTemplTermSet = trMap.getSubject().getTemplate(); if(subjTemplTermSet != null) { List<TemplateTerm> terms = subjTemplTermSet.getAllTerms(); if(isValidTemplate(terms)) { PredicateObjectMap pom = new PredicateObjectMap(PredicateObjectMap.getNewId(),trMap); Predicate pred = new Predicate(Uris.CLASS_INSTANCE_LINK_URI + "-" + predicateIdCounter++); pred.getTemplate().addTemplateTermToSet( new StringTemplateTerm(Uris.CLASS_INSTANCE_LINK_URI, true)); pom.setPredicate(pred); StringTemplateTerm rdfLiteralTypeTerm = new StringTemplateTerm("", true); TemplateTermSet rdfLiteralTypeTermSet = new TemplateTermSet(); rdfLiteralTypeTermSet.addTemplateTermToSet(rdfLiteralTypeTerm); ObjectMap objMap = new ObjectMap(getNewObjectMapId(objectMapCounter++), subjTemplTermSet, rdfLiteralTypeTermSet); pom.setObject(objMap); trMap.addPredicateObjectMap(pom); addColumnNameToPredObjectMapLink(objMap, pom, kr2rmlMapping); } } } private boolean isValidTemplate(List<TemplateTerm> terms) { if(terms == null || terms.size() == 0) { return false; } for(TemplateTerm term : terms) { if(term instanceof ColumnTemplateTerm) { return true; } } return false; } private void addColumnNameToPredObjectMapLink(ObjectMap objMap, PredicateObjectMap pom, KR2RMLMapping kr2rmlMapping) { TemplateTermSet objTermSet = objMap.getTemplate(); if(objTermSet == null) { logger.error("No matching object term set"); return; } for (TemplateTerm term:objTermSet.getAllTerms()) { if (term instanceof ColumnTemplateTerm) { String columnName = term.getTemplateTermValue(); String columnNameWithoutFormatting = kr2rmlMapping.getColumnNameFormatter().getColumnNameWithoutFormatting(columnName); List<PredicateObjectMap> existingPomList = kr2rmlMapping.getAuxInfo(). getColumnNameToPredObjLinks().get(columnNameWithoutFormatting); if (existingPomList == null) { existingPomList = new ArrayList<PredicateObjectMap>(); } existingPomList.add(pom); kr2rmlMapping.getAuxInfo().getColumnNameToPredObjLinks().put(columnNameWithoutFormatting, existingPomList); } } } private String getNewObjectMapId(int objectMapCounter) { return "ObjectMap" + objectMapCounter; } private SubjectMap addSubjectMapForTripleMap(Resource trMapRes, KR2RMLMapping kr2rmlMapping, List<Resource> subjectMapResources) throws JSONException { SubjectMap subjMap = null; Property subjMapProp = model.getProperty(Uris.RR_SUBJECTMAP_URI); Property templateProp = model.getProperty(Uris.RR_TEMPLATE_URI); Property rdfTypeProp = model.getProperty(Uris.RDF_TYPE_URI); Property rrClassProp = model.getProperty(Uris.RR_CLASS_URI); Resource steinerTreeRootNodeRes = model.getResource(Uris.KM_STEINER_TREE_ROOT_NODE); NodeIterator subjMapsItr = model.listObjectsOfProperty(trMapRes, subjMapProp); while (subjMapsItr.hasNext()){ Resource subjMapBlankRes = subjMapsItr.next().asResource(); subjectMapResources.add(subjMapBlankRes); String subjMapId = subjMapBlankRes.getId().getLabelString(); subjMap = new SubjectMap(subjMapId); kr2rmlMapping.getSubjectMapIndex().put(subjMapId, subjMap); // Get the subject template NodeIterator templateItr = model.listObjectsOfProperty(subjMapBlankRes, templateProp); TemplateTermSet subjTemplTermSet = null; while (templateItr.hasNext()) { RDFNode templNode = templateItr.next(); String template = templNode.toString(); subjTemplTermSet = TemplateTermSetBuilder.constructTemplateTermSetFromR2rmlTemplateString( template, kr2rmlMapping.getColumnNameFormatter()); List<String> columnsCovered = new LinkedList<String>(); for(TemplateTerm term : subjTemplTermSet.getAllColumnNameTermElements()) { columnsCovered.add(term.getTemplateTermValue()); } kr2rmlMapping.getAuxInfo().getSubjectMapIdToTemplateAnchor().put(subjMap.getId(), KR2RMLMappingAuxillaryInformation.findSubjectMapTemplateAnchor(columnsCovered)); } subjMap.setTemplate(subjTemplTermSet); // Get the subject type NodeIterator rdfTypesItr = model.listObjectsOfProperty(subjMapBlankRes, rrClassProp); while (rdfTypesItr.hasNext()) { RDFNode typeNode = rdfTypesItr.next(); if (typeNode.isAnon()) { NodeIterator typeTemplItr = model.listObjectsOfProperty(typeNode.asResource(), templateProp); while (typeTemplItr.hasNext()) { RDFNode templNode = typeTemplItr.next(); String template = templNode.toString(); TemplateTermSet typeTermSet = TemplateTermSetBuilder. constructTemplateTermSetFromR2rmlTemplateString( template); subjMap.addRdfsType(typeTermSet); } continue; } if (typeNode instanceof Resource) { // Skip the steiner tree root type if(((Resource) typeNode).getURI().equals(Uris.KM_STEINER_TREE_ROOT_NODE)) continue; StringTemplateTerm uriTerm = new StringTemplateTerm( ((Resource) typeNode).getURI(), true); TemplateTermSet typeTermSet = new TemplateTermSet(); typeTermSet.addTemplateTermToSet(uriTerm); subjMap.addRdfsType(typeTermSet); } else { TemplateTermSet typeTermSet = TemplateTermSetBuilder. constructTemplateTermSetFromR2rmlTemplateString( typeNode.toString()); subjMap.addRdfsType(typeTermSet); } } // Check if it is as the Steiner tree root node if (model.contains(subjMapBlankRes, rdfTypeProp, steinerTreeRootNodeRes)) { subjMap.setAsSteinerTreeRootNode(true); } } return subjMap; } private void calculateColumnNodesCoveredByBlankNodes(KR2RMLMapping kr2rmlMapping, List<Resource> subjectMapResources) throws JSONException, KarmaException { Property termTypeProp = model.getProperty(Uris.RR_TERM_TYPE_URI); Resource blankNodeRes = model.getResource(Uris.RR_BLANK_NODE_URI); Property kmBnodePrefixProp = model.getProperty(Uris.KM_BLANK_NODE_PREFIX_URI); ResIterator blankNodeSubjMapItr = model.listResourcesWithProperty(termTypeProp, blankNodeRes); for (Resource subjMapRes:subjectMapResources) { if (model.contains(subjMapRes, termTypeProp, blankNodeRes)) { List<String> columnsCovered = new ArrayList<String>(); Resource blankNodeSubjRes = blankNodeSubjMapItr.next(); SubjectMap subjMap = kr2rmlMapping.getSubjectMapIndex().get(blankNodeSubjRes.getId().getLabelString()); subjMap.setAsBlankNode(true); NodeIterator bnodePrefixItr = model.listObjectsOfProperty(blankNodeSubjRes, kmBnodePrefixProp); while (bnodePrefixItr.hasNext()) { kr2rmlMapping.getAuxInfo().getBlankNodesUriPrefixMap().put(subjMap.getId(), bnodePrefixItr.next().toString()); } TriplesMap mytm = null; for(TriplesMap tm : kr2rmlMapping.getTriplesMapList()) { if(tm.getSubject().getId().equalsIgnoreCase(subjMap.getId())) { mytm = tm; List<PredicateObjectMap> poms = mytm.getPredicateObjectMaps(); for(PredicateObjectMap pom : poms ) { ObjectMap objMap = pom.getObject(); if(objMap == null) { logger.error("Unable to find object map for pom :" + pom.toString()); } TemplateTermSet templateTermSet = pom.getObject().getTemplate(); if(templateTermSet != null) { TemplateTerm term = templateTermSet.getAllTerms().get(0); if(term!= null && term instanceof ColumnTemplateTerm) { columnsCovered.add(term.getTemplateTermValue()); } } } if(columnsCovered.isEmpty()) { //String blankNodeUriPrefix = kr2rmlMapping.getAuxInfo().getBlankNodesUriPrefixMap().get(subjMap.getId()); //throw new KarmaException("You need to define a URI for "+blankNodeUriPrefix+ "."); } break; } } logger.debug("Adding columns for blank node" + subjMap.getId() + " List: " + columnsCovered); kr2rmlMapping.getAuxInfo().getBlankNodesColumnCoverage().put(subjMap.getId(), columnsCovered); kr2rmlMapping.getAuxInfo().getSubjectMapIdToTemplateAnchor().put(subjMap.getId(), KR2RMLMappingAuxillaryInformation.findSubjectMapTemplateAnchor(columnsCovered)); // Get the blank node prefix } } } }
/* * Copyright 2006-2012 ICEsoft Technologies Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.icepdf.core.pobjects.security; import org.icepdf.core.pobjects.Dictionary; import org.icepdf.core.pobjects.Name; import org.icepdf.core.pobjects.StringObject; import org.icepdf.core.util.Library; import java.util.Hashtable; import java.util.Vector; /** * <p>The EncryptionDictionary class is used to hold values needed by the Standard * Security Handler, Public Key Handlers and Crypt filters. This PDF object * is found via a document's Trailer object, but only when the Trailer has an * encrypted named reference.</p> * <p/> * <p>The dictionary is composed of combinations of the following entries defined * by the different encryption types. ICEpdf currently only supports the * Standard Security Handler.</p> * <p/> * <p/> * <table border="1" cellpadding="1" cellspacing="1" > * <tr> * <td colspan="3" ><b>Common to all Encryption Dictionaries</b></td> * </tr> * <tr> * <td><b>Key</b></td> * <td><b>Type</b></td> * <td><b>Value</b></td> * </tr> * <tr> * <td valign="top" >Filter</td> * <td valign="top" >name</td> * <td>(Required) The name of the preferred security handler for this * document; typically it is the name of the security handler that was * used to encrypt the document. If SubFilter is not present, only this * security handler should be used when opening the document. If it is * present, viewer applications are free to use any security handler * that implements the format specified by SubFilter.</td> * </tr> * <tr> * <td valign="top" >SubFilter</td> * <td valign="top" >name</td> * <td>(Optional; PDF 1.3) A name that completely specifies the format and * interpretation of the contents of the encryption dictionary. It is * needed in order to allow security handlers other than the one * specified by Filter to decrypt the document. If it is absent, other * security handlers will not be able to decrypt the document.</td> * </tr> * <tr> * <td valign="top" >V</td> * <td valign="top" >number</td> * <td>(Optional but strongly recommended) A code specifying the algorithm * to be used in encrypting and decrypting the document: * <ul> * <li>0 - An algorithm that is undocumented and no longer * supported, and whose use is strongly discouraged.</li> * <li>1 - Algorithm 3.1, with an encryption key length * of 40 bits; see below.</li> * <li>2 - (PDF 1.4) Algorithm 3.1, but allowing * encryption key lengths greater than 40 bits.</li> * <li>3 - (PDF 1.4) An unpublished algorithm allowing encryption * key lengths ranging from 40 to 128 bits. (This algorithm * is unpublished as an export requirement of the U.S. * Department of Commerce.)</li> * <li>(PDF 1.5) The security handler defines the use of encryption * and decryption in the document, using the rules specified by * the CF, StmF, and StrF entries.</li> * </ul> * </td> * </tr> * <tr> * <td valign="top" >Length</td> * <td valign="top" >integer</td> * <td>(Optional; PDF 1.4; only if V is 2 or 3) The length of the * encryption key, in bits. The value must be a multiple of 8, in the * range 40 to 128. Default value: 40.</td> * </tr> * <tr> * <td valign="top" >CF</td> * <td valign="top" >dictionary</td> * <td>(Optional; meaningful only when the value of V is 4; PDF 1.5) A * dictionary whose keys are crypt filter names and whose values are * the corresponding crypt filter dictionaries.</td> * </tr> * <tr> * <td valign="top" >StmF</td> * <td valign="top" >name</td> * <td>(Optional; meaningful only when the value of V is 4; PDF 1.5) The * name of the crypt filter that is used by default when encrypting * streams; it must correspond to a key in the CF dictionary or a * standard crypt filter name. All streams in the document, except for * cross-reference streams or those that have a crypt entry in their * Filter array are decrypted by the security handler, using this * crypt filter.</td> * </tr> * <tr> * <td valign="top" >StrF</td> * <td valign="top" >name</td> * <td>(Optional; meaningful only when the value of V is 4; PDF 1.5) The * name of the crypt filter that is used when decrypting all strings * in the document; it must correspond to a key in the CF dictionary * or a standard crypt filter name.</td> * </tr> * </table> * <p/> * <p>The dictionary composes of the following values that can be returned via * their named mehtod or by a generic getValue method if the key's name is known. * The values of the O and U entries in this dictionary are used to determine * whether a password entered when the document is opened is the correct owner * password, user password, or neither.</p> * <p/> * <table border="1" cellpadding="1" cellspacing="1" > * <tr> * <td colspan="3" ><b>Standard Encryption Dictionary Entries</b> </td> * </tr> * <tr> * <td><b>Key</b></td> * <td><b>Type</b></td> * <td><b>Value</b></td> * </tr> * <tr> * <td valign="top" >R</td> * <td valign="top" >number</td> * <td> * <p>(Required) A number specifying which revision of the standard * security handler should be used to interpret this dictionary. The * revision number should be:</p> * <ul> * <li>2 if the document is encrypted with a V value less than 2 * and does not have any of the access permissions set (via the * P entry, below) that are designated "Revision3"</li> * <li>3 if the document is encrypted with a V value of 2 or 3, or * has any "Revision 3" access permissions set.</li> * <li>4 if the document is encrypted with a V value of 4.</li> * </ul> * </td> * </tr> * <tr> * <td valign="top" >O</td> * <td valign="top" >String</td> * <td>(Required) A 32-byte string, based on both the owner and user * passwords, that is used in computing the encryption key and in * determining whether a valid owner password was entered.</td> * </tr> * <tr> * <td valign="top" >U</td> * <td valign="top" >String</td> * <td>U string (Required) A 32-byte string, based on the user password, * that is used in determining whether to prompt the user for a * password and, if so, whether a valid user or owner password was * entered.</td> * <tr> * <td valign="top" >P</td> * <td valign="top" >Integer</td> * <td>(Required) A set of flags specifying which operations are permitted * when the document is opened with user access. * </td> * </tr> * </table> * <p/> * <p>Encryption dictionaries for public-key security handlers contain the * common entries shown above. In addition, they may contain the entries * shown below.</p> * <p/> * <table border="1" cellpadding="1" cellspacing="1" > * <tr> * <td colspan="3" ><b>Additional public-key Dictionary Entries</b> </td> * </tr> * <tr> * <td><b>Key</b></td> * <td><b>Type</b></td> * <td><b>Value</b></td> * </tr> * <tr> * <td valign="top" >Recipients</td> * <td valign="top" >array</td> * <td>(Required when SubFilter is adbe.pkcs7.s3 or adbe.pkcs7.s4; PDF 1.3) * An array of strings, where each string is a PKCS#7 object listing * recipients that have been granted equal access rights to the document. * The data contained in the PKCS#7 object includes both a cryptographic * key that is used to decrypt the encrypted data and the access * permissions that apply to the recipient list. There should be only * one object per unique set of access permissions; if a recipient * appears in more than one list, the permissions used will be those * found in the first matching list.<br /> * <b>Note:</b><br /> * When SubFilter is adbe.pkcs7.s5, recipient lists are specified in * the crypt filter dictionary.</td> * </tr> * </table> * <p/> * <p>Encryption dictionaries for crypt filter security handlers contain the * common entries shown above. In addition, they may contain the entries * shown below</p> * <p/> * <table border="1" cellpadding="1" cellspacing="1" > * <tr> * <td colspan="3" ><b> Standard Encryption Dictionary Entries</b> </td> * </tr> * <tr> * <td><b>Key</b></td> * <td><b>Type</b></td> * <td><b>Value</b></td> * </tr> * <tr> * <td valign="top" >Type</td> * <td valign="top" >name</td> * <td>(Optional) If present, must be CryptFilter for a crypt filter * dictionary.</td> * </tr> * <tr> * <td valign="top" >CFM</td> * <td valign="top" >name</td> * <td>(Optional) The method used, if any, by the viewer application to * decrypt data. In PDF 1.5, the following values are supported: * <ul> * <li>None: (default)the viewer application does not decrypt data, * but directs the input stream to the security handler for * decryption.</li> * <li>V2: the viewer application asks the security handler for the * decryption key and implicitly decrypts data using Algorithm * 3.1. A viewer application may ask once for this decryption * key, then cache the key for subsequent use for streams that * use the same crypt filter; therefore, there must be a one-to-one * relationship between a crypt filter name and the corresponding * decryption key.</li> * </ul> * </td> * </tr> * <tr> * <td valign="top" >Length</td> * <td valign="top" >integer</td> * <td>(Optional) When the value of CFM is V2, this entry is used to * indicate the bit length of the decryption key. It must be a multiple * of 8 in the range of 40 to 128. Default value: 128. When the value * of CFM is None, security handlers can define their own use of this * entry, but are encouraged to follow the usage conventions defined * for V2.</td> * </tr> * <tr> * <td valign="top" >AuthEvent</td> * <td valign="top" >name</td> * <td> * (Optional) The event to be used to trigger the authorization that is * required to access decryption keys used by this filter. If * authorization fails, the event should fail. Acceptable values are: * <ul> * <li>DocOpen: (default) authorization is required when a document * is opened.</li> * <li>EFOpen: authorization is required when about to access embedded * files.</li> * </ul> * If this filter is used as the value of StrF or StmF in the encryption * dictionary, the viewer application should ignore this key and behave * as if the value is DocOpen. * </td> * </tr> * </table> * * @since 1.1 */ public class EncryptionDictionary extends Dictionary { // File ID, generated when document is created, first index used by // encryption algorithms private Vector fileID = null; private CryptFilter cryptFilter; // Revision 5 authentication holders as they are passwords // are validate when the key is calculated. private boolean isAuthenticatedUserPassword; private boolean isAuthenticatedOwnerPassword; /** * Creates a new Encryption Dictionary object. * * @param lib library dictionary of all objects in document. * @param encryptionDictionary dictionary of all values taken from encrypt key * in the documents Trailer reference. * @param fileID Vector containing the two file ID values originally * parsed from the Trailer reference. */ public EncryptionDictionary(Library lib, Hashtable encryptionDictionary, Vector fileID) { super(lib, encryptionDictionary); this.entries = encryptionDictionary; this.fileID = fileID; } /** * Gets the document's File ID. * * @return vector containing two values that represent the file ID */ public Vector getFileID() { return fileID; } /** * Entries common to all encryption dictionaries */ /** * Gets the preferred security handler name. * * @return handler name. */ public String getPreferredSecurityHandlerName() { return library.getName(entries, "Filter"); } /** * Gets the preferred security handler sub-name. * * @return handler sub-name. */ public String getPreferredSecurityHandlerSubName() { return library.getName(entries, "SubFilter"); } /** * Gets a code specifying the algorithm to be used in encrypting and * decrypting the document: * <ul> * <li>0 An algorithm that is undocumented. This value shall not be used.</li> * <li>1 "Algorithm 1: Encryption of data using the RC4 or AES algorithms" * in 7.6.2, "General Encryption Algorithm," with an encryption key length * of 40 bits; see below.</li> * <li>2 (PDF 1.4) "Algorithm 1: Encryption of data using the RC4 or AES * algorithms"in 7.6.2, "General Encryption Algorithm," but permitting * encryption key lengths greater than 40 bits.</li> * <li>3(PDF 1.4) An unpublished algorithm that permits encryption key * lengths ranging from 40 to 128 bits. This value shall not appear in a * conforming PDF file.</li> * <li>4(PDF 1.5) The security handler defines the use of encryption and * decryption in the document, using the rules specified by the CF, StmF, * and StrF entries. The default value if this entry is omitted shall be * 0, but when present should be a value of 1 or greater.</li> * </ul> * * @return encryption version. */ public int getVersion() { return library.getInt(entries, "V"); } /** * Gets the length of the encryption key, in bits. * * @return length of encryption key. */ public int getKeyLength() { int length = 40; int len = library.getInt(entries, "Length"); if (len != 0) { length = len; } return length; } /** * Entries added for standard encryption dictionaries */ /** * Gets the revision number of the standard security handler. * * @return revision number. */ public int getRevisionNumber() { return library.getInt(entries, "R"); } /** * Gets the 32-byte string used for verifying the owner password. * * @return 32-byte string representing the key O. */ public String getBigO() { Object tmp = library.getObject(entries, "O"); if (tmp instanceof StringObject) { return ((StringObject) tmp).getLiteralString(); } else { return null; } } /** * Gets the 32-byte string used for verifying the user password. * * @return 32-byte string representing the key U. */ public String getBigU() { Object tmp = library.getObject(entries, "U"); if (tmp instanceof StringObject) { return ((StringObject) library.getObject(entries, "U")).getLiteralString(); } else { return null; } } /** * Gets the integer flag which specifies the operation permitted when the * document is opened with user access. * * @return return flag specifying user access. */ public int getPermissions() { return library.getInt(entries, "P"); } /** * (Optional; meaningful only when the value of V is 4; PDF 1.5) * A dictionary whose keys shall be crypt filter names and whose values * shall be the corresponding crypt filter dictionaries (see Table 25). * Every crypt filter used in the document shall have an entry in this * dictionary, except for the standard crypt filter names (see Table 26). * <p/> * The conforming reader shall ignore entries in CF dictionary with the keys * equal to those listed in Table 26 and use properties of the respective * standard crypt filters. * * @return crypt filter object if found, null otherwise. */ public CryptFilter getCryptFilter() { if (cryptFilter == null) { Hashtable tmp = (Hashtable) library.getObject(entries, "CF"); if (tmp != null) { cryptFilter = new CryptFilter(library, tmp); return cryptFilter; } } return cryptFilter; } /** * (Optional; meaningful only when the value of V is 4; PDF 1.5) * The name of the crypt filter that shall be used by default when decrypting * streams. The name shall be a key in the CF dictionary or a standard crypt * filter name specified in Table 26. All streams in the document, except * for cross-reference streams (see 7.5.8, "Cross-Reference Streams") or * streams that have a Crypt entry in their Filterarray (see Table 6), * shall be decrypted by the security handler, using this crypt filter. * <p/> * Default value: Identity. * <p/> * * @return name of the default stream filter name. */ public Name getStmF() { Object tmp = library.getObject(entries, "StmF"); if (tmp != null && tmp instanceof Name) { return (Name) tmp; } return null; } /** * (Optional; meaningful only when the value of V is 4; PDF 1.5) * The name of the crypt filter that shall be used when decrypting all * strings in the document. The name shall be a key in the CF dictionary or * a standard crypt filter name specified in Table 26. * <p/> * Default value: Identity. * * @return name of the default string filter name. */ public Name getStrF() { Object tmp = library.getObject(entries, "StrF"); if (tmp != null && tmp instanceof Name) { return (Name) tmp; } return null; } /** * (Optional; meaningful only when the value of V is 4; PDF 1.6) The name * of the crypt filter that shall be used when encrypting embedded file * streams that do not have their own crypt filter specifier; it shall * correspond to a key in the CFdictionary or a standard crypt filter name * specified in Table 26. * <p/> * This entry shall be provided by the security handler. Conforming writers * shall respect this value when encrypting embedded files, except for * embedded file streams that have their own crypt filter specifier. If * this entry is not present, and the embedded file stream does not contain * a crypt filter specifier, the stream shall be encrypted using the default * stream crypt filter specified by StmF. * * EFF:name */ public Name getEEF() { Object tmp = library.getObject(entries, "EEF"); if (tmp != null && tmp instanceof Name) { return (Name) tmp; } return null; } /** * Gets the 32-byte string, based on the owner and user passwords, that is * used in the computing the encryption key. * * @return 32-byte string representing the key OE. */ public String getBigOE() { Object tmp = library.getObject(entries, "OE"); if (tmp instanceof StringObject) { return ((StringObject) library.getObject(entries, "OE")).getLiteralString(); } else { return null; } } /** * Gets the 32-byte string, based on the user password, that is * used in the computing the encryption key. * * @return 32-byte string representing the key UE. */ public String getBigUE() { Object tmp = library.getObject(entries, "UE"); if (tmp instanceof StringObject) { return ((StringObject) library.getObject(entries, "UE")).getLiteralString(); } else { return null; } } /** * A16-byte string, encrypted with the file encryption key, that contains an * encrypted copy of the permission flags. * * @return 16-byte string representing the key Perms. */ public String getPerms() { Object tmp = library.getObject(entries, "Perms"); if (tmp instanceof StringObject) { return ((StringObject) library.getObject(entries, "Perms")).getLiteralString(); } else { return null; } } /** * Indicates whether the document-level metadata stream (see Section 10.2.2, * "Metadata Streams") is to be encrypted. Applications should respect * this value. * * @return true if document-level metadata is encrypted */ public boolean isEncryptMetaData() { return library.getBoolean(entries, "EncryptMetadata"); } protected boolean isAuthenticatedUserPassword() { return isAuthenticatedUserPassword; } protected void setAuthenticatedUserPassword(boolean authenticatedUserPassword) { isAuthenticatedUserPassword = authenticatedUserPassword; } protected boolean isAuthenticatedOwnerPassword() { return isAuthenticatedOwnerPassword; } protected void setAuthenticatedOwnerPassword(boolean authenticatedOwnerPassword) { isAuthenticatedOwnerPassword = authenticatedOwnerPassword; } /** * Class utility methods */ /** * Gets any dictionary key specified by the key parameter. * * @param key named key to retrieve from dictionary. * @return return keys value if found; null, otherwise. */ public Object getValue(Object key) { return entries.get(key); } /** * Gets all the dictionary properties. * * @return named based hash of all encryption properties. */ public Hashtable getEntries() { return entries; } public String toString() { return "Encryption Dictionary: \n" + " fileID: " + getFileID() + " \n" + " Filter: " + getPreferredSecurityHandlerName() + " \n" + " SubFilter: " + getPreferredSecurityHandlerSubName() + " \n" + " V: " + getVersion() + " \n" + " P: " + getPermissions() + " \n" + " Length:" + getKeyLength() + " \n" + " CF: " + cryptFilter + " \n" + " StmF: " + getStmF() + " \n" + " StrF: " + getStrF() + " \n" + " R: " + getRevisionNumber() + " \n" + " O: " + getBigO() + " \n" + " U: " + getBigU() + " \n" + " UE: " + getBigUE() + " \n" + " OE: " + getBigOE() + " \n" + " Recipients: " + "not done yet" + " \n" + " "; } }
/** * $Revision $ * $Date $ * * Copyright (C) 2005-2010 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.plugin.ofmeet; import java.io.IOException; import java.io.Serializable; import java.security.Principal; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.security.auth.Subject; import org.eclipse.jetty.server.UserIdentity; import org.eclipse.jetty.util.component.AbstractLifeCycle; import org.eclipse.jetty.util.security.Credential; import org.eclipse.jetty.security.*; import org.jivesoftware.openfire.auth.UnauthorizedException; import org.jivesoftware.openfire.auth.AuthToken; import org.jivesoftware.openfire.auth.AuthFactory; import org.jivesoftware.openfire.user.User; import org.jivesoftware.openfire.user.UserAlreadyExistsException; import org.jivesoftware.openfire.user.UserManager; import org.jivesoftware.openfire.user.UserNotFoundException; import org.jivesoftware.openfire.XMPPServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A login service that uses Openfire to authenticate users * */ public class OpenfireLoginService extends AbstractLifeCycle implements LoginService { private static final Logger Log = LoggerFactory.getLogger(OpenfireLoginService.class); public static final ConcurrentHashMap<String, AuthToken> authTokens = new ConcurrentHashMap<String, AuthToken>(); public static final ConcurrentHashMap<String, UserIdentity> identities = new ConcurrentHashMap<String, UserIdentity>(); private IdentityService _identityService=new DefaultIdentityService(); private String _name; private UserManager userManager = XMPPServer.getInstance().getUserManager(); protected OpenfireLoginService() { } public OpenfireLoginService(String name) { setName(name); } public String getName() { return _name; } public IdentityService getIdentityService() { return _identityService; } public void setIdentityService(IdentityService identityService) { if (isRunning()) throw new IllegalStateException("Running"); _identityService = identityService; } public void setName(String name) { if (isRunning()) throw new IllegalStateException("Running"); _name = name; } @Override protected void doStart() throws Exception { super.doStart(); } @Override protected void doStop() throws Exception { super.doStop(); } public void logout(UserIdentity identity) { Log.debug("logout {}",identity); identities.remove(identity.getUserPrincipal().getName()); } @Override public String toString() { return this.getClass().getSimpleName()+"["+_name+"]"; } public UserIdentity login(String userName, Object credential) { UserIdentity identity = null; if (identities.containsKey(userName)) { identity = identities.get(userName); if (authTokens.containsKey(userName) == false) { Log.debug("UserIdentity login " + userName + " "); try { AuthToken authToken = AuthFactory.authenticate( userName, (String) credential); authTokens.put(userName, authToken); } catch ( UnauthorizedException e ) { Log.error( "access denied, bad password " + userName ); return null; } catch ( Exception e ) { Log.error( "access denied " + userName ); return null; } } } else { Log.debug("UserIdentity login " + userName + " "); try { userManager.getUser(userName); } catch (UserNotFoundException e) { Log.error( "user not found " + userName, e ); return null; } try { AuthToken authToken = AuthFactory.authenticate( userName, (String) credential); authTokens.put(userName, authToken); } catch ( UnauthorizedException e ) { Log.error( "access denied, bad password " + userName ); return null; } catch ( Exception e ) { Log.error( "access denied " + userName ); return null; } Principal userPrincipal = new KnownUser(userName, credential); Subject subject = new Subject(); subject.getPrincipals().add(userPrincipal); subject.getPrivateCredentials().add(credential); subject.getPrincipals().add(new RolePrincipal("ofmeet")); subject.setReadOnly(); identity = _identityService.newUserIdentity(subject, userPrincipal, new String[] {"ofmeet"}); identities.put(userName, identity); } return identity; } public boolean validate(UserIdentity user) { return true; } public static class KnownUser implements UserPrincipal, Serializable { private static final long serialVersionUID = -6226920753748399662L; private final String _name; private final Object _credential; public KnownUser(String name, Object credential) { _name=name; _credential=credential; } public boolean authenticate(Object credentials) { return true; } public String getName() { return _name; } public boolean isAuthenticated() { return true; } @Override public String toString() { return _name; } } public interface UserPrincipal extends Principal,Serializable { boolean authenticate(Object credentials); public boolean isAuthenticated(); } public static class RolePrincipal implements Principal,Serializable { private static final long serialVersionUID = 2998397924051854402L; private final String _roleName; public RolePrincipal(String name) { _roleName=name; } public String getName() { return _roleName; } } }
package org.hisp.dhis.datavalue; /* * Copyright (c) 2004-2015, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Set; import org.hisp.dhis.common.MapMap; import org.hisp.dhis.dataelement.CategoryOptionGroup; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategoryOption; import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo; import org.hisp.dhis.dataelement.DataElementOperand; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodType; /** * The DataValueService interface defines how to work with data values. * * @author Kristian Nordal * @version $Id: DataValueService.java 5715 2008-09-17 14:05:28Z larshelg $ */ public interface DataValueService { String ID = DataValueService.class.getName(); // ------------------------------------------------------------------------- // Basic DataValue // ------------------------------------------------------------------------- /** * Adds a DataValue. If both the value and the comment properties of the * specified DataValue object are null, then the object should not be * persisted. The value will be validated and not be saved if not passing * validation. * * @param dataValue the DataValue to add. * @return false whether the data value is null or invalid, true if value is * valid and attempted to be saved. */ boolean addDataValue( DataValue dataValue ); /** * Updates a DataValue. If both the value and the comment properties of the * specified DataValue object are null, then the object should be deleted * from the underlying storage. * * @param dataValue the DataValue to update. */ void updateDataValue( DataValue dataValue ); /** * Deletes a DataValue. * * @param dataValue the DataValue to delete. */ void deleteDataValue( DataValue dataValue ); /** * Deletes all data values for the given organisation unit. * * @param organisationUnit the organisation unit. */ void deleteDataValues( OrganisationUnit organisationUnit ); /** * Returns a DataValue. * * @param dataElement the DataElement of the DataValue. * @param period the Period of the DataValue. * @param source the Source of the DataValue. * @param optionCombo the category option combo. * @return the DataValue which corresponds to the given parameters, or null * if no match. */ DataValue getDataValue( DataElement dataElement, Period period, OrganisationUnit source, DataElementCategoryOptionCombo optionCombo ); /** * Returns a DataValue. * * @param dataElement the DataElement of the DataValue. * @param period the Period of the DataValue. * @param source the Source of the DataValue. * @param categoryOptionCombo the category option combo. * @param attributeOptionCombo the attribute option combo. * @return the DataValue which corresponds to the given parameters, or null * if no match. */ DataValue getDataValue( DataElement dataElement, Period period, OrganisationUnit source, DataElementCategoryOptionCombo categoryOptionCombo, DataElementCategoryOptionCombo attributeOptionCombo ); /** * Returns a non-persisted DataValue. * * @param dataElementId data element id * @param periodId period id * @param sourceId source id * @param categoryOptionComboId category option combo id */ DataValue getDataValue( int dataElementId, int periodId, int sourceId, int categoryOptionComboId ); // ------------------------------------------------------------------------- // Lists of DataValues // ------------------------------------------------------------------------- /** * Returns all DataValues. * * @return a collection of all DataValues. */ List<DataValue> getAllDataValues(); /** * Returns all DataValues for a given Source and Period. * * @param source the Source of the DataValues. * @param period the Period of the DataValues. * @return a collection of all DataValues which match the given Source and * Period, or an empty collection if no values match. */ List<DataValue> getDataValues( OrganisationUnit source, Period period ); /** * Returns all DataValues for a given Source and DataElement. * * @param source the Source of the DataValues. * @param dataElement the DataElement of the DataValues. * @return a collection of all DataValues which match the given Source and * DataElement, or an empty collection if no values match. */ List<DataValue> getDataValues( OrganisationUnit source, DataElement dataElement ); /** * Returns all DataValues for a given collection of Sources and a * DataElement. * * @param sources the Sources of the DataValues. * @param dataElement the DataElement of the DataValues. * @return a collection of all DataValues which match any of the given * Sources and the DataElement, or an empty collection if no values * match. */ List<DataValue> getDataValues( Collection<OrganisationUnit> sources, DataElement dataElement ); /** * Returns all DataValues for a given Source, Period, collection of * DataElements and collection of optioncombos. * * @param source the Source of the DataValues. * @param period the Period of the DataValues. * @param dataElements the DataElements of the DataValues. * @return a collection of all DataValues which match the given Source, * Period, and any of the DataElements, or an empty collection if no * values match. */ List<DataValue> getDataValues( OrganisationUnit source, Period period, Collection<DataElement> dataElements, Collection<DataElementCategoryOptionCombo> optionCombos ); /** * Returns all DataValues for a given Source, Period, and collection of * DataElements. * * @param source the Source of the DataValues. * @param period the Period of the DataValues. * @param dataElements the DataElements of the DataValues. * @return a collection of all DataValues which match the given Source, * Period, and any of the DataElements, or an empty collection if no * values match. */ List<DataValue> getDataValues( OrganisationUnit source, Period period, Collection<DataElement> dataElements ); /** * Returns all DataValues for a given Source, Period, collection of * DataElements and DataElementCategoryOptionCombo. * * @param source the Source of the DataValues. * @param period the Period of the DataValues. * @param dataElements the DataElements of the DataValues. * @param attributeOptionCombo the DataElementCategoryCombo. * @return a collection of all DataValues which match the given Source, * Period, and any of the DataElements, or an empty collection if no * values match. */ List<DataValue> getDataValues( OrganisationUnit source, Period period, Collection<DataElement> dataElements, DataElementCategoryOptionCombo attributeOptionCombo ); /** * Returns all DataValues for a given DataElement, Period, and collection of * Sources. * * @param dataElement the DataElements of the DataValues. * @param period the Period of the DataValues. * @param sources the Sources of the DataValues. * @return a collection of all DataValues which match the given DataElement, * Period, and Sources. */ List<DataValue> getDataValues( DataElement dataElement, Period period, Collection<OrganisationUnit> sources ); /** * Returns all DataValues for a given DataElement, collection of Periods, and * collection of Sources * @param dataElement the dataElements of the DataValues. * @param periods the periods of the DataValues. * @param sources the Sources of the DataValues. * @return a collection of all DataValues which match the given DataElement, * Periods, and Sources. */ List<DataValue> getDataValues( DataElement dataElement, Collection<Period> periods, Collection<OrganisationUnit> sources ); /** * Returns all DataValues for a given DataElement, DataElementCategoryOptionCombo, * collection of Periods, and collection of Sources. * * @param dataElement the DataElements of the DataValues. * @param optionCombo the DataElementCategoryOptionCombo of the DataValues. * @param periods the Periods of the DataValues. * @param sources the Sources of the DataValues. * @return a collection of all DataValues which match the given DataElement, * Periods, and Sources. */ List<DataValue> getDataValues( DataElement dataElement, DataElementCategoryOptionCombo optionCombo, Collection<Period> periods, Collection<OrganisationUnit> sources ); /** * Returns all DataValues for a given collection of DataElementCategoryOptionCombos. * * @param optionCombos the DataElementCategoryOptionCombos of the DataValue. * @return a collection of all DataValues which match the given collection of * DataElementCategoryOptionCombos. */ List<DataValue> getDataValues( Collection<DataElementCategoryOptionCombo> optionCombos ); /** * Returns all DataValues for a given collection of DataElements. * * @param dataElement the DataElements of the DataValue. * @return a collection of all DataValues which mach the given collection of DataElements. */ List<DataValue> getDataValues( DataElement dataElement ); /** * Returns Latest DataValues for a given DataElement, PeriodType and OrganisationUnit * * @param dataElement the DataElements of the DataValue. * @param periodType the Period Type of period of the DataValue * @param organisationUnit the Organisation Unit of the DataValue * @return a Latest DataValue */ DataValue getLatestDataValues( DataElement dataElement, PeriodType periodType, OrganisationUnit organisationUnit ); /** * Gets the number of DataValues persisted since the given number of days. * * @param days the number of days since now to include in the count. * @return the number of DataValues. */ int getDataValueCount( int days ); /** * Gets the number of DataValues which have been updated after the given * date time. * * @param date the date time. * @return the number of DataValues. */ int getDataValueCountLastUpdatedAfter( Date date ); /** * Returns a map of values for each attribute option combo found. * <p> * In the (unlikely) event that the same dataElement/optionCombo is found in * more than one period for the same organisationUnit, date, and attribute * combo, the value is returned from the period with the shortest duration. * * @param dataElements collection of DataElements to fetch for * @param date date which must be present in the period * @param source OrganisationUnit for which to fetch the values * @param periodTypes allowable period types in which to find the data * @param attributeCombo the attribute combo to check (if restricted) * @param lastUpdatedMap map in which to return the lastUpdated date for each value * @return map of values by attribute option combo id, then DataElementOperand */ MapMap<Integer, DataElementOperand, Double> getDataValueMapByAttributeCombo( Collection<DataElement> dataElements, Date date, OrganisationUnit source, Collection<PeriodType> periodTypes, DataElementCategoryOptionCombo attributeCombo, Set<CategoryOptionGroup> cogDimensionConstraints, Set<DataElementCategoryOption> coDimensionConstraints, MapMap<Integer, DataElementOperand, Date> lastUpdatedMap ); /** * Gets a Collection of DeflatedDataValues. * * @param dataElementId the DataElement identifier. * @param periodId the Period identifier. * @param sourceIds the Collection of Source identifiers. */ Collection<DeflatedDataValue> getDeflatedDataValues( int dataElementId, int periodId, Collection<Integer> sourceIds ); }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.daemon.impl; import com.intellij.codeHighlighting.TextEditorHighlightingPass; import com.intellij.codeInsight.daemon.*; import com.intellij.codeInsight.daemon.impl.analysis.HighlightingLevelManager; import com.intellij.injected.editor.DocumentWindow; import com.intellij.lang.Language; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Attachment; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.colors.CodeInsightColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.markup.GutterIconRenderer; import com.intellij.openapi.editor.markup.SeparatorPlacement; import com.intellij.openapi.progress.EmptyProgressIndicator; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.psi.FileViewProvider; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.util.PairConsumer; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.NotNullList; import gnu.trove.TIntObjectHashMap; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.util.*; public class LineMarkersPass extends TextEditorHighlightingPass { private static final Logger LOG = Logger.getInstance(LineMarkersPass.class); private volatile List<LineMarkerInfo<?>> myMarkers = Collections.emptyList(); @NotNull private final PsiFile myFile; @NotNull private final TextRange myPriorityBounds; @NotNull private final TextRange myRestrictRange; LineMarkersPass(@NotNull Project project, @NotNull PsiFile file, @NotNull Document document, @NotNull TextRange priorityBounds, @NotNull TextRange restrictRange) { super(project, document, false); myFile = file; myPriorityBounds = priorityBounds; myRestrictRange = restrictRange; } @Override public void doApplyInformationToEditor() { try { LineMarkersUtil.setLineMarkersToEditor(myProject, getDocument(), myRestrictRange, myMarkers, getId()); } catch (IndexNotReadyException ignored) { } } @Override public void doCollectInformation(@NotNull ProgressIndicator progress) { final List<LineMarkerInfo<?>> lineMarkers = new ArrayList<>(); FileViewProvider viewProvider = myFile.getViewProvider(); for (Language language : viewProvider.getLanguages()) { final PsiFile root = viewProvider.getPsi(language); HighlightingLevelManager highlightingLevelManager = HighlightingLevelManager.getInstance(myProject); if (!highlightingLevelManager.shouldHighlight(root)) continue; Divider.divideInsideAndOutsideInOneRoot(root, myRestrictRange, myPriorityBounds, elements -> { Collection<LineMarkerProvider> providers = getMarkerProviders(language, myProject); List<LineMarkerProvider> providersList = new ArrayList<>(providers); queryProviders(elements.inside, root, providersList, (element, info) -> { lineMarkers.add(info); ApplicationManager.getApplication().invokeLater(() -> { if (isValid()) { LineMarkersUtil.addLineMarkerToEditorIncrementally(myProject, getDocument(), info); } }, myProject.getDisposed()); }); queryProviders(elements.outside, root, providersList, (element, info) -> lineMarkers.add(info)); return true; }); } myMarkers = mergeLineMarkers(lineMarkers, getDocument()); if (LOG.isDebugEnabled()) { LOG.debug("LineMarkersPass.doCollectInformation. lineMarkers: " + lineMarkers+"; merged: "+myMarkers); } } @NotNull private static List<LineMarkerInfo<?>> mergeLineMarkers(@NotNull List<LineMarkerInfo<?>> markers, @NotNull Document document) { List<MergeableLineMarkerInfo<?>> forMerge = new ArrayList<>(); TIntObjectHashMap<List<MergeableLineMarkerInfo<?>>> sameLineMarkers = new TIntObjectHashMap<>(); for (int i = markers.size() - 1; i >= 0; i--) { LineMarkerInfo<?> marker = markers.get(i); if (marker instanceof MergeableLineMarkerInfo) { MergeableLineMarkerInfo<?> mergeable = (MergeableLineMarkerInfo<?>)marker; forMerge.add(mergeable); markers.remove(i); int line = document.getLineNumber(marker.startOffset); List<MergeableLineMarkerInfo<?>> infos = sameLineMarkers.get(line); if (infos == null) { infos = new ArrayList<>(); sameLineMarkers.put(line, infos); } infos.add(mergeable); } } if (forMerge.isEmpty()) return markers; List<LineMarkerInfo<?>> result = new ArrayList<>(markers); sameLineMarkers.forEachValue(infos -> result.addAll(MergeableLineMarkerInfo.merge(infos))); return result; } @NotNull public static List<LineMarkerProvider> getMarkerProviders(@NotNull Language language, @NotNull final Project project) { List<LineMarkerProvider> forLanguage = LineMarkerProviders.getInstance().allForLanguageOrAny(language); List<LineMarkerProvider> providers = DumbService.getInstance(project).filterByDumbAwareness(forLanguage); final LineMarkerSettings settings = LineMarkerSettings.getSettings(); return ContainerUtil.filter(providers, provider -> !(provider instanceof LineMarkerProviderDescriptor) || settings.isEnabled((LineMarkerProviderDescriptor)provider)); } private static void queryProviders(@NotNull List<? extends PsiElement> elements, @NotNull PsiFile containingFile, @NotNull List<? extends LineMarkerProvider> providers, @NotNull PairConsumer<? super PsiElement, ? super LineMarkerInfo<?>> consumer) { ApplicationManager.getApplication().assertReadAccessAllowed(); Set<PsiFile> visitedInjectedFiles = new HashSet<>(); //noinspection ForLoopReplaceableByForEach for (int i = 0; i < elements.size(); i++) { PsiElement element = elements.get(i); //noinspection ForLoopReplaceableByForEach for (int j = 0; j < providers.size(); j++) { ProgressManager.checkCanceled(); LineMarkerProvider provider = providers.get(j); LineMarkerInfo<?> info; try { info = provider.getLineMarkerInfo(element); } catch (ProcessCanceledException | IndexNotReadyException e) { throw e; } catch (Exception e) { LOG.error("During querying provider " + provider + " (" + provider.getClass() + ")", e, new Attachment(containingFile.getViewProvider().getVirtualFile().getName(), containingFile.getText())); continue; } if (info != null) { consumer.consume(element, info); } } queryLineMarkersForInjected(element, containingFile, visitedInjectedFiles, consumer); } List<LineMarkerInfo<?>> slowLineMarkers = new NotNullList<>(); //noinspection ForLoopReplaceableByForEach for (int j = 0; j < providers.size(); j++) { ProgressManager.checkCanceled(); LineMarkerProvider provider = providers.get(j); try { provider.collectSlowLineMarkers(elements, slowLineMarkers); } catch (ProcessCanceledException | IndexNotReadyException e) { throw e; } catch (Exception e) { LOG.error(e); continue; } if (!slowLineMarkers.isEmpty()) { //noinspection ForLoopReplaceableByForEach for (int k = 0; k < slowLineMarkers.size(); k++) { LineMarkerInfo<?> slowInfo = slowLineMarkers.get(k); PsiElement element = slowInfo.getElement(); consumer.consume(element, slowInfo); } slowLineMarkers.clear(); } } } private static void queryLineMarkersForInjected(@NotNull PsiElement element, @NotNull final PsiFile containingFile, @NotNull Set<? super PsiFile> visitedInjectedFiles, @NotNull final PairConsumer<? super PsiElement, ? super LineMarkerInfo<?>> consumer) { final InjectedLanguageManager manager = InjectedLanguageManager.getInstance(containingFile.getProject()); if (manager.isInjectedFragment(containingFile)) return; InjectedLanguageManager.getInstance(containingFile.getProject()).enumerateEx(element, containingFile, false, (injectedPsi, places) -> { if (!visitedInjectedFiles.add(injectedPsi)) return; // there may be several concatenated literals making the one injected file final Project project = injectedPsi.getProject(); Document document = PsiDocumentManager.getInstance(project).getCachedDocument(injectedPsi); if (!(document instanceof DocumentWindow)) return; List<PsiElement> injElements = CollectHighlightsUtil.getElementsInRange(injectedPsi, 0, injectedPsi.getTextLength()); final List<LineMarkerProvider> providers = getMarkerProviders(injectedPsi.getLanguage(), project); queryProviders(injElements, injectedPsi, providers, (injectedElement, injectedMarker) -> { GutterIconRenderer gutterRenderer = injectedMarker.createGutterRenderer(); TextRange injectedRange = new TextRange(injectedMarker.startOffset, injectedMarker.endOffset); List<TextRange> editables = manager.intersectWithAllEditableFragments(injectedPsi, injectedRange); for (TextRange editable : editables) { TextRange hostRange = manager.injectedToHost(injectedPsi, editable); Icon icon = gutterRenderer == null ? null : gutterRenderer.getIcon(); GutterIconNavigationHandler<PsiElement> navigationHandler = (GutterIconNavigationHandler<PsiElement>)injectedMarker.getNavigationHandler(); //noinspection deprecation LineMarkerInfo<PsiElement> converted = icon == null ? new LineMarkerInfo<>(injectedElement, hostRange) : new LineMarkerInfo<>(injectedElement, hostRange, icon, e -> injectedMarker.getLineMarkerTooltip(), navigationHandler, GutterIconRenderer.Alignment.RIGHT, () -> gutterRenderer.getAccessibleName()); consumer.consume(injectedElement, converted); } }); }); } @NotNull public static Collection<LineMarkerInfo<?>> queryLineMarkers(@NotNull PsiFile file, @NotNull Document document) { if (file.getNode() == null) { // binary file? see IDEADEV-2809 return Collections.emptyList(); } LineMarkersPass pass = new LineMarkersPass(file.getProject(), file, document, file.getTextRange(), file.getTextRange()); pass.doCollectInformation(new EmptyProgressIndicator()); return pass.myMarkers; } @NotNull public static LineMarkerInfo<PsiElement> createMethodSeparatorLineMarker(@NotNull PsiElement startFrom, @NotNull EditorColorsManager colorsManager) { LineMarkerInfo<PsiElement> info = new LineMarkerInfo<>(startFrom, startFrom.getTextRange()); EditorColorsScheme scheme = colorsManager.getGlobalScheme(); info.separatorColor = scheme.getColor(CodeInsightColors.METHOD_SEPARATORS_COLOR); info.separatorPlacement = SeparatorPlacement.TOP; return info; } @Override public String toString() { return super.toString() + "; myBounds: " + myPriorityBounds; } }
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.server.network; import java.io.IOException; import java.lang.reflect.Constructor; import java.net.BindException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketException; import java.net.UnknownHostException; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.config.OContextConfiguration; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper; import com.orientechnologies.orient.server.OServer; import com.orientechnologies.orient.server.config.OServerCommandConfiguration; import com.orientechnologies.orient.server.config.OServerParameterConfiguration; import com.orientechnologies.orient.server.network.protocol.ONetworkProtocol; import com.orientechnologies.orient.server.network.protocol.http.command.OServerCommand; public class OServerNetworkListener extends Thread { private ServerSocket serverSocket; private InetSocketAddress inboundAddr; private Class<? extends ONetworkProtocol> protocolType; private volatile boolean active = true; private OServerCommand[] commands; private int socketBufferSize; private OContextConfiguration configuration; private OServer server; private ONetworkProtocol protocol; @SuppressWarnings("unchecked") public OServerNetworkListener(final OServer iServer, final String iHostName, final String iHostPortRange, final String iProtocolName, final Class<? extends ONetworkProtocol> iProtocol, final OServerParameterConfiguration[] iParameters, final OServerCommandConfiguration[] iCommands) { super(Orient.getThreadGroup(), "OrientDB " + iProtocol.getSimpleName() + " listen at " + iHostName + ":" + iHostPortRange); server = iServer; listen(iHostName, iHostPortRange, iProtocolName); protocolType = iProtocol; readParameters(iServer.getContextConfiguration(), iParameters); if (iCommands != null) { // CREATE COMMANDS commands = new OServerCommand[iCommands.length]; Constructor<OServerCommand> c; for (int i = 0; i < iCommands.length; ++i) { try { c = (Constructor<OServerCommand>) Class.forName(iCommands[i].implementation).getConstructor( OServerCommandConfiguration.class); commands[i] = c.newInstance(new Object[] { iCommands[i] }); } catch (Exception e) { throw new IllegalArgumentException("Cannot create custom command invoking the constructor: " + iCommands[i].implementation + "(" + iCommands[i] + ")", e); } } } start(); } public void shutdown() { this.active = false; if (protocol != null) { protocol.sendShutdown(); protocol = null; } if (serverSocket != null) try { serverSocket.close(); } catch (IOException e) { } } /** * Initialize a server socket for communicating with the client. * * @param iHostPortRange * @param iHostName */ private void listen(final String iHostName, final String iHostPortRange, final String iProtocolName) { final int[] ports = getPorts(iHostPortRange); for (int port : ports) { inboundAddr = new InetSocketAddress(iHostName, port); try { serverSocket = new java.net.ServerSocket(port, 0, InetAddress.getByName(iHostName)); if (serverSocket.isBound()) { OLogManager.instance().info(this, "Listening " + iProtocolName + " connections on " + inboundAddr.getHostName() + ":" + inboundAddr.getPort()); return; } } catch (BindException be) { OLogManager.instance().info(this, "Port %s:%d busy, trying the next available...", iHostName, port); } catch (SocketException se) { OLogManager.instance().error(this, "Unable to create socket", se); System.exit(1); } catch (IOException ioe) { OLogManager.instance().error(this, "Unable to read data from an open socket", ioe); System.err.println("Unable to read data from an open socket."); System.exit(1); } } OLogManager.instance().error(this, "Unable to listen for connections using the configured ports '%s' on host '%s'", iHostPortRange, iHostName); System.exit(1); } public boolean isActive() { return active; } @Override public void run() { try { while (active) { try { // listen for and accept a client connection to serverSocket final Socket socket = serverSocket.accept(); socket.setPerformancePreferences(0, 2, 1); socket.setSendBufferSize(socketBufferSize); socket.setReceiveBufferSize(socketBufferSize); // CREATE A NEW PROTOCOL INSTANCE protocol = protocolType.newInstance(); // CONFIGURE THE PROTOCOL FOR THE INCOMING CONNECTION protocol.config(server, socket, configuration, commands); } catch (Throwable e) { if (active) OLogManager.instance().error(this, "Error on client connection", e); } finally { } } } finally { try { if (serverSocket != null && !serverSocket.isClosed()) serverSocket.close(); } catch (IOException ioe) { } } } public Class<? extends ONetworkProtocol> getProtocolType() { return protocolType; } public InetSocketAddress getInboundAddr() { return inboundAddr; } /** * Initializes connection parameters by the reading XML configuration. If not specified, get the parameters defined as global * configuration. * * @param iServerConfig */ private void readParameters(final OContextConfiguration iServerConfig, final OServerParameterConfiguration[] iParameters) { configuration = new OContextConfiguration(iServerConfig); // SET PARAMETERS if (iParameters != null && iParameters.length > 0) { // CONVERT PARAMETERS IN MAP TO INTIALIZE THE CONTEXT-CONFIGURATION for (OServerParameterConfiguration param : iParameters) configuration.setValue(param.name, param.value); } socketBufferSize = configuration.getValueAsInteger(OGlobalConfiguration.NETWORK_SOCKET_BUFFER_SIZE); } public static int[] getPorts(final String iHostPortRange) { int[] ports; if (OStringSerializerHelper.contains(iHostPortRange, ',')) { // MULTIPLE ENUMERATED PORTS String[] portValues = iHostPortRange.split(","); ports = new int[portValues.length]; for (int i = 0; i < portValues.length; ++i) ports[i] = Integer.parseInt(portValues[i]); } else if (OStringSerializerHelper.contains(iHostPortRange, '-')) { // MULTIPLE RANGE PORTS String[] limits = iHostPortRange.split("-"); int lowerLimit = Integer.parseInt(limits[0]); int upperLimit = Integer.parseInt(limits[1]); ports = new int[upperLimit - lowerLimit + 1]; for (int i = 0; i < upperLimit - lowerLimit + 1; ++i) ports[i] = lowerLimit + i; } else // SINGLE PORT SPECIFIED ports = new int[] { Integer.parseInt(iHostPortRange) }; return ports; } public String getListeningAddress() { String address = serverSocket.getInetAddress().getHostAddress().toString(); if (address.equals("0.0.0.0")) try { address = InetAddress.getLocalHost().getHostAddress().toString(); } catch (UnknownHostException e) { } return address + ":" + serverSocket.getLocalPort(); } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append(protocolType.getSimpleName()).append(" ").append(serverSocket.getLocalSocketAddress()).append(":"); return builder.toString(); } public ONetworkProtocol getProtocol() { return protocol; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package jef.net.ftpserver.impl; import java.io.IOException; import java.nio.charset.MalformedInputException; import jef.net.ftpserver.command.Command; import jef.net.ftpserver.command.CommandFactory; import jef.net.ftpserver.ftplet.DefaultFtpReply; import jef.net.ftpserver.ftplet.FileSystemView; import jef.net.ftpserver.ftplet.FtpReply; import jef.net.ftpserver.ftplet.FtpRequest; import jef.net.ftpserver.ftplet.FtpletResult; import jef.net.ftpserver.ftpletcontainer.FtpletContainer; import jef.net.ftpserver.listener.Listener; import org.apache.mina.core.session.IdleStatus; import org.apache.mina.core.write.WriteToClosedSessionException; import org.apache.mina.filter.codec.ProtocolDecoderException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <strong>Internal class, do not use directly.</strong> * * @author <a href="http://mina.apache.org">Apache MINA Project</a> * */ public class DefaultFtpHandler implements FtpHandler { private final Logger LOG = LoggerFactory.getLogger(DefaultFtpHandler.class); private final static String[] NON_AUTHENTICATED_COMMANDS = new String[] { "USER", "PASS", "AUTH", "QUIT", "PROT", "PBSZ" }; private FtpServerContext context; private Listener listener; public void init(final FtpServerContext context, final Listener listener) { this.context = context; this.listener = listener; } public void sessionCreated(final FtpIoSession session) throws Exception { session.setListener(listener); ServerFtpStatistics stats = ((ServerFtpStatistics) context .getFtpStatistics()); if (stats != null) { stats.setOpenConnection(session); } } public void sessionOpened(final FtpIoSession session) throws Exception { FtpletContainer ftplets = context.getFtpletContainer(); FtpletResult ftpletRet; try { ftpletRet = ftplets.onConnect(session.getFtpletSession()); } catch (Exception e) { LOG.debug("Ftplet threw exception", e); ftpletRet = FtpletResult.DISCONNECT; } if (ftpletRet == FtpletResult.DISCONNECT) { LOG.debug("Ftplet returned DISCONNECT, session will be closed"); session.close(false).awaitUninterruptibly(10000); } else { session.updateLastAccessTime(); session.write(LocalizedFtpReply.translate(session, null, context, FtpReply.REPLY_220_SERVICE_READY, null, null)); } } public void sessionClosed(final FtpIoSession session) throws Exception { LOG.debug("Closing session"); try { context.getFtpletContainer().onDisconnect( session.getFtpletSession()); } catch (Exception e) { // swallow the exception, we're closing down the session anyways LOG.warn("Ftplet threw an exception on disconnect", e); } // make sure we close the data connection if it happens to be open try { ServerDataConnectionFactory dc = session.getDataConnection(); if(dc != null) { dc.closeDataConnection(); } } catch (Exception e) { // swallow the exception, we're closing down the session anyways LOG.warn("Data connection threw an exception on disconnect", e); } FileSystemView fs = session.getFileSystemView(); if(fs != null) { try { fs.dispose(); } catch (Exception e) { LOG.warn("FileSystemView threw an exception on disposal", e); } } ServerFtpStatistics stats = ((ServerFtpStatistics) context .getFtpStatistics()); if (stats != null) { stats.setLogout(session); stats.setCloseConnection(session); LOG.debug("Statistics login and connection count decreased due to session close"); } else { LOG.warn("Statistics not available in session, can not decrease login and connection count"); } LOG.debug("Session closed"); } public void exceptionCaught(final FtpIoSession session, final Throwable cause) throws Exception { if(cause instanceof ProtocolDecoderException && cause.getCause() instanceof MalformedInputException) { // client probably sent something which is not UTF-8 and we failed to // decode it LOG.warn( "Client sent command that could not be decoded: {}", ((ProtocolDecoderException)cause).getHexdump()); session.write(new DefaultFtpReply(FtpReply.REPLY_501_SYNTAX_ERROR_IN_PARAMETERS_OR_ARGUMENTS, "Invalid character in command")); } else if (cause instanceof WriteToClosedSessionException) { WriteToClosedSessionException writeToClosedSessionException = (WriteToClosedSessionException) cause; LOG.warn( "Client closed connection before all replies could be sent, last reply was {}", writeToClosedSessionException.getRequest()); session.close(false).awaitUninterruptibly(10000); } else { LOG.error("Exception caught, closing session", cause); session.close(false).awaitUninterruptibly(10000); } } private boolean isCommandOkWithoutAuthentication(String command) { boolean okay = false; for (String allowed : NON_AUTHENTICATED_COMMANDS) { if (allowed.equals(command)) { okay = true; break; } } return okay; } public void messageReceived(final FtpIoSession session, final FtpRequest request) throws Exception { try { session.updateLastAccessTime(); String commandName = request.getCommand(); CommandFactory commandFactory = context.getCommandFactory(); Command command = commandFactory.getCommand(commandName); // make sure the user is authenticated before he issues commands if (!session.isLoggedIn() && !isCommandOkWithoutAuthentication(commandName)) { session.write(LocalizedFtpReply.translate(session, request, context, FtpReply.REPLY_530_NOT_LOGGED_IN, "permission", null)); return; } FtpletContainer ftplets = context.getFtpletContainer(); FtpletResult ftpletRet; try { ftpletRet = ftplets.beforeCommand(session.getFtpletSession(), request); } catch (Exception e) { LOG.debug("Ftplet container threw exception", e); ftpletRet = FtpletResult.DISCONNECT; } if (ftpletRet == FtpletResult.DISCONNECT) { LOG.debug("Ftplet returned DISCONNECT, session will be closed"); session.close(false).awaitUninterruptibly(10000); return; } else if (ftpletRet != FtpletResult.SKIP) { if (command != null) { synchronized (session) { command.execute(session, context, request); } } else { session.write(LocalizedFtpReply.translate(session, request, context, FtpReply.REPLY_502_COMMAND_NOT_IMPLEMENTED, "not.implemented", null)); } try { ftpletRet = ftplets.afterCommand( session.getFtpletSession(), request, session .getLastReply()); } catch (Exception e) { LOG.debug("Ftplet container threw exception", e); ftpletRet = FtpletResult.DISCONNECT; } if (ftpletRet == FtpletResult.DISCONNECT) { LOG.debug("Ftplet returned DISCONNECT, session will be closed"); session.close(false).awaitUninterruptibly(10000); return; } } } catch (Exception ex) { // send error reply try { session.write(LocalizedFtpReply.translate(session, request, context, FtpReply.REPLY_550_REQUESTED_ACTION_NOT_TAKEN, null, null)); } catch (Exception ex1) { } if (ex instanceof java.io.IOException) { throw (IOException) ex; } else { LOG.warn("RequestHandler.service()", ex); } } } public void sessionIdle(final FtpIoSession session, final IdleStatus status) throws Exception { LOG.info("Session idle, closing"); session.close(false).awaitUninterruptibly(10000); } public void messageSent(final FtpIoSession session, final FtpReply reply) throws Exception { // do nothing } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import java.io.FileNotFoundException; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.EnumSet; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.CorruptFileBlockIterator; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.Progressable; @InterfaceAudience.Private @InterfaceStability.Evolving public class Hdfs extends AbstractFileSystem { DFSClient dfs; private boolean verifyChecksum = true; static { Configuration.addDefaultResource("hdfs-default.xml"); Configuration.addDefaultResource("hdfs-site.xml"); } /** * This constructor has the signature needed by * {@link AbstractFileSystem#createFileSystem(URI, Configuration)} * * @param theUri * which must be that of Hdfs * @param conf * @throws IOException */ Hdfs(final URI theUri, final Configuration conf) throws IOException, URISyntaxException { super(theUri, FSConstants.HDFS_URI_SCHEME, true, NameNode.DEFAULT_PORT); if (!theUri.getScheme().equalsIgnoreCase(FSConstants.HDFS_URI_SCHEME)) { throw new IllegalArgumentException("Passed URI's scheme is not for Hdfs"); } String host = theUri.getHost(); if (host == null) { throw new IOException("Incomplete HDFS URI, no host: " + theUri); } InetSocketAddress namenode = NameNode.getAddress(theUri.getAuthority()); this.dfs = new DFSClient(namenode, conf, getStatistics()); } @Override public int getUriDefaultPort() { return NameNode.DEFAULT_PORT; } @Override public FSDataOutputStream createInternal(Path f, EnumSet<CreateFlag> createFlag, FsPermission absolutePermission, int bufferSize, short replication, long blockSize, Progressable progress, int bytesPerChecksum, boolean createParent) throws IOException { return new FSDataOutputStream(dfs.primitiveCreate(getUriPath(f), absolutePermission, createFlag, createParent, replication, blockSize, progress, bufferSize, bytesPerChecksum), getStatistics()); } @Override public boolean delete(Path f, boolean recursive) throws IOException, UnresolvedLinkException { return dfs.delete(getUriPath(f), recursive); } @Override public BlockLocation[] getFileBlockLocations(Path p, long start, long len) throws IOException, UnresolvedLinkException { return dfs.getBlockLocations(getUriPath(p), start, len); } @Override public FileChecksum getFileChecksum(Path f) throws IOException, UnresolvedLinkException { return dfs.getFileChecksum(getUriPath(f)); } @Override public FileStatus getFileStatus(Path f) throws IOException, UnresolvedLinkException { HdfsFileStatus fi = dfs.getFileInfo(getUriPath(f)); if (fi != null) { return makeQualified(fi, f); } else { throw new FileNotFoundException("File does not exist: " + f.toString()); } } @Override public FileStatus getFileLinkStatus(Path f) throws IOException, UnresolvedLinkException { HdfsFileStatus fi = dfs.getFileLinkInfo(getUriPath(f)); if (fi != null) { return makeQualified(fi, f); } else { throw new FileNotFoundException("File does not exist: " + f); } } private FileStatus makeQualified(HdfsFileStatus f, Path parent) { // NB: symlink is made fully-qualified in FileContext. return new FileStatus(f.getLen(), f.isDir(), f.getReplication(), f.getBlockSize(), f.getModificationTime(), f.getAccessTime(), f.getPermission(), f.getOwner(), f.getGroup(), f.isSymlink() ? new Path(f.getSymlink()) : null, (f.getFullPath(parent)).makeQualified( getUri(), null)); // fully-qualify path } private LocatedFileStatus makeQualifiedLocated( HdfsLocatedFileStatus f, Path parent) { return new LocatedFileStatus(f.getLen(), f.isDir(), f.getReplication(), f.getBlockSize(), f.getModificationTime(), f.getAccessTime(), f.getPermission(), f.getOwner(), f.getGroup(), f.isSymlink() ? new Path(f.getSymlink()) : null, (f.getFullPath(parent)).makeQualified( getUri(), null), // fully-qualify path DFSUtil.locatedBlocks2Locations(f.getBlockLocations())); } @Override public FsStatus getFsStatus() throws IOException { return dfs.getDiskStatus(); } @Override public FsServerDefaults getServerDefaults() throws IOException { return dfs.getServerDefaults(); } @Override public RemoteIterator<LocatedFileStatus> listLocatedStatus( final Path p) throws FileNotFoundException, IOException { return new DirListingIterator<LocatedFileStatus>(p, true) { @Override public LocatedFileStatus next() throws IOException { return makeQualifiedLocated((HdfsLocatedFileStatus)getNext(), p); } }; } @Override public RemoteIterator<FileStatus> listStatusIterator(final Path f) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { return new DirListingIterator<FileStatus>(f, false) { @Override public FileStatus next() throws IOException { return makeQualified(getNext(), f); } }; } /** * This class defines an iterator that returns * the file status of each file/subdirectory of a directory * * if needLocation, status contains block location if it is a file * throws a RuntimeException with the error as its cause. * * @param <T> the type of the file status */ abstract private class DirListingIterator<T extends FileStatus> implements RemoteIterator<T> { private DirectoryListing thisListing; private int i; final private String src; final private boolean needLocation; // if status private DirListingIterator(Path p, boolean needLocation) throws IOException { this.src = Hdfs.this.getUriPath(p); this.needLocation = needLocation; // fetch the first batch of entries in the directory thisListing = dfs.listPaths( src, HdfsFileStatus.EMPTY_NAME, needLocation); if (thisListing == null) { // the directory does not exist throw new FileNotFoundException("File " + src + " does not exist."); } } @Override public boolean hasNext() throws IOException { if (thisListing == null) { return false; } if (i>=thisListing.getPartialListing().length && thisListing.hasMore()) { // current listing is exhausted & fetch a new listing thisListing = dfs.listPaths(src, thisListing.getLastName(), needLocation); if (thisListing == null) { return false; // the directory is deleted } i = 0; } return (i<thisListing.getPartialListing().length); } /** * Get the next item in the list * @return the next item in the list * * @throws IOException if there is any error * @throws NoSuchElmentException if no more entry is available */ public HdfsFileStatus getNext() throws IOException { if (hasNext()) { return thisListing.getPartialListing()[i++]; } throw new java.util.NoSuchElementException("No more entry in " + src); } } @Override public FileStatus[] listStatus(Path f) throws IOException, UnresolvedLinkException { String src = getUriPath(f); // fetch the first batch of entries in the directory DirectoryListing thisListing = dfs.listPaths( src, HdfsFileStatus.EMPTY_NAME); if (thisListing == null) { // the directory does not exist throw new FileNotFoundException("File " + f + " does not exist."); } HdfsFileStatus[] partialListing = thisListing.getPartialListing(); if (!thisListing.hasMore()) { // got all entries of the directory FileStatus[] stats = new FileStatus[partialListing.length]; for (int i = 0; i < partialListing.length; i++) { stats[i] = makeQualified(partialListing[i], f); } return stats; } // The directory size is too big that it needs to fetch more // estimate the total number of entries in the directory int totalNumEntries = partialListing.length + thisListing.getRemainingEntries(); ArrayList<FileStatus> listing = new ArrayList<FileStatus>(totalNumEntries); // add the first batch of entries to the array list for (HdfsFileStatus fileStatus : partialListing) { listing.add(makeQualified(fileStatus, f)); } // now fetch more entries do { thisListing = dfs.listPaths(src, thisListing.getLastName()); if (thisListing == null) { // the directory is deleted throw new FileNotFoundException("File " + f + " does not exist."); } partialListing = thisListing.getPartialListing(); for (HdfsFileStatus fileStatus : partialListing) { listing.add(makeQualified(fileStatus, f)); } } while (thisListing.hasMore()); return listing.toArray(new FileStatus[listing.size()]); } /** * {@inheritDoc} */ @Override public RemoteIterator<Path> listCorruptFileBlocks(Path path) throws IOException { return new CorruptFileBlockIterator(dfs, path); } @Override public void mkdir(Path dir, FsPermission permission, boolean createParent) throws IOException, UnresolvedLinkException { dfs.mkdirs(getUriPath(dir), permission, createParent); } @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException, UnresolvedLinkException { return new DFSClient.DFSDataInputStream(dfs.open(getUriPath(f), bufferSize, verifyChecksum)); } @Override public void renameInternal(Path src, Path dst) throws IOException, UnresolvedLinkException { dfs.rename(getUriPath(src), getUriPath(dst), Options.Rename.NONE); } @Override public void renameInternal(Path src, Path dst, boolean overwrite) throws IOException, UnresolvedLinkException { dfs.rename(getUriPath(src), getUriPath(dst), overwrite ? Options.Rename.OVERWRITE : Options.Rename.NONE); } @Override public void setOwner(Path f, String username, String groupname) throws IOException, UnresolvedLinkException { dfs.setOwner(getUriPath(f), username, groupname); } @Override public void setPermission(Path f, FsPermission permission) throws IOException, UnresolvedLinkException { dfs.setPermission(getUriPath(f), permission); } @Override public boolean setReplication(Path f, short replication) throws IOException, UnresolvedLinkException { return dfs.setReplication(getUriPath(f), replication); } @Override public void setTimes(Path f, long mtime, long atime) throws IOException, UnresolvedLinkException { dfs.setTimes(getUriPath(f), mtime, atime); } @Override public void setVerifyChecksum(boolean verifyChecksum) throws IOException { this.verifyChecksum = verifyChecksum; } @Override public boolean supportsSymlinks() { return true; } @Override public void createSymlink(Path target, Path link, boolean createParent) throws IOException, UnresolvedLinkException { dfs.createSymlink(target.toString(), getUriPath(link), createParent); } @Override public Path getLinkTarget(Path p) throws IOException { return new Path(dfs.getLinkTarget(getUriPath(p))); } }
/* Copyright 2006-2015 SpringSource. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package grails.plugin.springsecurity.web.access.intercept; import grails.plugin.springsecurity.InterceptedUrl; import grails.util.GrailsUtil; import grails.util.Metadata; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import javax.servlet.http.HttpServletRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.context.support.MessageSourceAccessor; import org.springframework.http.HttpMethod; import org.springframework.security.access.ConfigAttribute; import org.springframework.security.access.SecurityConfig; import org.springframework.security.access.vote.AuthenticatedVoter; import org.springframework.security.access.vote.RoleVoter; import org.springframework.security.core.SpringSecurityMessageSource; import org.springframework.security.web.FilterInvocation; import org.springframework.security.web.access.intercept.FilterInvocationSecurityMetadataSource; import org.springframework.util.AntPathMatcher; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * @author <a href='mailto:burt@burtbeckwith.com'>Burt Beckwith</a> */ public abstract class AbstractFilterInvocationDefinition implements FilterInvocationSecurityMetadataSource, InitializingBean { protected static final Collection<ConfigAttribute> DENY = Collections.singletonList((ConfigAttribute)new SecurityConfig("_DENY_")); protected boolean rejectIfNoRule; protected RoleVoter roleVoter; protected AuthenticatedVoter authenticatedVoter; protected final List<InterceptedUrl> compiled = new CopyOnWriteArrayList<InterceptedUrl>(); protected MessageSourceAccessor messages = SpringSecurityMessageSource.getAccessor(); protected AntPathMatcher urlMatcher = new AntPathMatcher(); protected boolean initialized; protected boolean grails23Plus; protected final Logger log = LoggerFactory.getLogger(getClass()); /** * Allows subclasses to be externally reset. * @throws Exception */ public void reset() throws Exception { // override if necessary } public Collection<ConfigAttribute> getAttributes(Object object) throws IllegalArgumentException { Assert.notNull(object, "Object must be a FilterInvocation"); Assert.isTrue(supports(object.getClass()), "Object must be a FilterInvocation"); FilterInvocation filterInvocation = (FilterInvocation)object; String url = determineUrl(filterInvocation); Collection<ConfigAttribute> configAttributes; try { configAttributes = findConfigAttributes(url, filterInvocation.getRequest().getMethod()); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } if ((configAttributes == null || configAttributes.isEmpty()) && rejectIfNoRule) { // return something that cannot be valid; this will cause the voters to abstain or deny return DENY; } return configAttributes; } protected String determineUrl(final FilterInvocation filterInvocation) { return lowercaseAndStripQuerystring(calculateUri(filterInvocation.getHttpRequest())); } protected boolean stopAtFirstMatch() { return false; } // for testing public InterceptedUrl getInterceptedUrl(final String url, final HttpMethod httpMethod) throws Exception { initialize(); for (InterceptedUrl iu : compiled) { if (iu.getHttpMethod() == httpMethod && iu.getPattern().equals(url)) { return iu; } } return null; } protected Collection<ConfigAttribute> findConfigAttributes(final String url, final String requestMethod) throws Exception { initialize(); Collection<ConfigAttribute> configAttributes = null; String configAttributePattern = null; boolean stopAtFirstMatch = stopAtFirstMatch(); for (InterceptedUrl iu : compiled) { if (iu.getHttpMethod() != null && requestMethod != null && iu.getHttpMethod() != HttpMethod.valueOf(requestMethod)) { if (log.isDebugEnabled()) { log.debug("Request '{} {}' doesn't match '{} {}'", new Object[] { requestMethod, url, iu.getHttpMethod(), iu.getPattern() }); } continue; } if (urlMatcher.match(iu.getPattern(), url)) { if (configAttributes == null || urlMatcher.match(configAttributePattern, iu.getPattern())) { configAttributes = iu.getConfigAttributes(); configAttributePattern = iu.getPattern(); if (log.isTraceEnabled()) { log.trace("new candidate for '{}': '{}':{}", new Object[] { url, iu.getPattern(), configAttributes }); } if (stopAtFirstMatch) { break; } } } } if (log.isTraceEnabled()) { if (configAttributes == null) { log.trace("no config for '{}'", url); } else { log.trace("config for '{}' is '{}':{}", new Object[] { url, configAttributePattern, configAttributes }); } } return configAttributes; } protected void initialize() throws Exception { // override if necessary } public boolean supports(Class<?> clazz) { return FilterInvocation.class.isAssignableFrom(clazz); } public Collection<ConfigAttribute> getAllConfigAttributes() { try { initialize(); } catch (Exception e) { GrailsUtil.deepSanitize(e); log.error(e.getMessage(), e); } Collection<ConfigAttribute> all = new LinkedHashSet<ConfigAttribute>(); for (InterceptedUrl iu : compiled) { all.addAll(iu.getConfigAttributes()); } return Collections.unmodifiableCollection(all); } protected String calculateUri(final HttpServletRequest request) { String url = request.getRequestURI().substring(request.getContextPath().length()); int semicolonIndex = url.indexOf(";"); return semicolonIndex == -1 ? url : url.substring(0, semicolonIndex); } protected String lowercaseAndStripQuerystring(final String url) { String fixed = url.toLowerCase(); int firstQuestionMarkIndex = fixed.indexOf("?"); if (firstQuestionMarkIndex != -1) { fixed = fixed.substring(0, firstQuestionMarkIndex); } return fixed; } protected AntPathMatcher getUrlMatcher() { return urlMatcher; } /** * For debugging. * @return an unmodifiable map of {@link AnnotationFilterInvocationDefinition}ConfigAttributeDefinition * keyed by compiled patterns */ public List<InterceptedUrl> getConfigAttributeMap() { return Collections.unmodifiableList(compiled); } // fixes extra spaces, trailing commas, etc. protected List<String> split(final String value) { if (!value.startsWith("ROLE_") && !value.startsWith("IS_")) { // an expression return Collections.singletonList(value); } String[] parts = StringUtils.commaDelimitedListToStringArray(value); List<String> cleaned = new ArrayList<String>(); for (String part : parts) { part = part.trim(); if (part.length() > 0) { cleaned.add(part); } } return cleaned; } protected void compileAndStoreMapping(InterceptedUrl iu) { String pattern = iu.getPattern(); HttpMethod method = iu.getHttpMethod(); String key = pattern.toLowerCase(); Collection<ConfigAttribute> configAttributes = iu.getConfigAttributes(); InterceptedUrl replaced = storeMapping(key, method, Collections.unmodifiableCollection(configAttributes)); if (replaced != null) { log.warn("replaced rule for '{}' with roles {} with roles {}", new Object[] { key, replaced.getConfigAttributes(), configAttributes }); } } protected InterceptedUrl storeMapping(final String pattern, final HttpMethod method, final Collection<ConfigAttribute> configAttributes) { InterceptedUrl existing = null; for (InterceptedUrl iu : compiled) { if (iu.getPattern().equals(pattern) && iu.getHttpMethod() == method) { existing = iu; break; } } if (existing != null) { compiled.remove(existing); } compiled.add(new InterceptedUrl(pattern, method, configAttributes)); return existing; } protected void resetConfigs() { compiled.clear(); } /** * For admin/debugging - find all config attributes that apply to the specified URL (doesn't consider request method restrictions). * @param url the URL * @return matching attributes */ public Collection<ConfigAttribute> findMatchingAttributes(final String url) { for (InterceptedUrl iu : compiled) { if (urlMatcher.match(iu.getPattern(), url)) { return iu.getConfigAttributes(); } } return Collections.emptyList(); } /** * Dependency injection for whether to reject if there's no matching rule. * @param reject if true, reject access unless there's a pattern for the specified resource */ public void setRejectIfNoRule(final boolean reject) { rejectIfNoRule = reject; } public void afterPropertiesSet() { String version = Metadata.getCurrent().getGrailsVersion(); grails23Plus = !version.startsWith("2.0") && !version.startsWith("2.1") && !version.startsWith("2.2"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cocoon.components.flow.javascript.fom; import java.awt.Dimension; import java.awt.Toolkit; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PushbackInputStream; import java.io.Reader; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.servlet.http.HttpSession; import org.apache.avalon.framework.activity.Initializable; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.ConfigurationException; import org.apache.cocoon.ResourceNotFoundException; import org.apache.cocoon.components.flow.CompilingInterpreter; import org.apache.cocoon.components.flow.Interpreter; import org.apache.cocoon.components.flow.InvalidContinuationException; import org.apache.cocoon.components.flow.WebContinuation; import org.apache.cocoon.components.flow.javascript.JSErrorReporter; import org.apache.cocoon.components.flow.javascript.LocationTrackingDebugger; import org.apache.cocoon.components.flow.javascript.ScriptablePointerFactory; import org.apache.cocoon.components.flow.javascript.ScriptablePropertyHandler; import org.apache.cocoon.components.flow.util.PipelineUtil; import org.apache.cocoon.environment.ObjectModelHelper; import org.apache.cocoon.environment.Redirector; import org.apache.cocoon.environment.Request; import org.apache.commons.jxpath.JXPathIntrospector; import org.apache.commons.jxpath.ri.JXPathContextReferenceImpl; import org.apache.excalibur.source.Source; import org.apache.regexp.RE; import org.apache.regexp.RECompiler; import org.apache.regexp.REProgram; import org.mozilla.javascript.Context; import org.mozilla.javascript.ContextFactory; import org.mozilla.javascript.EcmaError; import org.mozilla.javascript.Function; import org.mozilla.javascript.JavaScriptException; import org.mozilla.javascript.NativeJavaClass; import org.mozilla.javascript.NativeJavaPackage; import org.mozilla.javascript.Script; import org.mozilla.javascript.ScriptRuntime; import org.mozilla.javascript.Scriptable; import org.mozilla.javascript.ScriptableObject; import org.mozilla.javascript.WrappedException; import org.mozilla.javascript.continuations.Continuation; import org.mozilla.javascript.tools.debugger.Main; import org.mozilla.javascript.tools.shell.Global; /** * Interface with the JavaScript interpreter. * * @since March 25, 2002 * @version $Id$ */ public class FOM_JavaScriptInterpreter extends CompilingInterpreter implements Initializable { /** * A long value is stored under this key in each top level JavaScript * thread scope object. When you enter a context any scripts whose * modification time is later than this value will be recompiled and reexecuted, * and this value will be updated to the current time. */ private final static String LAST_EXEC_TIME = "__PRIVATE_LAST_EXEC_TIME__"; /** * Prefix for session/request attribute storing JavaScript global scope object. */ private static final String USER_GLOBAL_SCOPE = "FOM JavaScript GLOBAL SCOPE/"; /** * Rhino supports Debuggers only in interpreting mode, and we are using * LocationTrackerDebugger. Hence need to force rhino into interpreted * mode by setting optimization level on a context. */ private static final int OPTIMIZATION_LEVEL = -1; /** * When was the last time we checked for script modifications. Used * only if {@link #reloadScripts} is true. Access is synchronized by * {@link #compiledScripts}. */ private long lastReloadCheckTime; /** * Shared global scope for scripts and other immutable objects */ private Global scope; /** * List of <code>String</code> objects that represent files to be * read in by the JavaScript interpreter. */ private List topLevelScripts = new ArrayList(); private boolean enableDebugger; /** * JavaScript debugger: there's only one of these: it can debug multiple * threads executing JS code. */ private static Main debugger; static synchronized Main getDebugger() { if (debugger == null) { final Main db = new Main("Cocoon Flow Debugger"); db.pack(); Dimension size = Toolkit.getDefaultToolkit().getScreenSize(); size.width *= 0.75; size.height *= 0.75; db.setSize(size.width, size.height); db.setExitAction(new Runnable() { public void run() { db.setVisible(false); } }); db.setVisible(true); debugger = db; debugger.attachTo(ContextFactory.getGlobal()); } return debugger; } public void configure(Configuration config) throws ConfigurationException { super.configure(config); String loadOnStartup = config.getChild("load-on-startup").getValue(null); if (loadOnStartup != null) { register(loadOnStartup); } String debugger = config.getChild("debugger").getValue(null); enableDebugger = "enabled".equalsIgnoreCase(debugger); } public void initialize() throws Exception { if (enableDebugger) { if (getLogger().isDebugEnabled()) { getLogger().debug("Flow debugger enabled, creating"); } getDebugger().doBreak(); } Context context = Context.enter(); context.setOptimizationLevel(OPTIMIZATION_LEVEL); context.setCompileFunctionsWithDynamicScope(true); context.setGeneratingDebug(true); // add support for Rhino objects to JXPath JXPathIntrospector.registerDynamicClass(Scriptable.class, ScriptablePropertyHandler.class); JXPathContextReferenceImpl.addNodePointerFactory(new ScriptablePointerFactory()); try { scope = new Global(context); // Access to Cocoon internal objects FOM_Cocoon.init(scope); } catch (Exception e) { Context.exit(); throw e; } } /** * Returns the JavaScript scope, a Scriptable object, from the user * session instance. Each interpreter instance can have a scope * associated with it. * * @return a <code>ThreadScope</code> value */ private ThreadScope getSessionScope() throws Exception { final String scopeID = USER_GLOBAL_SCOPE + getInterpreterID(); final Request request = ObjectModelHelper.getRequest(this.processInfoProvider.getObjectModel()); ThreadScope scope; // Get/create the scope attached to the current context HttpSession session = request.getSession(false); if (session != null) { scope = (ThreadScope) session.getAttribute(scopeID); } else { scope = (ThreadScope) request.getAttribute(scopeID); } if (scope == null) { scope = createThreadScope(); // Save scope in the request early to allow recursive Flow calls request.setAttribute(scopeID, scope); } return scope; } /** * Associates a JavaScript scope, a Scriptable object, with * {@link #getInterpreterID() identifier} of this {@link Interpreter} * instance. * * @param scope a <code>ThreadScope</code> value */ private void setSessionScope(ThreadScope scope) throws Exception { if (scope.useSession) { final String scopeID = USER_GLOBAL_SCOPE + getInterpreterID(); final Request request = ObjectModelHelper.getRequest(this.processInfoProvider.getObjectModel()); // FIXME: Where "session scope" should go when session is invalidated? // Attach the scope to the current context try { HttpSession session = request.getSession(true); session.setAttribute(scopeID, scope); } catch (IllegalStateException e) { // Session might be invalidated already. if (getLogger().isDebugEnabled()) { getLogger().debug("Got '" + e + "' while trying to set session scope.", e); } } } } public static class ThreadScope extends ScriptableObject { private static final String[] BUILTIN_PACKAGES = { "javax", "org", "com" }; private static final String[] BUILTIN_FUNCTIONS = { "importClass" }; private ClassLoader classLoader; /** true if this scope has assigned any global vars */ boolean useSession; /** true if this scope is locked for implicit variable declarations */ boolean locked; /** * Initializes new top-level scope. */ public ThreadScope(Global scope) throws Exception { final Context context = Context.getCurrentContext(); defineFunctionProperties(BUILTIN_FUNCTIONS, ThreadScope.class, ScriptableObject.DONTENUM); setPrototype(scope); // We want this to be a new top-level scope, so set its // parent scope to null. This means that any variables created // by assignments will be properties of this. setParentScope(null); // Put in the thread scope the Cocoon object, which gives access // to the interpreter object, and some Cocoon objects. See // FOM_Cocoon for more details. final Object[] args = {}; FOM_Cocoon cocoon = (FOM_Cocoon) context.newObject(this, "FOM_Cocoon", args); cocoon.setParentScope(this); super.put("cocoon", this, cocoon); defineProperty(LAST_EXEC_TIME, new Long(0), ScriptableObject.DONTENUM | ScriptableObject.PERMANENT); } public String getClassName() { return "ThreadScope"; } public void setLock(boolean lock) { this.locked = lock; } public void put(String name, Scriptable start, Object value) { //Allow setting values to existing variables, or if this is a //java class (used by importClass & importPackage) if (this.locked && !has(name, start) && !(value instanceof NativeJavaClass) && !(value instanceof Function)) { // Need to wrap into a runtime exception as Scriptable.put has no throws clause... throw new WrappedException (new RuntimeException("Implicit declaration of global variable '" + name + "' forbidden. Please ensure all variables are explicitely declared with the 'var' keyword")); } this.useSession = true; super.put(name, start, value); } public void put(int index, Scriptable start, Object value) { // FIXME(SW): do indexed properties have a meaning on the global scope? if (this.locked && !has(index, start)) { throw new WrappedException(new RuntimeException("Global scope locked. Cannot set value for index " + index)); } this.useSession = true; super.put(index, start, value); } /** Invoked after script execution */ void onExec() { this.useSession = false; super.put(LAST_EXEC_TIME, this, new Long(System.currentTimeMillis())); } /** Override importClass to allow reloading of classes */ public static void importClass(Context ctx, Scriptable thisObj, Object[] args, Function funObj) { for (int i = 0; i < args.length; i++) { Object clazz = args[i]; if (!(clazz instanceof NativeJavaClass)) { throw Context.reportRuntimeError("Not a Java class: " + Context.toString(clazz)); } String s = ((NativeJavaClass) clazz).getClassObject().getName(); String n = s.substring(s.lastIndexOf('.') + 1); thisObj.put(n, thisObj, clazz); } } public void setupPackages(ClassLoader cl) throws Exception { final String JAVA_PACKAGE = "JavaPackage"; if (classLoader != cl) { classLoader = cl; Scriptable newPackages = new NativeJavaPackage("", cl); newPackages.setParentScope(this); newPackages.setPrototype(ScriptableObject.getClassPrototype(this, JAVA_PACKAGE)); super.put("Packages", this, newPackages); for (int i = 0; i < BUILTIN_PACKAGES.length; i++) { String pkgName = BUILTIN_PACKAGES[i]; Scriptable pkg = new NativeJavaPackage(pkgName, cl); pkg.setParentScope(this); pkg.setPrototype(ScriptableObject.getClassPrototype(this, JAVA_PACKAGE)); super.put(pkgName, this, pkg); } } } public ClassLoader getClassLoader() { return classLoader; } } private ThreadScope createThreadScope() throws Exception { return new ThreadScope(scope); } /** * Sets up a ThreadScope object to be used as the global scope * when running the JavaScript scripts in the context of a request. * * <p>If you want to maintain the state of global variables across * multiple invocations of <code>&lt;map:call * function="..."&gt;</code>, you need to instanciate the session * object which is a property of the cocoon object * <code>var session = cocoon.session</code>. This will place the * newly create Scriptable object in the user's session, where it * will be retrieved from at the next invocation of {@link #callFunction}.</p> * * @exception Exception if an error occurs */ private void setupContext(Redirector redirector, Context context, ThreadScope thrScope) throws Exception { // We need to setup the FOM_Cocoon object according to the current // request. Everything else remains the same. ClassLoader contextClassloader = Thread.currentThread().getContextClassLoader(); thrScope.setupPackages(contextClassloader); FOM_Cocoon cocoon = (FOM_Cocoon) thrScope.get("cocoon", thrScope); cocoon.pushCallContext(this, redirector, avalonContext, null); // Time when scripts were last executed, in this thread scope final long lastExecuted = ((Long) thrScope.get(LAST_EXEC_TIME, thrScope)).longValue(); // List of scripts (ScriptSourceEntry objects) which might have to be // executed in this thread scope. List execList = new ArrayList(); // Check if we need to (re)compile any of the scripts synchronized (compiledScripts) { // Determine if refresh is needed. boolean needsRefresh = false; if (reloadScripts) { long now = System.currentTimeMillis(); if (now >= lastReloadCheckTime + checkTime) { needsRefresh = true; lastReloadCheckTime = now; } } // List of script URIs to resolve List resolveList = new ArrayList(); // If reloadScripts is true, recompile all top level scripts if (needsRefresh) { resolveList.addAll(topLevelScripts); } // If new scripts has been specified in sitemap, load and compile them if (needResolve.size() > 0) { topLevelScripts.addAll(needResolve); resolveList.addAll(needResolve); needResolve.clear(); } // Compile all the scripts first. That way you can set breakpoints // in the debugger before they execute. for (int i = 0, size = resolveList.size(); i < size; i++) { String sourceURI = (String) resolveList.get(i); ScriptSourceEntry entry = (ScriptSourceEntry) compiledScripts.get(sourceURI); if (entry == null) { Source src = this.sourceresolver.resolveURI(sourceURI); entry = new ScriptSourceEntry(src); compiledScripts.put(sourceURI, entry); } entry.compile(context, this.scope); // If top level scripts were executed in this thread scope, // collect only newly added scripts for execution. if (lastExecuted != 0) { execList.add(entry); } } // If scripts were never executed in this thread scope yet, // then collect all top level scripts for execution. if (lastExecuted == 0) { for (int i = 0, size = topLevelScripts.size(); i < size; i++) { String sourceURI = (String) topLevelScripts.get(i); ScriptSourceEntry entry = (ScriptSourceEntry) compiledScripts.get(sourceURI); if (entry != null) { execList.add(entry); } } } } // Execute the scripts identified above, as necessary boolean executed = false; for (int i = 0, size = execList.size(); i < size; i++) { ScriptSourceEntry entry = (ScriptSourceEntry) execList.get(i); if (lastExecuted == 0 || entry.getCompileTime() > lastExecuted) { entry.getScript().exec(context, thrScope); executed = true; } } // If any of the scripts has been executed, inform ThreadScope, // which will update last execution timestamp. if (executed) { thrScope.onExec(); } } /** * Compile filename as JavaScript code * * @param cx Rhino context * @param fileName resource uri * @return compiled script */ Script compileScript(Context cx, String fileName) throws Exception { Source src = this.sourceresolver.resolveURI(fileName); synchronized (compiledScripts) { ScriptSourceEntry entry = (ScriptSourceEntry) compiledScripts.get(src.getURI()); if (entry == null) { compiledScripts.put(src.getURI(), entry = new ScriptSourceEntry(src)); } else { this.sourceresolver.release(src); } long compileTime = entry.getCompileTime(); if (compileTime == 0 || reloadScripts && (compileTime + checkTime < System.currentTimeMillis())) { entry.compile(cx, this.scope); } return entry.getScript(); } } protected Script compileScript(Context cx, Scriptable scope, Source src) throws Exception { PushbackInputStream is = new PushbackInputStream(src.getInputStream(), ENCODING_BUF_SIZE); try { String encoding = findEncoding(is); Reader reader = encoding == null ? new InputStreamReader(is) : new InputStreamReader(is, encoding); reader = new BufferedReader(reader); return cx.compileReader(reader, src.getURI(), 1, null); } finally { is.close(); } } // A charset name can be up to 40 characters taken from the printable characters of US-ASCII // (see http://www.iana.org/assignments/character-sets). So reading 100 bytes should be more than enough. private final static int ENCODING_BUF_SIZE = 100; // Match 'encoding = xxxx' on the first line private final static REProgram encodingRE = new RECompiler().compile("encoding\\s*=\\s*([^\\s]*)"); /** * Find the encoding of the stream, or null if not specified */ String findEncoding(PushbackInputStream is) throws IOException { // Read some bytes byte[] buffer = new byte[ENCODING_BUF_SIZE]; int len = is.read(buffer, 0, buffer.length); // and push them back is.unread(buffer, 0, len); // Interpret them as an ASCII string String str = new String(buffer, 0, len, "ASCII"); RE re = new RE(encodingRE); if (re.match(str)) { return re.getParen(1); } return null; } /** * Calls a JavaScript function, passing <code>params</code> as its * arguments. In addition to this, it makes available the parameters * through the <code>cocoon.parameters</code> JavaScript array * (indexed by the parameter names). * * @param funName a <code>String</code> value * @param params a <code>List</code> value * @param redirector * @exception Exception if an error occurs */ public void callFunction(String funName, List params, Redirector redirector) throws Exception { Context context = Context.enter(); context.setOptimizationLevel(OPTIMIZATION_LEVEL); context.setGeneratingDebug(true); context.setCompileFunctionsWithDynamicScope(true); context.setErrorReporter(new JSErrorReporter()); LocationTrackingDebugger locationTracker = new LocationTrackingDebugger(); if (!enableDebugger) { //FIXME: add a "tee" debugger that allows both to be used simultaneously context.setDebugger(locationTracker, null); } // Try to retrieve the scope object from the session instance. If // no scope is found, we create a new one, but don't place it in // the session. // // When a user script "creates" a session using // cocoon.createSession() in JavaScript, the thrScope is placed in // the session object, where it's later retrieved from here. This // behaviour allows multiple JavaScript functions to share the // same global scope. ThreadScope thrScope = getSessionScope(); synchronized (thrScope) { ClassLoader savedClassLoader = Thread.currentThread().getContextClassLoader(); FOM_Cocoon cocoon = null; try { try { setupContext(redirector, context, thrScope); cocoon = (FOM_Cocoon) thrScope.get("cocoon", thrScope); // Register the current scope for scripts indirectly called from this function FOM_JavaScriptFlowHelper.setFOM_FlowScope(cocoon.getObjectModel(), thrScope); if (enableDebugger) { // only raise the debugger window if it isn't already visible if (!getDebugger().isVisible()) { getDebugger().setVisible(true); } } int size = (params != null ? params.size() : 0); Scriptable parameters = context.newObject(thrScope); for (int i = 0; i < size; i++) { Interpreter.Argument arg = (Interpreter.Argument)params.get(i); if (arg.name == null) { arg.name = ""; } parameters.put(arg.name, parameters, arg.value); } cocoon.setParameters(parameters); // Resolve function name // Object fun; try { fun = context.compileString(funName, null, 1, null).exec (context, thrScope); } catch (EcmaError ee) { throw new ResourceNotFoundException ( "Function \"javascript:" + funName + "()\" not found"); } thrScope.setLock(true); ScriptRuntime.call(context, fun, thrScope, new Object[0], thrScope); } catch (JavaScriptException e) { throw locationTracker.getException("Error calling flowscript function " + funName, e); } catch (EcmaError e) { throw locationTracker.getException("Error calling function " + funName, e); } catch (WrappedException e) { throw locationTracker.getException("Error calling function " + funName, e); } } finally { thrScope.setLock(false); setSessionScope(thrScope); if (cocoon != null) { cocoon.popCallContext(); } Context.exit(); Thread.currentThread().setContextClassLoader(savedClassLoader); } } } public void handleContinuation(String id, List params, Redirector redirector) throws Exception { WebContinuation wk = continuationsMgr.lookupWebContinuation(id, getInterpreterID()); if (wk == null) { /* * Throw an InvalidContinuationException to be handled inside the * <map:handle-errors> sitemap element. */ throw new InvalidContinuationException("The continuation ID " + id + " is invalid."); } Context context = Context.enter(); context.setOptimizationLevel(OPTIMIZATION_LEVEL); context.setGeneratingDebug(true); context.setCompileFunctionsWithDynamicScope(true); LocationTrackingDebugger locationTracker = new LocationTrackingDebugger(); if (!enableDebugger) { //FIXME: add a "tee" debugger that allows both to be used simultaneously context.setDebugger(locationTracker, null); } // Obtain the continuation object from it, and setup the // FOM_Cocoon object associated in the dynamic scope of the saved // continuation with the environment and context objects. Continuation k = (Continuation) wk.getContinuation(); ThreadScope kScope = (ThreadScope) k.getParentScope(); synchronized (kScope) { ClassLoader savedClassLoader = Thread.currentThread().getContextClassLoader(); FOM_Cocoon cocoon = null; try { Thread.currentThread().setContextClassLoader(kScope.getClassLoader()); cocoon = (FOM_Cocoon)kScope.get("cocoon", kScope); kScope.setLock(true); cocoon.pushCallContext(this, redirector, avalonContext, wk); // Register the current scope for scripts indirectly called from this function FOM_JavaScriptFlowHelper.setFOM_FlowScope(cocoon.getObjectModel(), kScope); if (enableDebugger) { getDebugger().setVisible(true); } Scriptable parameters = context.newObject(kScope); int size = params != null ? params.size() : 0; for (int i = 0; i < size; i++) { Interpreter.Argument arg = (Interpreter.Argument)params.get(i); parameters.put(arg.name, parameters, arg.value); } cocoon.setParameters(parameters); FOM_WebContinuation fom_wk = new FOM_WebContinuation(wk); fom_wk.setLogger(getLogger()); fom_wk.setParentScope(kScope); fom_wk.setPrototype(ScriptableObject.getClassPrototype(kScope, fom_wk.getClassName())); Object[] args = new Object[] {k, fom_wk}; try { ScriptableObject.callMethod(cocoon, "handleContinuation", args); } catch (JavaScriptException e) { throw locationTracker.getException("Error calling continuation", e); } catch (EcmaError e) { throw locationTracker.getException("Error calling continuation", e); } catch (WrappedException e) { throw locationTracker.getException("Error calling continuation", e); } } finally { kScope.setLock(false); setSessionScope(kScope); if (cocoon != null) { cocoon.popCallContext(); } Context.exit(); Thread.currentThread().setContextClassLoader(savedClassLoader); } } } public void forwardTo(Scriptable scope, FOM_Cocoon cocoon, String uri, Object bizData, FOM_WebContinuation fom_wk, Redirector redirector) throws Exception { setupView(scope, cocoon, fom_wk); super.forwardTo(uri, bizData, fom_wk == null ? null : fom_wk.getWebContinuation(), redirector); } /** * Call the Cocoon sitemap for the given URI, sending the output of the * eventually matched pipeline to the specified outputstream. * * @param uri The URI for which the request should be generated. * @param bizData Extra data associated with the subrequest. * @param out An OutputStream where the output should be written to. * @exception Exception If an error occurs. */ // package access as this is called by FOM_Cocoon void process(Scriptable scope, FOM_Cocoon cocoon, String uri, Object bizData, OutputStream out) throws Exception { setupView(scope, cocoon, null); // FIXME (SW): should we deprecate this method in favor of PipelineUtil? PipelineUtil pipeUtil = new PipelineUtil(); pipeUtil.processToStream(uri, bizData, out); } private void setupView(Scriptable scope, FOM_Cocoon cocoon, FOM_WebContinuation kont) { final Map objectModel = this.processInfoProvider.getObjectModel(); // Make the JS live-connect objects available to the view layer FOM_JavaScriptFlowHelper.setPackages(objectModel, (Scriptable)ScriptableObject.getProperty(scope, "Packages")); FOM_JavaScriptFlowHelper.setJavaPackage(objectModel, (Scriptable)ScriptableObject.getProperty(scope, "java")); // Make the FOM objects available to the view layer FOM_JavaScriptFlowHelper.setFOM_Request(objectModel, cocoon.jsGet_request()); FOM_JavaScriptFlowHelper.setFOM_Response(objectModel, cocoon.jsGet_response()); Request request = ObjectModelHelper.getRequest(objectModel); Scriptable session = null; if (request.getSession(false) != null) { session = cocoon.jsGet_session(); } FOM_JavaScriptFlowHelper.setFOM_Session(objectModel, session); FOM_JavaScriptFlowHelper.setFOM_Context(objectModel, cocoon.jsGet_context()); if (kont != null) { FOM_JavaScriptFlowHelper.setFOM_WebContinuation(objectModel, kont); } } /** * @see org.apache.cocoon.components.flow.AbstractInterpreter#getScriptExtension() */ public String getScriptExtension() { return ".js"; } }
/* * $RCSfile: MlibWarpPolynomialOpImage.java,v $ * * Copyright (c) 2005 Sun Microsystems, Inc. All rights reserved. * * Use is subject to license terms. * * $Revision: 1.2 $ * $Date: 2005/12/15 18:35:48 $ * $State: Exp $ */ package com.lightcrafts.media.jai.mlib; import java.awt.Point; import java.awt.Rectangle; import java.awt.image.DataBuffer; import java.awt.image.Raster; import java.awt.image.RenderedImage; import java.awt.image.WritableRaster; import com.lightcrafts.mediax.jai.BorderExtender; import com.lightcrafts.mediax.jai.ImageLayout; import com.lightcrafts.mediax.jai.Interpolation; import java.util.Map; import com.lightcrafts.mediax.jai.WarpOpImage; import com.lightcrafts.mediax.jai.WarpPolynomial; import com.sun.medialib.mlib.*; import com.lightcrafts.media.jai.util.ImageUtil; /** * An <code>OpImage</code> implementing the polynomial "Warp" operation * using MediaLib. * * <p> With warp operations, there is no forward mapping (from source to * destination). JAI images are tiled, while mediaLib does not handle * tiles and consider each tile an individual image. For each tile in * destination, in order not to cobble the entire source image, the * <code>computeTile</code> method in this class attemps to do a backward * mapping on the tile region using the pixels along the perimeter of the * rectangular region. The hope is that the mapped source rectangle * should include all source pixels needed for this particular destination * tile. However, with certain unusual warp points, an inner destination * pixel may be mapped outside of the mapped perimeter pixels. In this * case, this destination pixel is not filled, and left black. * * @see com.lightcrafts.mediax.jai.operator.WarpDescriptor * @see MlibWarpRIF * * @since 1.0 * */ final class MlibWarpPolynomialOpImage extends WarpOpImage { /** The x and y coefficients. */ private double[] xCoeffs; private double[] yCoeffs; /** * Indicates what kind of interpolation to use; may be * <code>Constants.MLIB_NEAREST</code>, * <code>Constants.MLIB_BILINEAR</code>, * or <code>Constants.MLIB_BICUBIC</code>, * and was determined in <code>MlibWarpRIF.create()</code>. */ private int filter; /** The pre and post scale factors. */ private double preScaleX; private double preScaleY; private double postScaleX; private double postScaleY; /** * Constructs a <code>MlibWarpPolynomialOpImage</code>. * * @param source The source image. * @param layout The destination image layout. * @param warp An object defining the warp algorithm. * @param interp An object describing the interpolation method. */ public MlibWarpPolynomialOpImage(RenderedImage source, BorderExtender extender, Map config, ImageLayout layout, WarpPolynomial warp, Interpolation interp, int filter, double[] backgroundValues) { super(source, layout, config, true, extender, interp, warp, backgroundValues); float[] xc = warp.getXCoeffs(); float[] yc = warp.getYCoeffs(); int size = xc.length; xCoeffs = new double[size]; // X and Y coefficients as doubles yCoeffs = new double[size]; for (int i = 0; i < size; i++) { xCoeffs[i] = xc[i]; yCoeffs[i] = yc[i]; } this.filter = filter; // interpolation preScaleX = warp.getPreScaleX(); // pre/post factors preScaleY = warp.getPreScaleY(); postScaleX = warp.getPostScaleX(); postScaleY = warp.getPostScaleY(); } /** * Returns the minimum bounding box of the region of the specified * source to which a particular <code>Rectangle</code> of the * destination will be mapped. * * @param destRect the <code>Rectangle</code> in destination coordinates. * @param sourceIndex the index of the source image. * * @return a <code>Rectangle</code> indicating the source bounding box, * or <code>null</code> if the bounding box is unknown. * * @throws IllegalArgumentException if <code>sourceIndex</code> is * negative or greater than the index of the last source. * @throws IllegalArgumentException if <code>destRect</code> is * <code>null</code>. */ protected Rectangle backwardMapRect(Rectangle destRect, int sourceIndex) { // Superclass method will throw documented exceptions if needed. Rectangle wrect = super.backwardMapRect(destRect, sourceIndex); // "Dilate" the backwarp mapped rectangle to account for // the lack of being able to know the floating point result of // mapDestRect() and to mimic what is done in AffineOpImage. // See bug 4518223 for more information. wrect.setBounds(wrect.x - 1, wrect.y - 1, wrect.width + 2, wrect.height + 2); return wrect; } /** * Computes a tile. A new <code>WritableRaster</code> is created to * represent the requested tile. Its width and height equals to this * image's tile width and tile height respectively. If the requested * tile lies outside of the image's boundary, the created raster is * returned with all of its pixels set to 0. * * <p> This method overrides the method in <code>WarpOpImage</code> * and performs source cobbling when necessary. MediaLib is used to * calculate the actual warping. * * @param tileX The X index of the tile. * @param tileY The Y index of the tile. * * @return The tile as a <code>Raster</code>. */ public Raster computeTile(int tileX, int tileY) { /* The origin of the tile. */ Point org = new Point(tileXToX(tileX), tileYToY(tileY)); /* Create a new WritableRaster to represent this tile. */ WritableRaster dest = createWritableRaster(sampleModel, org); /* Find the intersection between this tile and the writable bounds. */ Rectangle rect = new Rectangle(org.x, org.y, tileWidth, tileHeight); Rectangle destRect = rect.intersection(computableBounds); Rectangle destRect1 = rect.intersection(getBounds()); if (destRect.isEmpty()) { if (setBackground) { ImageUtil.fillBackground(dest, destRect1, backgroundValues); } return dest; // tile completely outside of writable bounds } /* Map destination rectangle to source space. */ Rectangle srcRect = backwardMapRect(destRect, 0).intersection( getSourceImage(0).getBounds()); if (srcRect.isEmpty()) { if (setBackground) { ImageUtil.fillBackground(dest, destRect1, backgroundValues); } return dest; // outside of source bounds } if (!destRect1.equals(destRect)) { // beware that destRect1 contains destRect ImageUtil.fillBordersWithBackgroundValues(destRect1, destRect, dest, backgroundValues); } /* Add the interpolation paddings. */ int l = interp== null ? 0 : interp.getLeftPadding(); int r = interp== null ? 0 : interp.getRightPadding(); int t = interp== null ? 0 : interp.getTopPadding(); int b = interp== null ? 0 : interp.getBottomPadding(); srcRect = new Rectangle(srcRect.x - l, srcRect.y - t, srcRect.width + l + r, srcRect.height + t + b); /* Cobble source into one Raster. */ Raster[] sources = new Raster[1]; sources[0] = getBorderExtender() != null ? getSourceImage(0).getExtendedData(srcRect, extender) : getSourceImage(0).getData(srcRect); computeRect(sources, dest, destRect); // Recycle the source tile if(getSourceImage(0).overlapsMultipleTiles(srcRect)) { recycleTile(sources[0]); } return dest; } /** * Performs the "Warp" operation on a rectangular region of * the same. */ protected void computeRect(Raster[] sources, WritableRaster dest, Rectangle destRect) { Raster source = sources[0]; /* Find the mediaLib data tag. */ int formatTag = MediaLibAccessor.findCompatibleTag(sources, dest); MediaLibAccessor srcMA = new MediaLibAccessor(source, source.getBounds(), formatTag); MediaLibAccessor dstMA = new MediaLibAccessor(dest, destRect, formatTag); mediaLibImage[] srcMLI = srcMA.getMediaLibImages(); mediaLibImage[] dstMLI = dstMA.getMediaLibImages(); switch (dstMA.getDataType()) { case DataBuffer.TYPE_BYTE: case DataBuffer.TYPE_USHORT: case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_INT: if (setBackground) for (int i = 0 ; i < dstMLI.length; i++) { Image.PolynomialWarp2(dstMLI[i], srcMLI[i], xCoeffs, yCoeffs, destRect.x, destRect.y, source.getMinX(), source.getMinY(), preScaleX, preScaleY, postScaleX, postScaleY, filter, Constants.MLIB_EDGE_DST_NO_WRITE, intBackgroundValues); } else for (int i = 0 ; i < dstMLI.length; i++) { Image.PolynomialWarp(dstMLI[i], srcMLI[i], xCoeffs, yCoeffs, destRect.x, destRect.y, source.getMinX(), source.getMinY(), preScaleX, preScaleY, postScaleX, postScaleY, filter, Constants.MLIB_EDGE_DST_NO_WRITE); MlibUtils.clampImage(dstMLI[i], getColorModel()); } break; case DataBuffer.TYPE_FLOAT: case DataBuffer.TYPE_DOUBLE: if (setBackground) for (int i = 0 ; i < dstMLI.length; i++) { Image.PolynomialWarp2_Fp(dstMLI[i], srcMLI[i], xCoeffs, yCoeffs, destRect.x, destRect.y, source.getMinX(), source.getMinY(), preScaleX, preScaleY, postScaleX, postScaleY, filter, Constants.MLIB_EDGE_DST_NO_WRITE, backgroundValues); } else for (int i = 0 ; i < dstMLI.length; i++) { Image.PolynomialWarp_Fp(dstMLI[i], srcMLI[i], xCoeffs, yCoeffs, destRect.x, destRect.y, source.getMinX(), source.getMinY(), preScaleX, preScaleY, postScaleX, postScaleY, filter, Constants.MLIB_EDGE_DST_NO_WRITE); } break; default: throw new RuntimeException(JaiI18N.getString("Generic2")); } if (dstMA.isDataCopy()) { dstMA.clampDataArrays(); dstMA.copyDataToRaster(); } } }
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.gui.app.core.entityinspector; import com.haulmont.bali.util.ParamsMap; import com.haulmont.chile.core.model.MetaClass; import com.haulmont.chile.core.model.MetaProperty; import com.haulmont.chile.core.model.MetaPropertyPath; import com.haulmont.chile.core.model.Range; import com.haulmont.chile.core.model.utils.InstanceUtils; import com.haulmont.cuba.client.ClientConfig; import com.haulmont.cuba.core.entity.Categorized; import com.haulmont.cuba.core.entity.Category; import com.haulmont.cuba.core.entity.Entity; import com.haulmont.cuba.core.global.*; import com.haulmont.cuba.gui.WindowManager.OpenType; import com.haulmont.cuba.gui.WindowParam; import com.haulmont.cuba.gui.components.*; import com.haulmont.cuba.gui.components.actions.AddAction; import com.haulmont.cuba.gui.components.actions.BaseAction; import com.haulmont.cuba.gui.components.actions.ItemTrackingAction; import com.haulmont.cuba.gui.components.actions.RemoveAction; import com.haulmont.cuba.gui.data.*; import com.haulmont.cuba.gui.data.impl.*; import com.haulmont.cuba.gui.theme.ThemeConstants; import com.haulmont.cuba.gui.xml.layout.ComponentsFactory; import com.haulmont.cuba.security.entity.EntityAttrAccess; import com.haulmont.cuba.security.entity.EntityOp; import org.dom4j.DocumentHelper; import org.dom4j.Element; import javax.inject.Inject; import javax.persistence.ManyToOne; import javax.persistence.OneToOne; import java.util.*; public class EntityInspectorEditor extends AbstractWindow { public static final int CAPTION_MAX_LENGTH = 100; public static final int MAX_TEXT_LENGTH = 50; public static final OpenType OPEN_TYPE = OpenType.THIS_TAB; public static final int MAX_TEXTFIELD_STRING_LENGTH = 255; @Inject protected Metadata metadata; @Inject protected MessageTools messageTools; @Inject protected ViewRepository viewRepository; @Inject protected Security security; @Inject protected DataSupplier dataSupplier; @Inject protected BoxLayout buttonsBox; @Inject protected BoxLayout contentPane; @Inject protected BoxLayout runtimePane; @Inject protected TabSheet tablesTabSheet; @Inject protected ComponentsFactory componentsFactory; @Inject protected Configuration configuration; @Inject protected ThemeConstants themeConstants; @WindowParam(name = "item") protected Entity item; @WindowParam(name = "parent") protected Entity parent; @WindowParam(name = "parentProperty") protected String parentProperty; @WindowParam(name = "parentDs") protected Datasource parentDs; @WindowParam(name = "datasource") protected Datasource datasource; protected MetaClass meta; protected DsContextImpl dsContext; protected Map<String, Datasource> datasources; protected Boolean isNew; protected Boolean autocommit; protected Boolean showSystemFields; protected Collection<Table> tables; protected Collection<Field> reserveLineSeparatorFields; protected RuntimePropsDatasource rDS; protected CollectionDatasource categoriesDs; protected ButtonsPanel buttonsPanel; protected Button commitButton; protected Button cancelButton; protected FieldGroup focusFieldGroup; protected String focusFieldId; public EntityInspectorEditor() { datasources = new HashMap<>(); tables = new LinkedList<>(); isNew = true; autocommit = true; showSystemFields = false; } @SuppressWarnings("unchecked") @Override public void init(Map<String, Object> params) { isNew = item == null || PersistenceHelper.isNew(item); meta = item != null ? item.getMetaClass() : metadata.getSession().getClass((String) params.get("metaClass")); autocommit = params.get("autocommit") != null ? (Boolean) params.get("autocommit") : true; showSystemFields = params.get("showSystemFields") != null ? (Boolean) params.get("showSystemFields") : false; if (meta == null) throw new IllegalStateException("Entity or entity's MetaClass must be specified"); setCaption(meta.getName()); initShortcuts(); View view = createView(meta); dsContext = new DsContextImpl(dataSupplier); dsContext.setFrameContext(getDsContext().getFrameContext()); setDsContext(dsContext); boolean createRequest = item == null || item.getId() == null; if (createRequest) { item = metadata.create(meta); setParentField(item, parentProperty, parent); } else { //edit request Object itemId = item.getId(); if (!isNew) { item = loadSingleItem(meta, itemId, view); } if (item == null) { throw new EntityAccessException(meta, itemId); } } createEmbeddedFields(meta, item); boolean categorizedEntity = item instanceof Categorized; if (datasource == null) { datasource = new DatasourceImpl<>(); datasource.setup(dsContext, dataSupplier, meta.getName() + "Ds", item.getMetaClass(), view); ((DatasourceImpl) datasource).setParent(parentDs); ((DatasourceImpl) datasource).valid(); } dsContext.register(datasource); createPropertyDatasources(datasource); if (categorizedEntity) { initRuntimePropertiesDatasources(view); } datasource.refresh(); reserveLineSeparatorFields = new LinkedList<>(); createDataComponents(meta, item); if (categorizedEntity) { createRuntimeDataComponents(); } datasource.setItem(item); if (categorizedEntity) { rDS.refresh(); } createCommitButtons(); setCaption(meta.getName()); if (focusFieldGroup != null && focusFieldId != null) { focusFieldGroup.requestFocus(focusFieldId); } } public Entity getItem() { return datasource.getItem(); } protected void initShortcuts() { Action commitAction = new BaseAction("commitAndClose") .withCaption(messages.getMainMessage("actions.OkClose")) .withShortcut(configuration.getConfig(ClientConfig.class).getCommitShortcut()) .withHandler(e -> commitAndClose() ); addAction(commitAction); } protected void setParentField(Entity item, String parentProperty, Entity parent) { if (parentProperty != null && parent != null && item != null) item.setValue(parentProperty, parent); } protected void createRuntimeDataComponents() { if (rDS != null && categoriesDs != null) { Map<String, Object> params = new HashMap<>(); params.put("runtimeDs", rDS.getId()); params.put("categoriesDs", categoriesDs.getId()); params.put("fieldWidth", themeConstants.get("cuba.gui.EntityInspectorEditor.field.width")); params.put("borderVisible", Boolean.TRUE); RuntimePropertiesFrame runtimePropertiesFrame = (RuntimePropertiesFrame) openFrame(runtimePane, "runtimePropertiesFrame", params); runtimePropertiesFrame.setFrame(this.getFrame()); runtimePropertiesFrame.setMessagesPack("com.haulmont.cuba.gui.app.core.entityinspector"); runtimePropertiesFrame.setCategoryFieldVisible(false); runtimePropertiesFrame.setHeightAuto(); runtimePropertiesFrame.setWidthFull(); runtimePane.add(runtimePropertiesFrame); } } protected void initRuntimePropertiesDatasources(View view) { rDS = new RuntimePropsDatasourceImpl(dsContext, dataSupplier, "rDS", datasource.getId(), null); MetaClass categoriesMeta = metadata.getSession().getClass(Category.class); categoriesDs = new CollectionDatasourceImpl(); ViewProperty categoryProperty = view.getProperty("category"); if (categoryProperty == null) { throw new IllegalArgumentException("Category property not found. Not a categorized entity?"); } categoriesDs.setup(dsContext, dataSupplier, "categoriesDs", categoriesMeta, categoryProperty.getView()); categoriesDs.setQuery(String.format("select c from sys$Category c where c.entityType='%s'", meta.getName())); categoriesDs.refresh(); dsContext.register(rDS); dsContext.register(categoriesDs); } /** * Recursively instantiates the embedded properties. * E.g. embedded properties of the embedded property will also be instantiated. * * @param metaClass meta class of the entity * @param item entity instance */ protected void createEmbeddedFields(MetaClass metaClass, Entity item) { for (MetaProperty metaProperty : metaClass.getProperties()) { if (isEmbedded(metaProperty)) { MetaClass embeddedMetaClass = metaProperty.getRange().asClass(); Entity embedded = item.getValue(metaProperty.getName()); if (embedded == null) { embedded = metadata.create(embeddedMetaClass); item.setValue(metaProperty.getName(), embedded); } createEmbeddedFields(embeddedMetaClass, embedded); } } } /** * Returns metaProperty of the referred entity annotated with either nullIndicatorAttributeName or * nullIndicatorColumnName property. * * @param embeddedMetaProperty embedded property of the current entity * @return property of the referred entity */ protected MetaProperty getNullIndicatorProperty(MetaProperty embeddedMetaProperty) { // Unsupported for EclipseLink ORM return null; } /** * Checks if the property is embedded * * @param metaProperty meta property * @return true if embedded, false otherwise */ protected boolean isEmbedded(MetaProperty metaProperty) { return metaProperty.getAnnotatedElement().isAnnotationPresent(javax.persistence.Embedded.class); } /** * Loads single item by id. * * @param meta item's meta class * @param id item's id * @param view view * @return loaded item if found, null otherwise */ protected Entity loadSingleItem(MetaClass meta, Object id, View view) { String primaryKeyName = metadata.getTools().getPrimaryKeyName(meta); if (primaryKeyName == null) { throw new IllegalStateException(String.format("Entity %s has no primary key", meta.getName())); } LoadContext ctx = new LoadContext(meta); ctx.setLoadDynamicAttributes(true); ctx.setSoftDeletion(false); ctx.setView(view); String query = String.format("select e from %s e where e.%s = :id", meta.getName(), primaryKeyName); LoadContext.Query q = ctx.setQueryString(query); q.setParameter("id", id); return dataSupplier.load(ctx); } /** * Creates components representing item data * (fieldGroup, fieldGroups for embedded properties, tables for the referred entities) * * @param metaClass item meta class */ protected void createDataComponents(MetaClass metaClass, Entity item) { FieldGroup fieldGroup = componentsFactory.createComponent(FieldGroup.class); fieldGroup.setBorderVisible(true); contentPane.add(fieldGroup); fieldGroup.setFrame(frame); MetadataTools tools = metadata.getTools(); MetaProperty primaryKeyProperty = tools.getPrimaryKeyProperty(metaClass); LinkedList<FieldGroup.FieldConfig> customFields = new LinkedList<>(); for (MetaProperty metaProperty : metaClass.getProperties()) { boolean isRequired = isRequired(metaProperty); boolean isReadonly = metaProperty.isReadOnly(); switch (metaProperty.getType()) { case DATATYPE: case ENUM: boolean includeId = primaryKeyProperty.equals(metaProperty) && String.class.equals(metaProperty.getJavaType()); //skip system properties if (tools.isSystem(metaProperty) && !showSystemFields && !includeId) { continue; } if (metaProperty.getType() != MetaProperty.Type.ENUM && (isByteArray(metaProperty) || isUuid(metaProperty))) { continue; } if (includeId && !isNew) { isReadonly = true; } Range range = metaProperty.getRange(); if (range.isDatatype() && range.asDatatype().getJavaClass().equals(Boolean.class)) { addBooleanCustomField(metaClass, metaProperty, item, fieldGroup, isRequired, isReadonly); break; } addField(metaClass, metaProperty, item, fieldGroup, isRequired, false, isReadonly, customFields); break; case COMPOSITION: case ASSOCIATION: if (metaProperty.getRange().getCardinality().isMany()) { addTable(metaClass, metaProperty); } else { if (isEmbedded(metaProperty)) { Entity propertyValue = item.getValue(metaProperty.getName()); addEmbeddedFieldGroup(metaProperty, "", propertyValue); } else { addField(metaClass, metaProperty, item, fieldGroup, isRequired, true, isReadonly, customFields); } } break; default: break; } } fieldGroup.setDatasource(datasource); fieldGroup.bind(); createCustomFields(fieldGroup, customFields); } /** * Creates field group for the embedded property * * @param embeddedMetaProperty meta property of the embedded property * @param embeddedItem current value of the embedded property */ protected void addEmbeddedFieldGroup(MetaProperty embeddedMetaProperty, String fqnPrefix, Entity embeddedItem) { String fqn = fqnPrefix.isEmpty() ? embeddedMetaProperty.getName() : fqnPrefix + "." + embeddedMetaProperty.getName(); Datasource embedDs = datasources.get(fqn); if (embedDs == null) { throw new IllegalStateException(String.format("Datasource %s for property %s not found", fqn, embeddedMetaProperty.getName())); } FieldGroup fieldGroup = componentsFactory.createComponent(FieldGroup.class); fieldGroup.setBorderVisible(true); fieldGroup.setCaption(getPropertyCaption(embedDs.getMetaClass(), embeddedMetaProperty)); contentPane.add(fieldGroup); fieldGroup.setFrame(frame); MetaClass embeddableMetaClass = embeddedMetaProperty.getRange().asClass(); Collection<FieldGroup.FieldConfig> customFields = new LinkedList<>(); MetaProperty nullIndicatorProperty = getNullIndicatorProperty(embeddedMetaProperty); List<String> dateTimeFields = new ArrayList<>(); for (MetaProperty metaProperty : embeddableMetaClass.getProperties()) { boolean isRequired = isRequired(metaProperty) || metaProperty.equals(nullIndicatorProperty); boolean isReadonly = metaProperty.isReadOnly(); switch (metaProperty.getType()) { case DATATYPE: if (metaProperty.getRange().asDatatype().getJavaClass().equals(Date.class)) { dateTimeFields.add(metaProperty.getName()); } case ENUM: //skip system properties if (metadata.getTools().isSystem(metaProperty) && !showSystemFields) { continue; } if (metaProperty.getType() != MetaProperty.Type.ENUM && (isByteArray(metaProperty) || isUuid(metaProperty))) { continue; } addField(embeddableMetaClass, metaProperty, embeddedItem, fieldGroup, isRequired, false, isReadonly, customFields); break; case COMPOSITION: case ASSOCIATION: if (metaProperty.getRange().getCardinality().isMany()) { throw new IllegalStateException("tables for the embeddable entities are not supported"); } else { if (isEmbedded(metaProperty)) { Entity propertyValue = embeddedItem.getValue(metaProperty.getName()); addEmbeddedFieldGroup(metaProperty, fqn, propertyValue); } else { addField(embeddableMetaClass, metaProperty, embeddedItem, fieldGroup, isRequired, true, isReadonly, customFields); } } break; default: break; } } fieldGroup.setDatasource(embedDs); fieldGroup.bind(); createCustomFields(fieldGroup, customFields); for (String dateTimeField : dateTimeFields) { FieldGroup.FieldConfig field = fieldGroup.getField(dateTimeField); if (field != null && field.getComponent() != null) { ((DateField) field.getComponent()).setResolution(DateField.Resolution.SEC); } } } protected boolean isByteArray(MetaProperty metaProperty) { return metaProperty.getRange().asDatatype().getJavaClass().equals(byte[].class); } protected boolean isUuid(MetaProperty metaProperty) { return metaProperty.getRange().asDatatype().getJavaClass().equals(UUID.class); } protected boolean isRequired(MetaProperty metaProperty) { if (metaProperty.isMandatory()) return true; ManyToOne many2One = metaProperty.getAnnotatedElement().getAnnotation(ManyToOne.class); if (many2One != null && !many2One.optional()) return true; OneToOne one2one = metaProperty.getAnnotatedElement().getAnnotation(OneToOne.class); return one2one != null && !one2one.optional(); } /** * Creates and registers in dsContext property datasource for each of the entity non-datatype * and non-enum property * * @param masterDs master datasource */ protected void createPropertyDatasources(Datasource masterDs) { for (MetaProperty metaProperty : meta.getProperties()) { switch (metaProperty.getType()) { case COMPOSITION: case ASSOCIATION: NestedDatasource propertyDs; if (metaProperty.getRange().getCardinality().isMany()) { propertyDs = new CollectionPropertyDatasourceImpl(); } else { if (isEmbedded(metaProperty)) { propertyDs = new EmbeddedDatasourceImpl(); } else { propertyDs = new PropertyDatasourceImpl(); } } propertyDs.setup(metaProperty.getName() + "Ds", masterDs, metaProperty.getName()); if (isEmbedded(metaProperty)) { createNestedEmbeddedDatasources(metaProperty.getRange().asClass(), metaProperty.getName(), propertyDs); } datasources.put(metaProperty.getName(), propertyDs); dsContext.register(propertyDs); break; default: break; } } } protected void createNestedEmbeddedDatasources(MetaClass metaClass, String fqnPrefix, Datasource masterDs) { for (MetaProperty metaProperty : metaClass.getProperties()) { if (MetaProperty.Type.ASSOCIATION == metaProperty.getType() || MetaProperty.Type.COMPOSITION == metaProperty.getType()) { if (isEmbedded(metaProperty)) { String fqn = fqnPrefix + "." + metaProperty.getName(); MetaClass propertyMetaClass = metaProperty.getRange().asClass(); NestedDatasource propertyDs = new EmbeddedDatasourceImpl(); propertyDs.setup(fqn + "Ds", masterDs, metaProperty.getName()); createNestedEmbeddedDatasources(propertyMetaClass, fqn, propertyDs); datasources.put(fqn, propertyDs); dsContext.register(propertyDs); } } } } protected void createCommitButtons() { buttonsPanel = componentsFactory.createComponent(ButtonsPanel.class); commitButton = componentsFactory.createComponent(Button.class); commitButton.setIcon("icons/ok.png"); commitButton.setCaption(messages.getMessage(EntityInspectorEditor.class, "commit")); commitButton.setAction(new CommitAction()); cancelButton = componentsFactory.createComponent(Button.class); cancelButton.setIcon("icons/cancel.png"); cancelButton.setCaption(messages.getMessage(EntityInspectorEditor.class, "cancel")); cancelButton.setAction(new CancelAction()); buttonsPanel.add(commitButton); buttonsPanel.add(cancelButton); buttonsBox.add(buttonsPanel); } /** * Adds field to the specified field group. * If the field should be custom, adds it to the specified customFields collection * which can be used later to create fieldGenerators * * @param metaProperty meta property of the item's property which field is creating * @param item entity instance containing given property * @param fieldGroup field group to which created field will be added * @param customFields if the field is custom it will be added to this collection * @param required true if the field is required * @param custom true if the field is custom */ protected void addField(MetaClass metaClass, MetaProperty metaProperty, Entity item, FieldGroup fieldGroup, boolean required, boolean custom, boolean readOnly, Collection<FieldGroup.FieldConfig> customFields) { if (!attrViewPermitted(metaClass, metaProperty)) return; if ((metaProperty.getType() == MetaProperty.Type.COMPOSITION || metaProperty.getType() == MetaProperty.Type.ASSOCIATION) && !entityOpPermitted(metaProperty.getRange().asClass(), EntityOp.READ)) return; FieldGroup.FieldConfig field = fieldGroup.createField(metaProperty.getName()); field.setProperty(metaProperty.getName()); field.setCaption(getPropertyCaption(metaClass, metaProperty)); field.setCustom(custom); field.setRequired(required); field.setEditable(!readOnly); field.setWidth("400px"); if (requireTextArea(metaProperty, item)) { Element root = DocumentHelper.createElement("textArea"); root.addAttribute("rows", "3"); field.setXmlDescriptor(root); } if (focusFieldId == null && !readOnly) { focusFieldId = field.getId(); focusFieldGroup = fieldGroup; } if (required) { field.setRequiredMessage(messageTools.getDefaultRequiredMessage(metaClass, metaProperty.getName())); } fieldGroup.addField(field); if (custom) customFields.add(field); } /** * Adds LookupField with boolean values instead of CheckBox that can't display null value. * * @param metaClass meta property of the item's property which field is creating * @param metaProperty meta property of the item's property which field is creating * @param item entity instance containing given property * @param fieldGroup field group to which created field will be added * @param required true if the field is required * @param readOnly false if field should be editable */ protected void addBooleanCustomField(MetaClass metaClass, MetaProperty metaProperty, Entity item, FieldGroup fieldGroup, boolean required, boolean readOnly) { if (!attrViewPermitted(metaClass, metaProperty)) { return; } LookupField field = componentsFactory.createComponent(LookupField.class); String caption = getPropertyCaption(datasource.getMetaClass(), metaProperty); field.setCaption(caption); field.setEditable(!readOnly); field.setRequired(required); field.setDatasource(datasource, metaProperty.getName()); field.setOptionsMap(ParamsMap.of( messages.getMainMessage("trueString"), Boolean.TRUE, messages.getMainMessage("falseString"), Boolean.FALSE)); field.setTextInputAllowed(false); if (!PersistenceHelper.isNew(item)) { MetaPropertyPath metaPropertyPath = metaClass.getPropertyPath(metaProperty.getName()); Object value = InstanceUtils.getValueEx(item, metaPropertyPath.getPath()); field.setValue(value); } FieldGroup.FieldConfig fieldConfig = fieldGroup.createField(metaProperty.getName()); fieldConfig.setWidth("400px"); fieldConfig.setComponent(field); fieldGroup.addField(fieldConfig); } /** * @param metaProperty meta property * @param item entity containing property of the given meta property * @return true if property require text area component; that is if it either too long or contains line separators */ protected boolean requireTextArea(MetaProperty metaProperty, Entity item) { if (!String.class.equals(metaProperty.getJavaType())) { return false; } Integer textLength = (Integer) metaProperty.getAnnotations().get("length"); boolean isLong = textLength == null || textLength > MAX_TEXTFIELD_STRING_LENGTH; Object value = item.getValue(metaProperty.getName()); boolean isContainsSeparator = value != null && containsSeparator((String) value); return isLong || isContainsSeparator; } protected boolean containsSeparator(String s) { return s.indexOf('\n') >= 0 || s.indexOf('\r') >= 0; } /** * Checks if specified property is a reference to entity's parent entity. * Parent entity can be specified during creating of this screen. * * @param metaProperty meta property * @return true if property references to a parent entity */ protected boolean isParentProperty(MetaProperty metaProperty) { return parentProperty != null && metaProperty.getName().equals(parentProperty); } /** * Creates custom fields and adds them to the fieldGroup */ protected void createCustomFields(FieldGroup fieldGroup, Collection<FieldGroup.FieldConfig> customFields) { for (FieldGroup.FieldConfig field : customFields) { //custom field generator creates an pickerField fieldGroup.addCustomField(field, new FieldGroup.CustomFieldGenerator() { @Override public Component generateField(Datasource datasource, String propertyId) { MetaProperty metaProperty = datasource.getMetaClass().getPropertyNN(propertyId); MetaClass propertyMeta = metaProperty.getRange().asClass(); PickerField field = componentsFactory.createComponent(PickerField.class); String caption = getPropertyCaption(datasource.getMetaClass(), metaProperty); field.setCaption(caption); field.setMetaClass(propertyMeta); field.setWidth("400px"); PickerField.LookupAction lookupAction = field.addLookupAction(); //forwards lookup to the EntityInspectorBrowse window lookupAction.setLookupScreen(EntityInspectorBrowse.SCREEN_NAME); lookupAction.setLookupScreenOpenType(OPEN_TYPE); lookupAction.setLookupScreenParams(ParamsMap.of("entity", propertyMeta.getName())); field.addClearAction(); //don't lets user to change parent if (isParentProperty(metaProperty)) { //set parent item if it has been retrieved if (parent != null) { if (parent.toString() == null) { initNamePatternFields(parent); } field.setValue(parent); } field.setEditable(false); } field.setDatasource(datasource, propertyId); return field; } }); } } /** * Tries to initialize entity fields included in entity name pattern by default values * * @param entity instance */ protected void initNamePatternFields(Entity entity) { Collection<MetaProperty> properties = metadata.getTools().getNamePatternProperties(entity.getMetaClass()); for (MetaProperty property : properties) { if (entity.getValue(property.getName()) == null && property.getType() == MetaProperty.Type.DATATYPE) { try { entity.setValue(property.getName(), property.getJavaType().newInstance()); } catch (InstantiationException | IllegalAccessException e) { throw new RuntimeException("Unable to set value of name pattern field", e); } } } } protected String getPropertyCaption(MetaClass metaClass, MetaProperty metaProperty) { String caption = messageTools.getPropertyCaption(metaClass, metaProperty.getName()); if (caption.length() < CAPTION_MAX_LENGTH) return caption; else return caption.substring(0, CAPTION_MAX_LENGTH); } /** * Creates a table for the entities in ONE_TO_MANY or MANY_TO_MANY relation with the current one */ protected void addTable(MetaClass metaClass, MetaProperty childMeta) { MetaClass meta = childMeta.getRange().asClass(); //don't show empty table if the user don't have permissions on the attribute or the entity if (!attrViewPermitted(metaClass, childMeta.getName()) || !entityOpPermitted(meta, EntityOp.READ)) { return; } //don't show table on new master item, because an exception occurred on safe new item in table if (isNew && childMeta.getType().equals(MetaProperty.Type.ASSOCIATION)) { return; } //vertical box for the table and its label BoxLayout vbox = componentsFactory.createComponent(VBoxLayout.class); vbox.setWidth("100%"); CollectionDatasource propertyDs = (CollectionDatasource) datasources.get(childMeta.getName()); Table table = componentsFactory.createComponent(Table.class); table.setMultiSelect(true); table.setFrame(frame); //place non-system properties columns first LinkedList<Table.Column> nonSystemPropertyColumns = new LinkedList<>(); LinkedList<Table.Column> systemPropertyColumns = new LinkedList<>(); for (MetaProperty metaProperty : meta.getProperties()) { if (metaProperty.getRange().isClass() || isRelatedToNonLocalProperty(metaProperty)) continue; // because we use local views Table.Column column = new Table.Column(meta.getPropertyPath(metaProperty.getName())); if (!metadata.getTools().isSystem(metaProperty)) { column.setCaption(getPropertyCaption(meta, metaProperty)); nonSystemPropertyColumns.add(column); } else { column.setCaption(metaProperty.getName()); systemPropertyColumns.add(column); } if (metaProperty.getJavaType().equals(String.class)) { column.setMaxTextLength(MAX_TEXT_LENGTH); } } for (Table.Column column : nonSystemPropertyColumns) table.addColumn(column); for (Table.Column column : systemPropertyColumns) table.addColumn(column); //set datasource so we could create a buttons panel table.setDatasource(propertyDs); //refresh ds to read ds size propertyDs.refresh(); ButtonsPanel propertyButtonsPanel = createButtonsPanel(childMeta, propertyDs, table); table.setButtonsPanel(propertyButtonsPanel); RowsCount rowsCount = componentsFactory.createComponent(RowsCount.class); rowsCount.setDatasource(propertyDs); table.setRowsCount(rowsCount); table.setWidth("100%"); vbox.setHeight(themeConstants.get("cuba.gui.EntityInspectorEditor.tableContainer.height")); vbox.add(table); vbox.expand(table); vbox.setMargin(true); TabSheet.Tab tab = tablesTabSheet.addTab(childMeta.toString(), vbox); tab.setCaption(getPropertyCaption(metaClass, childMeta)); tables.add(table); } /** * Determine whether the given metaProperty relates to at least one non local property */ protected boolean isRelatedToNonLocalProperty(MetaProperty metaProperty) { MetaClass metaClass = metaProperty.getDomain(); for (String relatedProperty : metadata.getTools().getRelatedProperties(metaProperty)) { //noinspection ConstantConditions if (metaClass.getProperty(relatedProperty).getRange().isClass()) { return true; } } return false; } /** * Creates a buttons panel managing table's content. * * @param metaProperty property representing table's data * @param propertyDs property's Datasource (CollectionPropertyDatasource usually) * @param table table * @return buttons panel */ @SuppressWarnings("unchecked") protected ButtonsPanel createButtonsPanel(final MetaProperty metaProperty, final CollectionDatasource propertyDs, Table table) { MetaClass propertyMetaClass = metaProperty.getRange().asClass(); ButtonsPanel propertyButtonsPanel = componentsFactory.createComponent(ButtonsPanel.class); Button createButton = componentsFactory.createComponent(Button.class); CreateAction createAction = new CreateAction(metaProperty, propertyDs, propertyMetaClass); createButton.setAction(createAction); table.addAction(createAction); createButton.setCaption(messages.getMessage(EntityInspectorEditor.class, "create")); createButton.setIcon("icons/create.png"); Button addButton = componentsFactory.createComponent(Button.class); AddAction addAction = createAddAction(metaProperty, propertyDs, table, propertyMetaClass); table.addAction(addAction); addButton.setAction(addAction); addButton.setCaption(messages.getMessage(EntityInspectorEditor.class, "add")); addButton.setIcon("icons/add.png"); Button editButton = componentsFactory.createComponent(Button.class); EditAction editAction = new EditAction(metaProperty, table, propertyDs); editButton.setAction(editAction); editButton.setCaption(messages.getMessage(EntityInspectorEditor.class, "edit")); editButton.setIcon("icons/edit.png"); table.addAction(editAction); table.setItemClickAction(editAction); table.setEnterPressAction(editAction); RemoveAction removeAction = createRemoveAction(metaProperty, table); Button removeButton = componentsFactory.createComponent(Button.class); removeButton.setAction(removeAction); table.addAction(removeAction); removeButton.setCaption(messages.getMessage(EntityInspectorEditor.class, "remove")); removeButton.setIcon("icons/remove.png"); propertyButtonsPanel.add(createButton); propertyButtonsPanel.add(addButton); propertyButtonsPanel.add(editButton); propertyButtonsPanel.add(removeButton); return propertyButtonsPanel; } protected AddAction createAddAction(MetaProperty metaProperty, CollectionDatasource propertyDs, Table table, MetaClass propertyMetaClass) { Lookup.Handler addHandler = createAddHandler(metaProperty, propertyDs); AddAction addAction = new AddAction(table, addHandler, OPEN_TYPE); addAction.setWindowId(EntityInspectorBrowse.SCREEN_NAME); HashMap<String, Object> params = new HashMap<>(); params.put("entity", propertyMetaClass.getName()); MetaProperty inverseProperty = metaProperty.getInverse(); if (inverseProperty != null) params.put("parentProperty", inverseProperty.getName()); addAction.setWindowParams(params); addAction.setOpenType(OPEN_TYPE); addAction.setShortcut(configuration.getConfig(ClientConfig.class).getTableAddShortcut()); return addAction; } @SuppressWarnings("unchecked") protected Lookup.Handler createAddHandler(final MetaProperty metaProperty, final CollectionDatasource propertyDs) { Lookup.Handler result = new Lookup.Handler() { @Override public void handleLookup(Collection items) { for (Object item : items) { Entity entity = (Entity) item; if (!propertyDs.getItems().contains(entity)) { MetaProperty inverseProperty = metaProperty.getInverse(); if (inverseProperty != null) { if (!inverseProperty.getRange().getCardinality().isMany()) { //set currently editing item to the child's parent property entity.setValue(inverseProperty.getName(), datasource.getItem()); propertyDs.addItem(entity); } else { Collection properties = entity.getValue(inverseProperty.getName()); if (properties != null) { properties.add(datasource.getItem()); propertyDs.addItem(entity); } } } } propertyDs.addItem(entity); } } }; propertyDs.refresh(); return result; } public void commitAndClose() { try { validate(); dsContext.commit(); close(Window.COMMIT_ACTION_ID, true); } catch (ValidationException e) { showNotification("Validation error", e.getMessage(), NotificationType.TRAY); } } /** * Creates either Remove or Exclude action depending on property type */ protected RemoveAction createRemoveAction(MetaProperty metaProperty, Table table) { RemoveAction result; switch (metaProperty.getType()) { case COMPOSITION: result = new com.haulmont.cuba.gui.components.actions.RemoveAction(table); break; case ASSOCIATION: result = new com.haulmont.cuba.gui.components.actions.ExcludeAction(table); result.setShortcut(configuration.getConfig(ClientConfig.class).getTableRemoveShortcut()); break; default: throw new IllegalArgumentException("property must contain an entity"); } result.setAutocommit(false); return result; } /** * Creates a view, loading all the properties. * Referenced entities will be loaded with a LOCAL view. * * @param meta meta class * @return View instance */ protected View createView(MetaClass meta) { View view = new View(meta.getJavaClass(), false); for (MetaProperty metaProperty : meta.getProperties()) { switch (metaProperty.getType()) { case DATATYPE: case ENUM: view.addProperty(metaProperty.getName()); break; case ASSOCIATION: case COMPOSITION: MetaClass metaPropertyClass = metaProperty.getRange().asClass(); String metaPropertyClassName = metaPropertyClass.getName(); if (metadata.getTools().isEmbedded(metaProperty)) { View embeddedViewWithRelations = createEmbeddedView(metaPropertyClass); view.addProperty(metaProperty.getName(), embeddedViewWithRelations); } else { String viewName; if (metaProperty.getRange().getCardinality().isMany()) { viewName = View.LOCAL; } else { viewName = View.MINIMAL; } View propView = viewRepository.getView(metaPropertyClass, viewName); view.addProperty(metaProperty.getName(), new View(propView, metaPropertyClassName + ".entity-inspector-view", true)); } break; default: throw new IllegalStateException("unknown property type"); } } return view; } protected View createEmbeddedView(MetaClass metaPropertyClass) { View propView = viewRepository.getView(metaPropertyClass, View.BASE); View embeddedViewWithRelations = new View(propView, metaPropertyClass.getName() + ".entity-inspector-view", true); // iterate embedded properties and add relations with MINIMAL view for (MetaProperty embeddedNestedProperty : metaPropertyClass.getProperties()) { if (embeddedNestedProperty.getRange().isClass() && !embeddedNestedProperty.getRange().getCardinality().isMany()) { View embeddedRelationView = viewRepository.getView( embeddedNestedProperty.getRange().asClass(), View.MINIMAL); embeddedViewWithRelations.addProperty(embeddedNestedProperty.getName(), embeddedRelationView); } } return embeddedViewWithRelations; } protected class CommitAction extends AbstractAction { protected CommitAction() { super("commit", Status.PRIMARY); } @Override public void actionPerform(Component component) { commitAndClose(); } } protected class CancelAction extends AbstractAction { protected CancelAction() { super("cancel"); } @Override public void actionPerform(Component component) { close(Window.CLOSE_ACTION_ID); } } /** * Opens entity inspector's editor to create entity */ protected class CreateAction extends AbstractAction { private CollectionDatasource entitiesDs; private MetaClass entityMeta; protected MetaProperty metaProperty; protected CreateAction(MetaProperty metaProperty, CollectionDatasource entitiesDs, MetaClass entityMeta) { super("create"); this.entitiesDs = entitiesDs; this.entityMeta = entityMeta; this.metaProperty = metaProperty; setShortcut(configuration.getConfig(ClientConfig.class).getTableInsertShortcut()); } @Override @SuppressWarnings("unchecked") public void actionPerform(Component component) { Map<String, Object> editorParams = new HashMap<>(); editorParams.put("metaClass", entityMeta.getName()); editorParams.put("autocommit", Boolean.FALSE); MetaProperty inverseProperty = metaProperty.getInverse(); if (inverseProperty != null) editorParams.put("parentProperty", inverseProperty.getName()); editorParams.put("parent", item); if (metaProperty.getType() == MetaProperty.Type.COMPOSITION) editorParams.put("parentDs", entitiesDs); EntityInspectorEditor editor = (EntityInspectorEditor) openWindow("entityInspector.edit", OPEN_TYPE, editorParams); editor.addCloseListener(actionId -> { if (COMMIT_ACTION_ID.equals(actionId) && metaProperty.getType() == MetaProperty.Type.ASSOCIATION) { boolean modified = entitiesDs.isModified(); entitiesDs.addItem(editor.getItem()); ((DatasourceImplementation) entitiesDs).setModified(modified); } }); } } protected class EditAction extends ItemTrackingAction { private Table entitiesTable; private CollectionDatasource entitiesDs; private MetaProperty metaProperty; protected EditAction(MetaProperty metaProperty, Table entitiesTable, CollectionDatasource entitiesDs) { super(entitiesTable, "edit"); this.entitiesTable = entitiesTable; this.entitiesDs = entitiesDs; this.metaProperty = metaProperty; } @Override public void actionPerform(Component component) { Set selected = entitiesTable.getSelected(); if (selected.size() != 1) return; Entity editItem = (Entity) selected.toArray()[0]; Map<String, Object> editorParams = new HashMap<>(); editorParams.put("metaClass", editItem.getMetaClass()); editorParams.put("item", editItem); editorParams.put("parent", item); editorParams.put("autocommit", Boolean.FALSE); MetaProperty inverseProperty = metaProperty.getInverse(); if (inverseProperty != null) editorParams.put("parentProperty", inverseProperty.getName()); if (metaProperty.getType() == MetaProperty.Type.COMPOSITION) editorParams.put("parentDs", entitiesDs); Window window = openWindow("entityInspector.edit", OPEN_TYPE, editorParams); window.addCloseListener(actionId -> entitiesDs.refresh()); } } protected boolean attrViewPermitted(MetaClass metaClass, String property) { return attrPermitted(metaClass, property, EntityAttrAccess.VIEW); } protected boolean attrViewPermitted(MetaClass metaClass, MetaProperty metaProperty) { return attrPermitted(metaClass, metaProperty.getName(), EntityAttrAccess.VIEW); } protected boolean attrPermitted(MetaClass metaClass, String property, EntityAttrAccess entityAttrAccess) { return security.isEntityAttrPermitted(metaClass, property, entityAttrAccess); } protected boolean entityOpPermitted(MetaClass metaClass, EntityOp entityOp) { return security.isEntityOpPermitted(metaClass, entityOp); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.xdbm.search.impl; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.util.HashSet; import java.util.Set; import org.apache.directory.api.util.FileUtils; import org.apache.directory.api.ldap.model.constants.SchemaConstants; import org.apache.directory.api.ldap.model.cursor.Cursor; import org.apache.directory.api.ldap.model.cursor.InvalidCursorPositionException; import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.filter.ExprNode; import org.apache.directory.api.ldap.model.filter.FilterParser; import org.apache.directory.api.ldap.model.filter.NotNode; import org.apache.directory.api.ldap.model.filter.SubstringNode; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.api.ldap.model.schema.syntaxCheckers.UuidSyntaxChecker; import org.apache.directory.api.ldap.schema.extractor.SchemaLdifExtractor; import org.apache.directory.api.ldap.schema.extractor.impl.DefaultSchemaLdifExtractor; import org.apache.directory.api.ldap.schema.loader.LdifSchemaLoader; import org.apache.directory.api.ldap.schema.manager.impl.DefaultSchemaManager; import org.apache.directory.api.util.Strings; import org.apache.directory.api.util.exception.Exceptions; import org.apache.directory.server.core.api.LdapPrincipal; import org.apache.directory.server.core.api.MockCoreSession; import org.apache.directory.server.core.api.MockDirectoryService; import org.apache.directory.server.core.api.partition.Partition; import org.apache.directory.server.core.api.partition.PartitionTxn; import org.apache.directory.server.core.partition.impl.avl.AvlPartition; import org.apache.directory.server.xdbm.IndexEntry; import org.apache.directory.server.xdbm.MockPartitionReadTxn; import org.apache.directory.server.xdbm.StoreUtils; import org.apache.directory.server.xdbm.impl.avl.AvlIndex; import org.apache.directory.server.xdbm.search.Evaluator; import org.apache.directory.server.xdbm.search.cursor.NotCursor; import org.apache.directory.server.xdbm.search.evaluator.SubstringEvaluator; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * Test cases for NotCursor. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ @Execution(ExecutionMode.SAME_THREAD) public class NotCursorTest extends AbstractCursorTest { private static final Logger LOG = LoggerFactory.getLogger( NotCursorTest.class ); UuidSyntaxChecker uuidSynChecker = UuidSyntaxChecker.INSTANCE; File wkdir; static SchemaManager schemaManager = null; @BeforeAll static public void setup() throws Exception { // setup the standard registries String workingDirectory = System.getProperty( "workingDirectory" ); if ( workingDirectory == null ) { String path = NotCursorTest.class.getResource( "" ).getPath(); int targetPos = path.indexOf( "target" ); workingDirectory = path.substring( 0, targetPos + 6 ); } File schemaRepository = new File( workingDirectory, "schema" ); SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( new File( workingDirectory ) ); extractor.extractOrCopy( true ); LdifSchemaLoader loader = new LdifSchemaLoader( schemaRepository ); schemaManager = new DefaultSchemaManager( loader ); boolean loaded = schemaManager.loadAllEnabled(); if ( !loaded ) { fail( "Schema load failed : " + Exceptions.printErrors( schemaManager.getErrors() ) ); } loaded = schemaManager.loadWithDeps( loader.getSchema( "collective" ) ); if ( !loaded ) { fail( "Schema load failed : " + Exceptions.printErrors( schemaManager.getErrors() ) ); } } @BeforeEach public void createStore() throws Exception { directoryService = new MockDirectoryService(); // setup the working directory for the store wkdir = File.createTempFile( getClass().getSimpleName(), "db" ); wkdir.delete(); wkdir = new File( wkdir.getParentFile(), getClass().getSimpleName() ); wkdir.mkdirs(); StoreUtils.createdExtraAttributes( schemaManager ); // initialize the store store = new AvlPartition( schemaManager, directoryService.getDnFactory() ); ( ( Partition ) store ).setId( "example" ); store.setCacheSize( 10 ); store.setPartitionPath( wkdir.toURI() ); store.setSyncOnWrite( false ); store.addIndex( new AvlIndex<String>( SchemaConstants.OU_AT_OID ) ); store.addIndex( new AvlIndex<String>( SchemaConstants.CN_AT_OID ) ); ( ( Partition ) store ).setSuffixDn( new Dn( schemaManager, "o=Good Times Co." ) ); ( ( Partition ) store ).initialize(); StoreUtils.loadExampleData( store, schemaManager ); evaluatorBuilder = new EvaluatorBuilder( store, schemaManager ); cursorBuilder = new CursorBuilder( store, evaluatorBuilder ); directoryService.setSchemaManager( schemaManager ); session = new MockCoreSession( new LdapPrincipal(), directoryService ); LOG.debug( "Created new store" ); } @AfterEach public void destroyStore() throws Exception { if ( store != null ) { ( ( Partition ) store ).destroy( null ); } store = null; if ( wkdir != null ) { FileUtils.deleteDirectory( wkdir ); } wkdir = null; } @Test public void testNotCursor() throws Exception { String filter = "(!(cn=J*))"; ExprNode exprNode = FilterParser.parse( schemaManager, filter ); PartitionTxn txn = new MockPartitionReadTxn(); Cursor<Entry> cursor = buildCursor( txn, exprNode ); assertFalse( cursor.available() ); cursor.beforeFirst(); Set<String> set = new HashSet<String>(); while ( cursor.next() ) { assertTrue( cursor.available() ); Entry entry = cursor.get(); String uuid = entry.get( "entryUUID" ).getString(); set.add( uuid ); assertTrue( uuidSynChecker.isValidSyntax( uuid ) ); } assertEquals( 5, set.size() ); assertTrue( set.contains( Strings.getUUID( 1L ) ) ); assertTrue( set.contains( Strings.getUUID( 2L ) ) ); assertTrue( set.contains( Strings.getUUID( 3L ) ) ); assertTrue( set.contains( Strings.getUUID( 4L ) ) ); assertTrue( set.contains( Strings.getUUID( 7L ) ) ); assertFalse( cursor.next() ); assertFalse( cursor.available() ); cursor.close(); assertTrue( cursor.isClosed() ); } @Test public void testNotCursorWithManualFilter() throws Exception { PartitionTxn txn = ( ( Partition ) store ).beginReadTransaction(); NotNode notNode = new NotNode(); ExprNode exprNode = new SubstringNode( schemaManager.getAttributeType( "cn" ), "J", null ); Evaluator<? extends ExprNode> eval = new SubstringEvaluator( ( SubstringNode ) exprNode, store, schemaManager ); notNode.addNode( exprNode ); NotCursor<String> cursor = new NotCursor( txn, store, eval ); //cursorBuilder.build( andNode ); cursor.beforeFirst(); Set<String> set = new HashSet<String>(); while ( cursor.next() ) { assertTrue( cursor.available() ); set.add( cursor.get().getId() ); assertTrue( uuidSynChecker.isValidSyntax( cursor.get().getKey() ) ); } assertEquals( 5, set.size() ); assertTrue( set.contains( Strings.getUUID( 1L ) ) ); assertTrue( set.contains( Strings.getUUID( 2L ) ) ); assertTrue( set.contains( Strings.getUUID( 3L ) ) ); assertTrue( set.contains( Strings.getUUID( 4L ) ) ); assertTrue( set.contains( Strings.getUUID( 7L ) ) ); assertFalse( cursor.next() ); assertFalse( cursor.available() ); cursor.afterLast(); set.clear(); while ( cursor.previous() ) { assertTrue( cursor.available() ); set.add( cursor.get().getId() ); assertTrue( uuidSynChecker.isValidSyntax( cursor.get().getKey() ) ); } assertEquals( 5, set.size() ); assertTrue( set.contains( Strings.getUUID( 1L ) ) ); assertTrue( set.contains( Strings.getUUID( 2L ) ) ); assertTrue( set.contains( Strings.getUUID( 3L ) ) ); assertTrue( set.contains( Strings.getUUID( 4L ) ) ); assertTrue( set.contains( Strings.getUUID( 7L ) ) ); assertFalse( cursor.previous() ); assertFalse( cursor.available() ); try { cursor.get(); fail( "should fail with InvalidCursorPositionException" ); } catch ( InvalidCursorPositionException ice ) { } try { cursor.after( new IndexEntry<String, String>() ); fail( "should fail with UnsupportedOperationException " ); } catch ( UnsupportedOperationException uoe ) { } try { cursor.before( new IndexEntry<String, String>() ); fail( "should fail with UnsupportedOperationException " ); } catch ( UnsupportedOperationException uoe ) { } cursor.close(); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.compute.implementation; import com.microsoft.azure.AzureClient; import com.microsoft.azure.AzureServiceClient; import com.microsoft.azure.RestClient; import com.microsoft.rest.credentials.ServiceClientCredentials; /** * Initializes a new instance of the ComputeManagementClientImpl class. */ public final class ComputeManagementClientImpl extends AzureServiceClient { /** the {@link AzureClient} used for long running operations. */ private AzureClient azureClient; /** * Gets the {@link AzureClient} used for long running operations. * @return the azure client; */ public AzureClient getAzureClient() { return this.azureClient; } /** subscription credentials which uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. */ private String subscriptionId; /** * Gets subscription credentials which uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. * * @return the subscriptionId value. */ public String subscriptionId() { return this.subscriptionId; } /** * Sets subscription credentials which uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. * * @param subscriptionId the subscriptionId value. * @return the service client itself */ public ComputeManagementClientImpl withSubscriptionId(String subscriptionId) { this.subscriptionId = subscriptionId; return this; } /** Client Api Version. */ private String apiVersion; /** * Gets Client Api Version. * * @return the apiVersion value. */ public String apiVersion() { return this.apiVersion; } /** Gets or sets the preferred language for the response. */ private String acceptLanguage; /** * Gets Gets or sets the preferred language for the response. * * @return the acceptLanguage value. */ public String acceptLanguage() { return this.acceptLanguage; } /** * Sets Gets or sets the preferred language for the response. * * @param acceptLanguage the acceptLanguage value. * @return the service client itself */ public ComputeManagementClientImpl withAcceptLanguage(String acceptLanguage) { this.acceptLanguage = acceptLanguage; return this; } /** Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. */ private int longRunningOperationRetryTimeout; /** * Gets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. * * @return the longRunningOperationRetryTimeout value. */ public int longRunningOperationRetryTimeout() { return this.longRunningOperationRetryTimeout; } /** * Sets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. * * @param longRunningOperationRetryTimeout the longRunningOperationRetryTimeout value. * @return the service client itself */ public ComputeManagementClientImpl withLongRunningOperationRetryTimeout(int longRunningOperationRetryTimeout) { this.longRunningOperationRetryTimeout = longRunningOperationRetryTimeout; return this; } /** When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. */ private boolean generateClientRequestId; /** * Gets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. * * @return the generateClientRequestId value. */ public boolean generateClientRequestId() { return this.generateClientRequestId; } /** * Sets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. * * @param generateClientRequestId the generateClientRequestId value. * @return the service client itself */ public ComputeManagementClientImpl withGenerateClientRequestId(boolean generateClientRequestId) { this.generateClientRequestId = generateClientRequestId; return this; } /** * The AvailabilitySetsInner object to access its operations. */ private AvailabilitySetsInner availabilitySets; /** * Gets the AvailabilitySetsInner object to access its operations. * @return the AvailabilitySetsInner object. */ public AvailabilitySetsInner availabilitySets() { return this.availabilitySets; } /** * The VirtualMachineExtensionImagesInner object to access its operations. */ private VirtualMachineExtensionImagesInner virtualMachineExtensionImages; /** * Gets the VirtualMachineExtensionImagesInner object to access its operations. * @return the VirtualMachineExtensionImagesInner object. */ public VirtualMachineExtensionImagesInner virtualMachineExtensionImages() { return this.virtualMachineExtensionImages; } /** * The VirtualMachineExtensionsInner object to access its operations. */ private VirtualMachineExtensionsInner virtualMachineExtensions; /** * Gets the VirtualMachineExtensionsInner object to access its operations. * @return the VirtualMachineExtensionsInner object. */ public VirtualMachineExtensionsInner virtualMachineExtensions() { return this.virtualMachineExtensions; } /** * The VirtualMachineImagesInner object to access its operations. */ private VirtualMachineImagesInner virtualMachineImages; /** * Gets the VirtualMachineImagesInner object to access its operations. * @return the VirtualMachineImagesInner object. */ public VirtualMachineImagesInner virtualMachineImages() { return this.virtualMachineImages; } /** * The UsagesInner object to access its operations. */ private UsagesInner usages; /** * Gets the UsagesInner object to access its operations. * @return the UsagesInner object. */ public UsagesInner usages() { return this.usages; } /** * The VirtualMachineSizesInner object to access its operations. */ private VirtualMachineSizesInner virtualMachineSizes; /** * Gets the VirtualMachineSizesInner object to access its operations. * @return the VirtualMachineSizesInner object. */ public VirtualMachineSizesInner virtualMachineSizes() { return this.virtualMachineSizes; } /** * The VirtualMachinesInner object to access its operations. */ private VirtualMachinesInner virtualMachines; /** * Gets the VirtualMachinesInner object to access its operations. * @return the VirtualMachinesInner object. */ public VirtualMachinesInner virtualMachines() { return this.virtualMachines; } /** * The VirtualMachineScaleSetsInner object to access its operations. */ private VirtualMachineScaleSetsInner virtualMachineScaleSets; /** * Gets the VirtualMachineScaleSetsInner object to access its operations. * @return the VirtualMachineScaleSetsInner object. */ public VirtualMachineScaleSetsInner virtualMachineScaleSets() { return this.virtualMachineScaleSets; } /** * The VirtualMachineScaleSetVMsInner object to access its operations. */ private VirtualMachineScaleSetVMsInner virtualMachineScaleSetVMs; /** * Gets the VirtualMachineScaleSetVMsInner object to access its operations. * @return the VirtualMachineScaleSetVMsInner object. */ public VirtualMachineScaleSetVMsInner virtualMachineScaleSetVMs() { return this.virtualMachineScaleSetVMs; } /** * Initializes an instance of ComputeManagementClient client. * * @param credentials the management credentials for Azure */ public ComputeManagementClientImpl(ServiceClientCredentials credentials) { this("https://management.azure.com", credentials); } /** * Initializes an instance of ComputeManagementClient client. * * @param baseUrl the base URL of the host * @param credentials the management credentials for Azure */ public ComputeManagementClientImpl(String baseUrl, ServiceClientCredentials credentials) { this(new RestClient.Builder() .withBaseUrl(baseUrl) .withCredentials(credentials) .build()); } /** * Initializes an instance of ComputeManagementClient client. * * @param restClient the REST client to connect to Azure. */ public ComputeManagementClientImpl(RestClient restClient) { super(restClient); initialize(); } protected void initialize() { this.apiVersion = "2016-03-30"; this.acceptLanguage = "en-US"; this.longRunningOperationRetryTimeout = 30; this.generateClientRequestId = true; this.availabilitySets = new AvailabilitySetsInner(restClient().retrofit(), this); this.virtualMachineExtensionImages = new VirtualMachineExtensionImagesInner(restClient().retrofit(), this); this.virtualMachineExtensions = new VirtualMachineExtensionsInner(restClient().retrofit(), this); this.virtualMachineImages = new VirtualMachineImagesInner(restClient().retrofit(), this); this.usages = new UsagesInner(restClient().retrofit(), this); this.virtualMachineSizes = new VirtualMachineSizesInner(restClient().retrofit(), this); this.virtualMachines = new VirtualMachinesInner(restClient().retrofit(), this); this.virtualMachineScaleSets = new VirtualMachineScaleSetsInner(restClient().retrofit(), this); this.virtualMachineScaleSetVMs = new VirtualMachineScaleSetVMsInner(restClient().retrofit(), this); this.azureClient = new AzureClient(this); } /** * Gets the User-Agent header for the client. * * @return the user agent string. */ @Override public String userAgent() { return String.format("Azure-SDK-For-Java/%s (%s)", getClass().getPackage().getImplementationVersion(), "ComputeManagementClient, 2016-03-30"); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite; import org.apache.directory.api.ldap.model.constants.SupportedSaslMechanisms; import org.apache.directory.api.ldap.model.exception.LdapInvalidDnException; import org.apache.directory.server.core.api.DirectoryService; import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor; import org.apache.directory.server.kerberos.KerberosConfig; import org.apache.directory.server.kerberos.kdc.KdcServer; import org.apache.directory.server.kerberos.shared.replay.ReplayCache; import org.apache.directory.server.ldap.LdapServer; import org.apache.directory.server.ldap.handlers.sasl.cramMD5.CramMd5MechanismHandler; import org.apache.directory.server.ldap.handlers.sasl.digestMD5.DigestMd5MechanismHandler; import org.apache.directory.server.ldap.handlers.sasl.gssapi.GssapiMechanismHandler; import org.apache.directory.server.ldap.handlers.sasl.ntlm.NtlmMechanismHandler; import org.apache.directory.server.ldap.handlers.sasl.plain.PlainMechanismHandler; import org.apache.directory.server.protocol.shared.transport.UdpTransport; import org.apache.directory.shared.kerberos.KerberosTime; import org.apache.directory.shared.kerberos.KerberosUtils; import org.apache.directory.shared.kerberos.codec.types.EncryptionType; import org.jboss.logging.Logger; import org.keycloak.util.ldap.LDAPEmbeddedServer; import javax.security.auth.kerberos.KerberosPrincipal; import java.io.IOException; import java.lang.reflect.Field; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashSet; import java.util.Locale; import java.util.Properties; import java.util.Set; /** * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> */ public class KerberosEmbeddedServer extends LDAPEmbeddedServer { private static final Logger log = Logger.getLogger(KerberosEmbeddedServer.class); public static final String PROPERTY_KERBEROS_REALM = "kerberos.realm"; public static final String PROPERTY_KDC_PORT = "kerberos.port"; public static final String PROPERTY_KDC_ENCTYPES = "kerberos.encTypes"; private static final String DEFAULT_KERBEROS_LDIF_FILE = "classpath:kerberos/default-users.ldif"; public static final String DEFAULT_KERBEROS_REALM = "KEYCLOAK.ORG"; public static final String DEFAULT_KERBEROS_REALM_2 = "KC2.COM"; private static final String DEFAULT_KDC_PORT = "6088"; private static final String DEFAULT_KDC_ENCRYPTION_TYPES = "aes128-cts-hmac-sha1-96, des-cbc-md5, des3-cbc-sha1-kd"; private final String kerberosRealm; private final int kdcPort; private final String kdcEncryptionTypes; private KdcServer kdcServer; public static void main(String[] args) throws Exception { Properties defaultProperties = new Properties(); defaultProperties.put(PROPERTY_DSF, DSF_FILE); String kerberosRealm = System.getProperty("keycloak.kerberos.realm", DEFAULT_KERBEROS_REALM); configureDefaultPropertiesForRealm(kerberosRealm, defaultProperties); execute(args, defaultProperties); } public static void configureDefaultPropertiesForRealm(String kerberosRealm, Properties properties) { log.infof("Using kerberos realm: %s", kerberosRealm); if (DEFAULT_KERBEROS_REALM.equals(kerberosRealm)) { // No more configs } else if (DEFAULT_KERBEROS_REALM_2.equals(kerberosRealm)) { properties.put(PROPERTY_BASE_DN, "dc=kc2,dc=com"); properties.put(PROPERTY_BIND_PORT, "11389"); properties.put(PROPERTY_BIND_LDAPS_PORT, "11636"); properties.put(PROPERTY_LDIF_FILE, "classpath:kerberos/default-users-kc2.ldif"); properties.put(PROPERTY_KERBEROS_REALM, DEFAULT_KERBEROS_REALM_2); properties.put(PROPERTY_KDC_PORT, "7088"); } else { throw new IllegalArgumentException("Valid values for kerberos realm are [ " + DEFAULT_KERBEROS_REALM + " , " + DEFAULT_KERBEROS_REALM_2 + " ]"); } } public static void execute(String[] args, Properties defaultProperties) throws Exception { final KerberosEmbeddedServer kerberosEmbeddedServer = new KerberosEmbeddedServer(defaultProperties); kerberosEmbeddedServer.init(); kerberosEmbeddedServer.start(); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { kerberosEmbeddedServer.stop(); } catch (Exception e) { e.printStackTrace(); } } }); } public KerberosEmbeddedServer(Properties defaultProperties) { super(defaultProperties); this.ldifFile = readProperty(PROPERTY_LDIF_FILE, DEFAULT_KERBEROS_LDIF_FILE); this.kerberosRealm = readProperty(PROPERTY_KERBEROS_REALM, DEFAULT_KERBEROS_REALM); String kdcPort = readProperty(PROPERTY_KDC_PORT, DEFAULT_KDC_PORT); this.kdcPort = Integer.parseInt(kdcPort); this.kdcEncryptionTypes = readProperty(PROPERTY_KDC_ENCTYPES, DEFAULT_KDC_ENCRYPTION_TYPES); if (ldapSaslPrincipal == null || ldapSaslPrincipal.isEmpty()) { String hostname = getHostnameForSASLPrincipal(bindHost); this.ldapSaslPrincipal = "ldap/" + hostname + "@" + this.kerberosRealm; } } @Override public void init() throws Exception { super.init(); log.info("Creating KDC server. kerberosRealm: " + kerberosRealm + ", kdcPort: " + kdcPort + ", kdcEncryptionTypes: " + kdcEncryptionTypes); createAndStartKdcServer(); } @Override protected DirectoryService createDirectoryService() throws Exception { DirectoryService directoryService = super.createDirectoryService(); directoryService.addLast(new KeyDerivationInterceptor()); return directoryService; } @Override protected LdapServer createLdapServer() { LdapServer ldapServer = super.createLdapServer(); ldapServer.setSaslHost(this.bindHost); ldapServer.setSaslPrincipal( this.ldapSaslPrincipal); ldapServer.setSaslRealms(new ArrayList<String>()); ldapServer.addSaslMechanismHandler(SupportedSaslMechanisms.PLAIN, new PlainMechanismHandler()); ldapServer.addSaslMechanismHandler(SupportedSaslMechanisms.CRAM_MD5, new CramMd5MechanismHandler()); ldapServer.addSaslMechanismHandler(SupportedSaslMechanisms.DIGEST_MD5, new DigestMd5MechanismHandler()); ldapServer.addSaslMechanismHandler(SupportedSaslMechanisms.GSSAPI, new GssapiMechanismHandler()); ldapServer.addSaslMechanismHandler(SupportedSaslMechanisms.NTLM, new NtlmMechanismHandler()); ldapServer.addSaslMechanismHandler(SupportedSaslMechanisms.GSS_SPNEGO, new NtlmMechanismHandler()); return ldapServer; } protected KdcServer createAndStartKdcServer() throws Exception { KerberosConfig kdcConfig = new KerberosConfig(); kdcConfig.setServicePrincipal("krbtgt/" + this.kerberosRealm + "@" + this.kerberosRealm); kdcConfig.setPrimaryRealm(this.kerberosRealm); kdcConfig.setMaximumTicketLifetime(60000 * 1440); kdcConfig.setMaximumRenewableLifetime(60000 * 10080); kdcConfig.setPaEncTimestampRequired(false); Set<EncryptionType> encryptionTypes = convertEncryptionTypes(); kdcConfig.setEncryptionTypes(encryptionTypes); kdcServer = new NoReplayKdcServer(kdcConfig); kdcServer.setSearchBaseDn(this.baseDN); UdpTransport udp = new UdpTransport(this.bindHost, this.kdcPort); kdcServer.addTransports(udp); kdcServer.setDirectoryService(directoryService); // Launch the server kdcServer.start(); return kdcServer; } public void stop() throws Exception { stopLdapServer(); stopKerberosServer(); shutdownDirectoryService(); } protected void stopKerberosServer() { log.info("Stopping Kerberos server."); kdcServer.stop(); } private Set<EncryptionType> convertEncryptionTypes() { Set<EncryptionType> encryptionTypes = new HashSet<EncryptionType>(); String[] configEncTypes = kdcEncryptionTypes.split(","); for ( String enc : configEncTypes ) { enc = enc.trim(); for ( EncryptionType type : EncryptionType.getEncryptionTypes() ) { if ( type.getName().equalsIgnoreCase( enc ) ) { encryptionTypes.add( type ); } } } encryptionTypes = KerberosUtils.orderEtypesByStrength(encryptionTypes); return encryptionTypes; } // Forked from sun.security.krb5.PrincipalName constructor private String getHostnameForSASLPrincipal(String hostName) { try { // RFC4120 does not recommend canonicalizing a hostname. // However, for compatibility reason, we will try // canonicalize it and see if the output looks better. String canonicalized = (InetAddress.getByName(hostName)). getCanonicalHostName(); // Looks if canonicalized is a longer format of hostName, // we accept cases like // bunny -> bunny.rabbit.hole if (canonicalized.toLowerCase(Locale.ENGLISH).startsWith( hostName.toLowerCase(Locale.ENGLISH)+".")) { hostName = canonicalized; } } catch (UnknownHostException | SecurityException e) { // not canonicalized or no permission to do so, use old } return hostName.toLowerCase(Locale.ENGLISH); } /** * Replacement of apacheDS KdcServer class with disabled ticket replay cache. * * @author Dominik Pospisil <dpospisi@redhat.com> */ class NoReplayKdcServer extends KdcServer { NoReplayKdcServer(KerberosConfig kdcConfig) { super(kdcConfig); } /** * * Dummy implementation of the ApacheDS kerberos replay cache. Essentially disables kerbores ticket replay checks. * https://issues.jboss.org/browse/JBPAPP-10974 * * @author Dominik Pospisil <dpospisi@redhat.com> */ private class DummyReplayCache implements ReplayCache { @Override public boolean isReplay(KerberosPrincipal serverPrincipal, KerberosPrincipal clientPrincipal, KerberosTime clientTime, int clientMicroSeconds) { return false; } @Override public void save(KerberosPrincipal serverPrincipal, KerberosPrincipal clientPrincipal, KerberosTime clientTime, int clientMicroSeconds) { } @Override public void clear() { } } /** * @throws java.io.IOException if we cannot bind to the sockets */ @Override public void start() throws IOException, LdapInvalidDnException { super.start(); try { // override initialized replay cache with a dummy implementation Field replayCacheField = KdcServer.class.getDeclaredField("replayCache"); replayCacheField.setAccessible(true); replayCacheField.set(this, new DummyReplayCache()); } catch (Exception e) { throw new RuntimeException(e); } } } }
package com.javier.filterview; import android.app.Dialog; import android.content.Context; import android.support.annotation.ColorRes; import android.support.annotation.DrawableRes; import android.support.v4.content.ContextCompat; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.squareup.picasso.Picasso; import org.json.JSONArray; import java.util.ArrayList; import java.util.Iterator; /** * Created by usuario on 20/02/17. */ public class FilterView extends LinearLayout{ private String title; private ArrayList<Object> filterSections; private int backgroundColor; private int closeIcon; private int titleColor; private int toolbarBackgroundColor; private int closeIconColor; private OnFilterViewResultListener listener; private TextView lbTitle; private ImageView imgCloseIcon; private RecyclerView sections; private View topDivisor; private LinearLayout mainConteiner; private FrameLayout toolbar; private OnFilterCanceled onFilterCanceled; protected OnFilterCanceled getOnFilterCanceled() { return onFilterCanceled; } public void setOnFilterCanceled(OnFilterCanceled onFilterCanceled) { this.onFilterCanceled = onFilterCanceled; } private boolean toolbarVisible; private Dialog dialog; public FilterView(Context context, Builder builder) { super(context); this.title = builder.title; this.filterSections = builder.filterSections; this.backgroundColor = builder.backgroundColor; this.closeIcon = builder.closeIcon; this.titleColor = builder.titleColor; this.toolbarBackgroundColor = builder.toolbarBackgroundColor; this.closeIconColor = builder.closeIconColor; this.toolbarVisible = builder.isToolbarVisible; init(context); } public FilterView setOnFilterViewResultListener(OnFilterViewResultListener listener){ this.listener = listener; return this; } public int getCloseIconColor() { return closeIconColor; } public int getToolbarBackgroundColor() { return toolbarBackgroundColor; } public String getTitle() { return title; } public ArrayList<Object> getFilterSections() { return filterSections; } public int getBackgroundColor() { return backgroundColor; } public int getCloseIcon() { return closeIcon; } public int getTitleColor() { return titleColor; } public boolean isToolbarVisible() { return toolbarVisible; } private void init(Context context){ LayoutInflater.from(context).inflate(R.layout.filter_view, this, true); // Initialize view mainConteiner = (LinearLayout)findViewById(R.id.mainContainer); lbTitle = (TextView) findViewById(R.id.lbTitle); imgCloseIcon = (ImageView)findViewById(R.id.imgCloseIcon); sections = (RecyclerView)findViewById(R.id.sections); sections.setLayoutManager(new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false)); toolbar = (FrameLayout)findViewById(R.id.toolbar); topDivisor = findViewById(R.id.topDivisor); // Set values to properties if(!isToolbarVisible()){ toolbar.setVisibility(GONE); } if(getToolbarBackgroundColor() != 0){ toolbar.setBackgroundColor(ContextCompat.getColor(context, getToolbarBackgroundColor())); } if(getBackgroundColor() != 0) { mainConteiner.setBackgroundColor(ContextCompat.getColor(context, getBackgroundColor())); } lbTitle.setText(getTitle()); if(getTitleColor() != 0) { lbTitle.setTextColor(ContextCompat.getColor(context, getTitleColor())); } if(getCloseIcon() != 0) { Picasso.with(context).load(getCloseIcon()).into(imgCloseIcon); } if(getFilterSections() != null){ sections.setAdapter(new FilterAdapter(context, getFilterSections())); } imgCloseIcon.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { cancelView(); } }); lbTitle.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Iterator<Object> iterator = getFilterSections().iterator(); JSONArray data = new JSONArray(); while (iterator.hasNext()){ FilterSection section = (FilterSection)iterator.next(); if(section.getResult() != null){ data.put(section.getResult()); } } if(listener != null){ listener.onResult(data); } cancelView(); } }); if(getCloseIconColor() != 0){ imgCloseIcon.setColorFilter(ContextCompat.getColor(context, getCloseIconColor())); } } /** * Trigger cancel view */ private void cancelView(){ if(getOnFilterCanceled() != null){ getOnFilterCanceled().onCancel(); if(dialog != null){ dialog.cancel(); } } } public LinearLayout getView(){ return this; } public void show(){ dialog = new Dialog(getContext(), android.R.style.Theme_Translucent_NoTitleBar); dialog.setContentView(this); dialog.show(); } public static class Builder{ private Context context; private String title; private ArrayList<Object> filterSections; private int backgroundColor; private int closeIcon; private int titleColor; private int divisorColor; private int toolbarBackgroundColor; private int closeIconColor; private boolean isToolbarVisible; public Builder(Context context){ this.context = context; } public Builder setToolbarBackgroundColor(@ColorRes int color){ this.toolbarBackgroundColor = color; return this; } public Builder setToolbarVisible(boolean isVisible){ this.isToolbarVisible = isVisible; return this; } public Builder withDivisorColor(@ColorRes int color){ this.divisorColor = color; return this; } public Builder withTitle(String title){ this.title = title; return this; } public Builder addSection(FilterSection section){ if(filterSections == null){ filterSections = new ArrayList<>(); } filterSections.add(section); return this; } public Builder withBackgroundColor(@ColorRes int color){ this.backgroundColor = color; return this; } public Builder setCloseIcon(@DrawableRes int icon){ this.closeIcon = icon; return this; } public Builder setCloseIconColor(@ColorRes int color){ this.closeIconColor = color; return this; } public Builder withTitleColor(@ColorRes int color){ this.titleColor = color; return this; } public FilterView build(){ return new FilterView(context, this); } } }
/**************************************************************** * Licensed to the AOS Community (AOS) under one or more * * contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The AOS licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ /* Derby - Class nserverdemo.NsSample Copyright 2003, 2004 The Apache Software Foundation or its licensors, as applicable. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.datalayer.sql.server.server; import java.io.PrintWriter; import java.sql.Connection; import java.sql.DriverManager; import java.util.Properties; /** * The Network Server sample demo program is a simple JDBC application that * interacts with the Derby Network Server. The program: * * 1. starts the Derby Network Server 2. loads the IBM DB2 JDBC Universal driver * 3. creates the database if not already created 4. checks to see if the schema * is already created, and if not, 5. creates the schema which includes the * table SAMPLETBL and corresponding indexes. 6. connects to the database 7. * loads the schema by inserting data 8. starts client threads to perform * database related operations 9. has each of the clients perform DML operations * (select, insert, delete, update) using JDBC calls, i) one client opens an * embedded connection to perform database operations You can open an embedded * connection in the same JVM that starts the Derby Network Server. ii) one * client opens a client connection to the Derby Network Server to perform * database operations. 10.waits for the client threads to finish the tasks * 11.shuts down the Derby Network Server at the end of the demo * * <P> * Usage: java nserverdemo.NsSample * <P> * Please note, a file derby.log is created in the directory you run this * program. This file contains the logging of connections made with the derby * network server */ public class NsSample { public static final String DB2_JDBC_UNIVERSAL_DRIVER = new String("com.ibm.db2.jcc.DB2Driver"); // public static final String DB2_JDBC_UNIVERSAL_DRIVER = new // String("org.apache.derby.jdbc.EmbeddedDriver"); public static int NUM_ROWS = 50; /* Number of rows to load initially */ public static int ITERATIONS = 10; // Each client does these many iterations public static int NUM_CLIENT_THREADS = 2; // network server control specific private static int NETWORKSERVER_PORT = 1621; // Derby database connection URL for embedded environment public static final String CS_EMBED_DBURL = "jdbc:derby:NSSampledb;"; // To connect to Derby Network Server // This URL describes the target database for type 4 connectivity // Notice that the properties may be established via the URL syntax private static final String CS_NS_DBURL = "jdbc:derby:net://localhost:" + NETWORKSERVER_PORT + "/NSSampledb;create=true;retrieveMessagesFromServerOnGetMessage=true;deferPrepares=true;"; public static void main(String... args) throws Exception { NetworkServerUtil nwServer; // DB2Connection provides additional functionality than // java.sql.Connection // One can use either depending on the requirements Connection conn = null; PrintWriter pw = null; try { pw = new PrintWriter(System.out, true); // to print messages /* * Start - In order to start the network server do the following In * case you want to start the server as a script or another program * comment out the next block of code (i.e. until the comment line * 'End - network server started') Also, comment out the 'Shutdown * Derby Network Server' line of code at the bottom In case you * decide to comment out the starting of the network server, make * sure that the client thread is not making an embedded connection * but instead making only a client connection. Also note, the * server logs messages to the file derby.log in the directory you * run this program */ { nwServer = new NetworkServerUtil(NETWORKSERVER_PORT, pw); nwServer.start(); boolean knowIfServerUp = false; // do we know if server is ready // to accept connections int numTimes = 5; // Test to see if server is ready for connections, for 15 // seconds. while (!knowIfServerUp && (numTimes > 0)) { try { // testing for connection to see if the network server // is up and running // if server is not ready yet, this method will throw an // exception numTimes--; nwServer.testForConnection(); knowIfServerUp = true; } catch (Exception e) { System.out .println("[NsSample] Unable to obtain a connection to network server, trying again after 3000 ms."); Thread.currentThread().sleep(3000); } } if (!knowIfServerUp) { pw.println("[NsSample] Exiting, since unable to connect to Derby Network Server."); pw.println("[NsSample] Please try to increase the amount of time to keep trying to connect to the Server."); System.exit(1); } pw.println("[NsSample] Derby Network Server started."); } /* End - network server started */ pw.println("[NsSample] Sample Derby Network Server program demo starting. "); pw.println("Please wait ....................."); // Load the JCC Driver try { Class.forName(DB2_JDBC_UNIVERSAL_DRIVER).newInstance(); System.out.println("EC OK"); } catch (Exception e) { pw.println("[NsSample] Unable to load JCC driver. Following exception was thrown"); e.printStackTrace(); System.exit(1); // critical error, so exit } // See Derby documentation for description of properties that may be // set // in the context of the network server. Properties properties = new java.util.Properties(); // The user and password properties are a must, required by JCC properties.setProperty("user", "cloud"); properties.setProperty("password", "scape"); // Get database connection using the JCC client via DriverManager // api try { conn = (Connection) DriverManager.getConnection(CS_NS_DBURL, properties); } catch (Exception e) { pw.println("[NsSample] Connection request unsuccessful, exception thrown was: "); pw.println("[NsSample] Please check if all the jar files are in the classpath and the dbUrl is set correctly."); e.printStackTrace(); System.exit(1); // critical error, so exit } NsSampleWork.checkAndCreateSchema(conn, pw); // Check and create the // necessary schema if // not already created NsSampleWork.loadSchema(conn, NUM_ROWS, pw); // Insert rows into the // table conn.close(); // Start client threads to perform database related sql operations NsSampleClientThread clientThreads[] = new NsSampleClientThread[NUM_CLIENT_THREADS]; /* * Only the JVM that starts the Derby Network Server can obtain an * embedded connection Please pay attention to the database URL * Also, you need not load the org.apache.derby.jdbc.EmbeddedDriver * since it is already loaded when the network server starts up. 1. * Derby embedded database url - jdbc:derby:databasename */ clientThreads[0] = new NsSampleClientThread(1, CS_EMBED_DBURL, properties, pw); clientThreads[0].start(); /* * 2. The below client threads obtain a client connection to Derby * Network Server One can also get a client connection from another * JVM Please be aware of the database URL for obtaining a client * connection */ for (int i = 1; i < NUM_CLIENT_THREADS; i++) { clientThreads[i] = new NsSampleClientThread(i + 1, CS_NS_DBURL, properties, pw); clientThreads[i].start(); } // Wait for the client threads to complete all the work for (int i = 0; i < NUM_CLIENT_THREADS; i++) clientThreads[i].join(); // Shutdown Derby network server pw.println("[NsSample] Shutting down network server."); nwServer.shutdown(); pw.println("[NsSample] End of Network server demo."); } catch (Exception e) { e.printStackTrace(); } finally { if (pw != null) pw.close(); } } }
/* * Copyright (c) 2002, 2009, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ package sun.jvm.hotspot.jdi; import sun.jvm.hotspot.debugger.OopHandle; import sun.jvm.hotspot.runtime.VMObject; import sun.jvm.hotspot.runtime.JavaThread; import sun.jvm.hotspot.runtime.OSThread; //import sun.jvm.hotspot.runtime.StackFrameStream; import sun.jvm.hotspot.runtime.JavaVFrame; import sun.jvm.hotspot.runtime.JavaThreadState; import sun.jvm.hotspot.runtime.MonitorInfo; import sun.jvm.hotspot.runtime.ObjectMonitor; import sun.jvm.hotspot.oops.Oop; import sun.jvm.hotspot.oops.ObjectHeap; import sun.jvm.hotspot.oops.Instance; import sun.jvm.hotspot.oops.OopUtilities; import sun.jvm.hotspot.oops.Klass; import sun.jvm.hotspot.utilities.Assert; import com.sun.jdi.*; import java.util.*; public class ThreadReferenceImpl extends ObjectReferenceImpl implements ThreadReference, /* imports */ JVMTIThreadState { private JavaThread myJavaThread; private ArrayList frames; // StackFrames private List ownedMonitors; // List<ObjectReferenceImpl> private List ownedMonitorsInfo; // List<MonitorInfo> private ObjectReferenceImpl currentContendingMonitor; ThreadReferenceImpl(VirtualMachine aVm, sun.jvm.hotspot.runtime.JavaThread aRef) { // We are given a JavaThread and save it in our myJavaThread field. // But, our parent class is an ObjectReferenceImpl so we need an Oop // for it. JavaThread is a wrapper around a Thread Oop so we get // that Oop and give it to our super. // We can get it back again by calling ref(). super(aVm, (Instance)aRef.getThreadObj()); myJavaThread = aRef; } ThreadReferenceImpl(VirtualMachine vm, Instance oRef) { // Instance must be of type java.lang.Thread super(vm, oRef); // JavaThread retrieved from java.lang.Thread instance may be null. // This is the case for threads not-started and for zombies. Wherever // appropriate, check for null instead of resulting in NullPointerException. myJavaThread = OopUtilities.threadOopGetJavaThread(oRef); } // return value may be null. refer to the comment in constructor. JavaThread getJavaThread() { return myJavaThread; } protected String description() { return "ThreadReference " + uniqueID(); } /** * Note that we only cache the name string while suspended because * it can change via Thread.setName arbitrarily */ public String name() { return OopUtilities.threadOopGetName(ref()); } public void suspend() { vm.throwNotReadOnlyException("ThreadReference.suspend()"); } public void resume() { vm.throwNotReadOnlyException("ThreadReference.resume()"); } public int suspendCount() { // all threads are "suspended" when we attach to process or core. // we interpret this as one suspend. return 1; } public void stop(ObjectReference throwable) throws InvalidTypeException { vm.throwNotReadOnlyException("ThreadReference.stop()"); } public void interrupt() { vm.throwNotReadOnlyException("ThreadReference.interrupt()"); } // refer to jvmtiEnv::GetThreadState private int jvmtiGetThreadState() { // get most state bits int state = OopUtilities.threadOopGetThreadStatus(ref()); // add more state bits if (myJavaThread != null) { JavaThreadState jts = myJavaThread.getThreadState(); if (myJavaThread.isBeingExtSuspended()) { state |= JVMTI_THREAD_STATE_SUSPENDED; } if (jts == JavaThreadState.IN_NATIVE) { state |= JVMTI_THREAD_STATE_IN_NATIVE; } OSThread osThread = myJavaThread.getOSThread(); if (osThread != null && osThread.interrupted()) { state |= JVMTI_THREAD_STATE_INTERRUPTED; } } return state; } public int status() { int state = jvmtiGetThreadState(); int status = THREAD_STATUS_UNKNOWN; // refer to map2jdwpThreadStatus in util.c (back-end) if (! ((state & JVMTI_THREAD_STATE_ALIVE) != 0) ) { if ((state & JVMTI_THREAD_STATE_TERMINATED) != 0) { status = THREAD_STATUS_ZOMBIE; } else { status = THREAD_STATUS_NOT_STARTED; } } else { if ((state & JVMTI_THREAD_STATE_SLEEPING) != 0) { status = THREAD_STATUS_SLEEPING; } else if ((state & JVMTI_THREAD_STATE_BLOCKED_ON_MONITOR_ENTER) != 0) { status = THREAD_STATUS_MONITOR; } else if ((state & JVMTI_THREAD_STATE_WAITING) != 0) { status = THREAD_STATUS_WAIT; } else if ((state & JVMTI_THREAD_STATE_RUNNABLE) != 0) { status = THREAD_STATUS_RUNNING; } } return status; } public boolean isSuspended() { //fixme jjh // If we want to support doing this for a VM which was being // debugged, then we need to fix this. // In the meantime, we will say all threads are suspended, // otherwise, some things won't work, like the jdb 'up' cmd. return true; } public boolean isAtBreakpoint() { //fixme jjh // If we want to support doing this for a VM which was being // debugged, then we need to fix this. return false; } public ThreadGroupReference threadGroup() { return (ThreadGroupReferenceImpl)vm.threadGroupMirror( (Instance)OopUtilities.threadOopGetThreadGroup(ref())); } public int frameCount() throws IncompatibleThreadStateException { //fixme jjh privateFrames(0, -1); return frames.size(); } public List frames() throws IncompatibleThreadStateException { return privateFrames(0, -1); } public StackFrame frame(int index) throws IncompatibleThreadStateException { List list = privateFrames(index, 1); return (StackFrame)list.get(0); } public List frames(int start, int length) throws IncompatibleThreadStateException { if (length < 0) { throw new IndexOutOfBoundsException( "length must be greater than or equal to zero"); } return privateFrames(start, length); } /** * Private version of frames() allows "-1" to specify all * remaining frames. */ private List privateFrames(int start, int length) throws IncompatibleThreadStateException { if (myJavaThread == null) { // for zombies and yet-to-be-started threads we need to throw exception throw new IncompatibleThreadStateException(); } if (frames == null) { frames = new ArrayList(10); JavaVFrame myvf = myJavaThread.getLastJavaVFrameDbg(); while (myvf != null) { StackFrame myFrame = new StackFrameImpl(vm, this, myvf); //fixme jjh null should be a Location frames.add(myFrame); myvf = (JavaVFrame)myvf.javaSender(); } } List retVal; if (frames.size() == 0) { retVal = new ArrayList(0); } else { int toIndex = start + length; if (length == -1) { toIndex = frames.size(); } retVal = frames.subList(start, toIndex); } return Collections.unmodifiableList(retVal); } // refer to JvmtiEnvBase::get_owned_monitors public List ownedMonitors() throws IncompatibleThreadStateException { if (vm.canGetOwnedMonitorInfo() == false) { throw new UnsupportedOperationException(); } if (myJavaThread == null) { throw new IncompatibleThreadStateException(); } if (ownedMonitors != null) { return ownedMonitors; } ownedMonitorsWithStackDepth(); for (Iterator omi = ownedMonitorsInfo.iterator(); omi.hasNext(); ) { //FIXME : Change the MonitorInfoImpl cast to com.sun.jdi.MonitorInfo // when hotspot start building with jdk1.6. ownedMonitors.add(((MonitorInfoImpl)omi.next()).monitor()); } return ownedMonitors; } // new method since 1.6. // Real body will be supplied later. public List ownedMonitorsAndFrames() throws IncompatibleThreadStateException { if (!vm.canGetMonitorFrameInfo()) { throw new UnsupportedOperationException( "target does not support getting Monitor Frame Info"); } if (myJavaThread == null) { throw new IncompatibleThreadStateException(); } if (ownedMonitorsInfo != null) { return ownedMonitorsInfo; } ownedMonitorsWithStackDepth(); return ownedMonitorsInfo; } private void ownedMonitorsWithStackDepth() { ownedMonitorsInfo = new ArrayList(); List lockedObjects = new ArrayList(); // List<OopHandle> List stackDepth = new ArrayList(); // List<int> ObjectMonitor waitingMonitor = myJavaThread.getCurrentWaitingMonitor(); ObjectMonitor pendingMonitor = myJavaThread.getCurrentPendingMonitor(); OopHandle waitingObj = null; if (waitingMonitor != null) { // save object of current wait() call (if any) for later comparison waitingObj = waitingMonitor.object(); } OopHandle pendingObj = null; if (pendingMonitor != null) { // save object of current enter() call (if any) for later comparison pendingObj = pendingMonitor.object(); } JavaVFrame frame = myJavaThread.getLastJavaVFrameDbg(); int depth=0; while (frame != null) { List frameMonitors = frame.getMonitors(); // List<MonitorInfo> for (Iterator miItr = frameMonitors.iterator(); miItr.hasNext(); ) { sun.jvm.hotspot.runtime.MonitorInfo mi = (sun.jvm.hotspot.runtime.MonitorInfo) miItr.next(); if (mi.eliminated() && frame.isCompiledFrame()) { continue; // skip eliminated monitor } OopHandle obj = mi.owner(); if (obj == null) { // this monitor doesn't have an owning object so skip it continue; } if (obj.equals(waitingObj)) { // the thread is waiting on this monitor so it isn't really owned continue; } if (obj.equals(pendingObj)) { // the thread is pending on this monitor so it isn't really owned continue; } boolean found = false; for (Iterator loItr = lockedObjects.iterator(); loItr.hasNext(); ) { // check for recursive locks if (obj.equals(loItr.next())) { found = true; break; } } if (found) { // already have this object so don't include it continue; } // add the owning object to our list lockedObjects.add(obj); stackDepth.add(new Integer(depth)); } frame = (JavaVFrame) frame.javaSender(); depth++; } // now convert List<OopHandle> to List<ObjectReference> ObjectHeap heap = vm.saObjectHeap(); Iterator stk = stackDepth.iterator(); for (Iterator loItr = lockedObjects.iterator(); loItr.hasNext(); ) { Oop obj = heap.newOop((OopHandle)loItr.next()); ownedMonitorsInfo.add(new MonitorInfoImpl(vm, vm.objectMirror(obj), this, ((Integer)stk.next()).intValue())); } } // refer to JvmtiEnvBase::get_current_contended_monitor public ObjectReference currentContendedMonitor() throws IncompatibleThreadStateException { if (vm.canGetCurrentContendedMonitor() == false) { throw new UnsupportedOperationException(); } if (myJavaThread == null) { throw new IncompatibleThreadStateException(); } ObjectMonitor mon = myJavaThread.getCurrentWaitingMonitor(); if (mon == null) { // thread is not doing an Object.wait() call mon = myJavaThread.getCurrentPendingMonitor(); if (mon != null) { OopHandle handle = mon.object(); // If obj == NULL, then ObjectMonitor is raw which doesn't count // as contended for this API return vm.objectMirror(vm.saObjectHeap().newOop(handle)); } else { // no contended ObjectMonitor return null; } } else { // thread is doing an Object.wait() call OopHandle handle = mon.object(); if (Assert.ASSERTS_ENABLED) { Assert.that(handle != null, "Object.wait() should have an object"); } Oop obj = vm.saObjectHeap().newOop(handle); return vm.objectMirror(obj); } } public void popFrames(StackFrame frame) throws IncompatibleThreadStateException { vm.throwNotReadOnlyException("ThreadReference.popFrames()"); } public void forceEarlyReturn(Value returnValue) throws IncompatibleThreadStateException { vm.throwNotReadOnlyException("ThreadReference.forceEarlyReturn()"); } public String toString() { return "instance of " + referenceType().name() + "(name='" + name() + "', " + "id=" + uniqueID() + ")"; } }
package gr.iti.mklab.reveal.forensics.util.dwt; /** * Author Mark Bishop; 2014 * License GNU v3; * This class is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ /** * The coefficients were gathered from various sources including: * Wavelab Version 850 * Adapted Wavelet Analysis from Theory to Software, Victor Wickerhauser * A Wavelet Tour of Signal Processing, The Sparse Way, 3rd ed., Stephane Mallet */ import java.util.ArrayList; /** * Class responsibility: Provide filters for several orthogonal wavelets. * */ public class OrthogonalFilters { /** * * @param H * low pass filter for QMF pair * @return high pass filter for QMF pair */ public static double[] getHighPass(double[] H) { double sign = 1; int n = H.length; double[] G = new double[n]; for (int i = 0; i < n; i++) { G[n - i - 1] = H[i] * sign; sign *= -1; } return G; } /** * * @param wavelet To be explained * @param order To be explained * @return Low Pass filter for QMF pair */ static double[] getLowPass(Wavelet wavelet, int order) { if (wavelet == Wavelet.Haar) { double[] f = { .707106781186547, .707106781186547 }; return f; } if (wavelet == Wavelet.Daubechies) { switch (order) { case 4: { // (Wickerhauser) double[] f = { .482962913144534160, .836516303737807940, .224143868042013390, -.129409522551260370 }; return f; } case 6: {// (Wickerhauser) double[] f = { 0.332670552950082630, 0.806891509311092550, 0.459877502118491540, -0.135011020010254580, -0.0854412738820266580, 0.0352262918857095330 }; return f; } case 8: {// (Wickerhauser but order reversed) double[] f = { 0.2303778133090, 0.7148465705530, 0.6308807679300, -0.02798376941700, -0.1870348117190, 0.03084138183700, 0.032883011667, -0.01059740178500 }; return f; } case 10: {// (Wickerhauser but order reversed) double[] f = { 0.160102397974, 0.603829269797, 0.724308528438, 0.138428145901, -0.242294887066, -0.032244869585, 0.07757149384, -0.006241490213, -0.012580751999, 0.003335725285 }; return f; } case 12: {// (Wickerhauser but order reversed) double[] f = { 0.11154074335, 0.494623890398, 0.751133908021, 0.315250351709, -0.226264693965, -0.129766867567, 0.097501605587, 0.02752286553, -0.031582039318, 0.000553842201, 0.004777257511, -0.001077301085 }; return f; } case 14: {// (Wickerhauser but order reversed) double[] f = { 0.077852054085, 0.396539319482, 0.729132090846, 0.469782287405, -0.143906003929, -0.224036184994, 0.071309219267, 0.080612609151, -0.038029936935, -0.016574541631, 0.012550998556, 0.000429577973, -0.001801640704, 0.0003537138 }; return f; } case 16: {// (Wickerhauser but order reversed) double[] f = { 0.054415842243, 0.312871590914, 0.675630736297, 0.585354683654, -0.015829105256, -0.284015542962, 0.000472484574, 0.12874742662, -0.017369301002, -0.044088253931, 0.013981027917, 0.008746094047, -0.004870352993, -0.000391740373, 0.000675449406, -0.000117476784 }; return f; } case 18: {// (Wickerhauser but order reversed) double[] f = { 0.038077947364, 0.243834674613, 0.60482312369, 0.657288078051, 0.133197385825, -0.293273783279, -0.096840783223, 0.148540749338, 0.030725681479, -0.067632829061, 0.000250947115, 0.022361662124, -0.004723204758, -0.004281503682, 0.001847646883, 0.000230385764, -0.000251963189, 0.00003934732 }; return f; } case 20: {// Wavelab src double[] f = { 0.026670057901, 0.188176800078, 0.527201188932, 0.688459039454, 0.281172343661, -0.249846424327, -0.195946274377, 0.127369340336, 0.093057364604, -0.071394147166, -0.029457536822, 0.033212674059, 0.003606553567, -0.010733175483, 0.001395351747, 0.001992405295, -0.000685856695, -0.000116466855, 0.00009358867, -0.000013264203 }; return f; } } } if (wavelet == Wavelet.Beylkin) { // 18 (Wickerhauser but order // reversed) double[] f = { 0.00064048532852124535, -0.00273603162625860610, 0.0014842347824723461, 0.01004041184463199, -0.014365807968852611, -0.017460408696028829, 0.042916387274192273, 0.01967986604432212, -0.088543630623924835, -0.0175207462665229649, 0.1555387318770938, 0.02690030880369032, -0.26449723144638482, -0.1109275983482343, 0.44971825114946867, 0.69982521405660590, 0.42421536081296141, 0.099305765374353927 }; return f; } if (wavelet == Wavelet.Vaidyanathan) {// length = 24 // Wavelab src double[] f = { -.000062906118, .000343631905, -.000453956620, -.000944897136, .002843834547, .000708137504, -.008839103409, .003153847056, .019687215010, -.014853448005, -.035470398607, .038742619293, .055892523691, -.077709750902, -.083928884366, .131971661417, .135084227129, -.194450471766, -.263494802488, .201612161775, .635601059872, .572797793211, .250184129505, .045799334111 }; return f; } if (wavelet == Wavelet.Coiflet) {// Wavelab src switch (order) { case 6: { double[] f = { .038580777748, -.126969125396, -.077161555496, .607491641386, .745687558934, .226584265197 }; return f; } case 12: { double[] f = { .016387336463, -.041464936782, -.067372554722, .386110066823, .812723635450, .417005184424, -.076488599078, -.059434418646, .023680171947, .005611434819, -.001823208871, -.000720549445 }; ; return f; } case 18: { double[] f = { -.003793512864, .007782596426, .023452696142, -.065771911281, -.061123390003, .405176902410, .793777222626, .428483476378, -.071799821619, -.082301927106, .034555027573, .015880544864, -.009007976137, -.002574517688, .001117518771, .000466216960, -.000070983303, -.000034599773 }; return f; } case 24: { double[] f = { .000892313668, -.001629492013, -.007346166328, .016068943964, .026682300156, -.081266699680, -.056077313316, .415308407030, .782238930920, .434386056491, -.066627474263, -.096220442034, .039334427123, .025082261845, -.015211731527, -.005658286686, .003751436157, .001266561929, -.000589020757, -.000259974552, .000062339034, .000031229876, -.000003259680, -.000001784985 }; return f; } case 30: { double[] f = { -.000212080863, .000358589677, .002178236305, -.004159358782, -.010131117538, .023408156762, .028168029062, -.091920010549, -.052043163216, .421566206729, .774289603740, .437991626228, -.062035963906, -.105574208706, .041289208741, .032683574283, -.019761779012, -.009164231153, .006764185419, .002433373209, -.001662863769, -.000638131296, .000302259520, .000140541149, -.000041340484, -.000021315014, .000003734597, .000002063806, -.000000167408, -.000000095158 }; return f; } } } if (wavelet == Wavelet.Symmlet) {// Wavelab but normalized by 1/sqrt(2) switch (order) { case 4: { double[] f = { -0.07576571478935668, -0.029635527645960395, 0.4976186676325629, 0.803738751805386, 0.29785779560560505, -0.09921954357695636, -0.012603967262263831, 0.03222310060407815 }; return f; } case 5: { double[] f = { 0.027333068345162827, 0.029519490926071618, -0.039134249302581074, 0.19939753397698343, 0.7234076904037638, 0.6339789634569107, 0.01660210576442332, -0.17532808990809687, -0.021101834024928552, 0.019538882735385715 }; return f; } case 6: { double[] f = { 0.0154041093273385, 0.003490712084330607, -0.11799011114841682, -0.04831174258600069, 0.49105594192766583, 0.7876411410288363, 0.3379294217282582, -0.07263752278660392, -0.021060292512696543, 0.04472490177075063, 0.0017677118643983668, -0.007800708324765445 }; return f; } case 7: { double[] f = { 0.00268181456811643, -0.0010473848889657486, -0.01263630340315281, 0.0305155131659067, 0.06789269350159828, -0.049552834937041906, 0.01744125508711021, 0.5361019170907816, 0.7677643170045847, 0.28862963175098844, -0.14004724044270553, -0.10780823770361868, 0.004010244871703328, 0.01026817670849701 }; return f; } case 8: { double[] f = { 0.0018899503329009007, -3.0292051455166417E-4, -0.014952258336793822, 0.0038087520140603582, 0.04913717967348071, -0.027219029916815855, -0.05194583810787925, 0.36444189483598555, 0.77718575169981, 0.48135965125923963, -0.06127335906791368, -0.1432942383510657, 0.0076074873252853755, 0.03169508781034778, -5.4213233163559E-4, -0.0033824159513597117 }; return f; } case 9: { double[] f = { 0.0010694900326524929, -4.7315449858729915E-4, -0.010264064027672312, 0.00885926749350085, 0.062077789302687425, -0.018233770779803246, -0.19155083129625178, 0.03527248803589115, 0.6173384491405931, 0.717897082763343, 0.23876091460712456, -0.054568958430509365, 5.834627463304675E-4, 0.03022487885795232, -0.011528210207970613, -0.013271967781516928, 6.197808890541263E-4, 0.0014009155255699105 }; return f; } case 10: { double[] f = { 7.70159808941683E-4, 9.563267076371025E-5, -0.008641299274130404, -0.0014653825830396562, 0.045927239214146896, 0.011609893910541074, -0.1594942788241296, -0.07088053579601779, 0.4716906667438779, 0.7695100368531889, 0.3838267611450017, -0.035536740298026795, -0.03199005682146376, 0.049994972068686125, 0.005764912044344502, -0.02035493979968329, -8.043589343685685E-4, 0.004593173582708373, 5.703608432707147E-5, -4.5932942045186415E-4 }; return f; } } } // Battle performs comparatively poorly using the coefficients below // (sig figs?). if (wavelet == Wavelet.Battle) { switch (order) { case 1: { double[] f = { -0.000122686, -0.000224296, 0.000511636, 0.000923371, -0.002201945, -0.003883261, 0.009990599, 0.016974805, -0.051945337, -0.06910102, 0.39729643, 0.817645956, 0.39729643, -0.06910102, -0.051945337, 0.016974805, 0.009990599, -0.003883261, -0.002201945, 0.000923371, 0.000511636, -0.000224296, -0.000122686 }; return f; } case 3: { double[] f = { 0.000146098, -0.000232304, -0.000285414, 0.000462093, 0.000559952, -0.000927187, -0.001103748, 0.00188212, 0.002186714, -0.003882426, -0.00435384, 0.008201477, 0.008685294, -0.017982291, -0.017176331, 0.042068328, 0.032080869, -0.110036987, -0.050201753, 0.433923147, 0.766130398, 0.433923147, -0.050201753, -0.110036987, 0.032080869, 0.042068328, -0.017176331, -0.017982291, 0.008685294, 0.008201477, -0.00435384, -0.003882426, 0.002186714, 0.00188212, -0.001103748, -0.000927187, 0.000559952, 0.000462093, -0.000285414, -0.000232304, 0.000146098 }; return f; } } } double[] error = { 0 }; return error; } /** * * @param wavelet To be explained * @return List of available orders for a given wavelet. */ public static ArrayList<Integer> validParameters(Wavelet wavelet) { ArrayList<Integer> result = new ArrayList<Integer>(); switch (wavelet) { case Haar: result.add(2); break; case Daubechies: for (int i = 4; i <= 20; i += 2) { result.add(i); } break; case Beylkin: result.add(18); break; case Vaidyanathan: result.add(24); break; case Coiflet: for (int i = 6; i <= 30; i += 6) { result.add(i); } break; case Symmlet: for (int i = 4; i <= 10; i++) { result.add(i); } break; case Battle: result.add(1); result.add(3); break; } return result; } /** * * @param parameter To be explained * @param wavelet To be explained * @param signalLength To be explained * @return A list of valid scales for a given wavelet and signal length. * @throws Exception */ public static ArrayList<Integer> validScales(int parameter, Wavelet wavelet, int signalLength) throws Exception { ArrayList<Integer> validScales = new ArrayList<Integer>(); int hLength = OrthogonalFilters.getLowPass(wavelet, parameter).length; if (hLength > signalLength) { throw new Exception("The filter is longer than the signal."); } int log2SigLen = (int) (Math.log(signalLength) / Math.log(2)); int min = (int) (Math.log(hLength) / Math.log(2)); for (int i = min; i <= log2SigLen; i++) { double t = Math.log(hLength); if (t < min) { validScales.add(i); } } return validScales; } } // Some descriptive narrative from the Wavelab project: // // Description // The Haar filter (which could be considered a Daubechies-2) was the // first wavelet, though not called as such, and is discontinuous. // // The Beylkin filter places roots for the frequency response function // close to the Nyquist frequency on the real axis. // // The Coiflet filters are designed to give both the mother and father // wavelets 2*Par vanishing moments; here Par may be one of 1,2,3,4 or 5. // // The Daubechies filters are minimal phase filters that generate wavelets // which have a minimal support for a given number of vanishing moments. // They are indexed by their length, Par, which may be one of // 4,6,8,10,12,14,16,18 or 20. The number of vanishing moments is par/2. // // Symmlets are also wavelets within a minimum size support for a given // number of vanishing moments, but they are as symmetrical as possible, // as opposed to the Daubechies filters which are highly asymmetrical. // They are indexed by Par, which specifies the number of vanishing // moments and is equal to half the size of the support. It ranges // from 4 to 10. // // The Vaidyanathan filter gives an exact reconstruction, but does not // satisfy any moment condition. The filter has been optimized for // speech coding. // // The Battle-Lemarie filter generate spline orthogonal wavelet basis. // The order Par gives the degree of the spline. The number of // vanishing moments is Par+1. // //
package de.raysha.lib.jsimpleshell.rc.server; import static java.util.logging.Level.FINE; import static java.util.logging.Level.SEVERE; import static java.util.logging.Level.WARNING; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PipedInputStream; import java.io.PipedOutputStream; import java.util.Arrays; import java.util.logging.Logger; import jline.console.ConsoleReader; import de.raysha.lib.jsimpleshell.Shell; import de.raysha.lib.jsimpleshell.builder.ShellBuilder; import de.raysha.lib.jsimpleshell.io.TerminalIO; import de.raysha.lib.jsimpleshell.rc.model.ErrorMessage; import de.raysha.lib.jsimpleshell.rc.model.ExceptionMessage; import de.raysha.lib.jsimpleshell.rc.model.MessageCataloge; import de.raysha.lib.jsimpleshell.rc.model.OutputMessage; import de.raysha.lib.jsimpleshell.rc.model.ReadLine; import de.raysha.lib.net.scs.Connector; /** * This class is responsible for a shell session. For each user there is a own session. * * @author rainu */ public class ShellSession implements Runnable { private static final Logger LOG = Logger.getLogger(ShellSession.class.getName()); private final ShellBuilder shellBuilder; private Shell shellSession; private final Connector connector; private String name; private Thread inputThread; private Thread ouputThread; private Thread errorThread; private PipedOutputStream in; private PipedInputStream out; private PipedInputStream err; private long outEntry = 0L; private long errEntry = 0L; public ShellSession(ShellBuilder shellBuilder, Connector connector) { this.shellBuilder = shellBuilder; this.connector = connector; } public void setName(String name) { this.name = name; } public String getName() { return name; } private String logMessage(String message){ return "[ShellSession-" + name + "] " + message; } @Override public void run() { try{ LOG.info(logMessage("Start shell session.")); Thread.currentThread().setName("ShellSession-" + name); prepareConnector(); try { shellSession = buildNewShell(); } catch (Exception e) { LOG.log(SEVERE, logMessage("Could not create a new shell session!"), e); try { connector.send(new ExceptionMessage("Could not create a new shell session!", e)); } catch (IOException e1) { LOG.log(WARNING, logMessage("Could not send exception message to user!"), e1); } return; } try { doHandshake(); } catch (IOException e) { LOG.log(SEVERE, logMessage("The handshake between server and client fails! The client will be disconnected."), e); try { connector.disconnect(); } catch (IOException e1) { } return; } initializeAndStartThreads(); try { shellSession.commandLoop(); } catch (IOException e) { LOG.log(WARNING, logMessage("The shell-session ends unexpected!"), e); try { connector.send(new ExceptionMessage("The shell-session ends unexpected!", e)); } catch (IOException e1) { LOG.log(WARNING, logMessage("Could not send exception message to user!"), e1); } } finally { stopThreads(); closePipes(); } try { connector.disconnect(); } catch (IOException e) { LOG.log(FINE, logMessage("An error occurs on disconnecting client!"), e); } }finally{ try{ getConsole().shutdown(); } catch(Exception e) {} LOG.info(logMessage("Stop shell session.")); } } private void doHandshake() throws IOException { new Handshaker(connector, getConsole()).doHandshake(); } private void prepareConnector() { MessageCataloge.registerCataloge(connector); } private ConsoleReader getConsole(){ return ((TerminalIO)shellSession.getSettings().getInput()).getConsole(); } private Shell buildNewShell() throws IOException { out = new PipedInputStream(); PipedOutputStream worldOut = new PipedOutputStream(); worldOut.connect(out); err = new PipedInputStream(); PipedOutputStream worldErr = new PipedOutputStream(); worldErr.connect(err); in = new PipedOutputStream(); PipedInputStream worldIn = new PipedInputStream(); worldIn.connect(in); synchronized (shellBuilder) { shellBuilder.io() .setConsole(new SessionConsole(worldIn, worldOut)) .setError(worldErr); return shellBuilder.build(); } } public class SessionConsole extends ConsoleReader { public SessionConsole(InputStream in, OutputStream out) throws IOException { super(in, out); } @Override public String readLine(String prompt) throws IOException { waitUntilOutput(); connector.send(new ReadLine(prompt)); return super.readLine(prompt, (char)0); } @Override public String readLine(String prompt, Character mask) throws IOException { waitUntilOutput(); connector.send(new ReadLine(prompt, mask)); return super.readLine(prompt, mask); } //TODO: try what happens if the user should enter a invisible input! private static final long WAIT_CAP = 25L; /* * Maybe this method is imprecise. If a system is fully occupied the thread * don't get any cpu-time! The usage of current time is then not meaningful! */ private void waitUntilOutput() { try { Thread.sleep(WAIT_CAP); } catch (InterruptedException e) { return; } long elapsedErr = System.currentTimeMillis() - errEntry; long elapsedOut = System.currentTimeMillis() - outEntry; //there is a chance that we are not at the end of output/error while(elapsedErr <= WAIT_CAP || elapsedOut <= WAIT_CAP){ try { Thread.sleep(WAIT_CAP); } catch (InterruptedException e) { break; } elapsedErr = System.currentTimeMillis() - errEntry; elapsedOut = System.currentTimeMillis() - outEntry; } outEntry = System.currentTimeMillis(); errEntry = outEntry; } } private void closePipes() { try { in.close(); } catch (IOException e) { } try { out.close(); } catch (IOException e) { } try { err.close(); } catch (IOException e) { } } private void initializeAndStartThreads() { MessageDispatcher inputDispatcher = new MessageDispatcher(in, connector, getConsole(), name); this.inputThread = new Thread(inputDispatcher, "ShellSession-Input-" + name); this.ouputThread = new Thread(outputRunnable, "ShellSession-Output-" + name); this.errorThread = new Thread(errorRunnable, "ShellSession-Error-" + name); this.inputThread.start(); this.ouputThread.start(); this.errorThread.start(); } private void stopThreads() { stop(inputThread); stop(ouputThread); stop(errorThread); } private void stop(Thread thread){ thread.interrupt(); try { thread.join(2500); } catch (InterruptedException e) { } if(thread.isAlive()){ thread.stop(); } } private Runnable outputRunnable = new Runnable() { @Override public void run() { while(true){ byte[] readBuffer = new byte[8192]; int read; try { outEntry = System.currentTimeMillis(); read = out.read(readBuffer); } catch (IOException e) { try{ connector.send(new ExceptionMessage("Could not forward shell output! Close connection because of brocken stream!", e)); connector.disconnect(); }catch(IOException e1){ LOG.log(WARNING, logMessage("Could not send exception message to user!"), e1); } break; } OutputMessage output = new OutputMessage(Arrays.copyOfRange(readBuffer, 0, read)); try { connector.send(output); } catch (IOException e) { LOG.log(FINE, logMessage("Could not send shell output to client!"), e); break; //connection is closed... } } } }; private Runnable errorRunnable = new Runnable() { @Override public void run() { while(true){ byte[] readBuffer = new byte[8192]; int read; try { errEntry = System.currentTimeMillis(); read = err.read(readBuffer); } catch (IOException e) { try{ connector.send(new ExceptionMessage("Could not forward shell error! Close connection because of brocken stream!", e)); connector.disconnect(); }catch(IOException e1){ LOG.log(WARNING, logMessage("Could not send exception message to user!"), e1); } break; } ErrorMessage error = new ErrorMessage(Arrays.copyOfRange(readBuffer, 0, read)); try { connector.send(error); } catch (IOException e) { LOG.log(FINE, logMessage("Could not send shell error to client!"), e); break; //connection is closed... } } } }; }
/** * Copyright (C) 2013-2015 Dell, Inc * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud.azure.platform.model; import javax.xml.bind.annotation.*; import java.util.List; /** * Created by Vlad_Munthiu on 11/17/2014. */ @XmlRootElement(name="ServiceResource", namespace ="http://schemas.microsoft.com/windowsazure") @XmlAccessorType(XmlAccessType.FIELD) public class ServerServiceResourceModel { @XmlElement(name="Name", namespace ="http://schemas.microsoft.com/windowsazure") private String name; @XmlElement(name="Type", namespace ="http://schemas.microsoft.com/windowsazure") private String type; @XmlElement(name="DatabaseQuota", namespace ="http://schemas.microsoft.com/windowsazure") private String databaseQuota; @XmlElement(name="ServerQuota", namespace ="http://schemas.microsoft.com/windowsazure") private String serverQuota; @XmlElement(name="StartIPAddress", namespace ="http://schemas.microsoft.com/windowsazure") private String startIpAddress; @XmlElement(name="EndIPAddress", namespace ="http://schemas.microsoft.com/windowsazure") private String endIpAddress; @XmlElementWrapper(name = "Versions", namespace = "http://schemas.microsoft.com/windowsazure") @XmlElement(name="Version", namespace ="http://schemas.microsoft.com/windowsazure") private List<Version> versions; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getDatabaseQuota() { return databaseQuota; } public void setDatabaseQuota(String databaseQuota) { this.databaseQuota = databaseQuota; } public String getServerQuota() { return serverQuota; } public void setServerQuota(String serverQuota) { this.serverQuota = serverQuota; } public List<Version> getVersions() { return versions; } public void setVersions(List<Version> versions) { this.versions = versions; } public String getStartIpAddress() { return startIpAddress; } public void setStartIpAddress(String startIpAddress) { this.startIpAddress = startIpAddress; } public String getEndIpAddress() { return endIpAddress; } public void setEndIpAddress(String endIpAddress) { this.endIpAddress = endIpAddress; } @XmlRootElement(name="Version", namespace ="http://schemas.microsoft.com/windowsazure") @XmlAccessorType(XmlAccessType.FIELD) public static class Version{ @XmlElement(name="Name", namespace ="http://schemas.microsoft.com/windowsazure") private String name; @XmlElement(name="IsDefault", namespace ="http://schemas.microsoft.com/windowsazure") private String isDefault; @XmlElementWrapper(name = "Editions", namespace = "http://schemas.microsoft.com/windowsazure") @XmlElement(name="Edition", namespace ="http://schemas.microsoft.com/windowsazure") private List<Edition> editions; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getIsDefault() { return isDefault; } public void setIsDefault(String isDefault) { this.isDefault = isDefault; } public List<Edition> getEditions() { return editions; } public void setEditions(List<Edition> editions) { this.editions = editions; } } @XmlRootElement(name="Edition", namespace ="http://schemas.microsoft.com/windowsazure") @XmlAccessorType(XmlAccessType.FIELD) public static class Edition{ @XmlElement(name="Name", namespace ="http://schemas.microsoft.com/windowsazure") private String name; @XmlElement(name="IsDefault", namespace ="http://schemas.microsoft.com/windowsazure") private String isDefault; @XmlElementWrapper(name = "ServiceLevelObjectives", namespace = "http://schemas.microsoft.com/windowsazure") @XmlElement(name="ServiceLevelObjective", namespace ="http://schemas.microsoft.com/windowsazure") private List<ServiceLevelObjective> serviceLevelObjectives; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getIsDefault() { return isDefault; } public void setIsDefault(String isDefault) { this.isDefault = isDefault; } public List<ServiceLevelObjective> getServiceLevelObjectives() { return serviceLevelObjectives; } public void setServiceLevelObjectives(List<ServiceLevelObjective> serviceLevelObjectives) { this.serviceLevelObjectives = serviceLevelObjectives; } } @XmlRootElement(name="ServiceLevelObjective", namespace ="http://schemas.microsoft.com/windowsazure") @XmlAccessorType(XmlAccessType.FIELD) public static class ServiceLevelObjective{ @XmlElement(name="Name", namespace ="http://schemas.microsoft.com/windowsazure") private String name; @XmlElement(name="IsDefault", namespace ="http://schemas.microsoft.com/windowsazure") private String isDefault; @XmlElement(name="ID", namespace ="http://schemas.microsoft.com/windowsazure") private String id; @XmlElementWrapper(name = "MaxSizes", namespace = "http://schemas.microsoft.com/windowsazure") @XmlElement(name="MaxSize", namespace ="http://schemas.microsoft.com/windowsazure") private List<DatabaseSize> maxSizes; @XmlElement(name="PerformanceLevel", namespace ="http://schemas.microsoft.com/windowsazure") private PerformanceLevel performanceLevel; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getIsDefault() { return isDefault; } public void setIsDefault(String isDefault) { this.isDefault = isDefault; } public String getId() { return id; } public void setId(String id) { this.id = id; } public List<DatabaseSize> getMaxSizes() { return maxSizes; } public void setMaxSizes(List<DatabaseSize> maxSizes) { this.maxSizes = maxSizes; } public PerformanceLevel getPerformanceLevel() { return performanceLevel; } public void setPerformanceLevel(PerformanceLevel performanceLevel) { this.performanceLevel = performanceLevel; } } @XmlRootElement(name="MaxSize", namespace ="http://schemas.microsoft.com/windowsazure") @XmlAccessorType(XmlAccessType.FIELD) public static class DatabaseSize{ @XmlElement(name="Value", namespace ="http://schemas.microsoft.com/windowsazure") private String value; @XmlElement(name="Unit", namespace ="http://schemas.microsoft.com/windowsazure") private String unit; @XmlElement(name="IsDefault", namespace ="http://schemas.microsoft.com/windowsazure") private String isDefault; public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String getUnit() { return unit; } public void setUnit(String unit) { this.unit = unit; } public String getIsDefault() { return isDefault; } public void setIsDefault(String isDefault) { this.isDefault = isDefault; } } @XmlRootElement(name="PerformanceLevel", namespace ="http://schemas.microsoft.com/windowsazure") @XmlAccessorType(XmlAccessType.FIELD) public static class PerformanceLevel{ @XmlElement(name="Value", namespace ="http://schemas.microsoft.com/windowsazure") private String value; @XmlElement(name="Unit", namespace ="http://schemas.microsoft.com/windowsazure") private String unit; public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String getUnit() { return unit; } public void setUnit(String unit) { this.unit = unit; } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.reindex.remote; import org.apache.http.ContentTooLongException; import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseListener; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.reindex.RejectAwareActionListener; import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.io.InputStream; import java.util.function.BiFunction; import java.util.function.Consumer; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.elasticsearch.common.unit.TimeValue.timeValueNanos; import static org.elasticsearch.index.reindex.remote.RemoteResponseParsers.MAIN_ACTION_PARSER; import static org.elasticsearch.index.reindex.remote.RemoteResponseParsers.RESPONSE_PARSER; public class RemoteScrollableHitSource extends ScrollableHitSource { private final RestClient client; private final BytesReference query; private final SearchRequest searchRequest; Version remoteVersion; public RemoteScrollableHitSource(Logger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry, Consumer<AsyncResponse> onResponse, Consumer<Exception> fail, RestClient client, BytesReference query, SearchRequest searchRequest) { super(logger, backoffPolicy, threadPool, countSearchRetry, onResponse, fail); this.query = query; this.searchRequest = searchRequest; this.client = client; } @Override protected void doStart(RejectAwareActionListener<Response> searchListener) { lookupRemoteVersion(RejectAwareActionListener.withResponseHandler(searchListener, version -> { remoteVersion = version; execute(RemoteRequestBuilders.initialSearch(searchRequest, query, remoteVersion), RESPONSE_PARSER, RejectAwareActionListener.withResponseHandler(searchListener, r -> onStartResponse(searchListener, r))); })); } void lookupRemoteVersion(RejectAwareActionListener<Version> listener) { execute(new Request("GET", ""), MAIN_ACTION_PARSER, listener); } private void onStartResponse(RejectAwareActionListener<Response> searchListener, Response response) { if (Strings.hasLength(response.getScrollId()) && response.getHits().isEmpty()) { logger.debug("First response looks like a scan response. Jumping right to the second. scroll=[{}]", response.getScrollId()); doStartNextScroll(response.getScrollId(), timeValueMillis(0), searchListener); } else { searchListener.onResponse(response); } } @Override protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, RejectAwareActionListener<Response> searchListener) { TimeValue keepAlive = timeValueNanos(searchRequest.scroll().keepAlive().nanos() + extraKeepAlive.nanos()); execute(RemoteRequestBuilders.scroll(scrollId, keepAlive, remoteVersion), RESPONSE_PARSER, searchListener); } @Override protected void clearScroll(String scrollId, Runnable onCompletion) { client.performRequestAsync(RemoteRequestBuilders.clearScroll(scrollId, remoteVersion), new ResponseListener() { @Override public void onSuccess(org.elasticsearch.client.Response response) { logger.debug("Successfully cleared [{}]", scrollId); onCompletion.run(); } @Override public void onFailure(Exception e) { logFailure(e); onCompletion.run(); } private void logFailure(Exception e) { if (e instanceof ResponseException) { ResponseException re = (ResponseException) e; if (remoteVersion.before(Version.fromId(2000099)) && re.getResponse().getStatusLine().getStatusCode() == 404) { logger.debug((Supplier<?>) () -> new ParameterizedMessage( "Failed to clear scroll [{}] from pre-2.0 Elasticsearch. This is normal if the request terminated " + "normally as the scroll has already been cleared automatically.", scrollId), e); return; } } logger.warn((Supplier<?>) () -> new ParameterizedMessage("Failed to clear scroll [{}]", scrollId), e); } }); } @Override protected void cleanup(Runnable onCompletion) { /* This is called on the RestClient's thread pool and attempting to close the client on its * own threadpool causes it to fail to close. So we always shutdown the RestClient * asynchronously on a thread in Elasticsearch's generic thread pool. */ threadPool.generic().submit(() -> { try { client.close(); logger.debug("Shut down remote connection"); } catch (IOException e) { logger.error("Failed to shutdown the remote connection", e); } finally { onCompletion.run(); } }); } private <T> void execute(Request request, BiFunction<XContentParser, XContentType, T> parser, RejectAwareActionListener<? super T> listener) { // Preserve the thread context so headers survive after the call java.util.function.Supplier<ThreadContext.StoredContext> contextSupplier = threadPool.getThreadContext().newRestorableContext(true); try { client.performRequestAsync(request, new ResponseListener() { @Override public void onSuccess(org.elasticsearch.client.Response response) { // Restore the thread context to get the precious headers try (ThreadContext.StoredContext ctx = contextSupplier.get()) { assert ctx != null; // eliminates compiler warning T parsedResponse; try { HttpEntity responseEntity = response.getEntity(); InputStream content = responseEntity.getContent(); XContentType xContentType = null; if (responseEntity.getContentType() != null) { final String mimeType = ContentType.parse(responseEntity.getContentType().getValue()).getMimeType(); xContentType = XContentType.fromMediaType(mimeType); } if (xContentType == null) { try { throw new ElasticsearchException( "Response didn't include Content-Type: " + bodyMessage(response.getEntity())); } catch (IOException e) { ElasticsearchException ee = new ElasticsearchException("Error extracting body from response"); ee.addSuppressed(e); throw ee; } } // EMPTY is safe here because we don't call namedObject try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, content)) { parsedResponse = parser.apply(xContentParser, xContentType); } catch (XContentParseException e) { /* Because we're streaming the response we can't get a copy of it here. The best we can do is hint that it * is totally wrong and we're probably not talking to Elasticsearch. */ throw new ElasticsearchException( "Error parsing the response, remote is likely not an Elasticsearch instance", e); } } catch (IOException e) { throw new ElasticsearchException( "Error deserializing response, remote is likely not an Elasticsearch instance", e); } listener.onResponse(parsedResponse); } } @Override public void onFailure(Exception e) { try (ThreadContext.StoredContext ctx = contextSupplier.get()) { assert ctx != null; // eliminates compiler warning if (e instanceof ResponseException) { ResponseException re = (ResponseException) e; int statusCode = re.getResponse().getStatusLine().getStatusCode(); e = wrapExceptionToPreserveStatus(statusCode, re.getResponse().getEntity(), re); if (RestStatus.TOO_MANY_REQUESTS.getStatus() == statusCode) { listener.onRejection(e); return; } } else if (e instanceof ContentTooLongException) { e = new IllegalArgumentException( "Remote responded with a chunk that was too large. Use a smaller batch size.", e); } listener.onFailure(e); } } }); } catch (Exception e) { listener.onFailure(e); } } /** * Wrap the ResponseException in an exception that'll preserve its status code if possible so we can send it back to the user. We might * not have a constant for the status code so in that case we just use 500 instead. We also extract make sure to include the response * body in the message so the user can figure out *why* the remote Elasticsearch service threw the error back to us. */ static ElasticsearchStatusException wrapExceptionToPreserveStatus(int statusCode, @Nullable HttpEntity entity, Exception cause) { RestStatus status = RestStatus.fromCode(statusCode); String messagePrefix = ""; if (status == null) { messagePrefix = "Couldn't extract status [" + statusCode + "]. "; status = RestStatus.INTERNAL_SERVER_ERROR; } try { return new ElasticsearchStatusException(messagePrefix + bodyMessage(entity), status, cause); } catch (IOException ioe) { ElasticsearchStatusException e = new ElasticsearchStatusException(messagePrefix + "Failed to extract body.", status, cause); e.addSuppressed(ioe); return e; } } private static String bodyMessage(@Nullable HttpEntity entity) throws IOException { if (entity == null) { return "No error body."; } else { return "body=" + EntityUtils.toString(entity); } } }
/* Copyright 2013 Nationale-Nederlanden Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.monitoring; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import nl.nn.adapterframework.configuration.ConfigurationException; import nl.nn.adapterframework.util.DateUtils; import nl.nn.adapterframework.util.LogUtil; import nl.nn.adapterframework.util.XmlBuilder; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.log4j.Logger; /** * @author Gerrit van Brakel * @since 4.9 * @version $Id$ */ public class Monitor { protected Logger log = LogUtil.getLogger(this); private String name; private EventTypeEnum type=EventTypeEnum.TECHNICAL; private boolean raised=false; private Date stateChangeDt=null; private int additionalHitCount=0; private Date lastHit=null; private SeverityEnum alarmSeverity=null; private EventThrowing alarmSource=null; private MonitorManager owner=null; private List triggers = new ArrayList(); private Set destinationSet=new HashSet(); public Monitor() { super(); } public void register(Object x) { MonitorManager.getInstance().addMonitor(this); } public void configure() throws ConfigurationException { if (MonitorManager.traceReconfigure && log.isDebugEnabled()) log.debug("monitor ["+getName()+"] configuring triggers"); for (Iterator it=triggers.iterator(); it.hasNext();) { Trigger trigger = (Trigger)it.next(); trigger.configure(); } } public void registerEventNotificationListener(Trigger trigger, List eventCodes, Map adapterFilters, boolean filterOnLowerLevelObjects, boolean filterExclusive) throws MonitorException { if (MonitorManager.traceReconfigure && log.isDebugEnabled()) log.debug("monitor ["+getName()+"] registerEventNotificationListener for trigger"); getOwner().registerEventNotificationListener(trigger, eventCodes, adapterFilters, filterOnLowerLevelObjects, filterExclusive); } public void changeState(Date date, boolean alarm, SeverityEnum severity, EventThrowing source, String details, Throwable t) throws MonitorException { boolean hit=alarm && (getAlarmSeverityEnum()==null || getAlarmSeverityEnum().compareTo(severity)<=0); boolean up=alarm && (!raised || getAlarmSeverityEnum()==null || getAlarmSeverityEnum().compareTo(severity)<0); boolean clear=raised && (!alarm || up && getAlarmSeverityEnum()!=null && getAlarmSeverityEnum()!=severity); if (clear) { if (log.isDebugEnabled()) log.debug(getLogPrefix()+"state ["+getAlarmSeverityEnum()+"] will be cleared"); SeverityEnum clearSeverity=getAlarmSeverityEnum()!=null?getAlarmSeverityEnum():severity; EventThrowing clearSource=getAlarmSource()!=null?getAlarmSource():source; changeMonitorState(date, clearSource, EventTypeEnum.CLEARING, clearSeverity, details, t); } if (up) { if (log.isDebugEnabled()) log.debug(getLogPrefix()+"state ["+getAlarmSeverityEnum()+"] will be raised to ["+severity+"]"); changeMonitorState(date, source, getTypeEnum(), severity, details, t); setAlarmSource(source); setAlarmSeverityEnum(severity); setLastHit(date); setAdditionalHitCount(0); } else { if (hit) { setLastHit(date); setAdditionalHitCount(getAdditionalHitCount()+1); } } raised=alarm; notifyReverseTrigger(alarm,source); } public void changeMonitorState(Date date, EventThrowing subSource, EventTypeEnum eventType, SeverityEnum severity, String message, Throwable t) throws MonitorException { String eventSource=subSource==null?"":subSource.getEventSourceName(); if (eventType==null) { throw new MonitorException("eventType cannot be null"); } if (severity==null) { throw new MonitorException("severity cannot be null"); } setStateChangeDt(date); for (Iterator it=destinationSet.iterator();it.hasNext();) { String key=(String)it.next(); IMonitorAdapter monitorAdapter = getOwner().getDestination(key); if (log.isDebugEnabled()) log.debug(getLogPrefix()+"firing event on destination ["+key+"]"); if (monitorAdapter!=null) { monitorAdapter.fireEvent(eventSource, eventType, severity, getName(), null); } } } protected void notifyReverseTrigger(boolean alarm, EventThrowing source) { for (Iterator it=triggers.iterator(); it.hasNext();) { Trigger trigger=(Trigger)it.next(); if (trigger.isAlarm()!=alarm) { trigger.notificationOfReverseTrigger(source); } } } public XmlBuilder getStatusXml() { XmlBuilder monitor=new XmlBuilder("monitor"); monitor.addAttribute("name",getName()); monitor.addAttribute("raised",isRaised()); if (stateChangeDt!=null) { monitor.addAttribute("changed",getStateChangeDtStr()); } if (isRaised()) { monitor.addAttribute("severity",getAlarmSeverity()); EventThrowing source = getAlarmSource(); if (source!=null) { monitor.addAttribute("source",source.getEventSourceName()); } } return monitor; } public XmlBuilder toXml() { XmlBuilder monitor=new XmlBuilder("monitor"); monitor.addAttribute("name",getName()); monitor.addAttribute("type",getType()); monitor.addAttribute("destinations",getDestinationsAsString()); for (Iterator it=triggers.iterator();it.hasNext();) { Trigger trigger=(Trigger)it.next(); trigger.toXml(monitor); } return monitor; } public boolean isDestination(String name) { return destinationSet.contains(name); } public String getDestinationsAsString() { //log.debug(getLogPrefix()+"calling getDestinationsAsString()"); String result=null; for (Iterator it=getDestinationSet().iterator();it.hasNext();) { String item=(String)it.next(); if (result==null) { result=item; } else { result+=","+item; } } return result; } public String[] getDestinations() { //log.debug(getLogPrefix()+"entering getDestinations()"); String[] result=new String[destinationSet.size()]; result=(String[])destinationSet.toArray(result); return result; } public void setDestinations(String newDestinations) { // log.debug(getLogPrefix()+"entering setDestinations(String)"); destinationSet.clear(); StringTokenizer st=new StringTokenizer(newDestinations,","); while (st.hasMoreTokens()) { String token=st.nextToken(); // log.debug(getLogPrefix()+"adding destination ["+token+"]"); destinationSet.add(token); } } public void setDestinations(String[] newDestinations) { if (newDestinations.length==1) { log.debug("assuming single string, separated by commas"); destinationSet.clear(); StringTokenizer st=new StringTokenizer(newDestinations[0],","); while (st.hasMoreTokens()) { String token=st.nextToken(); log.debug(getLogPrefix()+"adding destination ["+token+"]"); destinationSet.add(token); } } else { log.debug(getLogPrefix()+"entering setDestinations(String[])"); Set set=new HashSet(); for (int i=0;i<newDestinations.length;i++) { log.debug(getLogPrefix()+"adding destination ["+newDestinations[i]+"]"); set.add(newDestinations[i]); } setDestinationSet(set); } } public Set getDestinationSet() { return destinationSet; } public void setDestinationSet(Set newDestinations) { if (newDestinations==null) { log.debug(getLogPrefix()+"clearing destinations"); destinationSet.clear(); } else { if (log.isDebugEnabled()) { String destinations=null; for (Iterator it=newDestinations.iterator();it.hasNext();) { if (destinations!=null) { destinations+=","+(String)it.next(); } else { destinations=(String)it.next(); } } log.debug(getLogPrefix()+"setting destinations to ["+destinations+"]"); } // log.debug("size before retain all ["+destinationSet.size()+"]" ); destinationSet.retainAll(newDestinations); // log.debug("size after retain all ["+destinationSet.size()+"]" ); destinationSet.addAll(newDestinations); // log.debug("size after add all ["+destinationSet.size()+"]" ); } } public void registerTrigger(Trigger trigger) { trigger.setOwner(this); triggers.add(trigger); } public void registerAlarm(Trigger trigger) { trigger.setAlarm(true); registerTrigger(trigger); } public void registerClearing(Trigger trigger) { trigger.setAlarm(false); registerTrigger(trigger); } public String getLogPrefix() { return "Monitor ["+getName()+"] "; } public void setOwner(MonitorManager manager) { owner = manager; } public MonitorManager getOwner() { return owner; } public List getTriggers() { return triggers; } public Trigger getTrigger(int index) { return (Trigger)triggers.get(index); } public String toString() { return ToStringBuilder.reflectionToString(this); } public void setName(String string) { name = string; } public String getName() { return name; } public void setType(String eventType) { setTypeEnum((EventTypeEnum)EventTypeEnum.getEnumMap().get(eventType)); } public String getType() { return type==null?null:type.getName(); } public void setTypeEnum(EventTypeEnum enumeration) { type = enumeration; } public EventTypeEnum getTypeEnum() { return type; } public void setRaised(boolean b) { raised = b; } public boolean isRaised() { return raised; } public String getAlarmSeverity() { return alarmSeverity==null?null:alarmSeverity.getName(); } public void setAlarmSeverityEnum(SeverityEnum enumeration) { alarmSeverity = enumeration; } public SeverityEnum getAlarmSeverityEnum() { return alarmSeverity; } public EventThrowing getAlarmSource() { return alarmSource; } public void setAlarmSource(EventThrowing source) { alarmSource = source; } public void setStateChangeDt(Date date) { stateChangeDt = date; getOwner().registerStateChange(date); } public Date getStateChangeDt() { return stateChangeDt; } public String getStateChangeDtStr() { if (stateChangeDt!=null) { return DateUtils.format(stateChangeDt,DateUtils.FORMAT_FULL_GENERIC); } return ""; } public void setLastHit(Date date) { lastHit = date; } public Date getLastHit() { return lastHit; } public String getLastHitStr() { if (lastHit!=null) { return DateUtils.format(lastHit,DateUtils.FORMAT_FULL_GENERIC); } return ""; } public void setAdditionalHitCount(int i) { additionalHitCount = i; } public int getAdditionalHitCount() { return additionalHitCount; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.resource; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import org.apache.commons.lang.StringUtils; import org.apache.falcon.FalconWebException; import org.apache.falcon.monitors.Dimension; import org.apache.falcon.monitors.Monitored; /** * Entity management operations as REST API for feed and process. */ @Path("entities") public class SchedulableEntityManager extends AbstractSchedulableEntityManager { @GET @Path("status/{type}/{entity}") @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) @Monitored(event = "status") @Override public APIResult getStatus(@Dimension("entityType") @PathParam("type") String type, @Dimension("entityName") @PathParam("entity") String entity, @Dimension("colo") @QueryParam("colo") final String colo, @Dimension("showScheduler") @QueryParam("showScheduler") final Boolean showScheduler) { try { return super.getStatus(type, entity, colo, showScheduler); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } @GET @Path("sla-alert/{type}") @Produces({MediaType.APPLICATION_JSON, MediaType.TEXT_XML}) @Monitored(event = "feed-sla-misses") public SchedulableEntityInstanceResult getFeedSLAMissPendingAlerts( @Dimension("entityType") @PathParam("type") String entityType, @Dimension("entityName") @QueryParam("name") String entityName, @Dimension("start") @QueryParam("start") String start, @Dimension("end") @QueryParam("end") String end, @Dimension("colo") @QueryParam("colo") final String colo) { try { validateSlaParams(entityType, entityName, start, end, colo); return super.getFeedSLAMissPendingAlerts(entityName, start, end, colo); } catch (Throwable e) { throw FalconWebException.newAPIException(e); } } @GET @Path("dependencies/{type}/{entity}") @Produces({MediaType.TEXT_XML, MediaType.APPLICATION_JSON}) @Monitored(event = "dependencies") @Override public EntityList getDependencies(@Dimension("entityType") @PathParam("type") String type, @Dimension("entityName") @PathParam("entity") String entity) { try { return super.getDependencies(type, entity); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck @GET @Path("list{type : (/[^/]+)?}") @Produces({MediaType.TEXT_XML, MediaType.APPLICATION_JSON}) @Monitored(event = "list") @Override public EntityList getEntityList(@Dimension("type") @PathParam("type") String type, @DefaultValue("") @QueryParam("fields") String fields, @DefaultValue("") @QueryParam("nameseq") String nameSubsequence, @DefaultValue("") @QueryParam("tagkeys") String tagKeywords, @DefaultValue("") @QueryParam("tags") String tags, @DefaultValue("") @QueryParam("filterBy") String filterBy, @DefaultValue("") @QueryParam("orderBy") String orderBy, @DefaultValue("asc") @QueryParam("sortOrder") String sortOrder, @DefaultValue("0") @QueryParam("offset") Integer offset, @QueryParam("numResults") Integer resultsPerPage, @DefaultValue("") @QueryParam("doAs") String doAsUser) { try { if (StringUtils.isNotEmpty(type)) { type = type.substring(1); } resultsPerPage = resultsPerPage == null ? getDefaultResultsPerPage() : resultsPerPage; return super.getEntityList(fields, nameSubsequence, tagKeywords, type, tags, filterBy, orderBy, sortOrder, offset, resultsPerPage, doAsUser); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } @GET @Path("summary/{type}") @Produces({MediaType.TEXT_XML, MediaType.APPLICATION_JSON}) @Monitored(event = "summary") @Override public EntitySummaryResult getEntitySummary( @Dimension("type") @PathParam("type") String type, @Dimension("cluster") @QueryParam("cluster") String cluster, @DefaultValue("") @QueryParam("start") String startStr, @DefaultValue("") @QueryParam("end") String endStr, @DefaultValue("") @QueryParam("fields") String fields, @DefaultValue("") @QueryParam("filterBy") String entityFilter, @DefaultValue("") @QueryParam("tags") String entityTags, @DefaultValue("") @QueryParam("orderBy") String entityOrderBy, @DefaultValue("asc") @QueryParam("sortOrder") String entitySortOrder, @DefaultValue("0") @QueryParam("offset") Integer entityOffset, @DefaultValue("10") @QueryParam("numResults") Integer numEntities, @DefaultValue("7") @QueryParam("numInstances") Integer numInstanceResults, @DefaultValue("") @QueryParam("doAs") final String doAsUser) { try { return super.getEntitySummary(type, cluster, startStr, endStr, fields, entityFilter, entityTags, entityOrderBy, entitySortOrder, entityOffset, numEntities, numInstanceResults, doAsUser); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } //RESUME CHECKSTYLE CHECK ParameterNumberCheck @GET @Path("definition/{type}/{entity}") @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) @Monitored(event = "definition") @Override public String getEntityDefinition(@Dimension("type") @PathParam("type") String type, @Dimension("entity") @PathParam("entity") String entityName) { try { return super.getEntityDefinition(type, entityName); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } @POST @Path("schedule/{type}/{entity}") @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) @Monitored(event = "schedule") @Override public APIResult schedule(@Context HttpServletRequest request, @Dimension("entityType") @PathParam("type") String type, @Dimension("entityName") @PathParam("entity") String entity, @Dimension("colo") @QueryParam("colo") String colo, @QueryParam("skipDryRun") Boolean skipDryRun, @QueryParam("properties") String properties) { try { return super.schedule(request, type, entity, colo, skipDryRun, properties); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } @POST @Path("suspend/{type}/{entity}") @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) @Monitored(event = "suspend") @Override public APIResult suspend(@Context HttpServletRequest request, @Dimension("entityType") @PathParam("type") String type, @Dimension("entityName") @PathParam("entity") String entity, @Dimension("colo") @QueryParam("colo") String colo) { try { return super.suspend(request, type, entity, colo); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } @POST @Path("resume/{type}/{entity}") @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) @Monitored(event = "resume") @Override public APIResult resume(@Context HttpServletRequest request, @Dimension("entityType") @PathParam("type") String type, @Dimension("entityName") @PathParam("entity") String entity, @Dimension("colo") @QueryParam("colo") String colo) { try { return super.resume(request, type, entity, colo); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } @POST @Path("validate/{type}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) @Monitored(event = "validate") @Override public APIResult validate(@Context HttpServletRequest request, @PathParam("type") String type, @QueryParam("skipDryRun") Boolean skipDryRun) { try { return super.validate(request, type, skipDryRun); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } @POST @Path("touch/{type}/{entity}") @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Monitored(event = "touch") @Override public APIResult touch(@Dimension("entityType") @PathParam("type") String type, @Dimension("entityName") @PathParam("entity") String entityName, @Dimension("colo") @QueryParam("colo") String colo, @QueryParam("skipDryRun") Boolean skipDryRun) { try { return super.touch(type, entityName, colo, skipDryRun); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } @GET @Path("lookup/{type}/") @Produces(MediaType.APPLICATION_JSON) @Monitored(event = "reverse-lookup") public FeedLookupResult reverseLookup( @Dimension("type") @PathParam("type") String type, @Dimension("path") @QueryParam("path") String instancePath) { try { return super.reverseLookup(type, instancePath); } catch (Throwable throwable) { throw FalconWebException.newAPIException(throwable); } } }