gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Copyright 2010-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.ralscha.extdirectspring.controller; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.data.MapEntry.entry; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MvcResult; import org.springframework.test.web.servlet.request.MockHttpServletRequestBuilder; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.util.Assert; import org.springframework.web.context.WebApplicationContext; import ch.ralscha.extdirectspring.bean.api.ActionDoc; import ch.ralscha.extdirectspring.util.ApiCache; @ExtendWith(SpringExtension.class) @WebAppConfiguration @ContextConfiguration("classpath:/testApplicationContext.xml") public class ApiControllerWithDocumentationTest { @Autowired private WebApplicationContext wac; private MockMvc mockMvc; @Autowired private ConfigurationService configurationService; @Autowired private ApiCache apiCache; @BeforeEach public void setupApiController() throws Exception { this.apiCache.clear(); Configuration config = new Configuration(); config.setTimeout(15000); config.setEnableBuffer(Boolean.FALSE); config.setMaxRetries(5); config.setStreamResponse(true); ReflectionTestUtils.setField(this.configurationService, "configuration", config); this.configurationService.afterPropertiesSet(); this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); } /** * to test the following need to activate Feature 'ALLOW_COMMENTS' for jackson parser * <p> * typical error is com.fasterxml.jackson.core.JsonParseException: Unexpected * character ('/' (code 47)): maybe a (non-standard) comment? * * @throws Exception */ @Test public void testDoc1() throws Exception { ActionDoc doc = callApi("method1"); assertThat(doc.isDeprecated()).isTrue(); assertThat(doc.getMethodComment()) .isEqualTo("this method is used to test the documentation generation"); assertThat(doc.getAuthor()).isEqualTo("dbs"); assertThat(doc.getVersion()).isEqualTo("0.1"); assertThat(doc.getParameters()).hasSize(5); assertThat(doc.getParameters()).contains(entry("a", "property a integer"), entry("b", "property b string"), entry("c", "property c string"), entry("d", "property d boolean"), entry("e", "array of integers")); assertThat(doc.getReturnMethod()).hasSize(2); assertThat(doc.getReturnMethod()).contains( entry("errors", "list of failed fields"), entry("success", "true for success, false otherwise")); } @Test public void testDoc2() throws Exception { ActionDoc doc = callApi("method2"); assertThat(doc.isDeprecated()).isFalse(); assertThat(doc.getMethodComment()).isEqualTo("method two doc"); assertThat(doc.getAuthor()).isEmpty(); assertThat(doc.getVersion()).isEqualTo("1.0"); assertThat(doc.getParameters()).isEmpty(); assertThat(doc.getReturnMethod()).isEmpty(); } @Test public void testDoc3() throws Exception { ActionDoc doc = callApi("method3"); assertThat(doc.isDeprecated()).isFalse(); assertThat(doc.getMethodComment()).isEqualTo("method three doc"); assertThat(doc.getAuthor()).isEqualTo("dbs"); assertThat(doc.getVersion()).isEqualTo("1.0"); assertThat(doc.getParameters()).isEmpty(); assertThat(doc.getReturnMethod()).isEmpty(); } @Test public void testDoc4() throws Exception { ActionDoc doc = callApi("method4"); assertThat(doc.isDeprecated()).isFalse(); assertThat(doc.getMethodComment()).isEqualTo("method four doc"); assertThat(doc.getAuthor()).isEqualTo("sr"); assertThat(doc.getVersion()).isEqualTo("0.4"); assertThat(doc.getParameters()).isEmpty(); assertThat(doc.getReturnMethod()).isEmpty(); } @Test public void testDoc5() throws Exception { ActionDoc doc = callApi("method5"); assertThat(doc.isDeprecated()).isTrue(); assertThat(doc.getMethodComment()).isEqualTo("method five doc"); assertThat(doc.getAuthor()).isEqualTo("dbs"); assertThat(doc.getVersion()).isEqualTo("0.5"); assertThat(doc.getParameters()).isEmpty(); assertThat(doc.getReturnMethod()).isEmpty(); } @Test public void testDoc6() throws Exception { ActionDoc doc = callApi("method6"); assertThat(doc.isDeprecated()).isFalse(); assertThat(doc.getMethodComment()).isEqualTo("method six doc"); assertThat(doc.getAuthor()).isEqualTo("sr"); assertThat(doc.getVersion()).isEqualTo("0.6"); assertThat(doc.getParameters()).isEmpty(); assertThat(doc.getReturnMethod()).isEmpty(); } @Test public void testDoc7() throws Exception { ActionDoc doc = callApi("method7"); assertThat(doc.isDeprecated()).isTrue(); assertThat(doc.getMethodComment()).isEqualTo("method seven doc"); assertThat(doc.getAuthor()).isEqualTo("sr"); assertThat(doc.getVersion()).isEqualTo("0.7"); assertThat(doc.getParameters()).isEmpty(); assertThat(doc.getReturnMethod()).hasSize(1); assertThat(doc.getReturnMethod()).contains(entry("p1", "p1 desc")); } @Test public void testDoc8() throws Exception { ActionDoc doc = callApi("method8"); assertThat(doc.isDeprecated()).isFalse(); assertThat(doc.getMethodComment()).isEqualTo("method eight doc"); assertThat(doc.getAuthor()).isEqualTo("sr"); assertThat(doc.getVersion()).isEqualTo("0.8"); assertThat(doc.getParameters()).isEmpty(); assertThat(doc.getReturnMethod()).hasSize(2); assertThat(doc.getReturnMethod()).contains(entry("p1", "p1 desc"), entry("p2", "p2 desc")); } @Test public void testDoc9() throws Exception { ActionDoc doc = callApi("method9"); assertThat(doc.isDeprecated()).isFalse(); assertThat(doc.getMethodComment()).isEqualTo("method nine doc"); assertThat(doc.getAuthor()).isEqualTo("dbs"); assertThat(doc.getVersion()).isEqualTo("0.9"); assertThat(doc.getParameters()).isEmpty(); assertThat(doc.getReturnMethod()).isEmpty(); } @Test public void testDoc10() throws Exception { ActionDoc doc = callApi("method10"); assertThat(doc.isDeprecated()).isFalse(); assertThat(doc.getMethodComment()).isEqualTo("method ten doc"); assertThat(doc.getAuthor()).isEqualTo("sr"); assertThat(doc.getVersion()).isEqualTo("1.0"); assertThat(doc.getParameters()).hasSize(1); assertThat(doc.getParameters()).contains(entry("a", "a desc")); assertThat(doc.getReturnMethod()).hasSize(2); assertThat(doc.getReturnMethod()).contains(entry("p1", "p1 desc"), entry("p2", "p2 desc")); } @Test public void testDoc11() throws Exception { ActionDoc doc = callApi("method11"); assertThat(doc.isDeprecated()).isFalse(); assertThat(doc.getMethodComment()).isEqualTo("method eleven doc"); assertThat(doc.getAuthor()).isEmpty(); assertThat(doc.getVersion()).isEqualTo("1.0"); assertThat(doc.getParameters()).hasSize(2); assertThat(doc.getParameters()).contains(entry("a", "a desc"), entry("b", "b desc")); assertThat(doc.getReturnMethod()).isEmpty(); } @Test public void testDoc12() throws Exception { ActionDoc doc = callApi("method12"); assertThat(doc.isDeprecated()).isFalse(); assertThat(doc.getMethodComment()).isEqualTo("method twelve doc"); assertThat(doc.getAuthor()).isEqualTo("sr"); assertThat(doc.getVersion()).isEqualTo("1.0"); assertThat(doc.getParameters()).isEmpty(); assertThat(doc.getReturnMethod()).isEmpty(); } public void testRequestToApiDebugDoesNotContainDocs() throws Exception { doRequestWithoutDocs("/api-debug.js"); } public void testRequestToApiDoesNotContainDocs() throws Exception { doRequestWithoutDocs("/api.js"); } private void doRequestWithoutDocs(String url) throws Exception { ApiRequestParams params = ApiRequestParams.builder().apiNs("Ext.ns") .actionNs("actionns").group("doc") .configuration(this.configurationService.getConfiguration()).build(); MockHttpServletRequestBuilder request = get(url).accept(MediaType.ALL) .characterEncoding("UTF-8"); request.param("apiNs", params.getApiNs()); request.param("actionNs", params.getActionNs()); request.param("group", params.getGroup()); MvcResult result = this.mockMvc.perform(request).andExpect(status().isOk()) .andExpect(content().contentTypeCompatibleWith("application/javascript")) .andReturn(); ApiControllerTest.compare(result, ApiControllerTest.groupApisWithDoc("actionns"), params); Assert.doesNotContain("/**", result.getResponse().getContentAsString(), "generation of api.js should not contain method documentation"); } private ActionDoc callApi(String method) throws Exception { ApiRequestParams params = ApiRequestParams.builder().apiNs("Ext.ns") .actionNs("actionns").group("doc") .configuration(this.configurationService.getConfiguration()).build(); MockHttpServletRequestBuilder request = get("/api-debug-doc.js") .accept(MediaType.ALL).characterEncoding("UTF-8"); request.param("apiNs", params.getApiNs()); request.param("actionNs", params.getActionNs()); request.param("group", params.getGroup()); MvcResult result = this.mockMvc.perform(request).andExpect(status().isOk()) .andExpect(content().contentTypeCompatibleWith("application/javascript")) .andReturn(); ApiControllerTest.compare(result, ApiControllerTest.groupApisWithDoc("actionns"), params); ActionDoc doc = getCommentForMethod(result.getResponse().getContentAsString(), method); return doc; } private final static Pattern COMMENT_PATTERN = Pattern.compile("/\\*\\*([^/]*)\\*/", Pattern.MULTILINE); private static ActionDoc getCommentForMethod(String apiString, String method) { ActionDoc doc = new ActionDoc(method, Collections.<String>emptyList()); String block = findCommentBlock(apiString, method); if (block != null) { doc.setDeprecated(block.contains("* @deprecated")); int p = block.indexOf("@author:"); if (p != -1) { doc.setAuthor(block.substring(p + 9, block.indexOf('\n', p))); } p = block.indexOf("@version:"); if (p != -1) { doc.setVersion(block.substring(p + 10, block.indexOf('\n', p))); } p = block.indexOf(method); if (p != -1) { doc.setMethodComment( block.substring(p + method.length() + 2, block.indexOf('\n', p))); } Map<String, String> params = new HashMap<>(); p = block.indexOf("@param:"); while (p != -1) { int p2 = block.indexOf('\n', p); String pc = block.substring(p + 8, p2); int c1 = pc.indexOf('['); int c2 = pc.indexOf(']'); params.put(pc.substring(c1 + 1, c2), pc.substring(c2 + 2)); p = block.indexOf("@param:", p2); } doc.setParameters(params); Map<String, String> returns = new HashMap<>(); p = block.indexOf("@return"); if (p != -1) { p = block.indexOf('[', p); while (p != -1) { int p2 = block.indexOf(']', p); returns.put(block.substring(p + 1, p2), block.substring(p2 + 2, block.indexOf('\n', p2))); p = block.indexOf('[', p2); } } doc.setReturnMethod(returns); } return doc; } private static String findCommentBlock(String apiString, String method) { Matcher m = COMMENT_PATTERN.matcher(apiString); while (m.find()) { String block = m.group(1); if (block.contains(method + ":")) { return block; } } return null; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.qjournal.client; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; import java.net.URL; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.PriorityQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetJournalStateResponseProto; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.NewEpochResponseProto; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.SegmentStateProto; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.namenode.EditLogFileInputStream; import org.apache.hadoop.hdfs.server.namenode.EditLogInputStream; import org.apache.hadoop.hdfs.server.namenode.EditLogOutputStream; import org.apache.hadoop.hdfs.server.namenode.JournalManager; import org.apache.hadoop.hdfs.server.namenode.JournalSet; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; import org.apache.hadoop.hdfs.web.URLConnectionFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.protobuf.TextFormat; /** * A JournalManager that writes to a set of remote JournalNodes, * requiring a quorum of nodes to ack each write. */ @InterfaceAudience.Private public class QuorumJournalManager implements JournalManager { static final Log LOG = LogFactory.getLog(QuorumJournalManager.class); // Timeouts for which the QJM will wait for each of the following actions. private final int startSegmentTimeoutMs; private final int prepareRecoveryTimeoutMs; private final int acceptRecoveryTimeoutMs; private final int finalizeSegmentTimeoutMs; private final int selectInputStreamsTimeoutMs; private final int getJournalStateTimeoutMs; private final int newEpochTimeoutMs; private final int writeTxnsTimeoutMs; // This timeout is used for calls that don't occur during normal operation // e.g. format, upgrade operations and a few others. So we can use rather // lengthy timeouts by default. private final int timeoutMs; private final Configuration conf; private final URI uri; private final NamespaceInfo nsInfo; private boolean isActiveWriter; private final AsyncLoggerSet loggers; private int outputBufferCapacity = 512 * 1024; private final URLConnectionFactory connectionFactory; public QuorumJournalManager(Configuration conf, URI uri, NamespaceInfo nsInfo) throws IOException { this(conf, uri, nsInfo, IPCLoggerChannel.FACTORY); } QuorumJournalManager(Configuration conf, URI uri, NamespaceInfo nsInfo, AsyncLogger.Factory loggerFactory) throws IOException { Preconditions.checkArgument(conf != null, "must be configured"); this.conf = conf; this.uri = uri; this.nsInfo = nsInfo; this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory)); this.connectionFactory = URLConnectionFactory .newDefaultURLConnectionFactory(conf); // Configure timeouts. this.startSegmentTimeoutMs = conf.getInt( DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY, DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT); this.prepareRecoveryTimeoutMs = conf.getInt( DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY, DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT); this.acceptRecoveryTimeoutMs = conf.getInt( DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY, DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT); this.finalizeSegmentTimeoutMs = conf.getInt( DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY, DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT); this.selectInputStreamsTimeoutMs = conf.getInt( DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY, DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT); this.getJournalStateTimeoutMs = conf.getInt( DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY, DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT); this.newEpochTimeoutMs = conf.getInt( DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY, DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT); this.writeTxnsTimeoutMs = conf.getInt( DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY, DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT); this.timeoutMs = (int) conf.getTimeDuration(DFSConfigKeys .DFS_QJM_OPERATIONS_TIMEOUT, DFSConfigKeys.DFS_QJM_OPERATIONS_TIMEOUT_DEFAULT, TimeUnit .MILLISECONDS); } protected List<AsyncLogger> createLoggers( AsyncLogger.Factory factory) throws IOException { return createLoggers(conf, uri, nsInfo, factory); } static String parseJournalId(URI uri) { String path = uri.getPath(); Preconditions.checkArgument(path != null && !path.isEmpty(), "Bad URI '%s': must identify journal in path component", uri); String journalId = path.substring(1); checkJournalId(journalId); return journalId; } public static void checkJournalId(String jid) { Preconditions.checkArgument(jid != null && !jid.isEmpty() && !jid.contains("/") && !jid.startsWith("."), "bad journal id: " + jid); } /** * Fence any previous writers, and obtain a unique epoch number * for write-access to the journal nodes. * * @return the new, unique epoch number */ Map<AsyncLogger, NewEpochResponseProto> createNewUniqueEpoch() throws IOException { Preconditions.checkState(!loggers.isEpochEstablished(), "epoch already created"); Map<AsyncLogger, GetJournalStateResponseProto> lastPromises = loggers.waitForWriteQuorum(loggers.getJournalState(), getJournalStateTimeoutMs, "getJournalState()"); long maxPromised = Long.MIN_VALUE; for (GetJournalStateResponseProto resp : lastPromises.values()) { maxPromised = Math.max(maxPromised, resp.getLastPromisedEpoch()); } assert maxPromised >= 0; long myEpoch = maxPromised + 1; Map<AsyncLogger, NewEpochResponseProto> resps = loggers.waitForWriteQuorum(loggers.newEpoch(nsInfo, myEpoch), newEpochTimeoutMs, "newEpoch(" + myEpoch + ")"); loggers.setEpoch(myEpoch); return resps; } @Override public void format(NamespaceInfo nsInfo) throws IOException { QuorumCall<AsyncLogger,Void> call = loggers.format(nsInfo); try { call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs, "format"); } catch (InterruptedException e) { throw new IOException("Interrupted waiting for format() response"); } catch (TimeoutException e) { throw new IOException("Timed out waiting for format() response"); } if (call.countExceptions() > 0) { call.rethrowException("Could not format one or more JournalNodes"); } } @Override public boolean hasSomeData() throws IOException { QuorumCall<AsyncLogger, Boolean> call = loggers.isFormatted(); try { call.waitFor(loggers.size(), 0, 0, timeoutMs, "hasSomeData"); } catch (InterruptedException e) { throw new IOException("Interrupted while determining if JNs have data"); } catch (TimeoutException e) { throw new IOException("Timed out waiting for response from loggers"); } if (call.countExceptions() > 0) { call.rethrowException( "Unable to check if JNs are ready for formatting"); } // If any of the loggers returned with a non-empty manifest, then // we should prompt for format. for (Boolean hasData : call.getResults().values()) { if (hasData) { return true; } } // Otherwise, none were formatted, we can safely format. return false; } /** * Run recovery/synchronization for a specific segment. * Postconditions: * <ul> * <li>This segment will be finalized on a majority * of nodes.</li> * <li>All nodes which contain the finalized segment will * agree on the length.</li> * </ul> * * @param segmentTxId the starting txid of the segment * @throws IOException */ private void recoverUnclosedSegment(long segmentTxId) throws IOException { Preconditions.checkArgument(segmentTxId > 0); LOG.info("Beginning recovery of unclosed segment starting at txid " + segmentTxId); // Step 1. Prepare recovery QuorumCall<AsyncLogger,PrepareRecoveryResponseProto> prepare = loggers.prepareRecovery(segmentTxId); Map<AsyncLogger, PrepareRecoveryResponseProto> prepareResponses= loggers.waitForWriteQuorum(prepare, prepareRecoveryTimeoutMs, "prepareRecovery(" + segmentTxId + ")"); LOG.info("Recovery prepare phase complete. Responses:\n" + QuorumCall.mapToString(prepareResponses)); // Determine the logger who either: // a) Has already accepted a previous proposal that's higher than any // other // // OR, if no such logger exists: // // b) Has the longest log starting at this transaction ID // TODO: we should collect any "ties" and pass the URL for all of them // when syncing, so we can tolerate failure during recovery better. Entry<AsyncLogger, PrepareRecoveryResponseProto> bestEntry = Collections.max( prepareResponses.entrySet(), SegmentRecoveryComparator.INSTANCE); AsyncLogger bestLogger = bestEntry.getKey(); PrepareRecoveryResponseProto bestResponse = bestEntry.getValue(); // Log the above decision, check invariants. if (bestResponse.hasAcceptedInEpoch()) { LOG.info("Using already-accepted recovery for segment " + "starting at txid " + segmentTxId + ": " + bestEntry); } else if (bestResponse.hasSegmentState()) { LOG.info("Using longest log: " + bestEntry); } else { // None of the responses to prepareRecovery() had a segment at the given // txid. This can happen for example in the following situation: // - 3 JNs: JN1, JN2, JN3 // - writer starts segment 101 on JN1, then crashes before // writing to JN2 and JN3 // - during newEpoch(), we saw the segment on JN1 and decide to // recover segment 101 // - before prepare(), JN1 crashes, and we only talk to JN2 and JN3, // neither of which has any entry for this log. // In this case, it is allowed to do nothing for recovery, since the // segment wasn't started on a quorum of nodes. // Sanity check: we should only get here if none of the responses had // a log. This should be a postcondition of the recovery comparator, // but a bug in the comparator might cause us to get here. for (PrepareRecoveryResponseProto resp : prepareResponses.values()) { assert !resp.hasSegmentState() : "One of the loggers had a response, but no best logger " + "was found."; } LOG.info("None of the responders had a log to recover: " + QuorumCall.mapToString(prepareResponses)); return; } SegmentStateProto logToSync = bestResponse.getSegmentState(); assert segmentTxId == logToSync.getStartTxId(); // Sanity check: none of the loggers should be aware of a higher // txid than the txid we intend to truncate to for (Map.Entry<AsyncLogger, PrepareRecoveryResponseProto> e : prepareResponses.entrySet()) { AsyncLogger logger = e.getKey(); PrepareRecoveryResponseProto resp = e.getValue(); if (resp.hasLastCommittedTxId() && resp.getLastCommittedTxId() > logToSync.getEndTxId()) { throw new AssertionError("Decided to synchronize log to " + logToSync + " but logger " + logger + " had seen txid " + resp.getLastCommittedTxId() + " committed"); } } URL syncFromUrl = bestLogger.buildURLToFetchLogs(segmentTxId); QuorumCall<AsyncLogger,Void> accept = loggers.acceptRecovery(logToSync, syncFromUrl); loggers.waitForWriteQuorum(accept, acceptRecoveryTimeoutMs, "acceptRecovery(" + TextFormat.shortDebugString(logToSync) + ")"); // If one of the loggers above missed the synchronization step above, but // we send a finalize() here, that's OK. It validates the log before // finalizing. Hence, even if it is not "in sync", it won't incorrectly // finalize. QuorumCall<AsyncLogger, Void> finalize = loggers.finalizeLogSegment(logToSync.getStartTxId(), logToSync.getEndTxId()); loggers.waitForWriteQuorum(finalize, finalizeSegmentTimeoutMs, String.format("finalizeLogSegment(%s-%s)", logToSync.getStartTxId(), logToSync.getEndTxId())); } static List<AsyncLogger> createLoggers(Configuration conf, URI uri, NamespaceInfo nsInfo, AsyncLogger.Factory factory) throws IOException { List<AsyncLogger> ret = Lists.newArrayList(); List<InetSocketAddress> addrs = Util.getAddressesList(uri); if (addrs.size() % 2 == 0) { LOG.warn("Quorum journal URI '" + uri + "' has an even number " + "of Journal Nodes specified. This is not recommended!"); } String jid = parseJournalId(uri); for (InetSocketAddress addr : addrs) { ret.add(factory.createLogger(conf, nsInfo, jid, addr)); } return ret; } @Override public EditLogOutputStream startLogSegment(long txId, int layoutVersion) throws IOException { Preconditions.checkState(isActiveWriter, "must recover segments before starting a new one"); QuorumCall<AsyncLogger, Void> q = loggers.startLogSegment(txId, layoutVersion); loggers.waitForWriteQuorum(q, startSegmentTimeoutMs, "startLogSegment(" + txId + ")"); boolean updateCommittedTxId = conf.getBoolean( DFSConfigKeys.DFS_HA_TAILEDITS_INPROGRESS_KEY, DFSConfigKeys.DFS_HA_TAILEDITS_INPROGRESS_DEFAULT); return new QuorumOutputStream(loggers, txId, outputBufferCapacity, writeTxnsTimeoutMs, updateCommittedTxId); } @Override public void finalizeLogSegment(long firstTxId, long lastTxId) throws IOException { QuorumCall<AsyncLogger,Void> q = loggers.finalizeLogSegment( firstTxId, lastTxId); loggers.waitForWriteQuorum(q, finalizeSegmentTimeoutMs, String.format("finalizeLogSegment(%s-%s)", firstTxId, lastTxId)); } @Override public void setOutputBufferCapacity(int size) { outputBufferCapacity = size; } @Override public void purgeLogsOlderThan(long minTxIdToKeep) throws IOException { // This purges asynchronously -- there's no need to wait for a quorum // here, because it's always OK to fail. LOG.info("Purging remote journals older than txid " + minTxIdToKeep); loggers.purgeLogsOlderThan(minTxIdToKeep); } @Override public void recoverUnfinalizedSegments() throws IOException { Preconditions.checkState(!isActiveWriter, "already active writer"); LOG.info("Starting recovery process for unclosed journal segments..."); Map<AsyncLogger, NewEpochResponseProto> resps = createNewUniqueEpoch(); LOG.info("Successfully started new epoch " + loggers.getEpoch()); if (LOG.isDebugEnabled()) { LOG.debug("newEpoch(" + loggers.getEpoch() + ") responses:\n" + QuorumCall.mapToString(resps)); } long mostRecentSegmentTxId = Long.MIN_VALUE; for (NewEpochResponseProto r : resps.values()) { if (r.hasLastSegmentTxId()) { mostRecentSegmentTxId = Math.max(mostRecentSegmentTxId, r.getLastSegmentTxId()); } } // On a completely fresh system, none of the journals have any // segments, so there's nothing to recover. if (mostRecentSegmentTxId != Long.MIN_VALUE) { recoverUnclosedSegment(mostRecentSegmentTxId); } isActiveWriter = true; } @Override public void close() throws IOException { loggers.close(); } public void selectInputStreams(Collection<EditLogInputStream> streams, long fromTxnId, boolean inProgressOk) throws IOException { selectInputStreams(streams, fromTxnId, inProgressOk, false); } @Override public void selectInputStreams(Collection<EditLogInputStream> streams, long fromTxnId, boolean inProgressOk, boolean onlyDurableTxns) throws IOException { QuorumCall<AsyncLogger, RemoteEditLogManifest> q = loggers.getEditLogManifest(fromTxnId, inProgressOk); Map<AsyncLogger, RemoteEditLogManifest> resps = loggers.waitForWriteQuorum(q, selectInputStreamsTimeoutMs, "selectInputStreams"); LOG.debug("selectInputStream manifests:\n" + Joiner.on("\n").withKeyValueSeparator(": ").join(resps)); final PriorityQueue<EditLogInputStream> allStreams = new PriorityQueue<EditLogInputStream>(64, JournalSet.EDIT_LOG_INPUT_STREAM_COMPARATOR); for (Map.Entry<AsyncLogger, RemoteEditLogManifest> e : resps.entrySet()) { AsyncLogger logger = e.getKey(); RemoteEditLogManifest manifest = e.getValue(); long committedTxnId = manifest.getCommittedTxnId(); for (RemoteEditLog remoteLog : manifest.getLogs()) { URL url = logger.buildURLToFetchLogs(remoteLog.getStartTxId()); long endTxId = remoteLog.getEndTxId(); // If it's bounded by durable Txns, endTxId could not be larger // than committedTxnId. This ensures the consistency. if (onlyDurableTxns && inProgressOk) { endTxId = Math.min(endTxId, committedTxnId); } EditLogInputStream elis = EditLogFileInputStream.fromUrl( connectionFactory, url, remoteLog.getStartTxId(), endTxId, remoteLog.isInProgress()); allStreams.add(elis); } } JournalSet.chainAndMakeRedundantStreams(streams, allStreams, fromTxnId); } @Override public String toString() { return "QJM to " + loggers; } @VisibleForTesting AsyncLoggerSet getLoggerSetForTests() { return loggers; } @Override public void doPreUpgrade() throws IOException { QuorumCall<AsyncLogger, Void> call = loggers.doPreUpgrade(); try { call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs, "doPreUpgrade"); if (call.countExceptions() > 0) { call.rethrowException("Could not do pre-upgrade of one or more JournalNodes"); } } catch (InterruptedException e) { throw new IOException("Interrupted waiting for doPreUpgrade() response"); } catch (TimeoutException e) { throw new IOException("Timed out waiting for doPreUpgrade() response"); } } @Override public void doUpgrade(Storage storage) throws IOException { QuorumCall<AsyncLogger, Void> call = loggers.doUpgrade(storage); try { call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs, "doUpgrade"); if (call.countExceptions() > 0) { call.rethrowException("Could not perform upgrade of one or more JournalNodes"); } } catch (InterruptedException e) { throw new IOException("Interrupted waiting for doUpgrade() response"); } catch (TimeoutException e) { throw new IOException("Timed out waiting for doUpgrade() response"); } } @Override public void doFinalize() throws IOException { QuorumCall<AsyncLogger, Void> call = loggers.doFinalize(); try { call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs, "doFinalize"); if (call.countExceptions() > 0) { call.rethrowException("Could not finalize one or more JournalNodes"); } } catch (InterruptedException e) { throw new IOException("Interrupted waiting for doFinalize() response"); } catch (TimeoutException e) { throw new IOException("Timed out waiting for doFinalize() response"); } } @Override public boolean canRollBack(StorageInfo storage, StorageInfo prevStorage, int targetLayoutVersion) throws IOException { QuorumCall<AsyncLogger, Boolean> call = loggers.canRollBack(storage, prevStorage, targetLayoutVersion); try { call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs, "lockSharedStorage"); if (call.countExceptions() > 0) { call.rethrowException("Could not check if roll back possible for" + " one or more JournalNodes"); } // Either they all return the same thing or this call fails, so we can // just return the first result. try { DFSUtil.assertAllResultsEqual(call.getResults().values()); } catch (AssertionError ae) { throw new IOException("Results differed for canRollBack", ae); } for (Boolean result : call.getResults().values()) { return result; } } catch (InterruptedException e) { throw new IOException("Interrupted waiting for lockSharedStorage() " + "response"); } catch (TimeoutException e) { throw new IOException("Timed out waiting for lockSharedStorage() " + "response"); } throw new AssertionError("Unreachable code."); } @Override public void doRollback() throws IOException { QuorumCall<AsyncLogger, Void> call = loggers.doRollback(); try { call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs, "doRollback"); if (call.countExceptions() > 0) { call.rethrowException("Could not perform rollback of one or more JournalNodes"); } } catch (InterruptedException e) { throw new IOException("Interrupted waiting for doFinalize() response"); } catch (TimeoutException e) { throw new IOException("Timed out waiting for doFinalize() response"); } } @Override public void discardSegments(long startTxId) throws IOException { QuorumCall<AsyncLogger, Void> call = loggers.discardSegments(startTxId); try { call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs, "discardSegments"); if (call.countExceptions() > 0) { call.rethrowException( "Could not perform discardSegments of one or more JournalNodes"); } } catch (InterruptedException e) { throw new IOException( "Interrupted waiting for discardSegments() response"); } catch (TimeoutException e) { throw new IOException( "Timed out waiting for discardSegments() response"); } } @Override public long getJournalCTime() throws IOException { QuorumCall<AsyncLogger, Long> call = loggers.getJournalCTime(); try { call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs, "getJournalCTime"); if (call.countExceptions() > 0) { call.rethrowException("Could not journal CTime for one " + "more JournalNodes"); } // Either they all return the same thing or this call fails, so we can // just return the first result. try { DFSUtil.assertAllResultsEqual(call.getResults().values()); } catch (AssertionError ae) { throw new IOException("Results differed for getJournalCTime", ae); } for (Long result : call.getResults().values()) { return result; } } catch (InterruptedException e) { throw new IOException("Interrupted waiting for getJournalCTime() " + "response"); } catch (TimeoutException e) { throw new IOException("Timed out waiting for getJournalCTime() " + "response"); } throw new AssertionError("Unreachable code."); } }
package de.pfabulist.lindwurm.niotest.tests; import de.pfabulist.kleinod.collection.Ref; import de.pfabulist.kleinod.collection.Sets; import de.pfabulist.lindwurm.niotest.tests.topics.Closable; import de.pfabulist.lindwurm.niotest.tests.topics.Delete; import de.pfabulist.lindwurm.niotest.tests.topics.SlowTest; import de.pfabulist.lindwurm.niotest.tests.topics.Watchable; import de.pfabulist.lindwurm.niotest.tests.topics.Writable; import org.junit.Test; import org.junit.experimental.categories.Category; import java.io.IOException; import java.nio.channels.SeekableByteChannel; import java.nio.file.ClosedWatchServiceException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.WatchEvent; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.List; import java.util.concurrent.TimeUnit; import static de.pfabulist.lindwurm.niotest.matcher.WatchEventMatcher.isEvent; import static de.pfabulist.lindwurm.niotest.matcher.WatchKeyMatcher.correctKey; import static java.nio.file.StandardOpenOption.WRITE; import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE; import static java.nio.file.StandardWatchEventKinds.ENTRY_DELETE; import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsCollectionContaining.hasItems; /** * ** BEGIN LICENSE BLOCK ***** * BSD License (2 clause) * Copyright (c) 2006 - 2015, Stephan Pfab * All rights reserved. * <p> * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * <p> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL Stephan Pfab BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * **** END LICENSE BLOCK **** */ public abstract class Tests11Watcher extends Tests10PathWithContent { public static final String WATCH_DELAY = "watchDelay"; public Tests11Watcher( FSDescription capa ) { super( capa ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class, Delete.class } ) public void testWatchADelete() throws Exception { Path toBeDeleted = watchedFileA(); watcherSetup( ENTRY_DELETE ); Files.delete( toBeDeleted ); Thread.sleep( 4000 ); // todo why does this matter? assertThat( watchServicePoll(), correctKey( toBeDeleted, ENTRY_DELETE ) ); } @Test( timeout = 20000 ) @Category( { SlowTest.class, Watchable.class, Writable.class, Delete.class } ) public void testWatchADeleteTake() throws Exception { Path toBeDeleted = watchedFileA(); watcherSetup( ENTRY_DELETE ); Files.delete( toBeDeleted ); assertThat( getWatchService().take(), correctKey( toBeDeleted, ENTRY_DELETE ) ); } @Test( timeout = 30000 ) @Category( { SlowTest.class, Watchable.class, Writable.class, Delete.class } ) public void testWatchADeletePollWithTimeOut() throws Exception { Path toBeDeleted = watchedFileA(); watcherSetup( ENTRY_DELETE ); Files.delete( toBeDeleted ); assertThat( waitForWatchService().poll( 1000, TimeUnit.MILLISECONDS ), correctKey( toBeDeleted, ENTRY_DELETE ) ); } @Test( timeout = 30000 ) @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchPollWithTimeoutTimesOut() throws Exception { watcherSetup( ENTRY_DELETE ); // no events getWatchService().poll( 1000, TimeUnit.MILLISECONDS ); assertThat( "did we reach that?", not( is( "no" ) ) ); } @Test( expected = ClosedWatchServiceException.class ) @Category( { Watchable.class, Writable.class } ) public void testRegisterOnClosedWatchService() throws IOException { WatchService watcher = FS.newWatchService(); watcher.close(); dirTAB().register( watcher, ENTRY_CREATE ); } @Test( expected = ClosedWatchServiceException.class ) @Category( { Watchable.class, Writable.class, Closable.class } ) public void testRegisterWatchServiceOfClosedFS() throws Exception { getClosedDirB().register( getClosedFSWatchService(), ENTRY_DELETE ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchADeleteFromAMove() throws Exception { Path toBeMoved = watchedFileA(); watcherSetup( ENTRY_DELETE ); Files.move( toBeMoved, absTA() ); assertThat( watchServicePoll(), correctKey( toBeMoved, ENTRY_DELETE ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchAModify() throws Exception { Path toBeModified = watchedFileA(); watcherSetup( ENTRY_MODIFY ); Files.write( toBeModified, CONTENT_OTHER ); assertThat( watchServicePoll(), correctKey( toBeModified, ENTRY_MODIFY ) ); } // // // todo yes or no ? //// @Test //// public void testKidsChangesOfADirIsNotAModify() throws Exception { //// assumeThat(capabilities.supportsWatchService(), is(true)); //// //// Path toBeCreated = watchedFileAB(); //// Path dir = createPathWAd(); //// watcherSetup(ENTRY_MODIFY); //// Files.write(toBeCreated, CONTENT); //// //// assertThat( waitForWatchService().poll(), nullValue() ); //// } // // todo how long to wait? @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchReadIsNotModify() throws Exception { Path toBeModified = watchedFileA(); watcherSetup( ENTRY_MODIFY ); Files.readAllBytes( toBeModified ); assertThat( watchServicePoll(), nullValue() ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchForOtherEventCatchesNothing() throws Exception { Path toBeModified = watchedFileA(); watcherSetup( ENTRY_CREATE ); Files.write( toBeModified, CONTENT_OTHER ); assertThat( watchServicePoll(), nullValue() ); } @Test @Category( { SlowTest.class, Watchable.class } ) public void testWatchInOtherDirCatchesNothing() throws Exception { watcherSetup( ENTRY_CREATE ); fileTAB(); assertThat( watchServicePoll(), nullValue() ); } @Test( timeout = 30000 ) @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testNotResetWatchKeyDoesNotQue() throws Exception { watcherSetup( ENTRY_CREATE ); watchedFileA(); WatchKey key = waitForWatchService().take(); key.pollEvents(); Files.write( watchedAbsB(), CONTENT ); assertThat( watchServicePoll(), nullValue() ); } @Test( timeout = 30000 ) @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testResetWatchKeyDoesQue() throws Exception { watcherSetup( ENTRY_CREATE ); watchedFileA(); WatchKey key = waitForWatchService().take(); key.pollEvents(); key.reset(); Path file = Files.write( watchedAbsB(), CONTENT ); assertThat( getWatchService().take(), correctKey( file, ENTRY_CREATE ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchSeveralEventsInOneDir() throws Exception { Path toBeModified = watchedFileA(); Path toBeCreated = watchedAbsB(); watcherSetup( ENTRY_CREATE, ENTRY_MODIFY ); Files.write( toBeModified, CONTENT_OTHER ); Files.write( toBeCreated, CONTENT_OTHER ); WatchKey key = watchServicePoll(); assertThat( key, notNullValue() ); List<WatchEvent<?>> watchEvents = key.pollEvents(); assertThat( watchEvents, hasItems( isEvent( toBeModified, ENTRY_MODIFY ), isEvent( toBeCreated, ENTRY_CREATE ) ) ); } // @Test // public void testWatchSeveralEventsMultipleDirs() throws Exception { // Path toBeCreated = watchedFileB(); // Path otherDir = createPathWAd(); // Path toBeCreated2 = watchedFileAB(); // watcherSetup(ENTRY_CREATE); // otherDir.register(watchService, ENTRY_CREATE); // // Files.write(toBeCreated, CONTENT_OTHER); // Thread.sleep(2000); // Files.write(toBeCreated2, CONTENT_OTHER); // // assertThat( waitForWatchService().poll(), anyOf(WatchKeyMatcher.correctKey(toBeCreated, ENTRY_CREATE), WatchKeyMatcher.correctKey(toBeCreated2, ENTRY_CREATE))); // assertThat( getWatchService().poll(), anyOf(WatchKeyMatcher.correctKey(toBeCreated, ENTRY_CREATE), WatchKeyMatcher.correctKey(toBeCreated2, ENTRY_CREATE))); // } @Test( timeout = 20000 ) @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchACreateBy2WatchServies() throws Exception { Path toBeCreated = watchedAbsA(); watcherSetup( ENTRY_CREATE ); WatchService watchService2 = FS.newWatchService(); watchedDir().register( watchService2, ENTRY_CREATE ); Files.write( toBeCreated, CONTENT ); assertThat( getWatchService().take(), correctKey( toBeCreated, ENTRY_CREATE ) ); assertThat( watchService2.take(), correctKey( toBeCreated, ENTRY_CREATE ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchACreate() throws Exception { Path toBeCreated = watchedAbsA(); watcherSetup( ENTRY_CREATE ); Files.write( toBeCreated, CONTENT ); assertThat( watchServicePoll(), correctKey( toBeCreated, ENTRY_CREATE ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class, Writable.class } ) public void testWatchACreateDir() throws Exception { Path toBeCreated = watchedAbsA(); watcherSetup( ENTRY_CREATE ); Files.createDirectory( toBeCreated ); assertThat( watchServicePoll(), correctKey( toBeCreated, ENTRY_CREATE ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class, Writable.class } ) public void testWatchACreateFromCopy() throws Exception { watcherSetup( ENTRY_CREATE ); Files.copy( fileTB(), watchedAbsA() ); assertThat( watchServicePoll(), correctKey( watchedAbsA(), ENTRY_CREATE ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class, Writable.class } ) public void testWatchACreateFromMove() throws Exception { watcherSetup( ENTRY_CREATE ); Files.move( fileTAB(), watchedAbsA() ); assertThat( watchServicePoll(), correctKey( watchedAbsA(), ENTRY_CREATE ) ); } //// @Test //// public void testCopyDirReplaceExistingOverwritesFile() throws Exception { //// assumeThat( capabilities.supportsWatchService(), is(true)); //// //// final ConcurrentLinkedDeque<Path> que = new ConcurrentLinkedDeque<>(); //// //// Path tgt = getPathPA(); //// Files.write( tgt, CONTENT, standardOpen ); //// Path srcFile = getPathPB(); //// Files.createDirectories( srcFile ); //// //// new Thread(new Watcher(tgt.getParent(), que, ENTRY_MODIFY )).start(); //// Thread.sleep(2000); //// //// Files.copy( srcFile, tgt, StandardCopyOption.REPLACE_EXISTING ); //// //// Thread.sleep(getWatcherSleep()); //// assertThat(que.size(), is(1)); //// //// } // @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testCanceledWatchKeyDoesNotWatch() throws Exception { WatchKey key = watcherSetup( ENTRY_CREATE ); key.cancel(); watchedFileA(); assertThat( watchServicePoll(), nullValue() ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchATruncate() throws Exception { Path file = watchedFileA(); watcherSetup( ENTRY_MODIFY ); try( SeekableByteChannel channel = FS.provider().newByteChannel( file, Sets.asSet( WRITE ) ) ) { channel.truncate( 2 ); } assertThat( watchServicePoll(), correctKey( file, ENTRY_MODIFY ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchServiceTakeBlocks() throws Exception { Path dir = dirTA(); final WatchService watcher = dir.getFileSystem().newWatchService(); dir.register( watcher, ENTRY_CREATE ); final Ref<Boolean> interrupted = Ref.valueOf( false ); new Thread( () -> { try { watcher.take(); } catch( InterruptedException | ClosedWatchServiceException e ) { } finally { interrupted.set( true ); } } ).start(); Thread.sleep( 1000 ); assertThat( interrupted.get(), is( false ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testCloseAWatchServiceReleasesBlockedTreads() throws Exception { Path dir = dirTB(); final WatchService watcher = dir.getFileSystem().newWatchService(); dir.register( watcher, ENTRY_CREATE ); final Ref<Boolean> interrupted = Ref.valueOf( false ); new Thread( () -> { try { watcher.take(); } catch( InterruptedException e ) { } catch( ClosedWatchServiceException e ) { interrupted.set( true ); } } ).start(); Thread.sleep( 1000 ); watcher.close(); Thread.sleep( 100 ); assertThat( interrupted.get(), is( true ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testCloseAWatchServiceCancelsKeys() throws Exception { Path dir = dirTA(); final WatchService watcher = dir.getFileSystem().newWatchService(); WatchKey key = dir.register( watcher, ENTRY_CREATE ); watcher.close(); waitForWatchService(); assertThat( key.isValid(), is( false ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testPollAnEmptyWatchServiceReturnsNull() throws Exception { Path dir = dirTA(); final WatchService watcher = dir.getFileSystem().newWatchService(); dir.register( watcher, ENTRY_CREATE ); assertThat( watcher.poll(), nullValue() ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchKeyPollEventsEmptiesQue() throws Exception { Path dir = dirTA(); Path toBeDeleted = dirTAB(); final WatchService watcher = dir.getFileSystem().newWatchService(); dir.register( watcher, ENTRY_DELETE ); Thread.sleep( 1000 ); Files.delete( toBeDeleted ); waitForWatchService(); WatchKey watchKey = watcher.poll(); watchKey.pollEvents(); assertThat( watchKey.pollEvents(), empty() ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testDeleteWatchedDirCancelsKeys() throws Exception { WatchKey key = watcherSetup( ENTRY_CREATE ); Files.delete( watchedDir() ); watchServicePoll(); assertThat( key.isValid(), is( false ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testMovedWatchedDirCancelsKeys() throws Exception { Path dir = dirTA(); final WatchService watcher = FS.newWatchService(); WatchKey key = dir.register( watcher, ENTRY_CREATE ); Files.move( dir, absTB() ); waitForWatchService(); assertThat( key.isValid(), is( false ) ); } @Test @Category( { SlowTest.class, Watchable.class, Writable.class } ) public void testWatchTwoModifiesOneKey() throws Exception { Path toBeModified = watchedFileA(); watcherSetup( ENTRY_MODIFY ); Files.write( toBeModified, CONTENT_OTHER ); Files.write( toBeModified, CONTENT ); WatchKey key = watchServicePoll(); assertThat( key, notNullValue() ); List<WatchEvent<?>> watchEvents = key.pollEvents(); // spec says one may be swallowed assertThat( watchEvents, hasItems( isEvent( toBeModified, ENTRY_MODIFY ) ) ); //, isEvent(toBeModified, ENTRY_MODIFY))); } // // /* // * ------------------------------------------------------------------------------------ // */ // // long watcherSleep = 1000; // // public long getWatcherSleep() { // return watcherSleep; // } // //// public void setWatcherSleep(long watcherSleep) { //// this.watcherSleep = watcherSleep; //// } // public Path watchedFileA() throws IOException { Path ret = watchedAbsA(); Files.write( ret, CONTENT, standardOpen ); return ret; } public Path watchedAbsA() throws IOException { return watchedDir().resolve( nameA() ); } public Path watchedAbsB() throws IOException { return watchedDir().resolve( nameB() ); } // public Path watchedFileB() throws IOException { // Path ret = watchedDir().resolve( nameB()); // Files.write(ret, CONTENT, standardOpen ); // return ret; // } // public Path createPathWBf() throws IOException { // Path ret = watchedFileB(); // Files.write(ret, CONTENT, standardOpen ); // return ret; // } // // public Path watchedFileAB() throws IOException { // return watchedFileA().resolve(nameStr[1]); // } // // public Path createPathWAd() throws IOException { // Path ret = watchedFileA(); // Files.createDirectories(ret); // return ret; // } // // public Path watchedFileA() throws IOException { // return watchedDir().resolve(nameStr[0]); // } // // public Path watchedFileB() throws IOException { // return watchedDir().resolve(nameStr[1]); // } // public Path watchedDir() throws IOException { Path ret = absT().resolve( "watched" ); Files.createDirectories( ret ); return ret; } public WatchService getWatchService() { return watchService; } public WatchService waitForWatchService() throws InterruptedException { Thread.sleep( getDelay() ); return watchService; } private int getDelay() { int delay = description.getInt( WATCH_DELAY ); if( delay < 10 ) { return 2000; } return delay; } public WatchKey watchServicePoll() throws InterruptedException { return watchService.poll( getDelay(), TimeUnit.MILLISECONDS ); } WatchKey watcherSetup( WatchEvent.Kind<Path>... kinds ) throws IOException { watchService = FS.newWatchService(); return watchedDir().register( watchService, kinds ); } private WatchService watchService; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.reporting; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.commons.collections4.Predicate; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.action.Action; import org.apache.nifi.authorization.RequestAction; import org.apache.nifi.authorization.resource.Authorizable; import org.apache.nifi.authorization.user.NiFiUser; import org.apache.nifi.components.validation.ValidationStatus; import org.apache.nifi.connectable.Connectable; import org.apache.nifi.connectable.ConnectableType; import org.apache.nifi.connectable.Connection; import org.apache.nifi.connectable.Funnel; import org.apache.nifi.connectable.Port; import org.apache.nifi.controller.FlowController; import org.apache.nifi.controller.ProcessScheduler; import org.apache.nifi.controller.ProcessorNode; import org.apache.nifi.controller.ScheduledState; import org.apache.nifi.controller.queue.QueueSize; import org.apache.nifi.controller.repository.FlowFileEvent; import org.apache.nifi.controller.repository.FlowFileEventRepository; import org.apache.nifi.controller.repository.RepositoryStatusReport; import org.apache.nifi.controller.repository.metrics.EmptyFlowFileEvent; import org.apache.nifi.controller.status.ConnectionStatus; import org.apache.nifi.controller.status.PortStatus; import org.apache.nifi.controller.status.ProcessGroupStatus; import org.apache.nifi.controller.status.ProcessorStatus; import org.apache.nifi.controller.status.RemoteProcessGroupStatus; import org.apache.nifi.controller.status.RunStatus; import org.apache.nifi.controller.status.TransmissionStatus; import org.apache.nifi.controller.status.analytics.ConnectionStatusPredictions; import org.apache.nifi.controller.status.analytics.StatusAnalytics; import org.apache.nifi.controller.status.analytics.StatusAnalyticsEngine; import org.apache.nifi.groups.ProcessGroup; import org.apache.nifi.groups.RemoteProcessGroup; import org.apache.nifi.history.History; import org.apache.nifi.processor.Relationship; import org.apache.nifi.provenance.ProvenanceEventRecord; import org.apache.nifi.provenance.ProvenanceRepository; import org.apache.nifi.registry.flow.VersionControlInformation; import org.apache.nifi.registry.flow.VersionedFlowState; import org.apache.nifi.registry.flow.VersionedFlowStatus; import org.apache.nifi.remote.PublicPort; import org.apache.nifi.remote.RemoteGroupPort; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class StandardEventAccess implements UserAwareEventAccess { private static final Logger logger = LoggerFactory.getLogger(StandardEventAccess.class); private final FlowFileEventRepository flowFileEventRepository; private final FlowController flowController; private final StatusAnalyticsEngine statusAnalyticsEngine; public StandardEventAccess(final FlowController flowController, final FlowFileEventRepository flowFileEventRepository) { this.flowController = flowController; this.flowFileEventRepository = flowFileEventRepository; this.statusAnalyticsEngine = flowController.getStatusAnalyticsEngine(); } /** * Returns the status of all components in the controller. This request is * not in the context of a user so the results will be unfiltered. * * @return the component status */ @Override public ProcessGroupStatus getControllerStatus() { return getGroupStatus(flowController.getFlowManager().getRootGroupId()); } /** * Returns the status of all components in the specified group. This request * is not in the context of a user so the results will be unfiltered. * * @param groupId group id * @return the component status */ @Override public ProcessGroupStatus getGroupStatus(final String groupId) { final RepositoryStatusReport repoStatusReport = generateRepositoryStatusReport(); return getGroupStatus(groupId, repoStatusReport); } /** * Returns the status for the components in the specified group with the * specified report. This request is not in the context of a user so the * results will be unfiltered. * * @param groupId group id * @param statusReport report * @return the component status */ public ProcessGroupStatus getGroupStatus(final String groupId, final RepositoryStatusReport statusReport) { final ProcessGroup group = flowController.getFlowManager().getGroup(groupId); // this was invoked with no user context so the results will be unfiltered... necessary for aggregating status history return getGroupStatus(group, statusReport, authorizable -> true, Integer.MAX_VALUE, 1); } @Override public List<ProvenanceEventRecord> getProvenanceEvents(final long firstEventId, final int maxRecords) throws IOException { return new ArrayList<>(getProvenanceRepository().getEvents(firstEventId, maxRecords)); } @Override public List<Action> getFlowChanges(final int firstActionId, final int maxActions) { final History history = flowController.getAuditService().getActions(firstActionId, maxActions); return new ArrayList<>(history.getActions()); } @Override public ProvenanceRepository getProvenanceRepository() { return flowController.getProvenanceRepository(); } private RepositoryStatusReport generateRepositoryStatusReport() { return flowFileEventRepository.reportTransferEvents(System.currentTimeMillis()); } @Override public ProcessorStatus getProcessorStatus(final String processorId, final NiFiUser user) { final ProcessorNode procNode = flowController.getFlowManager().getProcessorNode(processorId); if (procNode == null) { return null; } FlowFileEvent flowFileEvent = flowFileEventRepository.reportTransferEvents(processorId, System.currentTimeMillis()); if (flowFileEvent == null) { flowFileEvent = EmptyFlowFileEvent.INSTANCE; } final Predicate<Authorizable> authorizer = authorizable -> authorizable.isAuthorized(flowController.getAuthorizer(), RequestAction.READ, user); return getProcessorStatus(flowFileEvent, procNode, authorizer); } /** * Returns the status for components in the specified group. This request is * made by the specified user so the results will be filtered accordingly. * * @param groupId group id * @param user user making request * @return the component status */ public ProcessGroupStatus getGroupStatus(final String groupId, final NiFiUser user, final int recursiveStatusDepth) { final RepositoryStatusReport repoStatusReport = generateRepositoryStatusReport(); return getGroupStatus(groupId, repoStatusReport, user, recursiveStatusDepth); } /** * Returns the status for the components in the specified group with the * specified report. This request is made by the specified user so the * results will be filtered accordingly. * * @param groupId group id * @param statusReport report * @param user user making request * @return the component status */ public ProcessGroupStatus getGroupStatus(final String groupId, final RepositoryStatusReport statusReport, final NiFiUser user) { final ProcessGroup group = flowController.getFlowManager().getGroup(groupId); // on demand status request for a specific user... require authorization per component and filter results as appropriate return getGroupStatus(group, statusReport, authorizable -> authorizable.isAuthorized(flowController.getAuthorizer(), RequestAction.READ, user), Integer.MAX_VALUE, 1); } /** * Returns the status for components in the specified group. This request is * made by the specified user so the results will be filtered accordingly. * * @param groupId group id * @param user user making request * @return the component status */ public ProcessGroupStatus getGroupStatus(final String groupId, final NiFiUser user) { final RepositoryStatusReport repoStatusReport = generateRepositoryStatusReport(); return getGroupStatus(groupId, repoStatusReport, user); } /** * Returns the status for the components in the specified group with the * specified report. This request is made by the specified user so the * results will be filtered accordingly. * * @param groupId group id * @param statusReport report * @param user user making request * @param recursiveStatusDepth the number of levels deep we should recurse and still include the the processors' statuses, the groups' statuses, etc. in the returned ProcessGroupStatus * @return the component status */ public ProcessGroupStatus getGroupStatus(final String groupId, final RepositoryStatusReport statusReport, final NiFiUser user, final int recursiveStatusDepth) { final ProcessGroup group = flowController.getFlowManager().getGroup(groupId); // on demand status request for a specific user... require authorization per component and filter results as appropriate return getGroupStatus(group, statusReport, authorizable -> authorizable.isAuthorized(flowController.getAuthorizer(), RequestAction.READ, user), recursiveStatusDepth, 1); } /** * Returns the status for the components in the specified group with the * specified report. The results will be filtered by executing the specified * predicate. * * @param group group id * @param statusReport report * @param isAuthorized is authorized check * @param recursiveStatusDepth the number of levels deep we should recurse and still include the the processors' statuses, the groups' statuses, etc. in the returned ProcessGroupStatus * @param currentDepth the current number of levels deep that we have recursed * @return the component status */ ProcessGroupStatus getGroupStatus(final ProcessGroup group, final RepositoryStatusReport statusReport, final Predicate<Authorizable> isAuthorized, final int recursiveStatusDepth, final int currentDepth) { if (group == null) { return null; } final ProcessScheduler processScheduler = flowController.getProcessScheduler(); final ProcessGroupStatus status = new ProcessGroupStatus(); status.setId(group.getIdentifier()); status.setName(isAuthorized.evaluate(group) ? group.getName() : group.getIdentifier()); int activeGroupThreads = 0; int terminatedGroupThreads = 0; long bytesRead = 0L; long bytesWritten = 0L; int queuedCount = 0; long queuedContentSize = 0L; int flowFilesIn = 0; long bytesIn = 0L; int flowFilesOut = 0; long bytesOut = 0L; int flowFilesReceived = 0; long bytesReceived = 0L; int flowFilesSent = 0; long bytesSent = 0L; int flowFilesTransferred = 0; long bytesTransferred = 0; final boolean populateChildStatuses = currentDepth <= recursiveStatusDepth; // set status for processors final Collection<ProcessorStatus> processorStatusCollection = new ArrayList<>(); status.setProcessorStatus(processorStatusCollection); for (final ProcessorNode procNode : group.getProcessors()) { final ProcessorStatus procStat = getProcessorStatus(statusReport, procNode, isAuthorized); if (populateChildStatuses) { processorStatusCollection.add(procStat); } activeGroupThreads += procStat.getActiveThreadCount(); terminatedGroupThreads += procStat.getTerminatedThreadCount(); bytesRead += procStat.getBytesRead(); bytesWritten += procStat.getBytesWritten(); flowFilesReceived += procStat.getFlowFilesReceived(); bytesReceived += procStat.getBytesReceived(); flowFilesSent += procStat.getFlowFilesSent(); bytesSent += procStat.getBytesSent(); } // set status for local child groups final Collection<ProcessGroupStatus> localChildGroupStatusCollection = new ArrayList<>(); status.setProcessGroupStatus(localChildGroupStatusCollection); for (final ProcessGroup childGroup : group.getProcessGroups()) { final ProcessGroupStatus childGroupStatus; if (populateChildStatuses) { childGroupStatus = getGroupStatus(childGroup, statusReport, isAuthorized, recursiveStatusDepth, currentDepth + 1); localChildGroupStatusCollection.add(childGroupStatus); } else { // In this case, we don't want to include any of the recursive components' individual statuses. As a result, we can // avoid performing any sort of authorizations. Because we only care about the numbers that come back, we can just indicate // that the user is not authorized. This allows us to avoid the expense of both performing the authorization and calculating // things that we would otherwise need to calculate if the user were in fact authorized. childGroupStatus = getGroupStatus(childGroup, statusReport, authorizable -> false, recursiveStatusDepth, currentDepth + 1); } activeGroupThreads += childGroupStatus.getActiveThreadCount(); terminatedGroupThreads += childGroupStatus.getTerminatedThreadCount(); bytesRead += childGroupStatus.getBytesRead(); bytesWritten += childGroupStatus.getBytesWritten(); queuedCount += childGroupStatus.getQueuedCount(); queuedContentSize += childGroupStatus.getQueuedContentSize(); flowFilesReceived += childGroupStatus.getFlowFilesReceived(); bytesReceived += childGroupStatus.getBytesReceived(); flowFilesSent += childGroupStatus.getFlowFilesSent(); bytesSent += childGroupStatus.getBytesSent(); flowFilesTransferred += childGroupStatus.getFlowFilesTransferred(); bytesTransferred += childGroupStatus.getBytesTransferred(); } // set status for remote child groups final Collection<RemoteProcessGroupStatus> remoteProcessGroupStatusCollection = new ArrayList<>(); status.setRemoteProcessGroupStatus(remoteProcessGroupStatusCollection); for (final RemoteProcessGroup remoteGroup : group.getRemoteProcessGroups()) { final RemoteProcessGroupStatus remoteStatus = createRemoteGroupStatus(remoteGroup, statusReport, isAuthorized); if (remoteStatus != null) { if (populateChildStatuses) { remoteProcessGroupStatusCollection.add(remoteStatus); } flowFilesReceived += remoteStatus.getReceivedCount(); bytesReceived += remoteStatus.getReceivedContentSize(); flowFilesSent += remoteStatus.getSentCount(); bytesSent += remoteStatus.getSentContentSize(); } } // connection status final Collection<ConnectionStatus> connectionStatusCollection = new ArrayList<>(); status.setConnectionStatus(connectionStatusCollection); // get the connection and remote port status for (final Connection conn : group.getConnections()) { final boolean isConnectionAuthorized = isAuthorized.evaluate(conn); final boolean isSourceAuthorized = isAuthorized.evaluate(conn.getSource()); final boolean isDestinationAuthorized = isAuthorized.evaluate(conn.getDestination()); final ConnectionStatus connStatus = new ConnectionStatus(); connStatus.setId(conn.getIdentifier()); connStatus.setGroupId(conn.getProcessGroup().getIdentifier()); connStatus.setSourceId(conn.getSource().getIdentifier()); connStatus.setSourceName(isSourceAuthorized ? conn.getSource().getName() : conn.getSource().getIdentifier()); connStatus.setDestinationId(conn.getDestination().getIdentifier()); connStatus.setDestinationName(isDestinationAuthorized ? conn.getDestination().getName() : conn.getDestination().getIdentifier()); connStatus.setBackPressureDataSizeThreshold(conn.getFlowFileQueue().getBackPressureDataSizeThreshold()); connStatus.setBackPressureObjectThreshold(conn.getFlowFileQueue().getBackPressureObjectThreshold()); final FlowFileEvent connectionStatusReport = statusReport.getReportEntry(conn.getIdentifier()); if (connectionStatusReport != null) { connStatus.setInputBytes(connectionStatusReport.getContentSizeIn()); connStatus.setInputCount(connectionStatusReport.getFlowFilesIn()); connStatus.setOutputBytes(connectionStatusReport.getContentSizeOut()); connStatus.setOutputCount(connectionStatusReport.getFlowFilesOut()); flowFilesTransferred += connectionStatusReport.getFlowFilesIn() + connectionStatusReport.getFlowFilesOut(); bytesTransferred += connectionStatusReport.getContentSizeIn() + connectionStatusReport.getContentSizeOut(); } if (statusAnalyticsEngine != null) { StatusAnalytics statusAnalytics = statusAnalyticsEngine.getStatusAnalytics(conn.getIdentifier()); if (statusAnalytics != null) { Map<String,Long> predictionValues = statusAnalytics.getPredictions(); ConnectionStatusPredictions predictions = new ConnectionStatusPredictions(); connStatus.setPredictions(predictions); predictions.setPredictedTimeToBytesBackpressureMillis(predictionValues.get("timeToBytesBackpressureMillis")); predictions.setPredictedTimeToCountBackpressureMillis(predictionValues.get("timeToCountBackpressureMillis")); predictions.setNextPredictedQueuedBytes(predictionValues.get("nextIntervalBytes")); predictions.setNextPredictedQueuedCount(predictionValues.get("nextIntervalCount").intValue()); predictions.setPredictedPercentCount(predictionValues.get("nextIntervalPercentageUseCount").intValue()); predictions.setPredictedPercentBytes(predictionValues.get("nextIntervalPercentageUseBytes").intValue()); predictions.setPredictionIntervalMillis(predictionValues.get("intervalTimeMillis")); } }else{ connStatus.setPredictions(null); } if (isConnectionAuthorized) { if (StringUtils.isNotBlank(conn.getName())) { connStatus.setName(conn.getName()); } else if (conn.getRelationships() != null && !conn.getRelationships().isEmpty()) { final Collection<String> relationships = new ArrayList<>(conn.getRelationships().size()); for (final Relationship relationship : conn.getRelationships()) { relationships.add(relationship.getName()); } connStatus.setName(StringUtils.join(relationships, ", ")); } } else { connStatus.setName(conn.getIdentifier()); } final QueueSize queueSize = conn.getFlowFileQueue().size(); final int connectionQueuedCount = queueSize.getObjectCount(); final long connectionQueuedBytes = queueSize.getByteCount(); if (connectionQueuedCount > 0) { connStatus.setQueuedBytes(connectionQueuedBytes); connStatus.setQueuedCount(connectionQueuedCount); } if (populateChildStatuses) { connectionStatusCollection.add(connStatus); } queuedCount += connectionQueuedCount; queuedContentSize += connectionQueuedBytes; final Connectable source = conn.getSource(); if (ConnectableType.REMOTE_OUTPUT_PORT.equals(source.getConnectableType())) { final RemoteGroupPort remoteOutputPort = (RemoteGroupPort) source; activeGroupThreads += processScheduler.getActiveThreadCount(remoteOutputPort); } final Connectable destination = conn.getDestination(); if (ConnectableType.REMOTE_INPUT_PORT.equals(destination.getConnectableType())) { final RemoteGroupPort remoteInputPort = (RemoteGroupPort) destination; activeGroupThreads += processScheduler.getActiveThreadCount(remoteInputPort); } } // status for input ports final Collection<PortStatus> inputPortStatusCollection = new ArrayList<>(); status.setInputPortStatus(inputPortStatusCollection); final Set<Port> inputPorts = group.getInputPorts(); for (final Port port : inputPorts) { final boolean isInputPortAuthorized = isAuthorized.evaluate(port); final PortStatus portStatus = new PortStatus(); portStatus.setId(port.getIdentifier()); portStatus.setGroupId(port.getProcessGroup().getIdentifier()); portStatus.setName(isInputPortAuthorized ? port.getName() : port.getIdentifier()); portStatus.setActiveThreadCount(processScheduler.getActiveThreadCount(port)); // determine the run status if (ScheduledState.RUNNING.equals(port.getScheduledState())) { portStatus.setRunStatus(RunStatus.Running); } else if (ScheduledState.DISABLED.equals(port.getScheduledState())) { portStatus.setRunStatus(RunStatus.Disabled); } else if (!port.isValid()) { portStatus.setRunStatus(RunStatus.Invalid); } else { portStatus.setRunStatus(RunStatus.Stopped); } // special handling for public ports if (port instanceof PublicPort) { portStatus.setTransmitting(((PublicPort) port).isTransmitting()); } final FlowFileEvent entry = statusReport.getReportEntries().get(port.getIdentifier()); if (entry == null) { portStatus.setInputBytes(0L); portStatus.setInputCount(0); portStatus.setOutputBytes(0L); portStatus.setOutputCount(0); } else { final int processedCount = entry.getFlowFilesOut(); final long numProcessedBytes = entry.getContentSizeOut(); portStatus.setOutputBytes(numProcessedBytes); portStatus.setOutputCount(processedCount); final int inputCount = entry.getFlowFilesIn(); final long inputBytes = entry.getContentSizeIn(); portStatus.setInputBytes(inputBytes); portStatus.setInputCount(inputCount); flowFilesIn += port instanceof PublicPort ? entry.getFlowFilesReceived() : inputCount; bytesIn += port instanceof PublicPort ? entry.getBytesReceived() : inputBytes; bytesWritten += entry.getBytesWritten(); flowFilesReceived += entry.getFlowFilesReceived(); bytesReceived += entry.getBytesReceived(); } if (populateChildStatuses) { inputPortStatusCollection.add(portStatus); } activeGroupThreads += portStatus.getActiveThreadCount(); } // status for output ports final Collection<PortStatus> outputPortStatusCollection = new ArrayList<>(); status.setOutputPortStatus(outputPortStatusCollection); final Set<Port> outputPorts = group.getOutputPorts(); for (final Port port : outputPorts) { final boolean isOutputPortAuthorized = isAuthorized.evaluate(port); final PortStatus portStatus = new PortStatus(); portStatus.setId(port.getIdentifier()); portStatus.setGroupId(port.getProcessGroup().getIdentifier()); portStatus.setName(isOutputPortAuthorized ? port.getName() : port.getIdentifier()); portStatus.setActiveThreadCount(processScheduler.getActiveThreadCount(port)); // determine the run status if (ScheduledState.RUNNING.equals(port.getScheduledState())) { portStatus.setRunStatus(RunStatus.Running); } else if (ScheduledState.DISABLED.equals(port.getScheduledState())) { portStatus.setRunStatus(RunStatus.Disabled); } else if (!port.isValid()) { portStatus.setRunStatus(RunStatus.Invalid); } else { portStatus.setRunStatus(RunStatus.Stopped); } // special handling for public ports if (port instanceof PublicPort) { portStatus.setTransmitting(((PublicPort) port).isTransmitting()); } final FlowFileEvent entry = statusReport.getReportEntries().get(port.getIdentifier()); if (entry == null) { portStatus.setInputBytes(0L); portStatus.setInputCount(0); portStatus.setOutputBytes(0L); portStatus.setOutputCount(0); } else { final int processedCount = entry.getFlowFilesOut(); final long numProcessedBytes = entry.getContentSizeOut(); portStatus.setOutputBytes(numProcessedBytes); portStatus.setOutputCount(processedCount); final int inputCount = entry.getFlowFilesIn(); final long inputBytes = entry.getContentSizeIn(); portStatus.setInputBytes(inputBytes); portStatus.setInputCount(inputCount); bytesRead += entry.getBytesRead(); flowFilesOut += port instanceof PublicPort ? entry.getFlowFilesSent() : entry.getFlowFilesOut(); bytesOut += port instanceof PublicPort ? entry.getBytesSent() : entry.getContentSizeOut(); flowFilesSent = entry.getFlowFilesSent(); bytesSent += entry.getBytesSent(); } if (populateChildStatuses) { outputPortStatusCollection.add(portStatus); } activeGroupThreads += portStatus.getActiveThreadCount(); } for (final Funnel funnel : group.getFunnels()) { activeGroupThreads += processScheduler.getActiveThreadCount(funnel); } status.setActiveThreadCount(activeGroupThreads); status.setTerminatedThreadCount(terminatedGroupThreads); status.setBytesRead(bytesRead); status.setBytesWritten(bytesWritten); status.setQueuedCount(queuedCount); status.setQueuedContentSize(queuedContentSize); status.setInputContentSize(bytesIn); status.setInputCount(flowFilesIn); status.setOutputContentSize(bytesOut); status.setOutputCount(flowFilesOut); status.setFlowFilesReceived(flowFilesReceived); status.setBytesReceived(bytesReceived); status.setFlowFilesSent(flowFilesSent); status.setBytesSent(bytesSent); status.setFlowFilesTransferred(flowFilesTransferred); status.setBytesTransferred(bytesTransferred); final VersionControlInformation vci = group.getVersionControlInformation(); if (vci != null) { try { final VersionedFlowStatus flowStatus = vci.getStatus(); if (flowStatus != null && flowStatus.getState() != null) { status.setVersionedFlowState(flowStatus.getState()); } } catch (final Exception e) { logger.warn("Failed to determine Version Control State for {}. Will consider state to be SYNC_FAILURE", group, e); status.setVersionedFlowState(VersionedFlowState.SYNC_FAILURE); } } return status; } private RemoteProcessGroupStatus createRemoteGroupStatus(final RemoteProcessGroup remoteGroup, final RepositoryStatusReport statusReport, final Predicate<Authorizable> isAuthorized) { final boolean isRemoteProcessGroupAuthorized = isAuthorized.evaluate(remoteGroup); final ProcessScheduler processScheduler = flowController.getProcessScheduler(); int receivedCount = 0; long receivedContentSize = 0L; int sentCount = 0; long sentContentSize = 0L; int activeThreadCount = 0; int activePortCount = 0; int inactivePortCount = 0; final RemoteProcessGroupStatus status = new RemoteProcessGroupStatus(); status.setGroupId(remoteGroup.getProcessGroup().getIdentifier()); status.setName(isRemoteProcessGroupAuthorized ? remoteGroup.getName() : remoteGroup.getIdentifier()); status.setTargetUri(isRemoteProcessGroupAuthorized ? remoteGroup.getTargetUri() : null); long lineageMillis = 0L; int flowFilesRemoved = 0; int flowFilesTransferred = 0; for (final Port port : remoteGroup.getInputPorts()) { // determine if this input port is connected final boolean isConnected = port.hasIncomingConnection(); // we only want to consider remote ports that we are connected to if (isConnected) { if (port.isRunning()) { activePortCount++; } else { inactivePortCount++; } activeThreadCount += processScheduler.getActiveThreadCount(port); final FlowFileEvent portEvent = statusReport.getReportEntry(port.getIdentifier()); if (portEvent != null) { lineageMillis += portEvent.getAggregateLineageMillis(); flowFilesRemoved += portEvent.getFlowFilesRemoved(); flowFilesTransferred += portEvent.getFlowFilesOut(); sentCount += portEvent.getFlowFilesSent(); sentContentSize += portEvent.getBytesSent(); } } } for (final Port port : remoteGroup.getOutputPorts()) { // determine if this output port is connected final boolean isConnected = !port.getConnections().isEmpty(); // we only want to consider remote ports that we are connected from if (isConnected) { if (port.isRunning()) { activePortCount++; } else { inactivePortCount++; } activeThreadCount += processScheduler.getActiveThreadCount(port); final FlowFileEvent portEvent = statusReport.getReportEntry(port.getIdentifier()); if (portEvent != null) { receivedCount += portEvent.getFlowFilesReceived(); receivedContentSize += portEvent.getBytesReceived(); } } } status.setId(remoteGroup.getIdentifier()); status.setTransmissionStatus(remoteGroup.isTransmitting() ? TransmissionStatus.Transmitting : TransmissionStatus.NotTransmitting); status.setActiveThreadCount(activeThreadCount); status.setReceivedContentSize(receivedContentSize); status.setReceivedCount(receivedCount); status.setSentContentSize(sentContentSize); status.setSentCount(sentCount); status.setActiveRemotePortCount(activePortCount); status.setInactiveRemotePortCount(inactivePortCount); final int flowFilesOutOrRemoved = flowFilesTransferred + flowFilesRemoved; status.setAverageLineageDuration(flowFilesOutOrRemoved == 0 ? 0 : lineageMillis / flowFilesOutOrRemoved, TimeUnit.MILLISECONDS); return status; } private ProcessorStatus getProcessorStatus(final RepositoryStatusReport report, final ProcessorNode procNode, final Predicate<Authorizable> isAuthorized) { final FlowFileEvent entry = report.getReportEntries().get(procNode.getIdentifier()); return getProcessorStatus(entry, procNode, isAuthorized); } private ProcessorStatus getProcessorStatus(final FlowFileEvent flowFileEvent, final ProcessorNode procNode, final Predicate<Authorizable> isAuthorized) { final boolean isProcessorAuthorized = isAuthorized.evaluate(procNode); final ProcessScheduler processScheduler = flowController.getProcessScheduler(); final ProcessorStatus status = new ProcessorStatus(); status.setId(procNode.getIdentifier()); status.setGroupId(procNode.getProcessGroup().getIdentifier()); status.setName(isProcessorAuthorized ? procNode.getName() : procNode.getIdentifier()); status.setType(isProcessorAuthorized ? procNode.getComponentType() : "Processor"); if (flowFileEvent != null && flowFileEvent != EmptyFlowFileEvent.INSTANCE) { final int processedCount = flowFileEvent.getFlowFilesOut(); final long numProcessedBytes = flowFileEvent.getContentSizeOut(); status.setOutputBytes(numProcessedBytes); status.setOutputCount(processedCount); final int inputCount = flowFileEvent.getFlowFilesIn(); final long inputBytes = flowFileEvent.getContentSizeIn(); status.setInputBytes(inputBytes); status.setInputCount(inputCount); final long readBytes = flowFileEvent.getBytesRead(); status.setBytesRead(readBytes); final long writtenBytes = flowFileEvent.getBytesWritten(); status.setBytesWritten(writtenBytes); status.setProcessingNanos(flowFileEvent.getProcessingNanoseconds()); status.setInvocations(flowFileEvent.getInvocations()); status.setAverageLineageDuration(flowFileEvent.getAverageLineageMillis()); status.setFlowFilesReceived(flowFileEvent.getFlowFilesReceived()); status.setBytesReceived(flowFileEvent.getBytesReceived()); status.setFlowFilesSent(flowFileEvent.getFlowFilesSent()); status.setBytesSent(flowFileEvent.getBytesSent()); status.setFlowFilesRemoved(flowFileEvent.getFlowFilesRemoved()); if (isProcessorAuthorized) { status.setCounters(flowFileEvent.getCounters()); } } // Determine the run status and get any validation error... only validating while STOPPED // is a trade-off we are willing to make, even though processor validity could change due to // environmental conditions (property configured with a file path and the file being externally // removed). This saves on validation costs that would be unnecessary most of the time. if (ScheduledState.DISABLED.equals(procNode.getScheduledState())) { status.setRunStatus(RunStatus.Disabled); } else if (ScheduledState.RUNNING.equals(procNode.getScheduledState())) { status.setRunStatus(RunStatus.Running); } else if (procNode.getValidationStatus() == ValidationStatus.VALIDATING) { status.setRunStatus(RunStatus.Validating); } else if (procNode.getValidationStatus() == ValidationStatus.INVALID) { status.setRunStatus(RunStatus.Invalid); } else { status.setRunStatus(RunStatus.Stopped); } status.setExecutionNode(procNode.getExecutionNode()); status.setTerminatedThreadCount(procNode.getTerminatedThreadCount()); status.setActiveThreadCount(processScheduler.getActiveThreadCount(procNode)); return status; } /** * Returns the total number of bytes read by this instance (at the root process group level, i.e. all events) since the instance started * * @return the total number of bytes read by this instance */ @Override public long getTotalBytesRead() { return flowFileEventRepository.reportAggregateEvent().getBytesRead(); } /** * Returns the total number of bytes written by this instance (at the root process group level, i.e. all events) since the instance started * * @return the total number of bytes written by this instance */ @Override public long getTotalBytesWritten() { return flowFileEventRepository.reportAggregateEvent().getBytesWritten(); } /** * Returns the total number of bytes sent by this instance (at the root process group level) since the instance started * * @return the total number of bytes sent by this instance */ @Override public long getTotalBytesSent() { return flowFileEventRepository.reportAggregateEvent().getBytesSent(); } /** * Returns the total number of bytes received by this instance (at the root process group level) since the instance started * * @return the total number of bytes received by this instance */ @Override public long getTotalBytesReceived() { return flowFileEventRepository.reportAggregateEvent().getBytesReceived(); } }
package Armadillo.Analytics.Optimisation.ProblemFactories; import Armadillo.Analytics.Optimisation.Base.EnumOptimimisationPoblemType; import Armadillo.Analytics.Optimisation.Base.ObjectiveFunctionType; import Armadillo.Analytics.Optimisation.Base.OptimisationConstants; import Armadillo.Analytics.Optimisation.Base.DataStructures.Gp.AGpBridge; import Armadillo.Analytics.Optimisation.Base.DataStructures.Gp.GpOperatorsContainer; import Armadillo.Analytics.Optimisation.Base.Helpers.OptimizationHelper; import Armadillo.Analytics.Optimisation.Base.ObjectiveFunctions.IHeuristicObjectiveFunction; import Armadillo.Analytics.Optimisation.Base.Operators.AGuidedConvergence; import Armadillo.Analytics.Optimisation.Base.Operators.IndividualClasses.IndividualFactories.IndividualFactory; import Armadillo.Analytics.Optimisation.Base.Operators.LocalSearch.ILocalSearch; import Armadillo.Analytics.Optimisation.Base.Operators.PopulationClasses.IInitialPopulation; import Armadillo.Analytics.Optimisation.Base.Operators.PopulationClasses.Population; import Armadillo.Analytics.Optimisation.Base.Operators.PopulationClasses.RandomInitialPopulation; import Armadillo.Analytics.Optimisation.Base.Operators.Reproduction.IReproduction; import Armadillo.Analytics.Optimisation.Base.Problem.IHeuristicProblemFactory; import Armadillo.Analytics.Optimisation.Base.Problem.HeuristicProblem; import Armadillo.Analytics.Optimisation.Base.Solvers.EvolutionarySolver; import Armadillo.Analytics.Optimisation.Binary.Operators.GuidedConvergenceBln; import Armadillo.Analytics.Optimisation.Binary.Operators.LocalSearch.LocalSearchStdBln; import Armadillo.Analytics.Optimisation.Binary.Operators.Reproduction.ReproductionFactoryBln; import Armadillo.Analytics.Optimisation.Continuous.ContinuousConstants; import Armadillo.Analytics.Optimisation.Continuous.Operators.GuidedConvergenceDbl; import Armadillo.Analytics.Optimisation.Continuous.Operators.ReproductionDblStd; import Armadillo.Analytics.Optimisation.Continuous.Operators.LocalSearch.LocalSearchStdDbl; import Armadillo.Analytics.Optimisation.Gp.GpConstants; import Armadillo.Analytics.Optimisation.Integer.Operators.GuidedConvergenceInt; import Armadillo.Analytics.Optimisation.Integer.Operators.ReproductionFactoryInt; import Armadillo.Analytics.Optimisation.Integer.Operators.LocalSearch.LocalSearchStdInt; import Armadillo.Analytics.Optimisation.MixedSolvers.MixedSolversConstants; import Armadillo.Core.HCException; public abstract class AHeuristicProblemFactory implements IHeuristicProblemFactory { protected final IHeuristicObjectiveFunction m_objectiveFunction; private final EnumOptimimisationPoblemType m_problemType; protected AGpBridge m_gpBridge; protected GpOperatorsContainer m_gpOperatorsContainer; public AHeuristicProblemFactory( EnumOptimimisationPoblemType problemType, IHeuristicObjectiveFunction objectiveFunction) { this(problemType, objectiveFunction, null, null); } public AHeuristicProblemFactory( EnumOptimimisationPoblemType problemType, IHeuristicObjectiveFunction objectiveFunction, GpOperatorsContainer gpOperatorsContainer, AGpBridge gpBridge) { m_problemType = problemType; m_objectiveFunction = objectiveFunction; m_gpOperatorsContainer = gpOperatorsContainer; m_gpBridge = gpBridge; } public HeuristicProblem BuildProblem() { return BuildProblem(""); } public HeuristicProblem BuildProblem(String strProblemName) { HeuristicProblem heuristicProblem = new HeuristicProblem( m_problemType, strProblemName); // // load objective // heuristicProblem.setObjectiveFunction(m_objectiveFunction); // // crete population // Population population = new Population(heuristicProblem); heuristicProblem.setPopulation(population); // // Create reproduction operator // BuildReproduction(heuristicProblem); // // Build guided convergence // BuildGuidedConvergence(heuristicProblem); // // create initial population // IInitialPopulation initialPopulation = new RandomInitialPopulation( heuristicProblem); heuristicProblem.setInitialPopulation(initialPopulation); // // create solver // EvolutionarySolver solver = new EvolutionarySolver(heuristicProblem); heuristicProblem.setSolver(solver); if (m_problemType == EnumOptimimisationPoblemType.CONTINUOUS) { SetContinuousDefaultParams(heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.INTEGER) { SetIntegerDefaultParams(heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.BINARY) { SetBinaryDefaultParams(heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.MIXED) { SetMixedDefaultParams(heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.GENETIC_PROGRAMMING) { // // set bridge // heuristicProblem.setGpBridge(m_gpBridge); SetGpDefaultParams(heuristicProblem); } // // set local searh operator // LoadLocalSearch(heuristicProblem); if (m_problemType != EnumOptimimisationPoblemType.MIXED) { // // Set default individual factory // heuristicProblem.setIndividualFactory(new IndividualFactory( heuristicProblem)); } // // set number of threads // heuristicProblem.setThreads(OptimisationConstants.INT_THREADS); heuristicProblem.setDoClusterSolution(true); return heuristicProblem; } private void BuildGuidedConvergence( HeuristicProblem heuristicProblem) { // // create guided convergence // AGuidedConvergence guidedConvergence = null; if (m_problemType == EnumOptimimisationPoblemType.CONTINUOUS) { guidedConvergence = new GuidedConvergenceDbl(heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.INTEGER) { guidedConvergence = new GuidedConvergenceInt(heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.BINARY) { guidedConvergence = new GuidedConvergenceBln(heuristicProblem); } heuristicProblem.setGuidedConvergence(guidedConvergence); } private void LoadLocalSearch( HeuristicProblem heuristicProblem) { // // Set local search for non multi objective problems // ILocalSearch localSearch = null; if (m_objectiveFunction.ObjectiveFunctionType() != ObjectiveFunctionType.MULTI_OBJECTIVE_FUNCT && m_objectiveFunction.ObjectiveFunctionType() != ObjectiveFunctionType.MIXED) { if (m_problemType == EnumOptimimisationPoblemType.CONTINUOUS) { localSearch = new LocalSearchStdDbl( heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.BINARY) { localSearch = new LocalSearchStdBln( heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.INTEGER) { localSearch = new LocalSearchStdInt( heuristicProblem); } heuristicProblem.setLocalSearch(localSearch); heuristicProblem.setDoLocalSearch(true); heuristicProblem.setLocalSearchProb(OptimisationConstants.DBL_LOCAL_SEARCH); } } private void BuildReproduction( HeuristicProblem heuristicProblem) { // // create reproduction // IReproduction reproduction = null; if (m_problemType == EnumOptimimisationPoblemType.CONTINUOUS) { reproduction = new ReproductionDblStd( heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.INTEGER) { reproduction = ReproductionFactoryInt.BuildReproductionInt( heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.BINARY) { reproduction = ReproductionFactoryBln.BuildReproductionBln( heuristicProblem); } else if (m_problemType == EnumOptimimisationPoblemType.MIXED) { // // Do nothing // Reproduction should be loaded in a later stage // } else if (m_problemType == EnumOptimimisationPoblemType.GENETIC_PROGRAMMING) { // // Do nothing // Reproduction should be loaded in a later stage // } else { throw new HCException("Error. Problem type not implemented"); } heuristicProblem.setReproduction(reproduction); } private static void SetContinuousDefaultParams( HeuristicProblem heuristicProblem) { ValidateIterations(heuristicProblem); // // set convergence // if (heuristicProblem.getObjectiveFunction().VariableCount() <= ContinuousConstants.INT_SMALL_PROBLEM_DE && heuristicProblem.getConvergence() == 0 && heuristicProblem.ObjectiveCount() == 1) { heuristicProblem.setConvergence(ContinuousConstants.INT_DE_SMALL_CONVERGENCE); } else { heuristicProblem.setConvergence(ContinuousConstants.INT_DE_CONVERGENCE); } ValidatePopulation(heuristicProblem); heuristicProblem.getSolver().SetSolverName("Continuous Genetic Algorithm Solver"); } private static void SetBinaryDefaultParams( HeuristicProblem heuristicProblem) { ValidateIterations(heuristicProblem); // // set convergence // if (heuristicProblem.getObjectiveFunction().VariableCount() <= OptimisationConstants.INT_SMALL_PROBLEM_GA) { heuristicProblem.setConvergence(OptimisationConstants.INT_GA_SMALL_CONVERGENCE); } else { heuristicProblem.setConvergence(OptimisationConstants.INT_GA_CONVERGENCE); } ValidatePopulation(heuristicProblem); heuristicProblem.getSolver().SetSolverName("Binary Genetic Algorithm Solver"); } private static void SetIntegerDefaultParams( HeuristicProblem heuristicProblem) { ValidateIterations(heuristicProblem); // // set convergence // if (heuristicProblem.getObjectiveFunction().VariableCount() <= OptimisationConstants.INT_SMALL_PROBLEM_GA) { heuristicProblem.setConvergence(OptimisationConstants.INT_GA_SMALL_CONVERGENCE); } else { heuristicProblem.setConvergence(OptimisationConstants.INT_GA_CONVERGENCE); } ValidatePopulation(heuristicProblem); heuristicProblem.getSolver().SetSolverName("Integer Genetic Algorithm Solver"); } private static void SetMixedDefaultParams( HeuristicProblem heuristicProblem) { ValidateIterations(heuristicProblem); // // set convergence // if (heuristicProblem.getObjectiveFunction().VariableCount() <= OptimisationConstants.INT_SMALL_PROBLEM_GA) { heuristicProblem.setConvergence(MixedSolversConstants.INT_GA_SMALL_CONVERGENCE); } else { heuristicProblem.setConvergence(MixedSolversConstants.INT_GA_CONVERGENCE); } ValidatePopulation(heuristicProblem); heuristicProblem.getSolver().SetSolverName("Mixed Genetic Algorithm Solver"); } private static void SetGpDefaultParams( HeuristicProblem heuristicProblem) { // // set iterations // heuristicProblem.setIterations(GpConstants.INT_GP_ITERATIONS); // // set convergence // heuristicProblem.setConvergence(GpConstants.INT_GP_CONVERGENCE); ValidatePopulation(heuristicProblem); heuristicProblem.getSolver().SetSolverName("Genetic Programming Algorithm"); } private static void ValidatePopulation( HeuristicProblem heuristicProblem) { // // set population // if (heuristicProblem.PopulationSize() == 0) { heuristicProblem.setPopulationSize( OptimisationConstants.INT_POPULATION_SIZE); } } private static void ValidateIterations( HeuristicProblem heuristicProblem) { // // set iterations // if (heuristicProblem.getIterations() == 0) { heuristicProblem.setIterations(OptimizationHelper.GetHeuristicSolverIterations( heuristicProblem.VariableCount())); } } }
package com.communote.server.widgets; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang3.StringUtils; import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.communote.common.string.StringEscapeHelper; /** * abstract implementation of the widget interface * * @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a> */ public abstract class AbstractWidget implements Widget { private static final String BLANK = " "; private static final String COMMA = ","; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final Logger LOGGER = LoggerFactory.getLogger(AbstractWidget.class); /** * current output type */ private static final String OUTPUT_TYPE_DHTML = "dhtml"; /** * Key to be used in the response metadata to denote that a widget has no content. */ protected static final String METADATA_KEY_NO_CONTENT = "noContent"; private HttpServletRequest request; private HttpServletResponse response; private final Map<String, String> parameters = new HashMap<String, String>(); private final Map<String, Object> attributes = new HashMap<String, Object>(); private final Map<String, Object> responseMetadata = new HashMap<String, Object>(); private String groupName = ""; private boolean success = true; private String widgetId; /** * The initParameters() method is invoked implicitly. */ public AbstractWidget() { this.initParameters(); } /** * Determine the output type based on the request * * @return the output type (e.g. dhtml) */ protected String determineOutputType() { // determine output type String outputType = OUTPUT_TYPE_DHTML; if (request.getParameter(WidgetController.WIDGET_TYPE) != null) { outputType = request.getParameter(WidgetController.WIDGET_TYPE).toLowerCase(); } return outputType; } /** * {@inheritDoc} */ @Override public Object getAttribute(String key) { return attributes.get(key); } /** * Get a parameter for a key and parse it to a <code>boolean</code> value * * @param key * the key * @param fallback * the fall back to use in case of an NumberFormatException * @return the parsed value */ public boolean getBooleanParameter(String key, boolean fallback) { if ("true".equalsIgnoreCase(parameters.get(key))) { fallback = Boolean.TRUE; } else if ("false".equalsIgnoreCase(parameters.get(key))) { fallback = Boolean.FALSE; } return fallback; } /** * {@inheritDoc} */ @Override public String getGroupName() { return groupName; } /** * Get a parameter for a key and parse it to a <code>int</code> value * * @param key * the key * @return the parsed value */ public int getIntParameter(String key) { return Integer.parseInt(parameters.get(key)); } /** * Get a parameter for a key and parse it to a <code>int</code> value * * @param key * the key * @param fallback * the fall back to use in case of an NumberFormatException * @return the parsed value */ public int getIntParameter(String key, int fallback) { try { return Integer.parseInt(parameters.get(key)); } catch (NumberFormatException e) { return fallback; } } /** * Get the value of the given parameter. If it is not null split it by COMMA. Return the array. * (null if the value of the parameter is null) * * @param parameterName * the name of the parameter * @return an array with the parameters splitted by comma, or null if the was parameter not set * or did not contain any parseable long */ public Long[] getLongArrayParameter(String parameterName) { String value = getParameter(parameterName); String[] splitted = value == null ? null : value.split(COMMA); if (splitted == null) { return null; } List<Long> longValues = new ArrayList<Long>(); for (String s : splitted) { try { longValues.add(Long.parseLong(s)); } catch (NumberFormatException e) { // ignore } } return longValues.size() == 0 ? null : longValues.toArray(new Long[1]); } /** * Get a parameter for a key and parse it to a <code>long</code> value * * @param key * the key * @return the parsed value */ public long getLongParameter(String key) { return Long.parseLong(parameters.get(key)); } /** * Get a parameter for a key and parse it to a <code>long</code> value * * @param key * the key * @param fallback * the fallback to use in case of an NumberFormatException * @return the parsed value */ public long getLongParameter(String key, long fallback) { try { return Long.parseLong(parameters.get(key)); } catch (NumberFormatException e) { return fallback; } } /** * return widget parameter * * @param key * the parameter key * @return the parameter value */ @Override public String getParameter(String key) { return parameters.get(key); } /** * return widget parameter * * @param key * the parameter key * @param fallback * the fallback if the parameter key doesn't exist * @return the parameter value */ public String getParameter(String key, String fallback) { if (parameters.containsKey(key)) { return parameters.get(key); } else { return fallback; } } /** * Return a parameter with all characters that are not in [A-Za-z0-9_.-] escaped with * underscore. Helps to avoid XSS when the parameter, which might have been manipulated, needs * to be written out to the HTML so that it can be processed in Javascript code. The return * value should be stored in a local variable when needed more than once. * * @param key * the name of the parameter * @return the escaped parameter value or null if not set */ public String getParameterEscaped(String key) { String value = parameters.get(key); if (value != null) { return StringEscapeHelper.escapeNonWordCharacters(value); } return null; } /** * Like {@link #getParameterEscaped(String)} but returns the provided fallback if the parameter * is not set. The fallback is processed by the escape function. * * @param key * the name of the parameter to get * @param fallback * the fallback value if the parameter is not set * @return the parameter value or the fallback */ public String getParameterEscaped(String key, String fallback) { String value = getParameterEscaped(key); if (value == null) { return fallback; } return value; } /** * {@inheritDoc} */ @Override public Map<String, String> getParameters() { return parameters; } /** * get the request for getting some more information * * @return the requet for this widget */ protected HttpServletRequest getRequest() { return request; } /** * The response object the widget is working on. Can for instance be used to set some header * values. * * @return the response object */ protected HttpServletResponse getResponse() { return response; } @Override public String getResponseMetadata() { if (!responseMetadata.isEmpty()) { try { return OBJECT_MAPPER.writeValueAsString(responseMetadata); } catch (IOException e) { LOGGER.error("Converting response metadata to JSON failed", e); } } return null; } /** * Get the value of the given parameter. If it is not null split it by BLANK. Return the array. * (null if the value of the parameter is null) * * @param parameterName * name of the parameter key * @return an array with the parameters splitted by blank */ public String[] getStringArrayParameter(String parameterName) { String value = getParameter(parameterName); String[] splitted = StringUtils.split(value, BLANK); return splitted; } /** * {@inheritDoc} */ @Override public String getViewIdentifier() { return getTile(determineOutputType()); } /** * {@inheritDoc} */ @Override public String getWidgetId() { if (this.widgetId == null) { this.widgetId = getParameterEscaped(PARAM_WIDGET_ID); } return this.widgetId; } /** * abstract method hook that initializes the widget's parameters */ protected abstract void initParameters(); /** * {@inheritDoc} Is true by default. */ @Override public boolean isSuccess() { return this.success; } /** * {@inheritDoc} */ public void setAttribute(String key, Object value) { attributes.put(key, value); } /** * {@inheritDoc} */ @Override public void setGroupName(String path) { this.groupName = path; } /** * set widget parameter * * @param key * the parameter key * @param value * the parameter value */ @Override public void setParameter(String key, String value) { parameters.put(key, value); } /** * {@inheritDoc} */ @Override public void setRequest(HttpServletRequest request) { this.request = request; } /** * {@inheritDoc} */ @Override public void setResponse(HttpServletResponse response) { this.response = response; } /** * Add, remove or replace an entry in the response metadata that should be returned when * {@link #getResponseMetadata()} is called. * * @param key * the key of the entry to modify * @param value * the value of the metadata entry, if null the entry with the provided key will be * removed */ protected void setResponseMetadata(String key, Object value) { if (value == null) { responseMetadata.remove(key); } else { responseMetadata.put(key, value); } } /** * Set whether the widget request was successful. * * @param success * true if the request succeeded */ protected void setSuccess(boolean success) { this.success = success; } }
package cz.net21.ttulka.thistledb.client; import org.testng.annotations.Test; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; /** * @author ttulka */ public class QueryValidatorTest { @Test public void validateSelectTest() { assertThat(QueryValidator.validate("SELECT"), is(false)); assertThat(QueryValidator.validate("SELECT * FROM"), is(false)); assertThat(QueryValidator.validate("SELECT FROM"), is(false)); assertThat(QueryValidator.validate("SELECT FROM test"), is(false)); assertThat(QueryValidator.validate("FROM * SELECT test"), is(false)); assertThat(QueryValidator.validate("SELECT * FROM test WHERE"), is(false)); assertThat(QueryValidator.validate("SELECT * FROM test,test2 WHERE 1"), is(false)); assertThat(QueryValidator.validate("select * from test where person.name = \"Johnny"), is(false)); assertThat(QueryValidator.validate("SELECT * FROM test"), is(true)); assertThat(QueryValidator.validate("SELECT * FROM test;"), is(true)); assertThat(QueryValidator.validate("SELECT a1 FROM test"), is(true)); assertThat(QueryValidator.validate("SELECT a1 FROM test WHERE 1=1"), is(true)); assertThat(QueryValidator.validate("SELECT a_2 FROM test WHERE 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("SELECT a_2 FROM test WHERE 1=1 AND 1=1 OR 1=1"), is(true)); assertThat(QueryValidator.validate("SELECT a_2 FROM test WHERE 1=1 AND a_2 = '' OR a1 = 'xxx' OR 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("SELECT a_2 FROM test WHERE 1=1 AND a_2 >= '' OR a1 != 'xxx' OR 1 LIKE '1' AND 1 < 1"), is(true)); assertThat(QueryValidator.validate("select * from test where person.name = \"Johnny\""), is(true)); } @Test public void validateInsertTest() { assertThat(QueryValidator.validate("INSERT"), is(false)); assertThat(QueryValidator.validate("INSERT INTO"), is(false)); assertThat(QueryValidator.validate("INSERT INTO test"), is(false)); assertThat(QueryValidator.validate("INSERT INTO test VALUES"), is(false)); assertThat(QueryValidator.validate("INSERT INTO test VALUES *"), is(false)); assertThat(QueryValidator.validate("INSERT INTO test,test2 VALUES {}"), is(false)); assertThat(QueryValidator.validate("INSERT INTO test VALUES {}"), is(true)); assertThat(QueryValidator.validate("INSERT INTO test VALUES {\"a1\":\"1\"}"), is(true)); assertThat(QueryValidator.validate("INSERT INTO test VALUES {\"a1\" : { \"a_2\":\"1\" }}"), is(true)); assertThat(QueryValidator.validate("INSERT INTO test VALUES {\"a1\" : { \"a_2\": [\"1\", \"a_2\" ] }}"), is(true)); assertThat(QueryValidator.validate("INSERT INTO test VALUES {\"a1\" : { \"a_2\": [{\"1\" : \"a_2\"} , {\"1_a\" : \"_a_2_.1\"} ] }}"), is(true)); assertThat(QueryValidator.validate("INSERT INTO test VALUES {},{}"), is(true)); assertThat(QueryValidator.validate("INSERT INTO test VALUES {\"a\":\"1\"},{\"b\":\"2\"}"), is(true)); } @Test public void validateUpdateTest() { assertThat(QueryValidator.validate("UPDATE"), is(false)); assertThat(QueryValidator.validate("UPDATE *"), is(false)); assertThat(QueryValidator.validate("UPDATE test"), is(false)); assertThat(QueryValidator.validate("UPDATE test SET"), is(false)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = y.a_1"), is(false)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 WHERE"), is(false)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 WHERE 1=1"), is(false)); assertThat(QueryValidator.validate("UPDATE * SET x.a_1 = 1 WHERE 1=1"), is(false)); assertThat(QueryValidator.validate("UPDATE test SET x+a?1 = 1"), is(false)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 1"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = '1'"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = \"1\""), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = null"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = false"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = true"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 123"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 11.23"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 1, y.a_1='abc'"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1=1,y.a_1='abc' WHERE 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 'y.a_1'"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 'y.a_1' WHERE 1=1"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 'y.a_1' WHERE 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 'y.a_1' WHERE 1=1 AND 1=1 OR 1=1"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 'y.a_1' WHERE 1=1 AND a_2 = '' OR a1 = 'xxx' OR 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("UPDATE test SET x.a_1 = 'y.a_1' WHERE 1=1 AND a_2 >= '' OR a1 != 'xxx' OR 1 LIKE '1' AND 1 < 1"), is(true)); } @Test public void validateAddTest() { assertThat(QueryValidator.validate("ALTER"), is(false)); assertThat(QueryValidator.validate("ALTER *"), is(false)); assertThat(QueryValidator.validate("ALTER test"), is(false)); assertThat(QueryValidator.validate("ALTER test ADD"), is(false)); assertThat(QueryValidator.validate("ALTER test ADD x.a_1 WHERE"), is(false)); assertThat(QueryValidator.validate("ALTER test ADD x+a?1"), is(false)); assertThat(QueryValidator.validate("ALTER test ADD x.a_1"), is(true)); assertThat(QueryValidator.validate("ALTER test ADD x.a_1 WHERE 1=1"), is(true)); assertThat(QueryValidator.validate("ALTER test ADD x.a_1 WHERE 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("ALTER test ADD x.a_1 WHERE 1=1 AND 1=1 OR 1=1"), is(true)); assertThat(QueryValidator.validate("ALTER test ADD x.a_1 WHERE 1=1 AND a_2 = '' OR a1 = 'xxx' OR 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("ALTER test ADD x.a_1 WHERE 1=1 AND a_2 >= '' OR a1 != 'xxx' OR 1 LIKE '1' AND 1 < 1"), is(true)); } @Test public void validateRemoveTest() { assertThat(QueryValidator.validate("ALTER"), is(false)); assertThat(QueryValidator.validate("ALTER *"), is(false)); assertThat(QueryValidator.validate("ALTER test"), is(false)); assertThat(QueryValidator.validate("ALTER test REMOVE"), is(false)); assertThat(QueryValidator.validate("ALTER test REMOVE x.a_1 WHERE"), is(false)); assertThat(QueryValidator.validate("ALTER test REMOVE x+a?1"), is(false)); assertThat(QueryValidator.validate("ALTER test REMOVE x.a_1"), is(true)); assertThat(QueryValidator.validate("ALTER test REMOVE x.a_1 WHERE 1=1"), is(true)); assertThat(QueryValidator.validate("ALTER test REMOVE x.a_1 WHERE 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("ALTER test REMOVE x.a_1 WHERE 1=1 AND 1=1 OR 1=1"), is(true)); assertThat(QueryValidator.validate("ALTER test REMOVE x.a_1 WHERE 1=1 AND a_2 = '' OR a1 = 'xxx' OR 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("ALTER test REMOVE x.a_1 WHERE 1=1 AND a_2 >= '' OR a1 != 'xxx' OR 1 LIKE '1' AND 1 < 1"), is(true)); } @Test public void validateDeleteTest() { assertThat(QueryValidator.validate("DELETE"), is(false)); assertThat(QueryValidator.validate("DELETE FROM"), is(false)); assertThat(QueryValidator.validate("DELETE FROM *"), is(false)); assertThat(QueryValidator.validate("DELETE test FROM test"), is(false)); assertThat(QueryValidator.validate("DELETE FROM test,test1"), is(false)); assertThat(QueryValidator.validate("DELETE FROM test"), is(true)); assertThat(QueryValidator.validate("DELETE FROM test WHERE 1=1"), is(true)); assertThat(QueryValidator.validate("DELETE FROM test WHERE 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("DELETE FROM test WHERE 1=1 AND 1=1 OR 1=1"), is(true)); assertThat(QueryValidator.validate("DELETE FROM test WHERE 1=1 AND a_2 = '' OR a1 = 'xxx' OR 1=1 AND 1=1"), is(true)); assertThat(QueryValidator.validate("DELETE FROM test WHERE 1=1 AND a_2 >= '' OR a1 != 'xxx' OR 1 LIKE '1' AND 1 < 1"), is(true)); } @Test public void validateCreateIndexTest() { assertThat(QueryValidator.validate("CREATE INDEX"), is(false)); assertThat(QueryValidator.validate("CREATE INDEX a_2.a_3"), is(false)); assertThat(QueryValidator.validate("CREATE INDEX a_2.a_3 ON"), is(false)); assertThat(QueryValidator.validate("CREATE INDEX a_2.a_3 ON a_2.a_3"), is(false)); assertThat(QueryValidator.validate("CREATE INDEX a_2.a_3 ON a_2"), is(true)); } @Test public void validateDropIndexTest() { assertThat(QueryValidator.validate("DROP INDEX"), is(false)); assertThat(QueryValidator.validate("DROP INDEX a_2.a_3"), is(false)); assertThat(QueryValidator.validate("DROP INDEX a_2.a_3 ON"), is(false)); assertThat(QueryValidator.validate("DROP INDEX a_2.a_3 ON a_2.a_3"), is(false)); assertThat(QueryValidator.validate("DROP INDEX a_2.a_3 ON a_2"), is(true)); } @Test public void validateCreateTest() { assertThat(QueryValidator.validate("CREATE"), is(false)); assertThat(QueryValidator.validate("CREATE *"), is(false)); assertThat(QueryValidator.validate("CREATE a_2.a_3"), is(false)); assertThat(QueryValidator.validate("CREATE a+"), is(false)); assertThat(QueryValidator.validate("CREATE a?"), is(false)); assertThat(QueryValidator.validate("CREATE a1"), is(true)); assertThat(QueryValidator.validate("CREATE a_2"), is(true)); } @Test public void validateDropTest() { assertThat(QueryValidator.validate("DROP"), is(false)); assertThat(QueryValidator.validate("DROP *"), is(false)); assertThat(QueryValidator.validate("DROP a_2.a_3"), is(false)); assertThat(QueryValidator.validate("DROP a_2"), is(true)); } @Test public void validateJsonTest() { assertThat(QueryValidator.validateJson("{"), is(false)); assertThat(QueryValidator.validateJson("{\"*"), is(false)); assertThat(QueryValidator.validateJson("{\"a1\" : { \"a_2\":\"1\"}"), is(false)); assertThat(QueryValidator.validateJson("{\"a\":\"b\":\"c\"}"), is(false)); assertThat(QueryValidator.validateJson("{\"a\":\"b\",}"), is(false)); assertThat(QueryValidator.validateJson("{\"a':\"b'}"), is(false)); assertThat(QueryValidator.validateJson("{\"exp\":0,123}"), is(false)); assertThat(QueryValidator.validateJson("{\"exp\":0,123}"), is(false)); assertThat(QueryValidator.validateJson("{\"arr\":[1,\"abc\",]}"), is(false)); assertThat(QueryValidator.validateJson("{\"arr\":[,\"abc\"]}"), is(false)); assertThat(QueryValidator.validateJson("{}"), is(true)); assertThat(QueryValidator.validateJson("{\"a1\" : { \"a_2\":\"1\" }}"), is(true)); assertThat(QueryValidator.validateJson("{'a':'b'}"), is(true)); assertThat(QueryValidator.validateJson("{\"a\":'b'}"), is(true)); assertThat(QueryValidator.validateJson("{'a':\"b\"}"), is(true)); assertThat(QueryValidator.validateJson("{\"null\":null}"), is(true)); assertThat(QueryValidator.validateJson("{\"true\":true}"), is(true)); assertThat(QueryValidator.validateJson("{\"false\":false}"), is(true)); assertThat(QueryValidator.validateJson("{\"empty_object\":{}}"), is(true)); assertThat(QueryValidator.validateJson("{\"empty\":\"\"}"), is(true)); assertThat(QueryValidator.validateJson("{\"empty\":''}"), is(true)); assertThat(QueryValidator.validateJson("{\"quote\":\"'\"}"), is(true)); assertThat(QueryValidator.validateJson("{\"quote\":'\"'}"), is(true)); assertThat(QueryValidator.validateJson("{\"quote\":\"'\"}"), is(true)); assertThat(QueryValidator.validateJson("{\"quote\":\"\\\"\"}"), is(true)); assertThat(QueryValidator.validateJson("{\"int\":123}"), is(true)); assertThat(QueryValidator.validateJson("{\"int_neg\":-123}"), is(true)); assertThat(QueryValidator.validateJson("{\"int_neg_null\":-0123}"), is(true)); assertThat(QueryValidator.validateJson("{\"float\":-0.123}"), is(true)); assertThat(QueryValidator.validateJson("{\"float\":-0.123}"), is(true)); assertThat(QueryValidator.validateJson("{\"exp\":4.9e-123}"), is(true)); assertThat(QueryValidator.validateJson("{\"exp\":4.9E-123}"), is(true)); assertThat(QueryValidator.validateJson("{\"exp\":4.9e+123}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[[]]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[[],[]]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[{}]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[{},{}]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[[],{}]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[\"a\"]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":['a']}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[true]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[\"a\",\"b\"]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[1]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[1,\"abc\"]}"), is(true)); assertThat(QueryValidator.validateJson("{\"arr\":[1,\"abc\",true,-12.25,false,\"\",null,\"\\\"\",\"'\",'abc',[],{},{'a':'1'}]}"), is(true)); assertThat(QueryValidator.validateJson("{\"a1\" : { \"arr\":[1,\"abc\",true,-12.25,false,\"\",null,\"\\\"\",\"'\",'abc'], \"b\":{ 'a3':'123'} }, \"b1\":\"\"}"), is(true)); assertThat(QueryValidator.validateJson("{\"a1\" : { \"a_2\":\"1\", \"arr\":[1,\"abc\",true,-12.25,false,\"\",null,\"\\\"\",\"'\",'abc'], \"b\":2 }, \"b1\":\"\"}"), is(true)); } @Test public void validateWhereTest() { assertThat(QueryValidator.validate("SELECT x FROM x WHERE x"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1"), is(true)); assertThat(QueryValidator.validate("DELETE FROM x WHERE x"), is(false)); assertThat(QueryValidator.validate("DELETE FROM x WHERE x=1"), is(true)); assertThat(QueryValidator.validate("ALTER x ADD x WHERE x"), is(false)); assertThat(QueryValidator.validate("ALTER x ADD x WHERE x=1"), is(true)); assertThat(QueryValidator.validate("ALTER x REMOVE x WHERE x"), is(false)); assertThat(QueryValidator.validate("ALTER x REMOVE x WHERE x=1"), is(true)); assertThat(QueryValidator.validate("UPDATE x SET x=1 WHERE x"), is(false)); assertThat(QueryValidator.validate("UPDATE x SET x=1 WHERE x=1"), is(true)); } @Test public void validateWhereClauseTest() { assertThat(QueryValidator.validate("SELECT x FROM x WHERE OR x=1"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR x"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR OR x=1"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE AND x=1"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND x"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND AND x=1"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND OR x=1"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR AND x=1"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND x AND x=1"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND x=1 AND x"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR x OR x=1"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR x=1 OR x"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x like 1"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x like x"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND x=1 OR x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR x=1 AND x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR x=1 OR x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND x=1 AND x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR x=1 AND x=1 OR x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND x=1 OR x=1 AND x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 OR x=1 OR x=1 OR x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x=1 AND x=1 AND x=1 AND x=1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x1.a2=true OR x=123 AND x=1.23 OR x=\"abc\" AND x='abc' OR x=\"\" AND x='' OR x=null AND x =1 OR x= 1 AND x = 1"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x LIKE '1'1'"), is(false)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x LIKE '1\"1'"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x LIKE '1'"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x like '111'"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x like ''"), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x like \"x\""), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x like \"xxx\""), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x like \"x\\\\\"x\""), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x like \"\""), is(true)); assertThat(QueryValidator.validate("SELECT x FROM x WHERE x!=1 and x LIKE '1' or x like \"xx\" AND x like '' AND x like \"\" AND x>1 AND x<1 AND x<=1 AND x>=1"), is(true)); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.tasks.impl; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonSyntaxException; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.tasks.Task; import com.intellij.tasks.TaskRepository; import com.intellij.tasks.TaskState; import com.intellij.tasks.impl.httpclient.ResponseUtil; import com.intellij.util.text.DateFormatUtil; import org.apache.http.HttpResponse; import org.jdom.Element; import org.jetbrains.annotations.Contract; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.text.ParseException; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author Dmitry Avdeev */ public class TaskUtil { // Almost ISO-8601 strict except date parts may be separated by '/' // and date only also allowed just in case private static Pattern ISO8601_DATE_PATTERN = Pattern.compile("(\\d{4}[/-]\\d{2}[/-]\\d{2})" + // date (1) "(?:[ T]" + "(\\d{2}:\\d{2}:\\d{2})(.\\d{3,})?" + // optional time (2) and milliseconds (3) "(?:\\s?" + "([+-]\\d{2}:\\d{2}|[+-]\\d{4}|[+-]\\d{2}|Z)" + // optional timezone info (4), if time is also present ")?)?"); private TaskUtil() { // empty } public static String formatTask(@Nonnull Task task, String format) { return format.replace("{id}", task.getId()).replace("{number}", task.getNumber()).replace("{project}", StringUtil.notNullize(task.getProject())).replace("{summary}", task.getSummary()); } @Nullable public static String getChangeListComment(Task task) { final TaskRepository repository = task.getRepository(); if (repository == null || !repository.isShouldFormatCommitMessage()) { return null; } return formatTask(task, repository.getCommitMessageFormat()); } public static String getTrimmedSummary(Task task) { String text; if (task.isIssue()) { text = task.getId() + ": " + task.getSummary(); } else { text = task.getSummary(); } return StringUtil.first(text, 60, true); } @Nullable public static Date parseDate(@Nonnull String s) { // SimpleDateFormat prior JDK7 doesn't support 'X' specifier for ISO 8601 timezone format. // Because some bug trackers and task servers e.g. send dates ending with 'Z' (that stands for UTC), // dates should be preprocessed before parsing. Matcher m = ISO8601_DATE_PATTERN.matcher(s); if (!m.matches()) { return null; } String datePart = m.group(1).replace('/', '-'); String timePart = m.group(2); if (timePart == null) { timePart = "00:00:00"; } String milliseconds = m.group(3); milliseconds = milliseconds == null ? "000" : milliseconds.substring(1, 4); String timezone = m.group(4); if (timezone == null || timezone.equals("Z")) { timezone = "+0000"; } else if (timezone.length() == 3) { // [+-]HH timezone += "00"; } else if (timezone.length() == 6) { // [+-]HH:MM timezone = timezone.substring(0, 3) + timezone.substring(4, 6); } String canonicalForm = String.format("%sT%s.%s%s", datePart, timePart, milliseconds, timezone); try { return DateFormatUtil.getIso8601Format().parse(canonicalForm); } catch (ParseException e) { return null; } } public static String formatDate(@Nonnull Date date) { return DateFormatUtil.getIso8601Format().format(date); } /** * {@link Task#equals(Object)} implementation compares tasks by their unique IDs only. * This method should be used when full comparison is necessary. */ public static boolean tasksEqual(@Nonnull Task t1, @Nonnull Task t2) { if (!t1.getId().equals(t2.getId())) { return false; } if (!t1.getSummary().equals(t2.getSummary())) { return false; } if (t1.isClosed() != t2.isClosed()) { return false; } if (t1.isIssue() != t2.isIssue()) { return false; } if (!Comparing.equal(t1.getState(), t2.getState())) { return false; } if (!Comparing.equal(t1.getType(), t2.getType())) { return false; } if (!Comparing.equal(t1.getDescription(), t2.getDescription())) { return false; } if (!Comparing.equal(t1.getCreated(), t2.getCreated())) { return false; } if (!Comparing.equal(t1.getUpdated(), t2.getUpdated())) { return false; } if (!Comparing.equal(t1.getIssueUrl(), t2.getIssueUrl())) { return false; } if (!Comparing.equal(t1.getComments(), t2.getComments())) { return false; } if (!Comparing.equal(t1.getIcon(), t2.getIcon())) { return false; } if (!Comparing.equal(t1.getCustomIcon(), t2.getCustomIcon())) { return false; } return Comparing.equal(t1.getRepository(), t2.getRepository()); } public static boolean tasksEqual(@Nonnull List<? extends Task> tasks1, @Nonnull List<? extends Task> tasks2) { if (tasks1.size() != tasks2.size()) { return false; } for (int i = 0; i < tasks1.size(); i++) { if (!tasksEqual(tasks1.get(i), tasks2.get(i))) { return false; } } return true; } public static boolean tasksEqual(@Nonnull Task[] task1, @Nonnull Task[] task2) { return tasksEqual(Arrays.asList(task1), Arrays.asList(task2)); } /** * Print pretty-formatted XML to {@code logger}, if its level is DEBUG or below. */ public static void prettyFormatXmlToLog(@Nonnull Logger logger, @Nonnull Element element) { if (logger.isDebugEnabled()) { // alternatively //new XMLOutputter(Format.getPrettyFormat()).outputString(root) logger.debug("\n" + JDOMUtil.createOutputter("\n").outputString(element)); } } /** * Parse and print pretty-formatted XML to {@code logger}, if its level is DEBUG or below. */ public static void prettyFormatXmlToLog(@Nonnull Logger logger, @Nonnull InputStream xml) { if (logger.isDebugEnabled()) { try { logger.debug("\n" + JDOMUtil.createOutputter("\n").outputString(JDOMUtil.loadDocument(xml))); } catch (Exception e) { logger.debug(e); } } } /** * Parse and print pretty-formatted XML to {@code logger}, if its level is DEBUG or below. */ public static void prettyFormatXmlToLog(@Nonnull Logger logger, @Nonnull String xml) { if (logger.isDebugEnabled()) { try { logger.debug("\n" + JDOMUtil.createOutputter("\n").outputString(JDOMUtil.loadDocument(xml))); } catch (Exception e) { logger.debug(e); } } } /** * Parse and print pretty-formatted Json to {@code logger}, if its level is DEBUG or below. */ public static void prettyFormatJsonToLog(@Nonnull Logger logger, @Nonnull String json) { if (logger.isDebugEnabled()) { try { Gson gson = new GsonBuilder().setPrettyPrinting().create(); logger.debug("\n" + gson.toJson(gson.fromJson(json, JsonElement.class))); } catch (JsonSyntaxException e) { logger.debug("Malformed JSON\n" + json); } } } /** * Parse and print pretty-formatted Json to {@code logger}, if its level is DEBUG or below. */ public static void prettyFormatJsonToLog(@Nonnull Logger logger, @Nonnull JsonElement json) { if (logger.isDebugEnabled()) { try { Gson gson = new GsonBuilder().setPrettyPrinting().create(); logger.debug("\n" + gson.toJson(json)); } catch (JsonSyntaxException e) { logger.debug("Malformed JSON\n" + json); } } } public static void prettyFormatResponseToLog(@Nonnull Logger logger, @Nonnull HttpResponse response) { if (logger.isDebugEnabled()) { try { String content = ResponseUtil.getResponseContentAsString(response); org.apache.http.Header header = response.getEntity().getContentType(); String contentType = header == null ? "text/plain" : header.getElements()[0].getName().toLowerCase(Locale.ENGLISH); if (contentType.contains("xml")) { prettyFormatXmlToLog(logger, content); } else if (contentType.contains("json")) { prettyFormatJsonToLog(logger, content); } else { logger.debug(content); } } catch (IOException e) { logger.error(e); } } } /** * Perform standard {@code application/x-www-urlencoded} translation for string {@code s}. * * @return urlencoded string */ @Nonnull public static String encodeUrl(@Nonnull String s) { try { return URLEncoder.encode(s, CharsetToolkit.UTF8); } catch (UnsupportedEncodingException e) { throw new AssertionError("UTF-8 is not supported"); } } @Contract("null, _ -> false") public static boolean isStateSupported(@Nullable TaskRepository repository, @Nonnull TaskState state) { if (repository == null || !repository.isSupported(TaskRepository.STATE_UPDATING)) { return false; } return repository.getRepositoryType().getPossibleTaskStates().contains(state); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.sns.model; import java.io.Serializable; /** * <p> * Platform application object. * </p> */ public class PlatformApplication implements Serializable, Cloneable { /** * <p> * PlatformApplicationArn for platform application object. * </p> */ private String platformApplicationArn; /** * <p> * Attributes for platform application object. * </p> */ private com.amazonaws.internal.SdkInternalMap<String, String> attributes; /** * <p> * PlatformApplicationArn for platform application object. * </p> * * @param platformApplicationArn * PlatformApplicationArn for platform application object. */ public void setPlatformApplicationArn(String platformApplicationArn) { this.platformApplicationArn = platformApplicationArn; } /** * <p> * PlatformApplicationArn for platform application object. * </p> * * @return PlatformApplicationArn for platform application object. */ public String getPlatformApplicationArn() { return this.platformApplicationArn; } /** * <p> * PlatformApplicationArn for platform application object. * </p> * * @param platformApplicationArn * PlatformApplicationArn for platform application object. * @return Returns a reference to this object so that method calls can be * chained together. */ public PlatformApplication withPlatformApplicationArn( String platformApplicationArn) { setPlatformApplicationArn(platformApplicationArn); return this; } /** * <p> * Attributes for platform application object. * </p> * * @return Attributes for platform application object. */ public java.util.Map<String, String> getAttributes() { if (attributes == null) { attributes = new com.amazonaws.internal.SdkInternalMap<String, String>(); } return attributes; } /** * <p> * Attributes for platform application object. * </p> * * @param attributes * Attributes for platform application object. */ public void setAttributes(java.util.Map<String, String> attributes) { this.attributes = attributes == null ? null : new com.amazonaws.internal.SdkInternalMap<String, String>( attributes); } /** * <p> * Attributes for platform application object. * </p> * * @param attributes * Attributes for platform application object. * @return Returns a reference to this object so that method calls can be * chained together. */ public PlatformApplication withAttributes( java.util.Map<String, String> attributes) { setAttributes(attributes); return this; } public PlatformApplication addAttributesEntry(String key, String value) { if (null == this.attributes) { this.attributes = new com.amazonaws.internal.SdkInternalMap<String, String>(); } if (this.attributes.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.attributes.put(key, value); return this; } /** * Removes all the entries added into Attributes. &lt;p> Returns a reference * to this object so that method calls can be chained together. */ public PlatformApplication clearAttributesEntries() { this.attributes = null; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getPlatformApplicationArn() != null) sb.append("PlatformApplicationArn: " + getPlatformApplicationArn() + ","); if (getAttributes() != null) sb.append("Attributes: " + getAttributes()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof PlatformApplication == false) return false; PlatformApplication other = (PlatformApplication) obj; if (other.getPlatformApplicationArn() == null ^ this.getPlatformApplicationArn() == null) return false; if (other.getPlatformApplicationArn() != null && other.getPlatformApplicationArn().equals( this.getPlatformApplicationArn()) == false) return false; if (other.getAttributes() == null ^ this.getAttributes() == null) return false; if (other.getAttributes() != null && other.getAttributes().equals(this.getAttributes()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getPlatformApplicationArn() == null) ? 0 : getPlatformApplicationArn().hashCode()); hashCode = prime * hashCode + ((getAttributes() == null) ? 0 : getAttributes().hashCode()); return hashCode; } @Override public PlatformApplication clone() { try { return (PlatformApplication) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package mcjty.rftools.blocks.screens; import mcjty.lib.McJtyLib; import mcjty.lib.blocks.BaseBlock; import mcjty.lib.blocks.GenericBlock; import mcjty.lib.container.EmptyContainer; import mcjty.rftools.RFTools; import mcjty.theoneprobe.api.IProbeHitData; import mcjty.theoneprobe.api.IProbeInfo; import mcjty.theoneprobe.api.ProbeMode; import mcp.mobius.waila.api.IWailaConfigHandler; import mcp.mobius.waila.api.IWailaDataAccessor; import net.minecraft.block.Block; import net.minecraft.block.material.EnumPushReaction; import net.minecraft.block.material.Material; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.EnumBlockRenderType; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumHand; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.RayTraceResult; import net.minecraft.world.Explosion; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import net.minecraftforge.fml.common.Optional; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import java.util.List; import java.util.Random; public class ScreenHitBlock extends GenericBlock<ScreenHitTileEntity, EmptyContainer> { public ScreenHitBlock() { super(RFTools.instance, Material.GLASS, ScreenHitTileEntity.class, EmptyContainer::new, null, "screen_hitblock", false); setBlockUnbreakable(); setResistance(6000000.0F); // setUnlocalizedName("rftools.screen_hitblock"); // setRegistryName("screen_hitblock"); // GameRegistry.register(this); // GameRegistry.registerTileEntity(ScreenHitTileEntity.class, "screen_hitblock"); } @Override public ItemStack getItem(World worldIn, BlockPos pos, IBlockState state) { BlockPos screenPos = getScreenBlockPos(worldIn, pos); if(screenPos == null) return ItemStack.EMPTY; IBlockState screenState = worldIn.getBlockState(screenPos); return screenState.getBlock().getItem(worldIn, screenPos, screenState); } @Override public int getGuiID() { return -1; } @Override @Optional.Method(modid = "theoneprobe") public void addProbeInfo(ProbeMode mode, IProbeInfo probeInfo, EntityPlayer player, World world, IBlockState blockState, IProbeHitData data) { super.addProbeInfo(mode, probeInfo, player, world, blockState, data); BlockPos pos = data.getPos(); ScreenHitTileEntity screenHitTileEntity = (ScreenHitTileEntity) world.getTileEntity(pos); int dx = screenHitTileEntity.getDx(); int dy = screenHitTileEntity.getDy(); int dz = screenHitTileEntity.getDz(); Block block = world.getBlockState(pos.add(dx, dy, dz)).getBlock(); if (block instanceof ScreenBlock) { ((ScreenBlock) block).addProbeInfoScreen(mode, probeInfo, player, world, pos.add(dx, dy, dz)); } } @SideOnly(Side.CLIENT) @Override @Optional.Method(modid = "waila") public List<String> getWailaBody(ItemStack itemStack, List<String> currenttip, IWailaDataAccessor accessor, IWailaConfigHandler config) { super.getWailaBody(itemStack, currenttip, accessor, config); BlockPos pos = accessor.getPosition(); World world = accessor.getWorld(); ScreenHitTileEntity screenHitTileEntity = (ScreenHitTileEntity) world.getTileEntity(pos); int dx = screenHitTileEntity.getDx(); int dy = screenHitTileEntity.getDy(); int dz = screenHitTileEntity.getDz(); BlockPos rpos = pos.add(dx, dy, dz); IBlockState state = world.getBlockState(rpos); Block block = state.getBlock(); if (block instanceof ScreenBlock) { TileEntity te = world.getTileEntity(rpos); if (te instanceof ScreenTileEntity) { RayTraceResult mouseOver = accessor.getMOP(); ScreenTileEntity screenTileEntity = (ScreenTileEntity) te; ScreenTileEntity.ModuleRaytraceResult hit = screenTileEntity.getHitModule(mouseOver.hitVec.x - pos.getX() - dx, mouseOver.hitVec.y - pos.getY() - dy, mouseOver.hitVec.z - pos.getZ() - dz, mouseOver.sideHit, state.getValue(ScreenBlock.HORIZONTAL_FACING)); ((ScreenBlock) block).getWailaBodyScreen(currenttip, accessor.getPlayer(), screenTileEntity, hit); } } return currenttip; } @Override public TileEntity createTileEntity(World world, IBlockState state) { return new ScreenHitTileEntity(); } @Override public TileEntity createNewTileEntity(World world, int meta) { return new ScreenHitTileEntity(); } @Override public void initModel() { McJtyLib.proxy.initTESRItemStack(Item.getItemFromBlock(this), 0, ScreenTileEntity.class); super.initModel(); } @Override public void onBlockClicked(World world, BlockPos pos, EntityPlayer playerIn) { if (world.isRemote) { ScreenHitTileEntity screenHitTileEntity = (ScreenHitTileEntity) world.getTileEntity(pos); int dx = screenHitTileEntity.getDx(); int dy = screenHitTileEntity.getDy(); int dz = screenHitTileEntity.getDz(); IBlockState state = world.getBlockState(pos.add(dx, dy, dz)); Block block = state.getBlock(); if (block != ScreenSetup.screenBlock && block != ScreenSetup.creativeScreenBlock) { return; } RayTraceResult mouseOver = Minecraft.getMinecraft().objectMouseOver; ScreenTileEntity screenTileEntity = (ScreenTileEntity) world.getTileEntity(pos.add(dx, dy, dz)); screenTileEntity.hitScreenClient(mouseOver.hitVec.x - pos.getX() - dx, mouseOver.hitVec.y - pos.getY() - dy, mouseOver.hitVec.z - pos.getZ() - dz, mouseOver.sideHit, state.getValue(ScreenBlock.HORIZONTAL_FACING)); } } @Override public boolean onBlockActivated(World world, BlockPos pos, IBlockState state, EntityPlayer player, EnumHand hand, EnumFacing side, float hitX, float hitY, float hitZ) { return activate(world, pos, state, player, hand, side, hitX, hitY, hitZ); } public boolean activate(World world, BlockPos pos, IBlockState state, EntityPlayer player, EnumHand hand, EnumFacing side, float hitX, float hitY, float hitZ) { pos = getScreenBlockPos(world, pos); if (pos == null) { return false; } Block block = world.getBlockState(pos).getBlock(); return ((ScreenBlock) block).activate(world, pos, state, player, hand, side, hitX, hitY, hitZ); } @Override public boolean rotateBlock(World world, BlockPos pos, EnumFacing axis) { // Doesn't make sense to rotate a potentially 3x3 screen, // and is incompatible with our special wrench actions. return false; } public BlockPos getScreenBlockPos(World world, BlockPos pos) { ScreenHitTileEntity screenHitTileEntity = (ScreenHitTileEntity) world.getTileEntity(pos); int dx = screenHitTileEntity.getDx(); int dy = screenHitTileEntity.getDy(); int dz = screenHitTileEntity.getDz(); pos = pos.add(dx, dy, dz); Block block = world.getBlockState(pos).getBlock(); if (block != ScreenSetup.screenBlock && block != ScreenSetup.creativeScreenBlock) { return null; } return pos; } public static final AxisAlignedBB BLOCK_AABB = new AxisAlignedBB(0.5F - 0.5F, 0.0F, 0.5F - 0.5F, 0.5F + 0.5F, 1.0F, 0.5F + 0.5F); public static final AxisAlignedBB NORTH_AABB = new AxisAlignedBB(0.0F, 0.0F, 1.0F - 0.125F, 1.0F, 1.0F, 1.0F); public static final AxisAlignedBB SOUTH_AABB = new AxisAlignedBB(0.0F, 0.0F, 0.0F, 1.0F, 1.0F, 0.125F); public static final AxisAlignedBB WEST_AABB = new AxisAlignedBB(1.0F - 0.125F, 0.0F, 0.0F, 1.0F, 1.0F, 1.0F); public static final AxisAlignedBB EAST_AABB = new AxisAlignedBB(0.0F, 0.0F, 0.0F, 0.125F, 1.0F, 1.0F); public static final AxisAlignedBB UP_AABB = new AxisAlignedBB(0.0F, 0.0F, 0.0F, 1.0F, 0.125F, 1.0F); public static final AxisAlignedBB DOWN_AABB = new AxisAlignedBB(0.0F, 1.0F - 0.125F, 0.0F, 1.0F, 1.0F, 1.0F); @Override public AxisAlignedBB getBoundingBox(IBlockState state, IBlockAccess source, BlockPos pos) { EnumFacing facing = state.getValue(BaseBlock.FACING); if (facing == EnumFacing.NORTH) { return NORTH_AABB; } else if (facing == EnumFacing.SOUTH) { return SOUTH_AABB; } else if (facing == EnumFacing.WEST) { return WEST_AABB; } else if (facing == EnumFacing.EAST) { return EAST_AABB; } else if (facing == EnumFacing.UP) { return UP_AABB; } else if (facing == EnumFacing.DOWN) { return DOWN_AABB; } else { return BLOCK_AABB; } } @Override public boolean isOpaqueCube(IBlockState state) { return false; } @Override public boolean isBlockNormalCube(IBlockState state) { return false; } @Override public boolean isFullBlock(IBlockState state) { return false; } @Override public boolean isFullCube(IBlockState state) { return false; } @Override public EnumBlockRenderType getRenderType(IBlockState state) { return EnumBlockRenderType.ENTITYBLOCK_ANIMATED; } @Override public boolean canEntityDestroy(IBlockState state, IBlockAccess world, BlockPos pos, Entity entity) { return false; } @Override public void onBlockExploded(World world, BlockPos pos, Explosion explosion) { } @Override public int quantityDropped(Random random) { return 0; } @Override public EnumPushReaction getMobilityFlag(IBlockState state) { return EnumPushReaction.BLOCK; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.topology; import static org.easymock.EasyMock.anyBoolean; import static org.easymock.EasyMock.anyLong; import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.anyString; import static org.easymock.EasyMock.capture; import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.easymock.EasyMock.isA; import static org.easymock.EasyMock.isNull; import static org.easymock.EasyMock.newCapture; import static org.junit.Assert.assertEquals; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import org.apache.ambari.server.Role; import org.apache.ambari.server.RoleCommand; import org.apache.ambari.server.actionmanager.HostRoleCommand; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.controller.AmbariServer; import org.apache.ambari.server.controller.ClusterRequest; import org.apache.ambari.server.controller.ConfigurationRequest; import org.apache.ambari.server.controller.RequestStatusResponse; import org.apache.ambari.server.controller.internal.ProvisionAction; import org.apache.ambari.server.controller.internal.ProvisionClusterRequest; import org.apache.ambari.server.controller.internal.Stack; import org.apache.ambari.server.controller.spi.ClusterController; import org.apache.ambari.server.controller.spi.ResourceProvider; import org.apache.ambari.server.events.publishers.AmbariEventPublisher; import org.apache.ambari.server.orm.entities.TopologyLogicalRequestEntity; import org.apache.ambari.server.security.encryption.CredentialStoreService; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.ComponentInfo; import org.apache.ambari.server.state.SecurityType; import org.apache.ambari.server.topology.tasks.ConfigureClusterTask; import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory; import org.apache.ambari.server.topology.validators.TopologyValidatorService; import org.easymock.Capture; import org.easymock.EasyMockRule; import org.easymock.EasyMockSupport; import org.easymock.Mock; import org.easymock.MockType; import org.easymock.TestSubject; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.easymock.PowerMock; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PrepareForTest(AmbariServer.class) public class ClusterDeployWithStartOnlyTest extends EasyMockSupport { private static final String CLUSTER_NAME = "test-cluster"; private static final long CLUSTER_ID = 1; private static final String BLUEPRINT_NAME = "test-bp"; private static final String STACK_NAME = "test-stack"; private static final String STACK_VERSION = "test-stack-version"; @Rule public EasyMockRule mocks = new EasyMockRule(this); @TestSubject private TopologyManager topologyManager = new TopologyManager(); @Mock(type = MockType.NICE) private Blueprint blueprint; @Mock(type = MockType.NICE) private Stack stack; @Mock(type = MockType.NICE) private ProvisionClusterRequest request; private PersistedTopologyRequest persistedTopologyRequest; private LogicalRequestFactory logicalRequestFactory; @Mock(type = MockType.DEFAULT) private LogicalRequest logicalRequest; @Mock(type = MockType.NICE) private AmbariContext ambariContext; @Mock(type = MockType.NICE) private ConfigurationRequest configurationRequest; @Mock(type = MockType.NICE) private ConfigurationRequest configurationRequest2; @Mock(type = MockType.NICE) private ConfigurationRequest configurationRequest3; @Mock(type = MockType.NICE) private RequestStatusResponse requestStatusResponse; @Mock(type = MockType.STRICT) private ExecutorService executor; @Mock(type = MockType.STRICT) private PersistedState persistedState; @Mock(type = MockType.NICE) private HostGroup group1; @Mock(type = MockType.NICE) private HostGroup group2; @Mock(type = MockType.STRICT) private SecurityConfigurationFactory securityConfigurationFactory; @Mock(type = MockType.STRICT) private CredentialStoreService credentialStoreService; @Mock(type = MockType.STRICT) private ClusterController clusterController; @Mock(type = MockType.STRICT) private ResourceProvider resourceProvider; @Mock(type = MockType.NICE) private AmbariManagementController managementController; @Mock(type = MockType.NICE) private Clusters clusters; @Mock(type = MockType.NICE) private Cluster cluster; @Mock(type = MockType.NICE) private HostRoleCommand hostRoleCommandInstallComponent3; @Mock(type = MockType.NICE) private HostRoleCommand hostRoleCommandInstallComponent4; @Mock(type = MockType.NICE) private HostRoleCommand hostRoleCommandStartComponent1; @Mock(type = MockType.NICE) private HostRoleCommand hostRoleCommandStartComponent2; @Mock(type = MockType.NICE) private ComponentInfo serviceComponentInfo; @Mock(type = MockType.NICE) private ComponentInfo clientComponentInfo; @Mock(type = MockType.NICE) private ConfigureClusterTaskFactory configureClusterTaskFactory; @Mock(type = MockType.NICE) private ConfigureClusterTask configureClusterTask; @Mock(type = MockType.STRICT) private Future mockFuture; @Mock private TopologyValidatorService topologyValidatorServiceMock; private final Configuration stackConfig = new Configuration(new HashMap<String, Map<String, String>>(), new HashMap<String, Map<String, Map<String, String>>>()); private final Configuration bpConfiguration = new Configuration(new HashMap<String, Map<String, String>>(), new HashMap<String, Map<String, Map<String, String>>>(), stackConfig); private final Configuration topoConfiguration = new Configuration(new HashMap<String, Map<String, String>>(), new HashMap<String, Map<String, Map<String, String>>>(), bpConfiguration); private final Configuration bpGroup1Config = new Configuration(new HashMap<String, Map<String, String>>(), new HashMap<String, Map<String, Map<String, String>>>(), bpConfiguration); private final Configuration bpGroup2Config = new Configuration(new HashMap<String, Map<String, String>>(), new HashMap<String, Map<String, Map<String, String>>>(), bpConfiguration); private final Configuration topoGroup1Config = new Configuration(new HashMap<String, Map<String, String>>(), new HashMap<String, Map<String, Map<String, String>>>(), bpGroup1Config); private final Configuration topoGroup2Config = new Configuration(new HashMap<String, Map<String, String>>(), new HashMap<String, Map<String, Map<String, String>>>(), bpGroup2Config); private HostGroupInfo group1Info = new HostGroupInfo("group1"); private HostGroupInfo group2Info = new HostGroupInfo("group2"); private Map<String, HostGroupInfo> groupInfoMap = new HashMap<>(); private Collection<String> group1Components = Arrays.asList("component1", "component2", "component3"); private Collection<String> group2Components = Arrays.asList("component3", "component4"); private Map<String, Collection<String>> group1ServiceComponents = new HashMap<>(); private Map<String, Collection<String>> group2ServiceComponents = new HashMap<>(); private Map<String, Collection<String>> serviceComponents = new HashMap<>(); private String predicate = "Hosts/host_name=foo"; private List<TopologyValidator> topologyValidators = new ArrayList<>(); private Capture<ClusterTopology> clusterTopologyCapture; private Capture<Map<String, Object>> configRequestPropertiesCapture; private Capture<Map<String, Object>> configRequestPropertiesCapture2; private Capture<Map<String, Object>> configRequestPropertiesCapture3; private Capture<ClusterRequest> updateClusterConfigRequestCapture; private Capture<Runnable> updateConfigTaskCapture; @Before public void setup() throws Exception { clusterTopologyCapture = newCapture(); configRequestPropertiesCapture = newCapture(); configRequestPropertiesCapture2 = newCapture(); configRequestPropertiesCapture3 = newCapture(); updateClusterConfigRequestCapture = newCapture(); updateConfigTaskCapture = newCapture(); topoConfiguration.setProperty("service1-site", "s1-prop", "s1-prop-value"); topoConfiguration.setProperty("service2-site", "s2-prop", "s2-prop-value"); topoConfiguration.setProperty("cluster-env", "g-prop", "g-prop-value"); //clusterRequestCapture = EasyMock.newCapture(); // group 1 has fqdn specified group1Info.addHost("host1"); group1Info.setConfiguration(topoGroup1Config); // group 2 has host_count and host_predicate specified group2Info.setRequestedCount(2); group2Info.setPredicate(predicate); group2Info.setConfiguration(topoGroup2Config); groupInfoMap.put("group1", group1Info); groupInfoMap.put("group2", group2Info); Map<String, HostGroup> groupMap = new HashMap<>(); groupMap.put("group1", group1); groupMap.put("group2", group2); serviceComponents.put("service1", Arrays.asList("component1", "component3")); serviceComponents.put("service2", Arrays.asList("component2", "component4")); group1ServiceComponents.put("service1", Arrays.asList("component1", "component3")); group1ServiceComponents.put("service2", Collections.singleton("component2")); group2ServiceComponents.put("service2", Collections.singleton("component3")); group2ServiceComponents.put("service2", Collections.singleton("component4")); expect(blueprint.getHostGroup("group1")).andReturn(group1).anyTimes(); expect(blueprint.getHostGroup("group2")).andReturn(group2).anyTimes(); expect(blueprint.getComponents("service1")).andReturn(Arrays.asList("component1", "component3")).anyTimes(); expect(blueprint.getComponents("service2")).andReturn(Arrays.asList("component2", "component4")).anyTimes(); expect(blueprint.getConfiguration()).andReturn(bpConfiguration).anyTimes(); expect(blueprint.getHostGroups()).andReturn(groupMap).anyTimes(); expect(blueprint.getHostGroupsForComponent("component1")).andReturn(Collections.singleton(group1)).anyTimes(); expect(blueprint.getHostGroupsForComponent("component2")).andReturn(Collections.singleton(group1)).anyTimes(); expect(blueprint.getHostGroupsForComponent("component3")).andReturn(Arrays.asList(group1, group2)).anyTimes(); expect(blueprint.getHostGroupsForComponent("component4")).andReturn(Collections.singleton(group2)).anyTimes(); expect(blueprint.getHostGroupsForService("service1")).andReturn(Arrays.asList(group1, group2)).anyTimes(); expect(blueprint.getHostGroupsForService("service2")).andReturn(Arrays.asList(group1, group2)).anyTimes(); expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).anyTimes(); expect(blueprint.getServices()).andReturn(Arrays.asList("service1", "service2")).anyTimes(); expect(blueprint.getStack()).andReturn(stack).anyTimes(); expect(blueprint.isValidConfigType(anyString())).andReturn(true).anyTimes(); // don't expect toEntity() expect(stack.getAllConfigurationTypes("service1")).andReturn(Arrays.asList("service1-site", "service1-env")).anyTimes(); expect(stack.getAllConfigurationTypes("service2")).andReturn(Arrays.asList("service2-site", "service2-env")).anyTimes(); expect(stack.getAutoDeployInfo("component1")).andReturn(null).anyTimes(); expect(stack.getAutoDeployInfo("component2")).andReturn(null).anyTimes(); expect(stack.getAutoDeployInfo("component3")).andReturn(null).anyTimes(); expect(stack.getAutoDeployInfo("component4")).andReturn(null).anyTimes(); expect(serviceComponentInfo.isClient()).andReturn(false).anyTimes(); expect(clientComponentInfo.isClient()).andReturn(true).anyTimes(); expect(stack.getComponentInfo("component1")).andReturn(serviceComponentInfo).anyTimes(); expect(stack.getComponentInfo("component2")).andReturn(serviceComponentInfo).anyTimes(); expect(stack.getComponentInfo("component3")).andReturn(clientComponentInfo).anyTimes(); expect(stack.getComponentInfo("component4")).andReturn(clientComponentInfo).anyTimes(); expect(stack.getCardinality("component1")).andReturn(new Cardinality("1")).anyTimes(); expect(stack.getCardinality("component2")).andReturn(new Cardinality("1")).anyTimes(); expect(stack.getCardinality("component3")).andReturn(new Cardinality("1+")).anyTimes(); expect(stack.getCardinality("component4")).andReturn(new Cardinality("1+")).anyTimes(); expect(stack.getComponents()).andReturn(serviceComponents).anyTimes(); expect(stack.getComponents("service1")).andReturn(serviceComponents.get("service1")).anyTimes(); expect(stack.getComponents("service2")).andReturn(serviceComponents.get("service2")).anyTimes(); expect(stack.getConfiguration()).andReturn(stackConfig).anyTimes(); expect(stack.getName()).andReturn(STACK_NAME).anyTimes(); expect(stack.getVersion()).andReturn(STACK_VERSION).anyTimes(); expect(stack.getServiceForConfigType("service1-site")).andReturn("service1").anyTimes(); expect(stack.getServiceForConfigType("service2-site")).andReturn("service2").anyTimes(); expect(stack.getExcludedConfigurationTypes("service1")).andReturn(Collections.<String>emptySet()).anyTimes(); expect(stack.getExcludedConfigurationTypes("service2")).andReturn(Collections.<String>emptySet()).anyTimes(); expect(request.getBlueprint()).andReturn(blueprint).anyTimes(); expect(request.getClusterId()).andReturn(CLUSTER_ID).anyTimes(); expect(request.getClusterName()).andReturn(CLUSTER_NAME).anyTimes(); expect(request.getDescription()).andReturn("Provision Cluster Test").anyTimes(); expect(request.getConfiguration()).andReturn(topoConfiguration).anyTimes(); expect(request.getHostGroupInfo()).andReturn(groupInfoMap).anyTimes(); expect(request.getConfigRecommendationStrategy()).andReturn(ConfigRecommendationStrategy.NEVER_APPLY); expect(request.getProvisionAction()).andReturn(ProvisionAction.START_ONLY).anyTimes(); expect(request.getSecurityConfiguration()).andReturn(null).anyTimes(); expect(request.getRepositoryVersion()).andReturn("1").anyTimes(); expect(group1.getBlueprintName()).andReturn(BLUEPRINT_NAME).anyTimes(); expect(group1.getCardinality()).andReturn("test cardinality").anyTimes(); expect(group1.containsMasterComponent()).andReturn(true).anyTimes(); expect(group1.getComponentNames()).andReturn(group1Components).anyTimes(); expect(group1.getComponentNames(anyObject(ProvisionAction.class))).andReturn(Collections.<String>emptyList()).anyTimes(); expect(group1.getComponents("service1")).andReturn(group1ServiceComponents.get("service1")).anyTimes(); expect(group1.getComponents("service2")).andReturn(group1ServiceComponents.get("service1")).anyTimes(); expect(group1.getConfiguration()).andReturn(topoGroup1Config).anyTimes(); expect(group1.getName()).andReturn("group1").anyTimes(); expect(group1.getServices()).andReturn(Arrays.asList("service1", "service2")).anyTimes(); expect(group1.getStack()).andReturn(stack).anyTimes(); expect(group2.getBlueprintName()).andReturn(BLUEPRINT_NAME).anyTimes(); expect(group2.getCardinality()).andReturn("test cardinality").anyTimes(); expect(group2.containsMasterComponent()).andReturn(false).anyTimes(); expect(group2.getComponentNames()).andReturn(group2Components).anyTimes(); expect(group2.getComponentNames(anyObject(ProvisionAction.class))).andReturn(Collections.<String>emptyList()).anyTimes(); expect(group2.getComponents("service1")).andReturn(group2ServiceComponents.get("service1")).anyTimes(); expect(group2.getComponents("service2")).andReturn(group2ServiceComponents.get("service2")).anyTimes(); expect(group2.getConfiguration()).andReturn(topoGroup2Config).anyTimes(); expect(group2.getName()).andReturn("group2").anyTimes(); expect(group2.getServices()).andReturn(Arrays.asList("service1", "service2")).anyTimes(); expect(group2.getStack()).andReturn(stack).anyTimes(); // Create partial mock to allow actual logical request creation logicalRequestFactory = createMockBuilder(LogicalRequestFactory.class).addMockedMethod( LogicalRequestFactory.class.getMethod("createRequest", Long.class, TopologyRequest.class, ClusterTopology.class, TopologyLogicalRequestEntity.class)).createMock(); Field f = TopologyManager.class.getDeclaredField("logicalRequestFactory"); f.setAccessible(true); f.set(topologyManager, logicalRequestFactory); PowerMock.mockStatic(AmbariServer.class); expect(AmbariServer.getController()).andReturn(managementController).anyTimes(); PowerMock.replay(AmbariServer.class); expect(managementController.getClusters()).andReturn(clusters).anyTimes(); expect(clusters.getClusterById(anyLong())).andReturn(cluster).anyTimes(); expect(cluster.getClusterName()).andReturn(CLUSTER_NAME).anyTimes(); expect(ambariContext.getPersistedTopologyState()).andReturn(persistedState).anyTimes(); //todo: don't ignore param ambariContext.createAmbariResources(isA(ClusterTopology.class), eq(CLUSTER_NAME), (SecurityType) isNull(), eq("1"), anyLong()); expectLastCall().once(); expect(ambariContext.getNextRequestId()).andReturn(1L).once(); expect(ambariContext.isClusterKerberosEnabled(CLUSTER_ID)).andReturn(false).anyTimes(); expect(ambariContext.getClusterId(CLUSTER_NAME)).andReturn(CLUSTER_ID).anyTimes(); expect(ambariContext.getClusterName(CLUSTER_ID)).andReturn(CLUSTER_NAME).anyTimes(); // so only INITIAL config expect(ambariContext.createConfigurationRequests(capture(configRequestPropertiesCapture))). andReturn(Collections.singletonList(configurationRequest)); expect(ambariContext.createConfigurationRequests(capture(configRequestPropertiesCapture2))). andReturn(Collections.singletonList(configurationRequest2)).once(); expect(ambariContext.createConfigurationRequests(capture(configRequestPropertiesCapture3))). andReturn(Collections.singletonList(configurationRequest3)).once(); // INSTALL task expectation expect(ambariContext.createAmbariTask(anyLong(), anyLong(), eq("component3"), anyString(), eq(AmbariContext.TaskType.INSTALL), anyBoolean())).andReturn(hostRoleCommandInstallComponent3).times(3); expect(ambariContext.createAmbariTask(anyLong(), anyLong(), eq("component4"), anyString(), eq(AmbariContext.TaskType.INSTALL), anyBoolean())).andReturn(hostRoleCommandInstallComponent4).times(2); expect(hostRoleCommandInstallComponent3.getTaskId()).andReturn(1L).atLeastOnce(); expect(hostRoleCommandInstallComponent3.getRoleCommand()).andReturn(RoleCommand.INSTALL).atLeastOnce(); expect(hostRoleCommandInstallComponent3.getRole()).andReturn(Role.INSTALL_PACKAGES).atLeastOnce(); expect(hostRoleCommandInstallComponent3.getStatus()).andReturn(HostRoleStatus.COMPLETED).atLeastOnce(); expect(hostRoleCommandInstallComponent4.getTaskId()).andReturn(2L).atLeastOnce(); expect(hostRoleCommandInstallComponent4.getRoleCommand()).andReturn(RoleCommand.INSTALL).atLeastOnce(); expect(hostRoleCommandInstallComponent4.getRole()).andReturn(Role.INSTALL_PACKAGES).atLeastOnce(); expect(hostRoleCommandInstallComponent4.getStatus()).andReturn(HostRoleStatus.COMPLETED).atLeastOnce(); expect(ambariContext.createAmbariTask(anyLong(), anyLong(), eq("component1"), anyString(), eq(AmbariContext.TaskType.START), anyBoolean())).andReturn(hostRoleCommandStartComponent1).times (1); expect(ambariContext.createAmbariTask(anyLong(), anyLong(), eq("component2"), anyString(), eq(AmbariContext.TaskType.START), anyBoolean())).andReturn(hostRoleCommandStartComponent2).times(1); expect(hostRoleCommandStartComponent1.getTaskId()).andReturn(3L).anyTimes(); expect(hostRoleCommandStartComponent1.getRoleCommand()).andReturn(RoleCommand.START).atLeastOnce(); expect(hostRoleCommandStartComponent1.getRole()).andReturn(Role.DATANODE).atLeastOnce(); expect(hostRoleCommandStartComponent1.getStatus()).andReturn(HostRoleStatus.COMPLETED).atLeastOnce(); expect(hostRoleCommandStartComponent2.getTaskId()).andReturn(4L).anyTimes(); expect(hostRoleCommandStartComponent2.getRoleCommand()).andReturn(RoleCommand.START).atLeastOnce(); expect(hostRoleCommandStartComponent2.getRole()).andReturn(Role.NAMENODE).atLeastOnce(); expect(hostRoleCommandStartComponent2.getStatus()).andReturn(HostRoleStatus.COMPLETED).atLeastOnce(); ambariContext.setConfigurationOnCluster(capture(updateClusterConfigRequestCapture)); expectLastCall().times(3); ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION); expectLastCall().once(); expect(configureClusterTaskFactory.createConfigureClusterTask( anyObject(ClusterTopology.class), anyObject(ClusterConfigurationRequest.class), anyObject(AmbariEventPublisher.class) )).andReturn(configureClusterTask); expect(configureClusterTask.getTimeout()).andReturn(1000L); expect(configureClusterTask.getRepeatDelay()).andReturn(50L); expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1); persistedTopologyRequest = new PersistedTopologyRequest(1, request); expect(persistedState.getAllRequests()).andReturn(Collections.<ClusterTopology, List<LogicalRequest>>emptyMap()).once(); expect(persistedState.persistTopologyRequest(request)).andReturn(persistedTopologyRequest).once(); persistedState.persistLogicalRequest((LogicalRequest) anyObject(), anyLong()); expectLastCall().once(); topologyValidatorServiceMock.validateTopologyConfiguration(anyObject(ClusterTopology.class)); replayAll(); Class clazz = TopologyManager.class; f = clazz.getDeclaredField("executor"); f.setAccessible(true); f.set(topologyManager, executor); EasyMockSupport.injectMocks(topologyManager); } @After public void tearDown() { verifyAll(); resetAll(); } @Test public void testProvisionCluster() throws Exception { topologyManager.provisionCluster(request); LogicalRequest request = topologyManager.getRequest(1); assertEquals(request.getHostRequests().size(), 3); } }
/* * Copyright &copy 2014-2016 NetApp, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * DO NOT EDIT THIS CODE BY HAND! It has been generated with jsvcgen. */ package com.solidfire.element.api; import com.solidfire.gson.Gson; import com.solidfire.core.client.Attributes; import com.solidfire.gson.annotations.SerializedName; import com.solidfire.core.annotation.Since; import com.solidfire.core.javautil.Optional; import java.io.Serializable; import java.util.Arrays; import java.util.HashMap; import java.util.Objects; /** * BulkVolumeJob **/ public class BulkVolumeJob implements Serializable { public static final long serialVersionUID = 1835487517246678767L; @SerializedName("bulkVolumeID") private Long bulkVolumeID; @SerializedName("createTime") private String createTime; @SerializedName("elapsedTime") private Long elapsedTime; @SerializedName("format") private String format; @SerializedName("key") private String key; @SerializedName("percentComplete") private Long percentComplete; @SerializedName("remainingTime") private Long remainingTime; @SerializedName("srcVolumeID") private Long srcVolumeID; @SerializedName("status") private String status; @SerializedName("script") private Optional<String> script; @SerializedName("snapshotID") private Optional<Long> snapshotID; @SerializedName("type") private String type; @SerializedName("attributes") private Attributes attributes; // empty constructor @Since("7.0") public BulkVolumeJob() {} // parameterized constructor @Since("7.0") public BulkVolumeJob( Long bulkVolumeID, String createTime, String format, String key, Long percentComplete, Long remainingTime, Long srcVolumeID, String status, Optional<String> script, Optional<Long> snapshotID, String type, Attributes attributes ) { this.bulkVolumeID = bulkVolumeID; this.createTime = createTime; this.format = format; this.key = key; this.percentComplete = percentComplete; this.remainingTime = remainingTime; this.srcVolumeID = srcVolumeID; this.status = status; this.script = (script == null) ? Optional.<String>empty() : script; this.snapshotID = (snapshotID == null) ? Optional.<Long>empty() : snapshotID; this.type = type; this.attributes = attributes; } // parameterized constructor @Since("8.0") public BulkVolumeJob( Long bulkVolumeID, String createTime, Long elapsedTime, String format, String key, Long percentComplete, Long remainingTime, Long srcVolumeID, String status, Optional<String> script, Optional<Long> snapshotID, String type, Attributes attributes ) { this.bulkVolumeID = bulkVolumeID; this.createTime = createTime; this.elapsedTime = elapsedTime; this.format = format; this.key = key; this.percentComplete = percentComplete; this.remainingTime = remainingTime; this.srcVolumeID = srcVolumeID; this.status = status; this.script = (script == null) ? Optional.<String>empty() : script; this.snapshotID = (snapshotID == null) ? Optional.<Long>empty() : snapshotID; this.type = type; this.attributes = attributes; } /** * The internal bulk volume job ID. **/ public Long getBulkVolumeID() { return this.bulkVolumeID; } public void setBulkVolumeID(Long bulkVolumeID) { this.bulkVolumeID = bulkVolumeID; } /** * Timestamp created for the bulk volume job. **/ public String getCreateTime() { return this.createTime; } public void setCreateTime(String createTime) { this.createTime = createTime; } /** * The number of seconds since the job began. **/ public Long getElapsedTime() { return this.elapsedTime; } public void setElapsedTime(Long elapsedTime) { this.elapsedTime = elapsedTime; } /** * Format is either "compressed" or "native". **/ public String getFormat() { return this.format; } public void setFormat(String format) { this.format = format; } /** * The unique key created by the bulk volume session. **/ public String getKey() { return this.key; } public void setKey(String key) { this.key = key; } /** * The completed percentage reported by the operation. **/ public Long getPercentComplete() { return this.percentComplete; } public void setPercentComplete(Long percentComplete) { this.percentComplete = percentComplete; } /** * The estimated time remaining in seconds. **/ public Long getRemainingTime() { return this.remainingTime; } public void setRemainingTime(Long remainingTime) { this.remainingTime = remainingTime; } /** * The source volume ID. **/ public Long getSrcVolumeID() { return this.srcVolumeID; } public void setSrcVolumeID(Long srcVolumeID) { this.srcVolumeID = srcVolumeID; } /** * Can be one of the following: * preparing * active * done * failed **/ public String getStatus() { return this.status; } public void setStatus(String status) { this.status = status; } /** * The name of the script if one is provided. **/ public Optional<String> getScript() { return this.script; } public void setScript(Optional<String> script) { this.script = (script == null) ? Optional.<String>empty() : script; } /** * ID of the snapshot if a snapshot is in the source of the bulk volume job. **/ public Optional<Long> getSnapshotID() { return this.snapshotID; } public void setSnapshotID(Optional<Long> snapshotID) { this.snapshotID = (snapshotID == null) ? Optional.<Long>empty() : snapshotID; } /** * Can be one of the following: * read * write **/ public String getType() { return this.type; } public void setType(String type) { this.type = type; } /** * JSON attributes on the bulk volume job. **/ public Attributes getAttributes() { return this.attributes; } public void setAttributes(Attributes attributes) { this.attributes = attributes; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; BulkVolumeJob that = (BulkVolumeJob) o; return Objects.equals(bulkVolumeID, that.bulkVolumeID) && Objects.equals(createTime, that.createTime) && Objects.equals(elapsedTime, that.elapsedTime) && Objects.equals(format, that.format) && Objects.equals(key, that.key) && Objects.equals(percentComplete, that.percentComplete) && Objects.equals(remainingTime, that.remainingTime) && Objects.equals(srcVolumeID, that.srcVolumeID) && Objects.equals(status, that.status) && Objects.equals(script, that.script) && Objects.equals(snapshotID, that.snapshotID) && Objects.equals(type, that.type) && Objects.equals(attributes, that.attributes); } @Override public int hashCode() { return Objects.hash( bulkVolumeID,createTime,elapsedTime,format,key,percentComplete,remainingTime,srcVolumeID,status,script,snapshotID,type,attributes ); } public java.util.Map<String, Object> toMap() { java.util.Map<String, Object> map = new HashMap<>(); map.put("bulkVolumeID", bulkVolumeID); map.put("createTime", createTime); map.put("elapsedTime", elapsedTime); map.put("format", format); map.put("key", key); map.put("percentComplete", percentComplete); map.put("remainingTime", remainingTime); map.put("srcVolumeID", srcVolumeID); map.put("status", status); map.put("script", script); map.put("snapshotID", snapshotID); map.put("type", type); map.put("attributes", attributes); return map; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); Gson gson = new Gson(); sb.append( "{ " ); sb.append(" bulkVolumeID : ").append(gson.toJson(bulkVolumeID)).append(","); sb.append(" createTime : ").append(gson.toJson(createTime)).append(","); sb.append(" elapsedTime : ").append(gson.toJson(elapsedTime)).append(","); sb.append(" format : ").append(gson.toJson(format)).append(","); sb.append(" key : ").append(gson.toJson(key)).append(","); sb.append(" percentComplete : ").append(gson.toJson(percentComplete)).append(","); sb.append(" remainingTime : ").append(gson.toJson(remainingTime)).append(","); sb.append(" srcVolumeID : ").append(gson.toJson(srcVolumeID)).append(","); sb.append(" status : ").append(gson.toJson(status)).append(","); if(null != script && script.isPresent()){ sb.append(" script : ").append(gson.toJson(script)).append(","); } else{ sb.append(" script : ").append("null").append(","); } if(null != snapshotID && snapshotID.isPresent()){ sb.append(" snapshotID : ").append(gson.toJson(snapshotID)).append(","); } else{ sb.append(" snapshotID : ").append("null").append(","); } sb.append(" type : ").append(gson.toJson(type)).append(","); sb.append(" attributes : ").append(gson.toJson(attributes)).append(","); sb.append( " }" ); if(sb.lastIndexOf(", }") != -1) sb.deleteCharAt(sb.lastIndexOf(", }")); return sb.toString(); } public static Builder builder() { return new Builder(); } public final Builder asBuilder() { return new Builder().buildFrom(this); } public static class Builder { private Long bulkVolumeID; private String createTime; private Long elapsedTime; private String format; private String key; private Long percentComplete; private Long remainingTime; private Long srcVolumeID; private String status; private Optional<String> script; private Optional<Long> snapshotID; private String type; private Attributes attributes; private Builder() { } public BulkVolumeJob build() { return new BulkVolumeJob ( this.bulkVolumeID, this.createTime, this.elapsedTime, this.format, this.key, this.percentComplete, this.remainingTime, this.srcVolumeID, this.status, this.script, this.snapshotID, this.type, this.attributes); } private BulkVolumeJob.Builder buildFrom(final BulkVolumeJob req) { this.bulkVolumeID = req.bulkVolumeID; this.createTime = req.createTime; this.elapsedTime = req.elapsedTime; this.format = req.format; this.key = req.key; this.percentComplete = req.percentComplete; this.remainingTime = req.remainingTime; this.srcVolumeID = req.srcVolumeID; this.status = req.status; this.script = req.script; this.snapshotID = req.snapshotID; this.type = req.type; this.attributes = req.attributes; return this; } public BulkVolumeJob.Builder bulkVolumeID(final Long bulkVolumeID) { this.bulkVolumeID = bulkVolumeID; return this; } public BulkVolumeJob.Builder createTime(final String createTime) { this.createTime = createTime; return this; } public BulkVolumeJob.Builder elapsedTime(final Long elapsedTime) { this.elapsedTime = elapsedTime; return this; } public BulkVolumeJob.Builder format(final String format) { this.format = format; return this; } public BulkVolumeJob.Builder key(final String key) { this.key = key; return this; } public BulkVolumeJob.Builder percentComplete(final Long percentComplete) { this.percentComplete = percentComplete; return this; } public BulkVolumeJob.Builder remainingTime(final Long remainingTime) { this.remainingTime = remainingTime; return this; } public BulkVolumeJob.Builder srcVolumeID(final Long srcVolumeID) { this.srcVolumeID = srcVolumeID; return this; } public BulkVolumeJob.Builder status(final String status) { this.status = status; return this; } public BulkVolumeJob.Builder optionalScript(final String script) { this.script = (script == null) ? Optional.<String>empty() : Optional.of(script); return this; } public BulkVolumeJob.Builder optionalSnapshotID(final Long snapshotID) { this.snapshotID = (snapshotID == null) ? Optional.<Long>empty() : Optional.of(snapshotID); return this; } public BulkVolumeJob.Builder type(final String type) { this.type = type; return this; } public BulkVolumeJob.Builder attributes(final Attributes attributes) { this.attributes = attributes; return this; } } }
package com.wyrnlab.jotdownthatmovie.ExternalLibraries.FullImages; import android.graphics.Bitmap; import android.util.Log; public class Constants { private static final float BITMAP_SCALE = 0.4f; private static final int BLUR_RADIUS = 8; public static Bitmap fastblur(Bitmap sentBitmap) { float scale = BITMAP_SCALE; int radius = BLUR_RADIUS; int width = Math.round(sentBitmap.getWidth() * scale); int height = Math.round(sentBitmap.getHeight() * scale); sentBitmap = Bitmap.createScaledBitmap(sentBitmap, width, height, false); Bitmap bitmap = sentBitmap.copy(sentBitmap.getConfig(), true); if (radius < 1) { return (null); } int w = bitmap.getWidth(); int h = bitmap.getHeight(); int[] pix = new int[w * h]; Log.e("pix", w + " " + h + " " + pix.length); bitmap.getPixels(pix, 0, w, 0, 0, w, h); int wm = w - 1; int hm = h - 1; int wh = w * h; int div = radius + radius + 1; int r[] = new int[wh]; int g[] = new int[wh]; int b[] = new int[wh]; int rsum, gsum, bsum, x, y, i, p, yp, yi, yw; int vmin[] = new int[Math.max(w, h)]; int divsum = (div + 1) >> 1; divsum *= divsum; int dv[] = new int[256 * divsum]; for (i = 0; i < 256 * divsum; i++) { dv[i] = (i / divsum); } yw = yi = 0; int[][] stack = new int[div][3]; int stackpointer; int stackstart; int[] sir; int rbs; int r1 = radius + 1; int routsum, goutsum, boutsum; int rinsum, ginsum, binsum; for (y = 0; y < h; y++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; for (i = -radius; i <= radius; i++) { p = pix[yi + Math.min(wm, Math.max(i, 0))]; sir = stack[i + radius]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rbs = r1 - Math.abs(i); rsum += sir[0] * rbs; gsum += sir[1] * rbs; bsum += sir[2] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } } stackpointer = radius; for (x = 0; x < w; x++) { r[yi] = dv[rsum]; g[yi] = dv[gsum]; b[yi] = dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (y == 0) { vmin[x] = Math.min(x + radius + 1, wm); } p = pix[yw + vmin[x]]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[(stackpointer) % div]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi++; } yw += w; } for (x = 0; x < w; x++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; yp = -radius * w; for (i = -radius; i <= radius; i++) { yi = Math.max(0, yp) + x; sir = stack[i + radius]; sir[0] = r[yi]; sir[1] = g[yi]; sir[2] = b[yi]; rbs = r1 - Math.abs(i); rsum += r[yi] * rbs; gsum += g[yi] * rbs; bsum += b[yi] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } if (i < hm) { yp += w; } } yi = x; stackpointer = radius; for (y = 0; y < h; y++) { // Preserve alpha channel: ( 0xff000000 & pix[yi] ) pix[yi] = (0xff000000 & pix[yi]) | (dv[rsum] << 16) | (dv[gsum] << 8) | dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (x == 0) { vmin[y] = Math.min(y + r1, hm) * w; } p = x + vmin[y]; sir[0] = r[p]; sir[1] = g[p]; sir[2] = b[p]; rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[stackpointer]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi += w; } } Log.e("pix", w + " " + h + " " + pix.length); bitmap.setPixels(pix, 0, w, 0, 0, w, h); return (bitmap); } }
/* $Id$ */ package ibis.satin.impl.spawnSync; import ibis.ipl.IbisIdentifier; import ibis.satin.impl.Config; import ibis.satin.impl.Satin; /** A vector of invocation records. */ public final class IRVector implements Config { private InvocationRecord[] l = new InvocationRecord[500]; private int count = 0; private Satin satin; public IRVector(Satin s) { this.satin = s; } public void add(InvocationRecord r) { if (ASSERTS) { Satin.assertLocked(satin); } if (count >= l.length) { InvocationRecord[] nl = new InvocationRecord[l.length * 2]; System.arraycopy(l, 0, nl, 0, l.length); l = nl; } l[count] = r; count++; } public int size() { if (ASSERTS) { Satin.assertLocked(satin); } return count; } public InvocationRecord remove(Stamp stamp) { InvocationRecord res = null; if (ASSERTS) { Satin.assertLocked(satin); } for (int i = 0; i < count; i++) { if (l[i].getStamp().stampEquals(stamp)) { res = l[i]; count--; l[i] = l[count]; l[count] = null; return res; } } // Sometimes (in case of crashes or aborts), we try to remove // non-existent elements. This is not a problem, just return null. spawnLogger.debug("IRVector: removing non-existent elt: " + stamp); return null; } public InvocationRecord remove(InvocationRecord r) { if (ASSERTS) { Satin.assertLocked(satin); } for (int i = count - 1; i >= 0; i--) { if (l[i].equals(r)) { InvocationRecord res = l[i]; count--; l[i] = l[count]; l[count] = null; return res; } } // Sometimes (in case of crashes or aborts), we try to remove // non-existent elements. This is not a problem, just return null. spawnLogger.debug("IRVector: removing non-existent elt: " + r.getStamp()); return null; } public void killChildrenOf(Stamp targetStamp, boolean store) { if (ASSERTS) { Satin.assertLocked(satin); } for (int i = 0; i < count; i++) { InvocationRecord curr = l[i]; if (curr.aborted) { continue; // already handled. } if ((curr.getParent() != null && curr.getParent().aborted) || curr.isDescendentOf(targetStamp)) { curr.aborted = true; if (abortLogger.isDebugEnabled()) { abortLogger.debug("found stolen child: " + curr.getStamp() + ", it depends on " + targetStamp); } curr.decrSpawnCounter(); satin.stats.abortedJobs++; satin.stats.abortMessages++; // Curr is removed, but not put back in cache. // this is OK. Moreover, it might have children, // so we should keep it alive. // cleanup is done inside the spawner itself. removeIndex(i); i--; if (store) { satin.ft.sendAbortAndStoreMessage(curr); } else { satin.aborts.sendAbortMessage(curr); } } } } // Abort every job that was spawned on targetOwner // or is a child of a job spawned on targetOwner. public void killAndStoreSubtreeOf(IbisIdentifier targetOwner) { if (ASSERTS) { Satin.assertLocked(satin); } for (int i = 0; i < count; i++) { InvocationRecord curr = l[i]; if ((curr.getParent() != null && curr.getParent().aborted) || curr.isDescendentOf(targetOwner) || curr.getOwner().equals(targetOwner)) { // this shouldnt happen, actually curr.aborted = true; if (abortLogger.isDebugEnabled()) { abortLogger.debug("found stolen child: " + curr.getStamp() + ", it depends on " + targetOwner); } curr.decrSpawnCounter(); satin.stats.abortedJobs++; satin.stats.abortMessages++; removeIndex(i); i--; satin.ft.sendAbortAndStoreMessage(curr); } } } // Abort every job that was spawned on targetOwner // or is a child of a job spawned on targetOwner. public void killSubtreeOf(IbisIdentifier targetOwner) { if (ASSERTS) { Satin.assertLocked(satin); } for (int i = 0; i < count; i++) { InvocationRecord curr = l[i]; if ((curr.getParent() != null && curr.getParent().aborted) || curr.isDescendentOf(targetOwner) || curr.getOwner().equals(targetOwner)) { // this shouldnt happen, actually curr.aborted = true; if (abortLogger.isDebugEnabled()) { abortLogger.debug("found stolen child: " + curr.getStamp() + ", it depends on " + targetOwner); } curr.decrSpawnCounter(); satin.stats.abortedJobs++; satin.stats.abortMessages++; removeIndex(i); i--; satin.ft.sendAbortMessage(curr); } } } public void killAll() { if (ASSERTS) { Satin.assertLocked(satin); } for (int i = 0; i < count; i++) { InvocationRecord curr = l[i]; curr.aborted = true; curr.decrSpawnCounter(); removeIndex(i); i--; } } public InvocationRecord removeIndex(int i) { if (ASSERTS) { Satin.assertLocked(satin); } if (i >= count) { return null; } InvocationRecord res = l[i]; count--; l[i] = l[count]; l[count] = null; return res; } /** * Used for fault tolerance. Remove all the jobs stolen by targetOwner and * put them back in the taskQueue. */ public void redoStolenBy(IbisIdentifier crashedIbis) { Satin.assertLocked(satin); for (int i = count - 1; i >= 0; i--) { if (crashedIbis.equals(l[i].getStealer())) { if (ftLogger.isDebugEnabled()) { ftLogger.debug("Found a job to restart: " + l[i].getStamp()); } l[i].setReDone(true); l[i].setStealer(null); satin.q.addToTail(l[i]); satin.stats.restartedJobs++; count--; l[i] = l[count]; } } } public void print(java.io.PrintStream out) { Satin.assertLocked(satin); out.println("=IRVector " + satin.ident + ":============="); for (int i = 0; i < count; i++) { out.println("outjobs [" + i + "] = " + l[i] + "," + l[i].getStealer()); } out.println("end of IRVector: " + satin.ident + "="); } public InvocationRecord first() { return l[0]; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.rest.test; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.config.impl.ConfigurationImpl; import org.apache.activemq.artemis.core.remoting.impl.invm.InVMAcceptorFactory; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.ActiveMQServers; import org.apache.activemq.artemis.rest.MessageServiceManager; import org.apache.activemq.artemis.rest.queue.QueueDeployment; import org.apache.activemq.artemis.rest.queue.push.xml.PushRegistration; import org.apache.activemq.artemis.rest.queue.push.xml.XmlLink; import org.jboss.logging.Logger; import org.jboss.resteasy.client.ClientRequest; import org.jboss.resteasy.client.ClientResponse; import org.jboss.resteasy.spi.Link; import org.jboss.resteasy.spi.ResteasyDeployment; import org.jboss.resteasy.test.EmbeddedContainer; import org.junit.Assert; import org.junit.Test; import static org.jboss.resteasy.test.TestPortProvider.generateURL; /** * Test durable queue push consumers */ public class PersistentPushQueueConsumerTest { private static final Logger log = Logger.getLogger(PersistentPushQueueConsumerTest.class); public static MessageServiceManager manager; protected static ResteasyDeployment deployment; public static ActiveMQServer activeMQServer; public static void startup() throws Exception { Configuration configuration = new ConfigurationImpl().setPersistenceEnabled(false).setSecurityEnabled(false).addAcceptorConfiguration(new TransportConfiguration(InVMAcceptorFactory.class.getName())); activeMQServer = ActiveMQServers.newActiveMQServer(configuration); activeMQServer.start(); deployment = EmbeddedContainer.start(); manager = new MessageServiceManager(null); manager.start(); deployment.getRegistry().addSingletonResource(manager.getQueueManager().getDestination()); deployment.getRegistry().addSingletonResource(manager.getTopicManager().getDestination()); } public static void shutdown() throws Exception { manager.stop(); manager = null; EmbeddedContainer.stop(); deployment = null; activeMQServer.stop(); activeMQServer = null; } @Test public void testBridge() throws Exception { try { startup(); String testName = "testBridge"; deployBridgeQueues(testName); ClientRequest request = new ClientRequest(generateURL("/queues/" + testName)); ClientResponse<?> response = request.head(); response.releaseConnection(); Assert.assertEquals(200, response.getStatus()); Link sender = MessageTestBase.getLinkByTitle(manager.getQueueManager().getLinkStrategy(), response, "create"); log.debug("create: " + sender); Link pushSubscriptions = MessageTestBase.getLinkByTitle(manager.getQueueManager().getLinkStrategy(), response, "push-consumers"); log.debug("push subscriptions: " + pushSubscriptions); request = new ClientRequest(generateURL("/queues/" + testName + "forwardQueue")); response = request.head(); response.releaseConnection(); Assert.assertEquals(200, response.getStatus()); Link consumers = MessageTestBase.getLinkByTitle(manager.getQueueManager().getLinkStrategy(), response, "pull-consumers"); log.debug("pull: " + consumers); response = Util.setAutoAck(consumers, true); Link consumeNext = MessageTestBase.getLinkByTitle(manager.getQueueManager().getLinkStrategy(), response, "consume-next"); log.debug("poller: " + consumeNext); PushRegistration reg = new PushRegistration(); reg.setDurable(true); reg.setDisableOnFailure(true); XmlLink target = new XmlLink(); target.setHref(generateURL("/queues/" + testName + "forwardQueue")); target.setRelationship("destination"); reg.setTarget(target); response = pushSubscriptions.request().body("application/xml", reg).post(); response.releaseConnection(); Assert.assertEquals(201, response.getStatus()); shutdown(); startup(); deployBridgeQueues(testName); ClientResponse<?> res = sender.request().body("text/plain", Integer.toString(1)).post(); res.releaseConnection(); Assert.assertEquals(201, res.getStatus()); res = consumeNext.request().header("Accept-Wait", "2").post(String.class); Assert.assertEquals(200, res.getStatus()); Assert.assertEquals("1", res.getEntity(String.class)); res.releaseConnection(); Link session = MessageTestBase.getLinkByTitle(manager.getQueueManager().getLinkStrategy(), res, "consumer"); res = session.request().delete(); res.releaseConnection(); Assert.assertEquals(204, res.getStatus()); manager.getQueueManager().getPushStore().removeAll(); } finally { shutdown(); } } private void deployBridgeQueues(String testName) throws Exception { QueueDeployment deployment = new QueueDeployment(); deployment.setDuplicatesAllowed(true); deployment.setDurableSend(false); deployment.setName(testName); manager.getQueueManager().deploy(deployment); QueueDeployment deployment2 = new QueueDeployment(); deployment2.setDuplicatesAllowed(true); deployment2.setDurableSend(false); deployment2.setName(testName + "forwardQueue"); manager.getQueueManager().deploy(deployment2); } @Test public void testFailure() throws Exception { try { startup(); String testName = "testFailure"; QueueDeployment deployment = new QueueDeployment(); deployment.setDuplicatesAllowed(true); deployment.setDurableSend(false); deployment.setName(testName); manager.getQueueManager().deploy(deployment); ClientRequest request = new ClientRequest(generateURL("/queues/" + testName)); ClientResponse<?> response = request.head(); response.releaseConnection(); Assert.assertEquals(200, response.getStatus()); Link sender = MessageTestBase.getLinkByTitle(manager.getQueueManager().getLinkStrategy(), response, "create"); log.debug("create: " + sender); Link pushSubscriptions = MessageTestBase.getLinkByTitle(manager.getQueueManager().getLinkStrategy(), response, "push-consumers"); log.debug("push subscriptions: " + pushSubscriptions); PushRegistration reg = new PushRegistration(); reg.setDurable(true); XmlLink target = new XmlLink(); target.setHref("http://localhost:3333/error"); target.setRelationship("uri"); reg.setTarget(target); reg.setDisableOnFailure(true); reg.setMaxRetries(3); reg.setRetryWaitMillis(10); response = pushSubscriptions.request().body("application/xml", reg).post(); Assert.assertEquals(201, response.getStatus()); Link pushSubscription = response.getLocationLink(); response.releaseConnection(); ClientResponse<?> res = sender.request().body("text/plain", Integer.toString(1)).post(); res.releaseConnection(); Assert.assertEquals(201, res.getStatus()); Thread.sleep(5000); response = pushSubscription.request().get(); PushRegistration reg2 = response.getEntity(PushRegistration.class); Assert.assertEquals(reg.isDurable(), reg2.isDurable()); Assert.assertEquals(reg.getTarget().getHref(), reg2.getTarget().getHref()); Assert.assertFalse(reg2.isEnabled()); // make sure the failure disables the PushRegistration response.releaseConnection(); manager.getQueueManager().getPushStore().removeAll(); } finally { shutdown(); } } }
/******************************************************************************* * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved. *******************************************************************************/ package com.att.dao.aaf.cass; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import com.att.authz.env.AuthzTrans; import com.att.authz.layer.Result; import com.att.dao.Bytification; import com.att.dao.Cached; import com.att.dao.CassAccess; import com.att.dao.CassDAOImpl; import com.att.dao.Loader; import com.att.dao.Streamer; import com.att.inno.env.APIException; import com.att.inno.env.Env; import com.att.inno.env.TimeTaken; import com.datastax.driver.core.Cluster; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.Row; import com.datastax.driver.core.exceptions.DriverException; /** * NsDAO * * Data Access Object for Namespace Data * */ public class NsDAO extends CassDAOImpl<AuthzTrans,NsDAO.Data> { public static final String TABLE = "ns"; public static final String TABLE_ATTRIB = "ns_attrib"; public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F public static final int ROOT = 1; public static final int COMPANY=2; public static final int APP = 3; private static final String BEGIN_BATCH = "BEGIN BATCH\n"; private static final String APPLY_BATCH = "APPLY BATCH;\n"; private static final String SQSCCR = "';\n"; private static final String SQCSQ = "','"; private HistoryDAO historyDAO; private CacheInfoDAO infoDAO; private PSInfo psNS; public NsDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException { super(trans, NsDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); init(trans); } public NsDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO iDAO) throws APIException, IOException { super(trans, NsDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); historyDAO=hDAO; infoDAO = iDAO; init(trans); } ////////////////////////////////////////// // Data Definition, matches Cassandra DM ////////////////////////////////////////// private static final int KEYLIMIT = 1; /** * Data class that matches the Cassandra Table "role" * */ public static class Data extends CacheableData implements Bytification { public String name; public int type; public String description; public String parent; public Map<String,String> attrib; // //////////////////////////////////////// // // Getters public Map<String,String> attrib(boolean mutable) { if (attrib == null) { attrib = new HashMap<String,String>(); } else if (mutable && !(attrib instanceof HashMap)) { attrib = new HashMap<String,String>(attrib); } return attrib; } @Override public int[] invalidate(Cached<?,?> cache) { return new int[] { seg(cache,name) }; } public NsSplit split(String name) { return new NsSplit(this,name); } @Override public ByteBuffer bytify() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); NSLoader.deflt.marshal(this,new DataOutputStream(baos)); return ByteBuffer.wrap(baos.toByteArray()); } @Override public void reconstitute(ByteBuffer bb) throws IOException { NSLoader.deflt.unmarshal(this,toDIS(bb)); } @Override public String toString() { return name; } } private void init(AuthzTrans trans) throws APIException, IOException { // Set up sub-DAOs if(historyDAO==null) { historyDAO = new HistoryDAO(trans, this); } if(infoDAO==null) { infoDAO = new CacheInfoDAO(trans,this); } String[] helpers = setCRUD(trans, TABLE, Data.class, NSLoader.deflt,4/*need to skip attrib */); psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + " WHERE parent = ?", new NSLoader(1),readConsistency); } private static final class NSLoader extends Loader<Data> implements Streamer<Data> { public static final int MAGIC=250935515; public static final int VERSION=1; public static final int BUFF_SIZE=48; public static final NSLoader deflt = new NSLoader(KEYLIMIT); public NSLoader(int keylimit) { super(keylimit); } @Override public Data load(Data data, Row row) { // Int more efficient data.name = row.getString(0); data.type = row.getInt(1); data.description = row.getString(2); data.parent = row.getString(3); return data; } @Override protected void key(Data data, int idx, Object[] obj) { obj[idx]=data.name; } @Override protected void body(Data data, int _idx, Object[] obj) { int idx = _idx; obj[idx]=data.type; obj[++idx]=data.description; obj[++idx]=data.parent; } @Override public void marshal(Data data, DataOutputStream os) throws IOException { writeHeader(os,MAGIC,VERSION); writeString(os, data.name); os.writeInt(data.type); writeString(os,data.description); writeString(os,data.parent); if(data.attrib==null) { os.writeInt(-1); } else { os.writeInt(data.attrib.size()); for(Entry<String, String> es : data.attrib(false).entrySet()) { writeString(os,es.getKey()); writeString(os,es.getValue()); } } } @Override public void unmarshal(Data data, DataInputStream is) throws IOException { /*int version = */readHeader(is,MAGIC,VERSION); // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields byte[] buff = new byte[BUFF_SIZE]; data.name = readString(is, buff); data.type = is.readInt(); data.description = readString(is,buff); data.parent = readString(is,buff); int count = is.readInt(); if(count>0) { Map<String, String> da = data.attrib(true); for(int i=0;i<count;++i) { da.put(readString(is,buff), readString(is,buff)); } } } } @Override public Result<Data> create(AuthzTrans trans, Data data) { String ns = data.name; // Ensure Parent is set int ldot = ns.lastIndexOf('.'); data.parent=ldot<0?".":ns.substring(0,ldot); // insert Attributes StringBuilder stmt = new StringBuilder(); stmt.append(BEGIN_BATCH); attribInsertStmts(stmt, data); stmt.append(APPLY_BATCH); try { getSession(trans).execute(stmt.toString()); //// TEST CODE for Exception // boolean force = true; // if(force) { // throw new com.datastax.driver.core.exceptions.NoHostAvailableException(new HashMap<InetSocketAddress,Throwable>()); //// throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"Sample Message"); // } ////END TEST CODE } catch (DriverException | APIException | IOException e) { reportPerhapsReset(trans,e); trans.info().log(stmt); return Result.err(Result.ERR_Backend, "Backend Access"); } return super.create(trans, data); } @Override public Result<Void> update(AuthzTrans trans, Data data) { String ns = data.name; // Ensure Parent is set int ldot = ns.lastIndexOf('.'); data.parent=ldot<0?".":ns.substring(0,ldot); StringBuilder stmt = new StringBuilder(); stmt.append(BEGIN_BATCH); try { Map<String, String> localAttr = data.attrib; Result<Map<String, String>> rremoteAttr = readAttribByNS(trans,ns); if(rremoteAttr.notOK()) { return Result.err(rremoteAttr); } // update Attributes String str; for(Entry<String, String> es : localAttr.entrySet()) { str = rremoteAttr.value.get(es.getKey()); if(str==null || !str.equals(es.getValue())) { attribInsertStmt(stmt, ns, es.getKey(),es.getValue()); } } // No point in deleting... insert overwrites... // for(Entry<String, String> es : remoteAttr.entrySet()) { // str = localAttr.get(es.getKey()); // if(str==null || !str.equals(es.getValue())) { // attribDeleteStmt(stmt, ns, es.getKey()); // } // } if(stmt.length()>BEGIN_BATCH.length()) { stmt.append(APPLY_BATCH); getSession(trans).execute(stmt.toString()); } } catch (DriverException | APIException | IOException e) { reportPerhapsReset(trans,e); trans.info().log(stmt); return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); } return super.update(trans,data); } /* (non-Javadoc) * @see com.att.dao.CassDAOImpl#read(com.att.inno.env.TransStore, java.lang.Object) */ @Override public Result<List<Data>> read(AuthzTrans trans, Data data) { Result<List<Data>> rld = super.read(trans, data); if(rld.isOKhasData()) { for(Data d : rld.value) { // Note: Map is null at this point, save time/mem by assignment Result<Map<String, String>> rabn = readAttribByNS(trans,d.name); if(rabn.isOK()) { d.attrib = rabn.value; } else { return Result.err(rabn); } } } return rld; } /* (non-Javadoc) * @see com.att.dao.CassDAOImpl#read(com.att.inno.env.TransStore, java.lang.Object[]) */ @Override public Result<List<Data>> read(AuthzTrans trans, Object... key) { Result<List<Data>> rld = super.read(trans, key); if(rld.isOKhasData()) { for(Data d : rld.value) { // Note: Map is null at this point, save time/mem by assignment Result<Map<String, String>> rabn = readAttribByNS(trans,d.name); if(rabn.isOK()) { d.attrib = rabn.value; } else { return Result.err(rabn); } } } return rld; } @Override public Result<Void> delete(AuthzTrans trans, Data data, boolean reread) { TimeTaken tt = trans.start("Delete NS Attributes " + data.name, Env.REMOTE); try { StringBuilder stmt = new StringBuilder(); attribDeleteAllStmt(stmt, data); try { getSession(trans).execute(stmt.toString()); } catch (DriverException | APIException | IOException e) { reportPerhapsReset(trans,e); trans.info().log(stmt); return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); } } finally { tt.done(); } return super.delete(trans, data, reread); } public Result<Map<String,String>> readAttribByNS(AuthzTrans trans, String ns) { Map<String,String> map = new HashMap<String,String>(); TimeTaken tt = trans.start("readAttribByNS " + ns, Env.REMOTE); try { ResultSet rs = getSession(trans).execute("SELECT key,value FROM " + TABLE_ATTRIB + " WHERE ns='" + ns + "';"); for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) { Row r = iter.next(); map.put(r.getString(0), r.getString(1)); } } catch (DriverException | APIException | IOException e) { reportPerhapsReset(trans,e); return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); } finally { tt.done(); } return Result.ok(map); } public Result<Set<String>> readNsByAttrib(AuthzTrans trans, String key) { Set<String> set = new HashSet<String>(); TimeTaken tt = trans.start("readNsBykey " + key, Env.REMOTE); try { ResultSet rs = getSession(trans).execute("SELECT ns FROM " + TABLE_ATTRIB + " WHERE key='" + key + "';"); for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) { Row r = iter.next(); set.add(r.getString(0)); } } catch (DriverException | APIException | IOException e) { reportPerhapsReset(trans,e); return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); } finally { tt.done(); } return Result.ok(set); } public Result<Void> attribAdd(AuthzTrans trans, String ns, String key, String value) { try { getSession(trans).execute(attribInsertStmt(new StringBuilder(),ns,key,value).toString()); return Result.ok(); } catch (DriverException | APIException | IOException e) { reportPerhapsReset(trans,e); return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); } } private StringBuilder attribInsertStmt(StringBuilder sb, String ns, String key, String value) { sb.append("INSERT INTO "); sb.append(TABLE_ATTRIB); sb.append(" (ns,key,value) VALUES ('"); sb.append(ns); sb.append(SQCSQ); sb.append(key); sb.append(SQCSQ); sb.append(value); sb.append("');"); return sb; } public Result<Void> attribRemove(AuthzTrans trans, String ns, String key) { try { getSession(trans).execute(attribDeleteStmt(new StringBuilder(),ns,key).toString()); return Result.ok(); } catch (DriverException | APIException | IOException e) { reportPerhapsReset(trans,e); return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); } } private StringBuilder attribDeleteStmt(StringBuilder stmt, String ns, String key) { stmt.append("DELETE FROM "); stmt.append(TABLE_ATTRIB); stmt.append(" WHERE ns='"); stmt.append(ns); stmt.append("' AND key='"); stmt.append(key); stmt.append("';"); return stmt; } private void attribDeleteAllStmt(StringBuilder stmt, Data data) { stmt.append(" DELETE FROM "); stmt.append(TABLE_ATTRIB); stmt.append(" WHERE ns='"); stmt.append(data.name); stmt.append(SQSCCR); } private void attribInsertStmts(StringBuilder stmt, Data data) { // INSERT new Attrib for(Entry<String,String> es : data.attrib(false).entrySet() ) { stmt.append(" "); attribInsertStmt(stmt,data.name,es.getKey(),es.getValue()); } } /** * Add description to Namespace * @param trans * @param ns * @param description * @return */ public Result<Void> addDescription(AuthzTrans trans, String ns, String description) { try { getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" + description + "' WHERE name = '" + ns + "';"); } catch (DriverException | APIException | IOException e) { reportPerhapsReset(trans,e); return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); } Data data = new Data(); data.name=ns; wasModified(trans, CRUD.update, data, "Added description " + description + " to namespace " + ns, null ); return Result.ok(); } public Result<List<Data>> getChildren(AuthzTrans trans, String parent) { return psNS.read(trans, R_TEXT, new Object[]{parent}); } /** * Log Modification statements to History * * @param modified which CRUD action was done * @param data entity data that needs a log entry * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data */ @Override protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { boolean memo = override.length>0 && override[0]!=null; boolean subject = override.length>1 && override[1]!=null; //TODO Must log history HistoryDAO.Data hd = HistoryDAO.newInitedData(); hd.user = trans.user(); hd.action = modified.name(); hd.target = TABLE; hd.subject = subject ? override[1] : data.name; hd.memo = memo ? override[0] : (data.name + " was " + modified.name() + 'd' ); if(modified==CRUD.delete) { try { hd.reconstruct = data.bytify(); } catch (IOException e) { trans.error().log(e,"Could not serialize NsDAO.Data"); } } if(historyDAO.create(trans, hd).status!=Status.OK) { trans.error().log("Cannot log to History"); } if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) { trans.error().log("Cannot touch CacheInfo"); } } }
/** * Copyright 2016 Carnegie Mellon University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.cmu.sv.modelinference.modeltool; import java.io.IOException; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Stopwatch; import edu.cmu.sv.modelinference.common.Util; import edu.cmu.sv.modelinference.common.api.LogHandler; import edu.cmu.sv.modelinference.common.api.LogProcessingException; import edu.cmu.sv.modelinference.common.model.Model; import edu.cmu.sv.modelinference.common.model.pp.Format; import edu.cmu.sv.modelinference.common.model.pp.PrettyPrinter; import edu.cmu.sv.modelinference.modeltool.handlers.PrismLog2ModelHandler; import edu.cmu.sv.modelinference.modeltool.handlers.STLog2ModelHandler; import edu.cmu.sv.modelinference.modeltool.handlers.UppaalLog2ModelHandler; import edu.cmu.sv.modelinference.modeltool.mc.ModelAdapter; import edu.cmu.sv.modelinference.modeltool.mc.ModelCheckerAdapter; /** * @author Kasper Luckow */ public class Log2Model implements LogHandler<Void> { private static final String RUN_MODEL_CHECKER_ARG = "runmc"; private static final String HELP_ARG = "help"; private static final String OUTPUT_ARG = "o"; private static final String MODEL_CHECKER_ARG = "m"; private static final String PROPERTIES_ARG = "p"; private static final String TO_DOT_ARG = "v"; private static final Logger logger = LoggerFactory.getLogger(Log2Model.class.getName()); private static Log2Model instance = null; public static Log2Model getInstance() { if(instance == null) { instance = new Log2Model(); } return instance; } private final Options cmdOpts; private Log2Model() { this.cmdOpts = this.createCmdOptions(); } private static Set<LogHandler<ModelCheckerAdapter<?, ?>>> modelCheckerHandlers = new HashSet<>(); private static Set<LogHandler<Model<?>>> intermediateModelHandlers = new HashSet<>(); static { //Add supported intermediate model generators intermediateModelHandlers.add(STLog2ModelHandler.getInstance()); //Add supported model checkers modelCheckerHandlers.add(PrismLog2ModelHandler.getInstance()); modelCheckerHandlers.add(UppaalLog2ModelHandler.getInstance()); } private Options createCmdOptions() { Options options = new Options(); Option help = new Option(HELP_ARG, "print this message"); Option runModelChecker = new Option(RUN_MODEL_CHECKER_ARG, false, "Run model checker"); Option properties = new Option(PROPERTIES_ARG, false, "Run model checker"); Option modelChecker = Option.builder(MODEL_CHECKER_ARG). argName(Util.getSupportedHandlersString(modelCheckerHandlers)). hasArg(). desc("Specify output path of resulting models").required().build(); Option outputPath = Option.builder(OUTPUT_ARG).argName("path").hasArg() .desc("Specify output path of resulting models").required().build(); Option toDot = Option.builder(TO_DOT_ARG).desc("Output model to DOT").build(); //options.addOption(runModelChecker); //options.addOption(properties); options.addOption(modelChecker); options.addOption(outputPath); options.addOption(toDot); options.addOption(help); return options; } @Override public String getHandlerName() { return "model"; } @Override public Void process(String logFile, String logType, String[] additionalCmdArgs) throws LogProcessingException { Stopwatch sw = Stopwatch.createStarted(); CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; try { cmd = parser.parse(cmdOpts, additionalCmdArgs, true); } catch(ParseException exp) { logger.error(exp.getMessage()); System.err.println(exp.getMessage()); Util.printHelpAndExit(Log2Model.class, cmdOpts); } if(cmd.hasOption(HELP_ARG)) Util.printHelpAndExit(Log2Model.class, cmdOpts, 0); boolean runModelChecker = cmd.hasOption(RUN_MODEL_CHECKER_ARG); if(runModelChecker && !cmd.hasOption(PROPERTIES_ARG)) { String err = "Properties needed when executing model checker directly."; logger.error(err); System.err.println(err + " Add argument " + PROPERTIES_ARG); Util.printHelpAndExit(Log2Model.class, cmdOpts); } LogHandler<Model<?>> intermediateModelHandler = null; boolean found = false; for(LogHandler<Model<?>> lh : intermediateModelHandlers) { if(lh.getHandlerName().equals(logType)) { intermediateModelHandler = lh; found = true; break; } } if(!found) { StringBuilder sb = new StringBuilder(); Iterator<LogHandler<Model<?>>> logIter = intermediateModelHandlers.iterator(); while(logIter.hasNext()) { sb.append(logIter.next().getHandlerName()); if(logIter.hasNext()) sb.append(", "); } logger.error("Did not find intermediate model generator for log type " + logType); System.err.println("Supported log handlers: " + sb.toString()); Util.printHelpAndExit(Log2Model.class, cmdOpts); } logger.info("Using intemediate model generator for logtype: " + intermediateModelHandler.getHandlerName()); String modelCheckerHandlerOpt = cmd.getOptionValue(MODEL_CHECKER_ARG); LogHandler<ModelCheckerAdapter<?, ?>> modelCheckerHandler = null; found = false; for(LogHandler<ModelCheckerAdapter<?, ?>> lh : modelCheckerHandlers) { if(lh.getHandlerName().equals(modelCheckerHandlerOpt)) { modelCheckerHandler = lh; found = true; break; } } if(!found) { StringBuilder sb = new StringBuilder(); Iterator<LogHandler<ModelCheckerAdapter<?, ?>>> logIter = modelCheckerHandlers.iterator(); while(logIter.hasNext()) { sb.append(logIter.next().getHandlerName()); if(logIter.hasNext()) sb.append(", "); } logger.error("Did not find handler for model checker type " + logType); System.err.println("Supported model checkers: " + sb.toString()); Util.printHelpAndExit(Log2Model.class, cmdOpts); } logger.info("Using model checker for: " + modelCheckerHandler.getHandlerName()); String outputPath = cmd.getOptionValue(OUTPUT_ARG); logger.info("Outputting to " + outputPath); logger.info("Generating intermediate model"); Model<?> irmodel = intermediateModelHandler.process(logFile, logType, cmd.getArgs()); logger.info("Generating intermediate model took: " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms"); ModelCheckerAdapter<?, ?> modelChecker = modelCheckerHandler.process(logFile, logType, cmd.getArgs()); logger.info("Generating model checker took: " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms"); if(runModelChecker) { throw new LogProcessingException("Running model checker is not supported yet."); //String props = cmd.getOptionValue(PROPERTIES_ARG); //modelChecker.executeModelChecker(irModel, properties); } else { ModelAdapter<?> model = modelChecker.generateModel(irmodel); logger.info("Generating final model took: " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms"); logger.info("Saving model to " + outputPath); try { model.writeModelToFile(outputPath); } catch (IOException e) { logger.error(e.getMessage()); throw new LogProcessingException(e); } } if(cmd.hasOption(TO_DOT_ARG)) { PrettyPrinter p = new PrettyPrinter(irmodel); p.printModel(outputPath, Format.PDF); logger.info("Prettyprinting took: " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms"); } logger.info("Total processing time: " + sw.elapsed(TimeUnit.SECONDS) + "s"); return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.tracer.internal; import java.io.IOException; import java.io.PrintWriter; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.UUID; import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.Weigher; import org.apache.commons.io.FileUtils; import org.apache.felix.webconsole.SimpleWebConsolePlugin; import org.apache.sling.commons.json.JSONException; import org.apache.sling.commons.json.io.JSONWriter; import org.osgi.framework.BundleContext; class TracerLogServlet extends SimpleWebConsolePlugin implements TraceLogRecorder { static final String ATTR_RECORDING = TracerLogServlet.class.getName(); public static final String CLEAR = "clear"; private static final String LABEL = "tracer"; public static final String HEADER_TRACER_RECORDING = "Sling-Tracer-Record"; public static final String HEADER_TRACER_REQUEST_ID = "Sling-Tracer-Request-Id"; public static final String HEADER_TRACER_PROTOCOL_VERSION = "Sling-Tracer-Protocol-Version"; public static final int TRACER_PROTOCOL_VERSION = 1; private final Cache<String, JSONRecording> cache; private final boolean compressRecording; private final int cacheSizeInMB; private final long cacheDurationInSecs; private final boolean gzipResponse; public TracerLogServlet(BundleContext context){ this(context, LogTracer.PROP_TRACER_SERVLET_CACHE_SIZE_DEFAULT, LogTracer.PROP_TRACER_SERVLET_CACHE_DURATION_DEFAULT, LogTracer.PROP_TRACER_SERVLET_COMPRESS_DEFAULT, LogTracer.PROP_TRACER_SERVLET_GZIP_RESPONSE_DEFAULT ); } public TracerLogServlet(BundleContext context, int cacheSizeInMB, long cacheDurationInSecs, boolean compressionEnabled, boolean gzipResponse) { super(LABEL, "Sling Tracer", "Sling", null); this.compressRecording = compressionEnabled; this.cacheDurationInSecs = cacheDurationInSecs; this.cacheSizeInMB = cacheSizeInMB; this.gzipResponse = compressionEnabled && gzipResponse; this.cache = CacheBuilder.newBuilder() .maximumWeight(cacheSizeInMB * FileUtils.ONE_MB) .weigher(new Weigher<String, JSONRecording>() { @Override public int weigh(@Nonnull String key, @Nonnull JSONRecording value) { return value.size(); } }) .expireAfterAccess(cacheDurationInSecs, TimeUnit.SECONDS) .recordStats() .build(); register(context); } boolean isCompressRecording() { return compressRecording; } public boolean isGzipResponse() { return gzipResponse; } int getCacheSizeInMB() { return cacheSizeInMB; } long getCacheDurationInSecs() { return cacheDurationInSecs; } //~-----------------------------------------------< WebConsole Plugin > @Override protected void renderContent(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (isHtmlRequest(request)){ PrintWriter pw = response.getWriter(); renderStatus(pw); renderRequests(pw); } else { String requestId = getRequestId(request); prepareJSONResponse(response); try { boolean responseDone = false; if (requestId != null) { JSONRecording recording = cache.getIfPresent(requestId); if (recording != null){ boolean shouldGZip = prepareForGZipResponse(request, response); responseDone = recording.render(response.getOutputStream(), shouldGZip); } } if (!responseDone) { PrintWriter pw = response.getWriter(); JSONWriter jw = new JSONWriter(pw); jw.object(); jw.key("error").value("Not found"); jw.endObject(); } } catch (JSONException e) { throw new ServletException(e); } } } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException { if (req.getParameter(CLEAR) != null) { resetCache(); resp.sendRedirect(req.getRequestURI()); } } @Override protected boolean isHtmlRequest(HttpServletRequest request) { return request.getRequestURI().endsWith(LABEL); } private boolean prepareForGZipResponse(HttpServletRequest request, HttpServletResponse response) { if (!gzipResponse) { return false; } String acceptEncoding = request.getHeader("Accept-Encoding"); boolean acceptsGzip = acceptEncoding != null && accepts(acceptEncoding, "gzip"); if (acceptsGzip) { response.setHeader("Content-Encoding", "gzip"); } return acceptsGzip; } /** * Returns true if the given accept header accepts the given value. * @param acceptHeader The accept header. * @param toAccept The value to be accepted. * @return True if the given accept header accepts the given value. */ private static boolean accepts(String acceptHeader, String toAccept) { return acceptHeader.contains(toAccept) || acceptHeader.contains("*/*"); } private static void prepareJSONResponse(HttpServletResponse response) { response.setContentType("application/json"); response.setCharacterEncoding("UTF-8"); } private void renderStatus(PrintWriter pw) { pw.printf("<p class='statline'>Log Tracer Recordings: %d recordings, %s memory " + "(Max %dMB, Expired in %d secs)</p>%n", cache.size(), memorySize(), cacheSizeInMB, cacheDurationInSecs); pw.println("<div class='ui-widget-header ui-corner-top buttonGroup'>"); pw.println("<span style='float: left; margin-left: 1em'>Tracer Recordings</span>"); pw.println("<form method='POST'><input type='hidden' name='clear' value='clear'><input type='submit' value='Clear' class='ui-state-default ui-corner-all'></form>"); pw.println("</div>"); } private String memorySize() { long size = 0; for (JSONRecording r : cache.asMap().values()){ size += r.size(); } return humanReadableByteCount(size); } private void renderRequests(PrintWriter pw) { if (cache.size() > 0){ pw.println("<ol>"); List<JSONRecording> recordings = new ArrayList<JSONRecording>(cache.asMap().values()); SimpleDateFormat sdf = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss"); Collections.sort(recordings); for (JSONRecording r : recordings){ String id = r.getRequestId(); String date = sdf.format(new Date(r.getStart())); pw.printf("<li>%s - <a href='%s/%s.json'>%s</a> - %s (%s) (%dms)</li>", date, LABEL, id, id, r.getUri(), humanReadableByteCount(r.size()), r.getTimeTaken()); } pw.println("</ol>"); } } private static String getRequestId(HttpServletRequest request) { String requestUri = request.getRequestURI(); int lastSlash = requestUri.lastIndexOf('/'); int lastDot = requestUri.indexOf('.', lastSlash + 1); if (lastDot > 0){ return requestUri.substring(lastSlash + 1, lastDot); } return null; } //~-----------------------------------------------< TraceLogRecorder > @Override public Recording startRecording(HttpServletRequest request, HttpServletResponse response) { if (request.getHeader(HEADER_TRACER_RECORDING) == null){ return Recording.NOOP; } if (request.getAttribute(ATTR_RECORDING) != null){ //Already processed return getRecordingForRequest(request); } String requestId = generateRequestId(); JSONRecording recording = record(requestId, request); response.setHeader(HEADER_TRACER_REQUEST_ID, requestId); response.setHeader(HEADER_TRACER_PROTOCOL_VERSION, String.valueOf(TRACER_PROTOCOL_VERSION)); return recording; } @Override public Recording getRecordingForRequest(HttpServletRequest request) { Recording recording = (Recording) request.getAttribute(ATTR_RECORDING); if (recording == null){ recording = Recording.NOOP; } return recording; } @Override public void endRecording(HttpServletRequest httpRequest, Recording recording) { if (recording instanceof JSONRecording) { JSONRecording r = (JSONRecording) recording; r.done(); cache.put(r.getRequestId(), r); } httpRequest.removeAttribute(ATTR_RECORDING); } Recording getRecording(String requestId) { Recording recording = cache.getIfPresent(requestId); return recording == null ? Recording.NOOP : recording; } private JSONRecording record(String requestId, HttpServletRequest request) { JSONRecording data = new JSONRecording(requestId, request, compressRecording); request.setAttribute(ATTR_RECORDING, data); return data; } private static String generateRequestId() { return UUID.randomUUID().toString(); } /** * Returns a human-readable version of the file size, where the input represents * a specific number of bytes. Based on http://stackoverflow.com/a/3758880/1035417 */ @SuppressWarnings("Duplicates") private static String humanReadableByteCount(long bytes) { if (bytes < 0) { return "0"; } int unit = 1000; if (bytes < unit) { return bytes + " B"; } int exp = (int) (Math.log(bytes) / Math.log(unit)); char pre = "kMGTPE".charAt(exp - 1); return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre); } void resetCache(){ cache.invalidateAll(); } }
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.domain.packagerepository; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.builder.ConfigurationPropertyBuilder; import com.thoughtworks.go.config.materials.AbstractMaterialConfig; import com.thoughtworks.go.config.validation.NameTypeValidator; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.ConfigurationDisplayUtil; import com.thoughtworks.go.domain.config.Configuration; import com.thoughtworks.go.domain.config.ConfigurationProperty; import com.thoughtworks.go.domain.config.SecureKeyInfoProvider; import com.thoughtworks.go.plugin.access.packagematerial.*; import com.thoughtworks.go.plugin.api.config.Property; import com.thoughtworks.go.util.CachedDigestUtils; import org.apache.commons.lang.StringUtils; import javax.annotation.PostConstruct; import java.io.Serializable; import java.util.*; import static java.lang.String.format; import static org.apache.commons.lang.StringUtils.isBlank; @ConfigTag("package") @ConfigReferenceCollection(collectionName = "packages", idFieldName = "id") public class PackageDefinition implements Serializable, Validatable, ParamsAttributeAware { public static final String NAME = "name"; public static final String ID = "id"; public static final String CONFIGURATION = "configuration"; private final ConfigErrors errors = new ConfigErrors(); @ConfigAttribute(value = "id", allowNull = true) private String id; @ConfigAttribute(value = "name", allowNull = false) private String name; @ConfigAttribute(value = "autoUpdate", optional = true) private boolean autoUpdate = true; @Expose @SerializedName("config") @ConfigSubtag private Configuration configuration = new Configuration(); @Expose @SerializedName("repository") @IgnoreTraversal private PackageRepository packageRepository; public PackageDefinition() { } public PackageDefinition(String id, String name, Configuration configuration) { this.id = id; this.name = name; this.configuration = configuration; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public boolean isAutoUpdate() { return autoUpdate; } public void setAutoUpdate(boolean autoUpdate) { this.autoUpdate = autoUpdate; } public Configuration getConfiguration() { return configuration; } public PackageRepository getRepository() { return packageRepository; } public void setConfiguration(Configuration configuration) { this.configuration = configuration; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PackageDefinition that = (PackageDefinition) o; if (configuration != null ? !configuration.equals(that.configuration) : that.configuration != null) { return false; } if (id != null ? !id.equals(that.id) : that.id != null) { return false; } if (name != null ? !name.equals(that.name) : that.name != null) { return false; } return true; } @Override public int hashCode() { int result = id != null ? id.hashCode() : 0; result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (configuration != null ? configuration.hashCode() : 0); return result; } @Override public void validate(ValidationContext validationContext) { if (isBlank(name)) { errors().add(NAME, "Package name is mandatory"); } else if (new NameTypeValidator().isNameInvalid(name)) { errors().add(NAME, NameTypeValidator.errorMessage("Package", name)); } configuration.validateUniqueness(String.format("Package '%s'", name)); } public void validateFingerprintUniqueness(Map<String, Packages> packagesMap) { String fingerprint = getFingerprint(AbstractMaterialConfig.FINGERPRINT_DELIMITER); Packages packageDefinitionsWithSameFingerprint = packagesMap.get(fingerprint); if (packageDefinitionsWithSameFingerprint.size() > 1) { List<String> packageNames = new ArrayList<>(); for (PackageDefinition packageDefinition : packageDefinitionsWithSameFingerprint) { packageNames.add(format("[Repo Name: '%s', Package Name: '%s']", packageDefinition.getRepository().getName(), packageDefinition.getName())); } addError(ID, String.format("Cannot save package or repo, found duplicate packages. %s", StringUtils.join(packageNames, ", "))); } } @Override public ConfigErrors errors() { return errors; } public List<ConfigErrors> getAllErrors() { return ErrorCollector.getAllErrors(this); } @Override public void addError(String fieldName, String message) { errors.add(fieldName, message); } public void setRepository(PackageRepository packageRepository) { this.packageRepository = packageRepository; } public String getConfigForDisplay() { AbstractMetaDataStore metadataStore = PackageMetadataStore.getInstance(); List<ConfigurationProperty> propertiesToBeUsedForDisplay = ConfigurationDisplayUtil.getConfigurationPropertiesToBeUsedForDisplay(metadataStore, pluginId(), configuration); return format("%s - Package: %s", getRepository().getConfigForDisplay(), configuration.forDisplay(propertiesToBeUsedForDisplay)); } public String getFingerprint(String fingerprintDelimiter) { List<String> list = new ArrayList<>(); list.add(format("%s=%s", "plugin-id", pluginId())); handlePackageDefinitionProperties(list); handlePackageRepositoryProperties(list); String fingerprint = StringUtils.join(list, fingerprintDelimiter); // CAREFUL! the hash algorithm has to be same as the one used in 47_create_new_materials.sql return CachedDigestUtils.sha256Hex(fingerprint); } private void handlePackageDefinitionProperties(List<String> list) { PackageConfigurations metadata = PackageMetadataStore.getInstance().getMetadata(pluginId()); for (ConfigurationProperty configurationProperty : configuration) { handleProperty(list, metadata, configurationProperty); } } private String pluginId() { return packageRepository.getPluginConfiguration().getId(); } public void addConfigurations(List<ConfigurationProperty> configurations) { ConfigurationPropertyBuilder builder = new ConfigurationPropertyBuilder(); for (ConfigurationProperty property : configurations) { if (doesPluginExist()) { com.thoughtworks.go.plugin.api.material.packagerepository.PackageConfiguration packageMetadata = PackageMetadataStore.getInstance().getPackageMetadata(pluginId()); if (isValidConfiguration(property.getConfigKeyName(), packageMetadata)) { configuration.add(builder.create(property.getConfigKeyName(), property.getConfigValue(), property.getEncryptedValue(), packagePropertyFor(property.getConfigKeyName(), packageMetadata).getOption(Property.SECURE))); } else { configuration.add(property); } } else { configuration.add(property); } } } private boolean isValidConfiguration(String configKeyName, com.thoughtworks.go.plugin.api.material.packagerepository.PackageConfiguration packageMetadata) { return packagePropertyFor(configKeyName, packageMetadata) != null; } private Property packagePropertyFor(String configKeyName, com.thoughtworks.go.plugin.api.material.packagerepository.PackageConfiguration packageMetadata) { return packageMetadata.get(configKeyName); } private boolean doesPluginExist() { return packageRepository != null && RepositoryMetadataStore.getInstance().hasPlugin(pluginId()); } private void handlePackageRepositoryProperties(List<String> list) { PackageConfigurations metadata = RepositoryMetadataStore.getInstance().getMetadata(pluginId()); for (ConfigurationProperty configurationProperty : packageRepository.getConfiguration()) { handleProperty(list, metadata, configurationProperty); } } private void handleProperty(List<String> list, PackageConfigurations metadata, ConfigurationProperty configurationProperty) { PackageConfiguration packageConfiguration = null; if (metadata != null) { packageConfiguration = metadata.get(configurationProperty.getConfigurationKey().getName()); } if (packageConfiguration == null || packageConfiguration.getOption(PackageConfiguration.PART_OF_IDENTITY)) { list.add(configurationProperty.forFingerprint()); } } public void applyPackagePluginMetadata(String pluginId) { for (ConfigurationProperty configurationProperty : configuration) { PackageMetadataStore packageMetadataStore = PackageMetadataStore.getInstance(); if (packageMetadataStore.getMetadata(pluginId) != null) { boolean isSecureProperty = packageMetadataStore.hasOption(pluginId, configurationProperty.getConfigurationKey().getName(), PackageConfiguration.SECURE); configurationProperty.handleSecureValueConfiguration(isSecureProperty); } } } public void setConfigAttributes(Object attributes) { if (attributes == null) { return; } Map map = (Map) attributes; name = (String) map.get("name"); if (map.containsKey(Configuration.CONFIGURATION) && packageRepository != null) { configuration.setConfigAttributes(map.get(Configuration.CONFIGURATION), getSecureKeyInfoProvider()); } } private SecureKeyInfoProvider getSecureKeyInfoProvider() { PackageMetadataStore packageMetadataStore = PackageMetadataStore.getInstance(); final PackageConfigurations metadata = packageMetadataStore.getMetadata(pluginId()); if (metadata == null) { return null; } return new SecureKeyInfoProvider() { @Override public boolean isSecure(String key) { PackageConfiguration packageConfiguration = metadata.get(key); return packageConfiguration.getOption(PackageConfiguration.SECURE); } }; } public void addConfigurationErrorFor(String key, String message) { configuration.addErrorFor(key, message); } @Override public String toString() { return "PackageDefinition{" + "configuration=" + configuration + ", id='" + id + '\'' + ", name='" + name + '\'' + '}'; } public void clearEmptyConfigurations() { configuration.clearEmptyConfigurations(); } public void validateNameUniqueness(HashMap<String, PackageDefinition> nameMap) { String errorMessageForDuplicateName = String.format("You have defined multiple packages called '%s'. Package names are case-insensitive and must be unique within a repository.", name); PackageDefinition repoWithSameFieldValue = nameMap.get(name.toLowerCase()); if (repoWithSameFieldValue == null) { nameMap.put(name.toLowerCase(), this); } else { addError(NAME, errorMessageForDuplicateName); } } @PostConstruct public void ensureIdExists() { if (isBlank(getId())) { setId(UUID.randomUUID().toString()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.datastructures; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteAtomicLong; import org.apache.ignite.IgniteAtomicReference; import org.apache.ignite.IgniteAtomicSequence; import org.apache.ignite.IgniteAtomicStamped; import org.apache.ignite.IgniteCompute; import org.apache.ignite.IgniteCountDownLatch; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteInterruptedException; import org.apache.ignite.IgniteLock; import org.apache.ignite.IgniteQueue; import org.apache.ignite.IgniteSemaphore; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.configuration.AtomicConfiguration; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.CollectionConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.NodeStoppingException; import org.apache.ignite.internal.util.GridLeanSet; import org.apache.ignite.internal.util.typedef.CA; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.internal.util.typedef.PA; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.lang.IgniteClosure; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.testframework.GridTestUtils; import org.junit.Test; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.testframework.GridTestUtils.waitForCondition; /** * Failover tests for cache data structures. */ public abstract class GridCacheAbstractDataStructuresFailoverSelfTest extends IgniteCollectionAbstractTest { /** */ private static final long TEST_TIMEOUT = 3 * 60 * 1000; /** */ private static final String NEW_IGNITE_INSTANCE_NAME = "newGrid"; /** */ private static final String STRUCTURE_NAME = "structure"; /** */ private static final String TRANSACTIONAL_CACHE_NAME = "tx_cache"; /** */ private static final String CLIENT_INSTANCE_NAME = "client"; /** */ private static final int TOP_CHANGE_CNT = 2; /** */ private static final int TOP_CHANGE_THREAD_CNT = 2; /** */ private boolean client; /** {@inheritDoc} */ @Override protected long getTestTimeout() { return TEST_TIMEOUT; } /** * @return Grids count to start. */ @Override public int gridCount() { return 3; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { // No-op } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { startGridsMultiThreaded(gridCount()); super.beforeTest(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); ((TcpCommunicationSpi)cfg.getCommunicationSpi()).setSharedMemoryPort(-1); AtomicConfiguration atomicCfg = new AtomicConfiguration(); atomicCfg.setCacheMode(collectionCacheMode()); atomicCfg.setBackups(collectionConfiguration().getBackups()); cfg.setAtomicConfiguration(atomicCfg); CacheConfiguration ccfg = new CacheConfiguration(DEFAULT_CACHE_NAME); ccfg.setName(TRANSACTIONAL_CACHE_NAME); ccfg.setAtomicityMode(TRANSACTIONAL); cfg.setCacheConfiguration(ccfg); if (client) { cfg.setClientMode(client); ((TcpDiscoverySpi)(cfg.getDiscoverySpi())).setForceServerMode(true); } return cfg; } /** * Starts client node. * * @return client node. * @throws Exception If failed. */ protected IgniteEx startClient() throws Exception { return startGrid(getConfiguration(CLIENT_INSTANCE_NAME).setClientMode(true)); } /** * @throws Exception If failed. */ @Test public void testAtomicLongFailsWhenServersLeft() throws Exception { client = true; Ignite ignite = startGrid(gridCount()); new Timer().schedule(new TimerTask() { @Override public void run() { for (int i = 0; i < gridCount(); i++) stopGrid(i); } }, 10_000); long stopTime = U.currentTimeMillis() + TEST_TIMEOUT / 2; IgniteAtomicLong atomic = ignite.atomicLong(STRUCTURE_NAME, 10, true); try { while (U.currentTimeMillis() < stopTime) assertEquals(10, atomic.get()); } catch (IgniteException ignore) { return; // Test that client does not hang. } fail(); } /** * @throws Exception If failed. */ @Test public void testAtomicLongTopologyChange() throws Exception { try (IgniteAtomicLong atomic = grid(0).atomicLong(STRUCTURE_NAME, 10, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(10, g.atomicLong(STRUCTURE_NAME, 10, false).get()); assertEquals(20, g.atomicLong(STRUCTURE_NAME, 10, false).addAndGet(10)); stopGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(20, grid(0).atomicLong(STRUCTURE_NAME, 10, true).get()); } } /** * @throws Exception If failed. */ @Test public void testAtomicLongConstantTopologyChange() throws Exception { doTestAtomicLong(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * @throws Exception If failed. */ @Test public void testAtomicLongConstantMultipleTopologyChange() throws Exception { doTestAtomicLong(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests IgniteAtomicLong. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestAtomicLong(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteAtomicLong s = grid(0).atomicLong(STRUCTURE_NAME, 1, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { assert ignite.atomicLong(STRUCTURE_NAME, 1, true).get() > 0; return null; } }); long val = s.get(); while (!fut.isDone()) { assertEquals(val, s.get()); assertEquals(++val, s.incrementAndGet()); } fut.get(); for (Ignite g : G.allGrids()) assertEquals(val, g.atomicLong(STRUCTURE_NAME, 1, false).get()); } } /** * @throws Exception If failed. */ @Test public void testAtomicReferenceTopologyChange() throws Exception { try (IgniteAtomicReference atomic = grid(0).atomicReference(STRUCTURE_NAME, 10, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals((Integer)10, g.atomicReference(STRUCTURE_NAME, 10, false).get()); g.atomicReference(STRUCTURE_NAME, 10, false).set(20); stopGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals((Integer)20, grid(0).atomicReference(STRUCTURE_NAME, 10, true).get()); } } /** * @throws Exception If failed. */ @Test public void testAtomicReferenceConstantTopologyChange() throws Exception { doTestAtomicReference(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * @throws Exception If failed. */ @Test public void testAtomicReferenceConstantMultipleTopologyChange() throws Exception { doTestAtomicReference(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests atomic reference. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestAtomicReference(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteAtomicReference<Integer> s = grid(0).atomicReference(STRUCTURE_NAME, 1, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { assert ignite.atomicReference(STRUCTURE_NAME, 1, false).get() > 0; return null; } }); int val = s.get(); while (!fut.isDone()) { assertEquals(val, (int)s.get()); s.set(++val); } fut.get(); for (Ignite g : G.allGrids()) assertEquals(val, (int)g.atomicReference(STRUCTURE_NAME, 1, true).get()); } } /** * @throws Exception If failed. */ @Test public void testAtomicStampedTopologyChange() throws Exception { try (IgniteAtomicStamped atomic = grid(0).atomicStamped(STRUCTURE_NAME, 10, 10, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); IgniteBiTuple<Integer, Integer> t = g.atomicStamped(STRUCTURE_NAME, 10, 10, false).get(); assertEquals((Integer)10, t.get1()); assertEquals((Integer)10, t.get2()); g.atomicStamped(STRUCTURE_NAME, 10, 10, false).set(20, 20); stopGrid(NEW_IGNITE_INSTANCE_NAME); t = grid(0).atomicStamped(STRUCTURE_NAME, 10, 10, false).get(); assertEquals((Integer)20, t.get1()); assertEquals((Integer)20, t.get2()); } } /** * @throws Exception If failed. */ @Test public void testAtomicStampedConstantTopologyChange() throws Exception { doTestAtomicStamped(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * @throws Exception If failed. */ @Test public void testAtomicStampedConstantMultipleTopologyChange() throws Exception { doTestAtomicStamped(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests atomic stamped value. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestAtomicStamped(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteAtomicStamped<Integer, Integer> s = grid(0).atomicStamped(STRUCTURE_NAME, 1, 1, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { IgniteBiTuple<Integer, Integer> t = ignite.atomicStamped(STRUCTURE_NAME, 1, 1, false).get(); assert t.get1() > 0; assert t.get2() > 0; return null; } }); int val = s.value(); while (!fut.isDone()) { IgniteBiTuple<Integer, Integer> t = s.get(); assertEquals(val, (int)t.get1()); assertEquals(val, (int)t.get2()); ++val; s.set(val, val); } fut.get(); for (Ignite g : G.allGrids()) { IgniteBiTuple<Integer, Integer> t = g.atomicStamped(STRUCTURE_NAME, 1, 1, false).get(); assertEquals(val, (int)t.get1()); assertEquals(val, (int)t.get2()); } } } /** * @throws Exception If failed. */ @Test public void testCountDownLatchTopologyChange() throws Exception { try (IgniteCountDownLatch latch = grid(0).countDownLatch(STRUCTURE_NAME, 20, true, true)) { try { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(20, g.countDownLatch(STRUCTURE_NAME, 20, true, false).count()); g.countDownLatch(STRUCTURE_NAME, 20, true, false).countDown(10); stopGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(10, grid(0).countDownLatch(STRUCTURE_NAME, 20, true, false).count()); } finally { grid(0).countDownLatch(STRUCTURE_NAME, 20, true, false).countDownAll(); } } } /** * @throws Exception If failed. */ @Test public void testSemaphoreFailoverSafe() throws Exception { try (final IgniteSemaphore semaphore = grid(0).semaphore(STRUCTURE_NAME, 20, true, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); IgniteSemaphore semaphore2 = g.semaphore(STRUCTURE_NAME, 20, true, false); assertEquals(20, semaphore2.availablePermits()); semaphore2.acquire(10); stopGrid(NEW_IGNITE_INSTANCE_NAME); waitForCondition(new PA() { @Override public boolean apply() { return semaphore.availablePermits() == 20; } }, 2000); } } /** * @throws Exception If failed. */ @Test public void testSemaphoreNonFailoverSafe() throws Exception { try (IgniteSemaphore sem = grid(0).semaphore(STRUCTURE_NAME, 20, false, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); IgniteSemaphore sem2 = g.semaphore(STRUCTURE_NAME, 20, false, false); sem2.acquire(20); assertEquals(0, sem.availablePermits()); new Timer().schedule(new TimerTask() { @Override public void run() { stopGrid(NEW_IGNITE_INSTANCE_NAME); } }, 2000); try { sem.acquire(1); } catch (IgniteInterruptedException ignored) { // Expected exception. return; } } fail("Thread hasn't been interrupted"); } /** * @throws Exception If failed. */ @Test public void testCanCloseSetInInterruptedThread() throws Exception { doCloseByInterruptedThread(grid(0).set(STRUCTURE_NAME, new CollectionConfiguration())); } /** * @throws Exception If failed. */ @Test public void testCanCloseQueueInInterruptedThread() throws Exception { doCloseByInterruptedThread(grid(0).queue(STRUCTURE_NAME, 0, new CollectionConfiguration())); } /** * @throws Exception If failed. */ @Test public void testCanCloseAtomicLongInInterruptedThread() throws Exception { doCloseByInterruptedThread(grid(0).atomicLong(STRUCTURE_NAME, 10, true)); } /** * @throws Exception If failed. */ @Test public void testCanCloseAtomicReferenceInInterruptedThread() throws Exception { doCloseByInterruptedThread(grid(0).atomicReference(STRUCTURE_NAME, 10, true)); } /** * @throws Exception If failed. */ @Test public void testCanCloseCountDownLatchInInterruptedThread() throws Exception { IgniteCountDownLatch latch = grid(0).countDownLatch(STRUCTURE_NAME, 1, true, true); latch.countDown(); doCloseByInterruptedThread(latch); } /** * @throws Exception If failed. */ @Test public void testCanCloseAtomicStampedInInterruptedThread() throws Exception { doCloseByInterruptedThread(grid(0).atomicStamped(STRUCTURE_NAME, 10, 10,true)); } /** * @throws Exception If failed. */ @Test public void testCanCloseSemaphoreInInterruptedThread() throws Exception { doCloseByInterruptedThread(grid(0).semaphore(STRUCTURE_NAME, 1, true, true)); } /** * Tries close datastructure in interrupted thread * * @param closeableDs DataStructure to close. * @throws Exception If failed. */ private void doCloseByInterruptedThread(final Closeable closeableDs) throws Exception { Thread.currentThread().interrupt(); try { closeableDs.close(); } catch (IOException e) { throw new RuntimeException(e); } finally { Thread.interrupted(); } } /** * @throws Exception If failed. */ @Test public void testSemaphoreSingleNodeFailure() throws Exception { final Ignite i1 = grid(0); IgniteSemaphore sem1 = i1.semaphore(STRUCTURE_NAME, 1, false, true); sem1.acquire(); final CountDownLatch createLatch = new CountDownLatch(1); IgniteInternalFuture<?> fut = GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() { boolean failed = true; IgniteSemaphore sem2 = i1.semaphore(STRUCTURE_NAME, 1, false, true); try { // Guard the acquire call by count down latch to make sure that semaphore creation does not fail. createLatch.countDown(); sem2.acquire(); } catch (Exception ignored){ failed = false; } finally { assertFalse(failed); sem2.release(); } return null; } }); assertTrue("Failed to wait for semaphore creation", createLatch.await(getTestTimeout(), TimeUnit.MILLISECONDS)); while(!sem1.hasQueuedThreads()) { try { Thread.sleep(1); } catch (InterruptedException ignored) { fail(); } } i1.close(); fut.get(); } /** * @throws Exception If failed. */ @Test public void testSemaphoreConstantTopologyChangeFailoverSafe() throws Exception { doTestSemaphore(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true); } /** * @throws Exception If failed. */ @Test public void testSemaphoreConstantTopologyChangeNonFailoverSafe() throws Exception { doTestSemaphore(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false); } /** * @throws Exception If failed. */ @Test public void testSemaphoreMultipleTopologyChangeFailoverSafe() throws Exception { doTestSemaphore(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true); } /** * @throws Exception If failed. */ @Test public void testSemaphoreMultipleTopologyChangeNonFailoverSafe() throws Exception { doTestSemaphore(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false); } /** * @throws Exception If failed. */ private void doTestSemaphore(ConstantTopologyChangeWorker topWorker, final boolean failoverSafe) throws Exception { final int permits = topWorker instanceof MultipleTopologyChangeWorker || topWorker instanceof PartitionedMultipleTopologyChangeWorker ? TOP_CHANGE_THREAD_CNT * 3 : TOP_CHANGE_CNT; try (IgniteSemaphore s = grid(0).semaphore(STRUCTURE_NAME, permits, failoverSafe, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { IgniteSemaphore sem = ignite.semaphore(STRUCTURE_NAME, permits, failoverSafe, false); while (true) { try { sem.acquire(1); break; } catch (IgniteInterruptedException e) { // Exception may happen in non failover safe mode. if (failoverSafe) throw e; else { // In non-failoverSafe mode semaphore is not safe to be reused, // and should always be discarded after exception is caught. break; } } } return null; } }); while (!fut.isDone()) { while (true) { try { s.acquire(1); break; } catch (IgniteInterruptedException e) { // Exception may happen in non failover safe mode. if (failoverSafe) throw e; else { // In non-failoverSafe mode semaphore is not safe to be reused, // and should always be discarded after exception is caught. break; } } } assert s.availablePermits() < permits; s.release(); assert s.availablePermits() <= permits; } fut.get(); // Semaphore is left in proper state only if failoverSafe mode is used. if (failoverSafe) { for (Ignite g : G.allGrids()) assertEquals(permits, g.semaphore(STRUCTURE_NAME, permits, false, false).availablePermits()); } } } /** * @throws Exception If failed. */ @Test public void testReentrantLockFailsWhenServersLeft() throws Exception { testReentrantLockFailsWhenServersLeft(false); } /** * @throws Exception If failed. */ @Test public void testFairReentrantLockFailsWhenServersLeft() throws Exception { testReentrantLockFailsWhenServersLeft(true); } /** * @throws Exception If failed. */ public void testReentrantLockFailsWhenServersLeft(final boolean fair) throws Exception { client = true; Ignite client = startGrid(gridCount()); Ignite server = grid(0); // Initialize lock. IgniteLock srvLock = server.reentrantLock("lock", true, fair, true); IgniteSemaphore semaphore = server.semaphore("sync", 0, true, true); IgniteFuture fut = client.compute().applyAsync(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { final IgniteLock l = ignite.reentrantLock("lock", true, fair, true); l.lock(); assertTrue(l.isHeldByCurrentThread()); l.unlock(); assertFalse(l.isHeldByCurrentThread()); // Signal the server to go down. ignite.semaphore("sync", 0, true, true).release(); boolean isExceptionThrown = false; try { // Wait for the server to go down. Thread.sleep(1000); l.lock(); fail("Exception must be thrown."); } catch (InterruptedException ignored) { fail("Interrupted exception not expected here."); } catch (IgniteException ignored) { isExceptionThrown = true; } finally { assertTrue(isExceptionThrown); assertFalse(l.isHeldByCurrentThread()); } return null; } }, client); // Wait for the lock on client to be acquired then released. semaphore.acquire(); for (int i = 0; i < gridCount(); i++) stopGrid(i); fut.get(); client.close(); } /** * @throws Exception If failed. */ @Test public void testReentrantLockConstantTopologyChangeFailoverSafe() throws Exception { doTestReentrantLock(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true, false); } /** * @throws Exception If failed. */ @Test public void testReentrantLockConstantMultipleTopologyChangeFailoverSafe() throws Exception { doTestReentrantLock(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true, false); } /** * @throws Exception If failed. */ @Test public void testReentrantLockConstantTopologyChangeNonFailoverSafe() throws Exception { doTestReentrantLock(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false, false); } /** * @throws Exception If failed. */ @Test public void testReentrantLockConstantMultipleTopologyChangeNonFailoverSafe() throws Exception { doTestReentrantLock(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false, false); } /** * @throws Exception If failed. */ @Test public void testFairReentrantLockConstantTopologyChangeFailoverSafe() throws Exception { doTestReentrantLock(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true, true); } /** * @throws Exception If failed. */ @Test public void testFairReentrantLockConstantMultipleTopologyChangeFailoverSafe() throws Exception { doTestReentrantLock(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true, true); } /** * @throws Exception If failed. */ @Test public void testFairReentrantLockConstantTopologyChangeNonFailoverSafe() throws Exception { doTestReentrantLock(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false, true); } /** * @throws Exception If failed. */ @Test public void testFairReentrantLockConstantMultipleTopologyChangeNonFailoverSafe() throws Exception { doTestReentrantLock(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false, true); } /** * @throws Exception If failed. */ private void doTestReentrantLock( final ConstantTopologyChangeWorker topWorker, final boolean failoverSafe, final boolean fair ) throws Exception { IgniteEx ig = grid(0); try (IgniteLock lock = ig.reentrantLock(STRUCTURE_NAME, failoverSafe, fair, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Void>() { @Override public Void apply(Ignite ignite) { final IgniteLock l = ignite.reentrantLock(STRUCTURE_NAME, failoverSafe, fair, false); final AtomicBoolean done = new AtomicBoolean(false); GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { try{ l.lock(); } finally { done.set(true); } return null; } }, "lock-thread"); // Wait until l.lock() has been called. while(!l.hasQueuedThreads() && !done.get()){ // No-op. } return null; } }); long endTime = System.currentTimeMillis() + getTestTimeout(); while (!fut.isDone()) { try { lock.lock(); } catch (IgniteException e) { // Exception may happen in non-failoversafe mode. if (failoverSafe) throw e; // problem already occurred, test is being shutdown if (Thread.currentThread().isInterrupted()) throw e; } finally { // Broken lock cannot be used in non-failoversafe mode. if(!lock.isBroken() || failoverSafe) { assertTrue(lock.isHeldByCurrentThread()); lock.unlock(); assertFalse(lock.isHeldByCurrentThread()); } } if (System.currentTimeMillis() > endTime) fail("Failed to wait for topology change threads."); } fut.get(); for (Ignite g : G.allGrids()){ IgniteLock l = g.reentrantLock(STRUCTURE_NAME, failoverSafe, fair, false); assertTrue(g.name(), !l.isHeldByCurrentThread() || lock.isBroken()); } } } /** * @throws Exception If failed. */ @Test public void testCountDownLatchConstantTopologyChange() throws Exception { doTestCountDownLatch(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * @throws Exception If failed. */ @Test public void testCountDownLatchConstantMultipleTopologyChange() throws Exception { doTestCountDownLatch(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests distributed count down latch. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestCountDownLatch(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteCountDownLatch s = grid(0).countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, true)) { try { IgniteInternalFuture<?> fut = topWorker.startChangingTopology( new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { assert ignite.countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, false).count() > 0; return null; } }); int val = s.count(); while (!fut.isDone()) { assertEquals(val, s.count()); assertEquals(--val, s.countDown()); } fut.get(); for (Ignite g : G.allGrids()) assertEquals(val, g.countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, true).count()); } finally { grid(0).countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, false).countDownAll(); } } } /** * @throws Exception If failed. */ @Test public void testFifoQueueTopologyChange() throws Exception { try { grid(0).queue(STRUCTURE_NAME, 0, config(false)).put(10); Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(10, (int)g.<Integer>queue(STRUCTURE_NAME, 0, null).poll()); g.queue(STRUCTURE_NAME, 0, null).put(20); stopGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(20, (int)grid(0).<Integer>queue(STRUCTURE_NAME, 0, null).peek()); } finally { grid(0).<Integer>queue(STRUCTURE_NAME, 0, null).close(); } } /** * @throws Exception If failed. */ @Test public void testQueueTopologyChange() throws Exception { ConstantTopologyChangeWorker topWorker = new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT); try (final IgniteQueue<Integer> q = grid(0).queue(STRUCTURE_NAME, 0, config(false))) { for (int i = 0; i < 1000; i++) q.add(i); final IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { return null; } }); IgniteInternalFuture<?> takeFut = GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { while (!fut.isDone()) q.take(); return null; } }); IgniteInternalFuture<?> pollFut = GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { while (!fut.isDone()) q.poll(); return null; } }); IgniteInternalFuture<?> addFut = GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { while (!fut.isDone()) q.add(0); return null; } }); fut.get(); pollFut.get(); addFut.get(); q.add(0); takeFut.get(); } } /** * @throws Exception If failed. */ @Test public void testQueueConstantTopologyChange() throws Exception { int topChangeThreads = collectionCacheMode() == CacheMode.PARTITIONED ? 1 : TOP_CHANGE_THREAD_CNT; doTestQueue(new ConstantTopologyChangeWorker(topChangeThreads)); } /** * @throws Exception If failed. */ @Test public void testQueueConstantMultipleTopologyChange() throws Exception { int topChangeThreads = collectionCacheMode() == CacheMode.PARTITIONED ? 1 : TOP_CHANGE_THREAD_CNT; doTestQueue(multipleTopologyChangeWorker(topChangeThreads)); } /** * Tests the queue. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestQueue(ConstantTopologyChangeWorker topWorker) throws Exception { int queueMaxSize = 100; try (IgniteQueue<Integer> s = grid(0).queue(STRUCTURE_NAME, 0, config(false))) { s.put(1); IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { IgniteQueue<Integer> queue = ignite.queue(STRUCTURE_NAME, 0, null); assertNotNull(queue); Integer val = queue.peek(); assertNotNull(val); assert val > 0; return null; } }); int val = s.peek(); while (!fut.isDone()) { if (s.size() == queueMaxSize) { int last = 0; for (int i = 0, size = s.size() - 1; i < size; i++) { int cur = s.poll(); if (i == 0) { last = cur; continue; } assertEquals(last, cur - 1); last = cur; } } s.put(++val); } fut.get(); val = s.peek(); for (Ignite g : G.allGrids()) assertEquals(val, (int)g.<Integer>queue(STRUCTURE_NAME, 0, null).peek()); } } /** * @throws Exception If failed. */ @Test public void testAtomicSequenceInitialization() throws Exception { checkAtomicSequenceInitialization(false); } /** * @throws Exception If failed. */ @Test public void testAtomicSequenceInitializationOnStableNodes() throws Exception { checkAtomicSequenceInitialization(true); } /** * @param limitProjection {@code True} if test should call init only on stable nodes. * @throws Exception If failed. */ private void checkAtomicSequenceInitialization(boolean limitProjection) throws Exception { int threadCnt = 3; IgniteCompute compute; if (limitProjection) { List<UUID> nodeIds = new ArrayList<>(gridCount()); for (int i = 0; i < gridCount(); i++) nodeIds.add(grid(i).cluster().localNode().id()); compute = grid(0).compute(grid(0).cluster().forNodeIds(nodeIds)); } else compute = grid(0).compute(); final AtomicInteger idx = new AtomicInteger(gridCount()); IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() { @Override public void apply() { int id = idx.getAndIncrement(); try { log.info("Start node: " + id); startGrid(id); Thread.sleep(1000); } catch (Exception e) { throw F.wrap(e); } finally { stopGrid(id); info("Thread finished."); } } }, threadCnt, "test-thread"); while (!fut.isDone()) { compute.call(new IgniteCallable<Object>() { /** */ @IgniteInstanceResource private Ignite g; @Override public Object call() { try { IgniteAtomicSequence seq = g.atomicSequence(STRUCTURE_NAME, 1, true); assert seq != null; for (int i = 0; i < 1000; i++) seq.getAndIncrement(); return null; } catch (IgniteException e) { // Fail if we are on stable nodes or exception is not node stop. if (limitProjection || !X.hasCause(e, NodeStoppingException.class)) throw e; return null; } } }); } fut.get(); } /** * @throws Exception If failed. */ @Test public void testAtomicSequenceTopologyChange() throws Exception { try (IgniteAtomicSequence s = grid(0).atomicSequence(STRUCTURE_NAME, 10, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(1010, g.atomicSequence(STRUCTURE_NAME, 10, false).get()); assertEquals(1020, g.atomicSequence(STRUCTURE_NAME, 10, false).addAndGet(10)); stopGrid(NEW_IGNITE_INSTANCE_NAME); } } /** * @throws Exception If failed. */ @Test public void testAtomicSequenceConstantTopologyChange() throws Exception { doTestAtomicSequence(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT, true)); } /** * @throws Exception If failed. */ @Test public void testAtomicSequenceConstantMultipleTopologyChange() throws Exception { doTestAtomicSequence(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests atomic sequence. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestAtomicSequence(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteAtomicSequence s = startClient().atomicSequence(STRUCTURE_NAME, 1, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { assertTrue(ignite.atomicSequence(STRUCTURE_NAME, 1, false).get() > 0); return null; } }); long old = s.get(); while (!fut.isDone()) { assertEquals(old, s.get()); long val = s.incrementAndGet(); assertTrue(val > old); old = val; } fut.get(); } } /** * @throws Exception If failed. */ @Test public void testUncommitedTxLeave() throws Exception { final int val = 10; grid(0).atomicLong(STRUCTURE_NAME, val, true); GridTestUtils.runAsync(new Callable<Object>() { @Override public Object call() throws Exception { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); try { g.transactions().txStart(); g.cache(TRANSACTIONAL_CACHE_NAME).put(1, 1); assertEquals(val + 1, g.atomicLong(STRUCTURE_NAME, val, false).incrementAndGet()); } finally { stopGrid(NEW_IGNITE_INSTANCE_NAME); } return null; } }).get(); waitForDiscovery(G.allGrids().toArray(new Ignite[gridCount()])); assertEquals(val + 1, grid(0).atomicLong(STRUCTURE_NAME, val, false).get()); } /** * @param topChangeThreads Number of topology change threads. * * @return Specific multiple topology change worker implementation. */ private ConstantTopologyChangeWorker multipleTopologyChangeWorker(int topChangeThreads) { return collectionCacheMode() == CacheMode.PARTITIONED ? new PartitionedMultipleTopologyChangeWorker(topChangeThreads) : new MultipleTopologyChangeWorker(topChangeThreads); } /** * */ private class ConstantTopologyChangeWorker { /** */ protected final AtomicBoolean failed = new AtomicBoolean(false); /** */ protected final int topChangeThreads; /** Flag to enable circular topology change. */ private boolean circular; /** * @param topChangeThreads Number of topology change threads. */ public ConstantTopologyChangeWorker(int topChangeThreads) { this.topChangeThreads = topChangeThreads; } /** * @param topChangeThreads Number of topology change threads. * @param circular flag to enable circular topology change. */ public ConstantTopologyChangeWorker(int topChangeThreads, boolean circular) { this.topChangeThreads = topChangeThreads; this.circular = circular; } /** * Starts changing cluster's topology. * * @param cb Callback to run after node start. * @return Future. */ IgniteInternalFuture<?> startChangingTopology(final IgniteClosure<Ignite, ?> cb) { final AtomicInteger nodeIdx = new AtomicInteger(G.allGrids().size()); return GridTestUtils.runMultiThreadedAsync(new CA() { @Override public void apply() { try { for (int i = 0; i < TOP_CHANGE_CNT; i++) { if (failed.get()) return; int idx = nodeIdx.incrementAndGet(); Thread.currentThread().setName("thread-" + getTestIgniteInstanceName(idx)); try { log.info("Start node: " + getTestIgniteInstanceName(idx)); Ignite g = startGrid(idx); cb.apply(g); } catch (IgniteException e) { if (!X.hasCause(e, NodeStoppingException.class) && !X.hasCause(e, IllegalStateException.class)) throw e; // OK for this test. } finally { if (circular) stopGrid(G.allGrids().get(0).configuration().getIgniteInstanceName()); else stopGrid(idx); } } } catch (Exception e) { if (failed.compareAndSet(false, true)) throw F.wrap(e); } } }, topChangeThreads, "topology-change-thread"); } } /** * */ private class MultipleTopologyChangeWorker extends ConstantTopologyChangeWorker { /** * @param topChangeThreads Number of topology change threads. */ public MultipleTopologyChangeWorker(int topChangeThreads) { super(topChangeThreads); } /** * Starts changing cluster's topology. * * @return Future. */ @Override IgniteInternalFuture<?> startChangingTopology(final IgniteClosure<Ignite, ?> cb) { return GridTestUtils.runMultiThreadedAsync(new CA() { @Override public void apply() { try { for (int i = 0; i < TOP_CHANGE_CNT; i++) { if (failed.get()) return; Collection<String> names = new GridLeanSet<>(3); try { for (int j = 0; j < 3; j++) { if (failed.get()) return; String name = UUID.randomUUID().toString(); log.info("Start node: " + name); Ignite g = startGrid(name); names.add(name); cb.apply(g); } } finally { for (String name : names) stopGrid(name); } } } catch (Exception e) { if (failed.compareAndSet(false, true)) throw F.wrap(e); } } }, topChangeThreads, "topology-change-thread"); } } /** * */ private class PartitionedMultipleTopologyChangeWorker extends ConstantTopologyChangeWorker { /** */ private CyclicBarrier barrier; /** * @param topChangeThreads Number of topology change threads. */ public PartitionedMultipleTopologyChangeWorker(int topChangeThreads) { super(topChangeThreads); } /** * Starts changing cluster's topology. * * @return Future. */ @Override IgniteInternalFuture<?> startChangingTopology(final IgniteClosure<Ignite, ?> cb) { final Semaphore sem = new Semaphore(TOP_CHANGE_THREAD_CNT); final ConcurrentSkipListSet<String> startedNodes = new ConcurrentSkipListSet<>(); barrier = new CyclicBarrier(TOP_CHANGE_THREAD_CNT, new Runnable() { @Override public void run() { try { assertEquals(TOP_CHANGE_THREAD_CNT * 3, startedNodes.size()); for (String name : startedNodes) { stopGrid(name, false); awaitPartitionMapExchange(); } startedNodes.clear(); sem.release(TOP_CHANGE_THREAD_CNT); barrier.reset(); } catch (Exception e) { if (failed.compareAndSet(false, true)) { sem.release(TOP_CHANGE_THREAD_CNT); barrier.reset(); throw F.wrap(e); } } } }); IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() { @Override public void apply() { try { for (int i = 0; i < TOP_CHANGE_CNT; i++) { sem.acquire(); if (failed.get()) return; for (int j = 0; j < 3; j++) { if (failed.get()) return; String name = UUID.randomUUID().toString(); startedNodes.add(name); log.info("Start node: " + name); Ignite g = startGrid(name); cb.apply(g); } try { barrier.await(); } catch (BrokenBarrierException ignored) { // No-op. } } } catch (Exception e) { if (failed.compareAndSet(false, true)) { sem.release(TOP_CHANGE_THREAD_CNT); barrier.reset(); throw F.wrap(e); } } } }, TOP_CHANGE_THREAD_CNT, "topology-change-thread"); return fut; } } }
/* * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 1999-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: DOM2Helper.java,v 1.2.4.1 2005/09/15 08:15:37 suresh_emailid Exp $ */ package com.sun.org.apache.xml.internal.utils; import java.io.IOException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.InputSource; /** * @deprecated Since the introduction of the DTM, this class will be removed. * This class provides a DOM level 2 "helper", which provides services currently * not provided be the DOM standard. */ public class DOM2Helper extends DOMHelper { /** * Construct an instance. */ public DOM2Helper(){} /** * Check node to see if it was created by a DOM implementation * that this helper is intended to support. This is currently * disabled, and assumes all nodes are acceptable rather than checking * that they implement com.sun.org.apache.xerces.internal.dom.NodeImpl. * * @param node The node to be tested. * * @throws TransformerException if the node is not one which this * DOM2Helper can support. If we return without throwing the exception, * the node is compatable. * @xsl.usage internal */ public void checkNode(Node node) throws TransformerException { // if(!(node instanceof com.sun.org.apache.xerces.internal.dom.NodeImpl)) // throw new TransformerException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_XERCES_CANNOT_HANDLE_NODES, new Object[]{((Object)node).getClass()})); //"DOM2Helper can not handle nodes of type" //+((Object)node).getClass()); } /** * Returns true if the DOM implementation handled by this helper * supports the SAX ContentHandler interface. * * @return true (since Xerces does). */ public boolean supportsSAX() { return true; } /** Field m_doc: Document Node for the document this helper is currently * accessing or building * @see #setDocument * @see #getDocument * */ private Document m_doc; /** * Specify which document this helper is currently operating on. * * @param doc The DOM Document node for this document. * @see #getDocument */ public void setDocument(Document doc) { m_doc = doc; } /** * Query which document this helper is currently operating on. * * @return The DOM Document node for this document. * @see #setDocument */ public Document getDocument() { return m_doc; } /** * Parse an XML document. * * <p>Right now the Xerces DOMParser class is used. This needs * fixing, either via jaxp, or via some other, standard method.</p> * * <p>The application can use this method to instruct the SAX parser * to begin parsing an XML document from any valid input * source (a character stream, a byte stream, or a URI).</p> * * <p>Applications may not invoke this method while a parse is in * progress (they should create a new Parser instead for each * additional XML document). Once a parse is complete, an * application may reuse the same Parser object, possibly with a * different input source.</p> * * @param source The input source for the top-level of the * XML document. * * @throws TransformerException if any checked exception is thrown. * @xsl.usage internal */ public void parse(InputSource source) throws TransformerException { try { // I guess I should use JAXP factory here... when it's legal. // com.sun.org.apache.xerces.internal.parsers.DOMParser parser // = new com.sun.org.apache.xerces.internal.parsers.DOMParser(); DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance(); builderFactory.setNamespaceAware(true); builderFactory.setValidating(true); DocumentBuilder parser = builderFactory.newDocumentBuilder(); /* // domParser.setFeature("http://apache.org/xml/features/dom/create-entity-ref-nodes", getShouldExpandEntityRefs()? false : true); if(m_useDOM2getNamespaceURI) { parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", true); parser.setFeature("http://xml.org/sax/features/namespaces", true); } else { parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); } parser.setFeature("http://apache.org/xml/features/allow-java-encodings", true); */ parser.setErrorHandler( new com.sun.org.apache.xml.internal.utils.DefaultErrorHandler()); // if(null != m_entityResolver) // { // System.out.println("Setting the entity resolver."); // parser.setEntityResolver(m_entityResolver); // } setDocument(parser.parse(source)); } catch (org.xml.sax.SAXException se) { throw new TransformerException(se); } catch (ParserConfigurationException pce) { throw new TransformerException(pce); } catch (IOException ioe) { throw new TransformerException(ioe); } // setDocument(((com.sun.org.apache.xerces.internal.parsers.DOMParser)parser).getDocument()); } /** * Given an XML ID, return the element. This requires assistance from the * DOM and parser, and is meaningful only in the context of a DTD * or schema which declares attributes as being of type ID. This * information may or may not be available in all parsers, may or * may not be available for specific documents, and may or may not * be available when validation is not turned on. * * @param id The ID to search for, as a String. * @param doc The document to search within, as a DOM Document node. * @return DOM Element node with an attribute of type ID whose value * uniquely matches the requested id string, or null if there isn't * such an element or if the DOM can't answer the question for other * reasons. */ public Element getElementByID(String id, Document doc) { return doc.getElementById(id); } /** * Figure out whether node2 should be considered as being later * in the document than node1, in Document Order as defined * by the XPath model. This may not agree with the ordering defined * by other XML applications. * <p> * There are some cases where ordering isn't defined, and neither are * the results of this function -- though we'll generally return true. * <p> * TODO: Make sure this does the right thing with attribute nodes!!! * * @param node1 DOM Node to perform position comparison on. * @param node2 DOM Node to perform position comparison on . * * @return false if node2 comes before node1, otherwise return true. * You can think of this as * <code>(node1.documentOrderPosition &lt;= node2.documentOrderPosition)</code>. */ public static boolean isNodeAfter(Node node1, Node node2) { // Assume first that the nodes are DTM nodes, since discovering node // order is massivly faster for the DTM. if(node1 instanceof DOMOrder && node2 instanceof DOMOrder) { int index1 = ((DOMOrder) node1).getUid(); int index2 = ((DOMOrder) node2).getUid(); return index1 <= index2; } else { // isNodeAfter will return true if node is after countedNode // in document order. The base isNodeAfter is sloooow (relatively). return DOMHelper.isNodeAfter(node1, node2); } } /** * Get the XPath-model parent of a node. This version takes advantage * of the DOM Level 2 Attr.ownerElement() method; the base version we * would otherwise inherit is prepared to fall back on exhaustively * walking the document to find an Attr's parent. * * @param node Node to be examined * * @return the DOM parent of the input node, if there is one, or the * ownerElement if the input node is an Attr, or null if the node is * a Document, a DocumentFragment, or an orphan. */ public static Node getParentOfNode(Node node) { Node parent=node.getParentNode(); if(parent==null && (Node.ATTRIBUTE_NODE == node.getNodeType()) ) parent=((Attr) node).getOwnerElement(); return parent; } /** * Returns the local name of the given node, as defined by the * XML Namespaces specification. This is prepared to handle documents * built using DOM Level 1 methods by falling back upon explicitly * parsing the node name. * * @param n Node to be examined * * @return String containing the local name, or null if the node * was not assigned a Namespace. */ public String getLocalNameOfNode(Node n) { String name = n.getLocalName(); return (null == name) ? super.getLocalNameOfNode(n) : name; } /** * Returns the Namespace Name (Namespace URI) for the given node. * In a Level 2 DOM, you can ask the node itself. Note, however, that * doing so conflicts with our decision in getLocalNameOfNode not * to trust the that the DOM was indeed created using the Level 2 * methods. If Level 1 methods were used, these two functions will * disagree with each other. * <p> * TODO: Reconcile with getLocalNameOfNode. * * @param n Node to be examined * * @return String containing the Namespace URI bound to this DOM node * at the time the Node was created. */ public String getNamespaceOfNode(Node n) { return n.getNamespaceURI(); } /** Field m_useDOM2getNamespaceURI is a compile-time flag which * gates some of the parser options used to build a DOM -- but * that code is commented out at this time and nobody else * references it, so I've commented this out as well. */ //private boolean m_useDOM2getNamespaceURI = false; }
/** * Copyright Notice * * This is a work of the U.S. Government and is not subject to copyright * protection in the United States. Foreign copyrights may apply. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gov.va.isaac.gui.treeview; import gov.va.isaac.AppContext; import gov.va.isaac.gui.util.Images; import gov.va.isaac.interfaces.gui.ApplicationMenus; import gov.va.isaac.interfaces.gui.CheckMenuItemI; import gov.va.isaac.interfaces.gui.MenuItemI; import gov.va.isaac.interfaces.gui.constants.SharedServiceNames; import gov.va.isaac.interfaces.gui.views.DockedViewI; import gov.va.isaac.interfaces.gui.views.commonFunctionality.taxonomyView.SctTreeItemDisplayPolicies; import gov.va.isaac.interfaces.gui.views.commonFunctionality.taxonomyView.TaxonomyViewI; import gov.vha.isaac.ochre.api.Get; import java.util.ArrayList; import java.util.List; import java.util.UUID; import javafx.beans.property.BooleanProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.scene.control.MenuItem; import javafx.scene.control.ProgressIndicator; import javafx.scene.image.Image; import javafx.scene.layout.Region; import javafx.stage.Window; import javax.inject.Named; import javax.inject.Singleton; import org.jvnet.hk2.annotations.Service; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * SctTreeViewDockedView * * @author <a href="mailto:daniel.armbrust.list@gmail.com">Dan Armbrust</a> */ @Service @Named (value=SharedServiceNames.DOCKED) @Singleton public class SctTreeViewDockedView implements DockedViewI, TaxonomyViewI { private final Logger LOG = LoggerFactory.getLogger(SctTreeViewDockedView.class); private SctTreeView sctTreeView_; private boolean hasBeenInited_ = false; private final BooleanProperty treeViewSearchRunning = new SimpleBooleanProperty(false); private ProgressIndicator treeViewProgress = new ProgressIndicator(-1); private SctTreeViewDockedView() { long startTime = System.currentTimeMillis(); sctTreeView_ = new SctTreeView(); treeViewProgress.setMaxSize(16, 16); treeViewProgress.setPrefSize(16, 16); treeViewProgress.visibleProperty().bind(treeViewSearchRunning); sctTreeView_.addToToolBar(treeViewProgress); LOG.debug(this.getClass().getSimpleName() + " construct time (blocking GUI): {}", System.currentTimeMillis() - startTime); } public void showConcept(final UUID conceptUUID, final BooleanProperty workingIndicator) { sctTreeView_.showConcept(conceptUUID, workingIndicator); } /** * @see gov.va.isaac.interfaces.gui.views.IsaacViewI#getMenuBarMenus() */ @Override public List<MenuItemI> getMenuBarMenus() { return new ArrayList<MenuItemI>(); } /** * @see gov.va.isaac.interfaces.gui.views.DockedViewI#getView() */ @Override public Region getView() { return sctTreeView_.getView(); } /** * @see gov.va.isaac.interfaces.gui.views.DockedViewI#getMenuBarMenuToShowView() */ @Override public CheckMenuItemI getMenuBarMenuToShowView() { return new CheckMenuItemI() { @Override public void handleMenuSelection(Window parent, MenuItem menuItem) { if (!hasBeenInited_) { //delay init till first display sctTreeView_.init(); hasBeenInited_ = true; } } @Override public int getSortOrder() { return 10; } @Override public String getParentMenuId() { return ApplicationMenus.PANELS.getMenuId(); } @Override public String getMenuName() { return "Taxonomy Viewer"; } @Override public String getMenuId() { return "taxonomyViewerMenuItem"; } @Override public boolean enableMnemonicParsing() { return false; } /** * @see gov.va.isaac.interfaces.gui.MenuItemI#getImage() */ @Override public Image getImage() { return Images.ROOT.getImage(); } }; } /** * @see gov.va.isaac.interfaces.gui.views.DockedViewI#getViewTitle() */ @Override public String getViewTitle() { return "SOLOR Browser"; } /** * @see gov.va.isaac.interfaces.gui.views.commonFunctionality.taxonomyView.TaxonomyViewI#locateConcept(java.util.UUID, javafx.beans.property.BooleanProperty) */ @Override public void locateConcept(UUID uuid, BooleanProperty busyIndicator) { if (busyIndicator == null) { treeViewSearchRunning.set(true); } showConcept(uuid, (busyIndicator == null ? treeViewSearchRunning : busyIndicator)); AppContext.getMainApplicationWindow().ensureDockedViewIsVisble(this); } /* * @see gov.va.isaac.interfaces.gui.TaxonomyViewI#locateConcept(int, javafx.beans.property.BooleanProperty) */ @Override public void locateConcept(int nid, BooleanProperty busyIndicator) { locateConcept(Get.identifierService().getUuidPrimordialForNid(nid).get(), busyIndicator); } /* (non-Javadoc) * @see gov.va.isaac.interfaces.gui.TaxonomyViewI#setDisplayPolicies(gov.va.isaac.interfaces.treeview.SctTreeItemDisplayPolicies) */ @Override public void setDisplayPolicies(SctTreeItemDisplayPolicies policies) { sctTreeView_.setDisplayPolicies(policies); } /* (non-Javadoc) * @see gov.va.isaac.interfaces.gui.TaxonomyViewI#refresh() */ @Override public void refresh() { sctTreeView_.refresh(); } /* (non-Javadoc) * @see gov.va.isaac.interfaces.gui.TaxonomyViewI#getDefaultDisplayPolicies() */ @Override public SctTreeItemDisplayPolicies getDefaultDisplayPolicies() { return SctTreeView.getDefaultDisplayPolicies(); } @Override public void viewDiscarded() { sctTreeView_.shutdownInstance(); } }
/********************************************************************** Copyright (c) 2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. **********************************************************************/ package com.google.appengine.datanucleus.test.jdo; import com.google.appengine.api.datastore.Key; import java.util.ArrayList; import java.util.List; import javax.jdo.annotations.Extension; import javax.jdo.annotations.Extensions; import javax.jdo.annotations.IdGeneratorStrategy; import javax.jdo.annotations.IdentityType; import javax.jdo.annotations.Inheritance; import javax.jdo.annotations.Order; import javax.jdo.annotations.PersistenceCapable; import javax.jdo.annotations.Persistent; import javax.jdo.annotations.PrimaryKey; /** * @author Max Ross <maxr@google.com> */ public class IllegalMappingsJDO { @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasLongPkWithKeyAncestor { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Long id; @Extension(vendorName = "datanucleus", key = "gae.parent-pk", value = "true") @Persistent private Key illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasLongPkWithStringAncestor { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Long id; @Extension(vendorName = "datanucleus", key = "gae.parent-pk", value = "true") @Persistent private String illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasUnencodedStringPkWithKeyAncestor { @PrimaryKey public String id; @Extension(vendorName = "datanucleus", key = "gae.parent-pk", value = "true") @Persistent private Key illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasUnencodedStringPkWithStringAncestor { @PrimaryKey public String id; @Extension(vendorName = "datanucleus", key = "gae.parent-pk", value = "true") @Persistent private String illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasMultiplePkNameFields { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) @Extension(vendorName = "datanucleus", key = "gae.encoded-pk", value = "true") private String id; @Extension(vendorName = "datanucleus", key = "gae.pk-name", value = "true") @Persistent private String firstIsOk; @Extension(vendorName = "datanucleus", key = "gae.pk-name", value = "true") @Persistent private String secondIsIllegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasMultiplePkIdFields { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) @Extension(vendorName = "datanucleus", key = "gae.encoded-pk", value = "true") private String id; @Extension(vendorName = "datanucleus", key = "gae.pk-id", value = "true") @Persistent private Long firstIsOk; @Extension(vendorName = "datanucleus", key = "gae.pk-id", value = "true") @Persistent private Long secondIsIllegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class MultipleAncestors { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) @Extension(vendorName = "datanucleus", key = "gae.encoded-pk", value = "true") private String id; @Extension(vendorName = "datanucleus", key = "gae.parent-pk", value = "true") @Persistent private String firstIsOk; @Extension(vendorName = "datanucleus", key = "gae.parent-pk", value = "true") @Persistent private String secondIsIllegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class EncodedPkOnNonPrimaryKeyField { @PrimaryKey public String id; @Extension(vendorName = "datanucleus", key = "gae.encoded-pk", value = "true") @Persistent private String illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class EncodedPkOnNonStringPrimaryKeyField { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) @Extension(vendorName = "datanucleus", key = "gae.encoded-pk", value = "true") private Long id; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class PkNameOnNonStringField { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) @Extension(vendorName = "datanucleus", key = "gae.encoded-pk", value = "true") private String id; @Persistent @Extension(vendorName = "datanucleus", key = "gae.pk-name", value = "true") private Long illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class PkIdOnNonLongField { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) @Extension(vendorName = "datanucleus", key = "gae.encoded-pk", value = "true") private String id; @Persistent @Extension(vendorName = "datanucleus", key = "gae.pk-id", value = "true") private String illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class PkMarkedAsAncestor { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) @Extensions({ @Extension(vendorName = "datanucleus", key = "gae.encoded-pk", value = "true"), @Extension(vendorName = "datanucleus", key = "gae.parent-pk", value = "true")} ) private String illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class PkMarkedAsPkId { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) @Extension(vendorName = "datanucleus", key = "gae.pk-id", value = "true") private Long illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class PkMarkedAsPkName { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) @Extension(vendorName = "datanucleus", key = "gae.pk-name", value = "true") private String illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class PkIdWithUnencodedStringPrimaryKey { @PrimaryKey public String id; @Persistent @Extension(vendorName = "datanucleus", key = "gae.pk-id", value = "true") private Long illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class PkNameWithUnencodedStringPrimaryKey { @PrimaryKey public String id; @Persistent @Extension(vendorName = "datanucleus", key = "gae.pk-name", value = "true") private String illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class OneToManyParentWithRootOnlyLongUniChild { @PrimaryKey public String id; @Persistent private List<HasLongPkJDO> uniChildren = new ArrayList<HasLongPkJDO>(); } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class OneToManyParentWithRootOnlyLongBiChild { @PrimaryKey public String id; @Persistent(mappedBy = "parent") private List<RootOnlyLongBiOneToManyChild> biChildren = new ArrayList<RootOnlyLongBiOneToManyChild>(); } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class RootOnlyLongBiOneToManyChild { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Long id; @Persistent private OneToManyParentWithRootOnlyLongBiChild parent; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class OneToManyParentWithRootOnlyStringUniChild { @PrimaryKey public String id; @Persistent private List<HasUnencodedStringPkJDO> uniChildren = new ArrayList<HasUnencodedStringPkJDO>(); } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class OneToManyParentWithRootOnlyStringBiChild { @PrimaryKey public String id; @Persistent(mappedBy = "parent") private List<RootOnlyStringBiOneToManyChild> biChildren = new ArrayList<RootOnlyStringBiOneToManyChild>(); } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class RootOnlyStringBiOneToManyChild { @PrimaryKey public String id; @Persistent private OneToManyParentWithRootOnlyStringBiChild parent; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class OneToOneParentWithRootOnlyLongUniChild { @PrimaryKey public String id; @Persistent private HasLongPkJDO uniChild; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class OneToOneParentWithRootOnlyLongBiChild { @PrimaryKey public String id; @SuppressWarnings("unused") private RootOnlyLongBiOneToManyChild biChild; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class RootOnlyLongBiOneToOneChild { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Long id; @Persistent(mappedBy = "biChild") private OneToOneParentWithRootOnlyLongBiChild parent; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class OneToOneParentWithRootOnlyStringUniChild { @PrimaryKey public String id; @Persistent private HasUnencodedStringPkJDO uniChild; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class OneToOneParentWithRootOnlyStringBiChild { @PrimaryKey public String id; @Persistent private RootOnlyStringBiOneToManyChild biChild; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class RootOnlyStringBiOneToOneChild { @PrimaryKey public String id; @Persistent(mappedBy = "biChild") private OneToOneParentWithRootOnlyStringBiChild parent; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class LongParent { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Long id; @Persistent @Extension(vendorName = "datanucleus", key="gae.parent-pk", value="true") private Long illegal; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class ManyToMany1 { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent(mappedBy = "manyToMany") private List<ManyToMany2> manyToMany; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class ManyToMany2 { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent(mappedBy = "manyToMany") private List<ManyToMany1> manyToMany; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class SequenceOnEncodedStringPk { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.SEQUENCE) @Extension(vendorName = "datanucleus", key = "gae.encoded-pk", value = "true") private String id; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class SequenceOnKeyPk { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.SEQUENCE) private Key id; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class Has2CollectionsOfSameType { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent private List<Flight> flights1; @Persistent private List<Flight> flights2; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class Has2OneToOnesOfSameType { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent private Flight f1; @Persistent private Flight f2; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasOneToOneAndOneToManyOfSameType { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent private List<Flight> flights; @Persistent private Flight f2; } @PersistenceCapable(identityType = IdentityType.APPLICATION) @Inheritance(customStrategy = "complete-table") public static class Has2CollectionsOfSameTypeParent { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent @Order(extensions = @Extension(vendorName = "datanucleus", key="list-ordering", value="name DESC")) private List<Flight> flights1; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class Has2CollectionsOfSameTypeChild extends Has2CollectionsOfSameTypeParent { @Persistent @Order(extensions = @Extension(vendorName = "datanucleus", key="list-ordering", value="name DESC")) private List<Flight> flights2; } @PersistenceCapable(identityType = IdentityType.APPLICATION) @Inheritance(customStrategy = "complete-table") public static class Has2CollectionsOfAssignableBaseTypeSuper { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent private String name; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class Has2CollectionsOfAssignableBaseTypeSub extends Has2CollectionsOfAssignableBaseTypeSuper { @SuppressWarnings("unused") private String str; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class Has2CollectionsOfAssignableType { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent @Order(extensions = @Extension(vendorName = "datanucleus", key="list-ordering", value="name DESC")) private List<Has2CollectionsOfAssignableBaseTypeSuper> superList; @Persistent @Order(extensions = @Extension(vendorName = "datanucleus", key="list-ordering", value="name DESC")) private List<Has2CollectionsOfAssignableBaseTypeSub> subList; } @PersistenceCapable(identityType = IdentityType.APPLICATION) @Inheritance(customStrategy = "complete-table") public static class Has2CollectionsOfAssignableTypeSuper { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent @Order(extensions = @Extension(vendorName = "datanucleus", key="list-ordering", value="name DESC")) private List<Has2CollectionsOfAssignableBaseTypeSuper> superList; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class Has2CollectionsOfAssignableTypeSub extends Has2CollectionsOfAssignableTypeSuper { @Persistent @Order(extensions = @Extension(vendorName = "datanucleus", key="list-ordering", value="name DESC")) private List<Has2CollectionsOfAssignableBaseTypeSub> subList; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasTwoOneToOnesWithSharedBaseClass { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Long id; @Persistent private HasSharedBaseClass1 hsbc1; @Persistent private HasSharedBaseClass2 hsbc2; public HasSharedBaseClass1 getHsbc1() { return hsbc1; } public void setHsbc1(HasSharedBaseClass1 hsbc1) { this.hsbc1 = hsbc1; } public HasSharedBaseClass2 getHsbc2() { return hsbc2; } public void setHsbc2(HasSharedBaseClass2 hsbc2) { this.hsbc2 = hsbc2; } } @PersistenceCapable(identityType = IdentityType.APPLICATION) public abstract static class SharedBaseClass { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; } @PersistenceCapable(identityType = IdentityType.APPLICATION) @Inheritance(customStrategy = "complete-table") public static class HasSharedBaseClass1 extends SharedBaseClass { @SuppressWarnings("unused") private String str; } @PersistenceCapable(identityType = IdentityType.APPLICATION) @Inheritance(customStrategy = "complete-table") public static class HasSharedBaseClass2 extends SharedBaseClass { @SuppressWarnings("unused") private String str; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasPkIdSortOnOneToMany { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent @Order(extensions = @Extension(vendorName = "datanucleus", key="list-ordering", value="id")) private List<HasEncodedStringPkSeparateIdFieldJDO> list; } @PersistenceCapable(identityType = IdentityType.APPLICATION) public static class HasPkNameSortOnOneToMany { @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Key id; @Persistent @Order(extensions = @Extension(vendorName = "datanucleus", key="list-ordering", value="name")) private List<HasEncodedStringPkSeparateNameFieldJDO> list; } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Contains a reference to a <code>Statement</code> element in a policy document that determines the result of the * simulation. * </p> * <p> * This data type is used by the <code>MatchedStatements</code> member of the <code> <a>EvaluationResult</a> </code> * type. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iam-2010-05-08/Statement" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Statement implements Serializable, Cloneable { /** * <p> * The identifier of the policy that was provided as an input. * </p> */ private String sourcePolicyId; /** * <p> * The type of the policy. * </p> */ private String sourcePolicyType; /** * <p> * The row and column of the beginning of the <code>Statement</code> in an IAM policy. * </p> */ private Position startPosition; /** * <p> * The row and column of the end of a <code>Statement</code> in an IAM policy. * </p> */ private Position endPosition; /** * <p> * The identifier of the policy that was provided as an input. * </p> * * @param sourcePolicyId * The identifier of the policy that was provided as an input. */ public void setSourcePolicyId(String sourcePolicyId) { this.sourcePolicyId = sourcePolicyId; } /** * <p> * The identifier of the policy that was provided as an input. * </p> * * @return The identifier of the policy that was provided as an input. */ public String getSourcePolicyId() { return this.sourcePolicyId; } /** * <p> * The identifier of the policy that was provided as an input. * </p> * * @param sourcePolicyId * The identifier of the policy that was provided as an input. * @return Returns a reference to this object so that method calls can be chained together. */ public Statement withSourcePolicyId(String sourcePolicyId) { setSourcePolicyId(sourcePolicyId); return this; } /** * <p> * The type of the policy. * </p> * * @param sourcePolicyType * The type of the policy. * @see PolicySourceType */ public void setSourcePolicyType(String sourcePolicyType) { this.sourcePolicyType = sourcePolicyType; } /** * <p> * The type of the policy. * </p> * * @return The type of the policy. * @see PolicySourceType */ public String getSourcePolicyType() { return this.sourcePolicyType; } /** * <p> * The type of the policy. * </p> * * @param sourcePolicyType * The type of the policy. * @return Returns a reference to this object so that method calls can be chained together. * @see PolicySourceType */ public Statement withSourcePolicyType(String sourcePolicyType) { setSourcePolicyType(sourcePolicyType); return this; } /** * <p> * The type of the policy. * </p> * * @param sourcePolicyType * The type of the policy. * @see PolicySourceType */ public void setSourcePolicyType(PolicySourceType sourcePolicyType) { this.sourcePolicyType = sourcePolicyType.toString(); } /** * <p> * The type of the policy. * </p> * * @param sourcePolicyType * The type of the policy. * @return Returns a reference to this object so that method calls can be chained together. * @see PolicySourceType */ public Statement withSourcePolicyType(PolicySourceType sourcePolicyType) { setSourcePolicyType(sourcePolicyType); return this; } /** * <p> * The row and column of the beginning of the <code>Statement</code> in an IAM policy. * </p> * * @param startPosition * The row and column of the beginning of the <code>Statement</code> in an IAM policy. */ public void setStartPosition(Position startPosition) { this.startPosition = startPosition; } /** * <p> * The row and column of the beginning of the <code>Statement</code> in an IAM policy. * </p> * * @return The row and column of the beginning of the <code>Statement</code> in an IAM policy. */ public Position getStartPosition() { return this.startPosition; } /** * <p> * The row and column of the beginning of the <code>Statement</code> in an IAM policy. * </p> * * @param startPosition * The row and column of the beginning of the <code>Statement</code> in an IAM policy. * @return Returns a reference to this object so that method calls can be chained together. */ public Statement withStartPosition(Position startPosition) { setStartPosition(startPosition); return this; } /** * <p> * The row and column of the end of a <code>Statement</code> in an IAM policy. * </p> * * @param endPosition * The row and column of the end of a <code>Statement</code> in an IAM policy. */ public void setEndPosition(Position endPosition) { this.endPosition = endPosition; } /** * <p> * The row and column of the end of a <code>Statement</code> in an IAM policy. * </p> * * @return The row and column of the end of a <code>Statement</code> in an IAM policy. */ public Position getEndPosition() { return this.endPosition; } /** * <p> * The row and column of the end of a <code>Statement</code> in an IAM policy. * </p> * * @param endPosition * The row and column of the end of a <code>Statement</code> in an IAM policy. * @return Returns a reference to this object so that method calls can be chained together. */ public Statement withEndPosition(Position endPosition) { setEndPosition(endPosition); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSourcePolicyId() != null) sb.append("SourcePolicyId: ").append(getSourcePolicyId()).append(","); if (getSourcePolicyType() != null) sb.append("SourcePolicyType: ").append(getSourcePolicyType()).append(","); if (getStartPosition() != null) sb.append("StartPosition: ").append(getStartPosition()).append(","); if (getEndPosition() != null) sb.append("EndPosition: ").append(getEndPosition()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Statement == false) return false; Statement other = (Statement) obj; if (other.getSourcePolicyId() == null ^ this.getSourcePolicyId() == null) return false; if (other.getSourcePolicyId() != null && other.getSourcePolicyId().equals(this.getSourcePolicyId()) == false) return false; if (other.getSourcePolicyType() == null ^ this.getSourcePolicyType() == null) return false; if (other.getSourcePolicyType() != null && other.getSourcePolicyType().equals(this.getSourcePolicyType()) == false) return false; if (other.getStartPosition() == null ^ this.getStartPosition() == null) return false; if (other.getStartPosition() != null && other.getStartPosition().equals(this.getStartPosition()) == false) return false; if (other.getEndPosition() == null ^ this.getEndPosition() == null) return false; if (other.getEndPosition() != null && other.getEndPosition().equals(this.getEndPosition()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSourcePolicyId() == null) ? 0 : getSourcePolicyId().hashCode()); hashCode = prime * hashCode + ((getSourcePolicyType() == null) ? 0 : getSourcePolicyType().hashCode()); hashCode = prime * hashCode + ((getStartPosition() == null) ? 0 : getStartPosition().hashCode()); hashCode = prime * hashCode + ((getEndPosition() == null) ? 0 : getEndPosition().hashCode()); return hashCode; } @Override public Statement clone() { try { return (Statement) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.highlight.Formatter; import org.apache.lucene.search.highlight.Fragmenter; import org.apache.lucene.search.highlight.NullFragmenter; import org.apache.lucene.search.highlight.QueryScorer; import org.apache.lucene.search.highlight.SimpleFragmenter; import org.apache.lucene.search.highlight.SimpleHTMLFormatter; import org.apache.lucene.search.highlight.SimpleSpanFragmenter; import org.apache.lucene.search.highlight.TextFragment; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; /** * */ public class PlainHighlighter implements Highlighter { private static final String CACHE_KEY = "highlight-plain"; @Override public HighlightField highlight(HighlighterContext highlighterContext) { SearchContextHighlight.Field field = highlighterContext.field; SearchContext context = highlighterContext.context; FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; FieldMapper mapper = highlighterContext.mapper; Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; if (!hitContext.cache().containsKey(CACHE_KEY)) { Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter> mappers = new HashMap<>(); hitContext.cache().put(CACHE_KEY, mappers); } @SuppressWarnings("unchecked") Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter> cache = (Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter>) hitContext.cache().get(CACHE_KEY); org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper); if (entry == null) { QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.fieldType().name() : null); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; if (field.fieldOptions().numberOfFragments() == 0) { fragmenter = new NullFragmenter(); } else if (field.fieldOptions().fragmenter() == null) { fragmenter = new SimpleSpanFragmenter(queryScorer, field.fieldOptions().fragmentCharSize()); } else if ("simple".equals(field.fieldOptions().fragmenter())) { fragmenter = new SimpleFragmenter(field.fieldOptions().fragmentCharSize()); } else if ("span".equals(field.fieldOptions().fragmenter())) { fragmenter = new SimpleSpanFragmenter(queryScorer, field.fieldOptions().fragmentCharSize()); } else { throw new IllegalArgumentException("unknown fragmenter option [" + field.fieldOptions().fragmenter() + "] for the field [" + highlighterContext.fieldName + "]"); } Formatter formatter = new SimpleHTMLFormatter(field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0]); entry = new org.apache.lucene.search.highlight.Highlighter(formatter, encoder, queryScorer); entry.setTextFragmenter(fragmenter); // always highlight across all data entry.setMaxDocCharsToAnalyze(Integer.MAX_VALUE); cache.put(mapper, entry); } // a HACK to make highlighter do highlighting, even though its using the single frag list builder int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments(); ArrayList<TextFragment> fragsList = new ArrayList<>(); List<Object> textsToHighlight; Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().type()).mappers().indexAnalyzer(); try { textsToHighlight = HighlightUtils.loadFieldValues(field, mapper, context, hitContext); for (Object textToHighlight : textsToHighlight) { String text; if (textToHighlight instanceof BytesRef) { text = mapper.fieldType().valueForDisplay(textToHighlight).toString(); } else { text = textToHighlight.toString(); } try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().name(), text)) { if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { // can't perform highlighting if the stream has no terms (binary token stream) or no offsets continue; } TextFragment[] bestTextFragments = entry.getBestTextFragments(tokenStream, text, false, numberOfFragments); for (TextFragment bestTextFragment : bestTextFragments) { if (bestTextFragment != null && bestTextFragment.getScore() > 0) { fragsList.add(bestTextFragment); } } } } } catch (Exception e) { if (ExceptionsHelper.unwrap(e, BytesRefHash.MaxBytesLengthExceededException.class) != null) { // this can happen if for example a field is not_analyzed and ignore_above option is set. // the field will be ignored when indexing but the huge term is still in the source and // the plain highlighter will parse the source and try to analyze it. return null; } else { throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } } if (field.fieldOptions().scoreOrdered()) { CollectionUtil.introSort(fragsList, new Comparator<TextFragment>() { @Override public int compare(TextFragment o1, TextFragment o2) { return Math.round(o2.getScore() - o1.getScore()); } }); } String[] fragments; // number_of_fragments is set to 0 but we have a multivalued field if (field.fieldOptions().numberOfFragments() == 0 && textsToHighlight.size() > 1 && fragsList.size() > 0) { fragments = new String[fragsList.size()]; for (int i = 0; i < fragsList.size(); i++) { fragments[i] = fragsList.get(i).toString(); } } else { // refine numberOfFragments if needed numberOfFragments = fragsList.size() < numberOfFragments ? fragsList.size() : numberOfFragments; fragments = new String[numberOfFragments]; for (int i = 0; i < fragments.length; i++) { fragments[i] = fragsList.get(i).toString(); } } if (fragments.length > 0) { return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } int noMatchSize = highlighterContext.field.fieldOptions().noMatchSize(); if (noMatchSize > 0 && textsToHighlight.size() > 0) { // Pull an excerpt from the beginning of the string but make sure to split the string on a term boundary. String fieldContents = textsToHighlight.get(0).toString(); int end; try { end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, mapper.fieldType().name(), fieldContents); } catch (Exception e) { throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } if (end > 0) { return new HighlightField(highlighterContext.fieldName, new Text[] { new Text(fieldContents.substring(0, end)) }); } } return null; } @Override public boolean canHighlight(FieldMapper fieldMapper) { return true; } private static int findGoodEndForNoHighlightExcerpt(int noMatchSize, Analyzer analyzer, String fieldName, String contents) throws IOException { try (TokenStream tokenStream = analyzer.tokenStream(fieldName, contents)) { if (!tokenStream.hasAttribute(OffsetAttribute.class)) { // Can't split on term boundaries without offsets return -1; } int end = -1; tokenStream.reset(); while (tokenStream.incrementToken()) { OffsetAttribute attr = tokenStream.getAttribute(OffsetAttribute.class); if (attr.endOffset() >= noMatchSize) { // Jump to the end of this token if it wouldn't put us past the boundary if (attr.endOffset() == noMatchSize) { end = noMatchSize; } return end; } end = attr.endOffset(); } tokenStream.end(); // We've exhausted the token stream so we should just highlight everything. return end; } } }
/* * Copyright 2013 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.spring; import org.drools.compiler.kie.builder.impl.ClasspathKieProject; import org.drools.compiler.kie.builder.impl.InternalKieModule; import org.drools.compiler.kie.builder.impl.KieBuilderImpl; import org.drools.compiler.kproject.ReleaseIdImpl; import org.drools.compiler.kproject.models.KieBaseModelImpl; import org.drools.compiler.kproject.models.KieModuleModelImpl; import org.drools.compiler.kproject.models.KieSessionModelImpl; import org.kie.api.KieServices; import org.kie.api.builder.ReleaseId; import org.kie.api.builder.model.KieModuleModel; import org.kie.api.builder.model.KieSessionModel; import org.kie.api.conf.DeclarativeAgendaOption; import org.kie.api.conf.EqualityBehaviorOption; import org.kie.api.conf.EventProcessingOption; import org.kie.api.runtime.conf.ClockTypeOption; import org.kie.spring.factorybeans.KBaseFactoryBean; import org.kie.spring.factorybeans.KModuleFactoryBean; import org.kie.spring.factorybeans.KSessionFactoryBean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.beans.PropertyValue; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanExpressionContext; import org.springframework.beans.factory.config.BeanFactoryPostProcessor; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.stereotype.Component; import java.io.File; import java.io.IOException; import java.lang.reflect.Method; import java.net.URISyntaxException; import java.net.URL; import java.util.Enumeration; import java.util.Map; @Component("kiePostProcessor") public class KModuleBeanFactoryPostProcessor implements BeanFactoryPostProcessor, ApplicationContextAware { private static final Logger log = LoggerFactory.getLogger(KModuleBeanFactoryPostProcessor.class); private static final String WEB_INF_FOLDER = "WEB-INF" + File.separator + "classes" + File.separator; protected URL configFileURL; protected ReleaseId releaseId; private String configFilePath; private ApplicationContext context; public KModuleBeanFactoryPostProcessor() { initConfigFilePath(); } public KModuleBeanFactoryPostProcessor(URL configFileURL, String configFilePath, ApplicationContext context) { this.configFileURL = configFileURL; this.configFilePath = configFilePath; this.context = context; } public KModuleBeanFactoryPostProcessor(URL configFileURL, String configFilePath) { this.configFileURL = configFileURL; this.configFilePath = configFilePath; } protected void initConfigFilePath() { try { configFilePath = getClass().getResource("/").toURI().getPath(); } catch (URISyntaxException e) { configFilePath = getClass().getResource("/").getPath(); } } public void setReleaseId(ReleaseId releaseId) { this.releaseId = releaseId; } public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { log.info(":: BeanFactoryPostProcessor::postProcessBeanFactory called ::"); if ( releaseId == null && configFilePath != null) { fixConfigFilePathForVfs(); String pomProperties = null; if ( configFilePath.endsWith(WEB_INF_FOLDER)){ String configFilePathForWebApps = configFilePath.substring(0, configFilePath.indexOf(WEB_INF_FOLDER)); pomProperties = ClasspathKieProject.getPomProperties(configFilePathForWebApps); } if (pomProperties == null) { pomProperties = ClasspathKieProject.getPomProperties(configFilePath); } if (pomProperties != null) { releaseId = ReleaseIdImpl.fromPropertiesString(pomProperties); } else { releaseId = new ReleaseIdImpl("org.default", "artifact","1.0.0-SNAPSHOT"); } log.info("Found project with releaseId: " + releaseId); KieSpringUtils.setDefaultReleaseId(releaseId); } for (String beanDef : beanFactory.getBeanDefinitionNames()){ BeanDefinition beanDefinition = beanFactory.getBeanDefinition(beanDef); if ( beanDefinition.getBeanClassName() != null && beanDefinition.getBeanClassName().equalsIgnoreCase(KModuleFactoryBean.class.getName())){ KieModuleModel kieModuleModel = fetchKieModuleModel(beanFactory); addKieModuleToRepo(kieModuleModel); } } } private void fixConfigFilePathForVfs() { if (configFileURL != null && configFileURL.toExternalForm().startsWith("vfs:")) { String contextPath = ClasspathKieProject.fixURLFromKProjectPath(configFileURL); File contextFile = new File(contextPath); if (contextFile.exists()) { // the spring context file is 2 folders under the temp folder where the war is unzipped contextFile = contextFile.getParentFile().getParentFile(); File mavenFolder = recurseToMavenFolder(contextFile); if (mavenFolder != null) { // remove /META-INF/maven since drools pom.properties lookup adds it back configFilePath = mavenFolder.getParentFile().getParent(); } } } } private File recurseToMavenFolder(File file) { if( file.isDirectory() ) { for ( java.io.File child : file.listFiles() ) { if ( child.isDirectory() ) { if ( child.getName().endsWith( "maven" ) ) { return child; } File returnedFile = recurseToMavenFolder( child ); if ( returnedFile != null ) { return returnedFile; } } } } return null; } private void addKieModuleToRepo(KieModuleModel kieProject) { KieBuilderImpl.setDefaultsforEmptyKieModule(kieProject); InternalKieModule kJar = createKieModule(kieProject); if ( kJar != null ) { KieServices ks = KieServices.Factory.get(); log.info("adding KieModule from " + configFileURL.toExternalForm() + " to repository."); ks.getRepository().addKieModule(kJar); if (context != null) { KieSpringUtils.setReleaseIdForContext(releaseId, context); KieSpringUtils.setDefaultReleaseId(releaseId); } } } protected InternalKieModule createKieModule(KieModuleModel kieProject) { if (configFileURL.toString().startsWith("bundle:") || configFileURL.toString().startsWith("bundleresource:")) { return createOsgiKModule(kieProject); } if ( configFilePath == null) { configFilePath = getClass().getResource("/").getPath(); } String rootPath = configFilePath; if ( rootPath.lastIndexOf( ':' ) > 0 ) { rootPath = configFilePath.substring( rootPath.lastIndexOf( ':' ) + 1 ); } return ClasspathKieProject.createInternalKieModule(configFileURL, configFilePath, kieProject, releaseId, rootPath); } private InternalKieModule createOsgiKModule(KieModuleModel kieProject) { Method m; try { Class<?> c = Class.forName(ClasspathKieProject.OSGI_KIE_MODULE_CLASS_NAME, true, KieBuilderImpl.class.getClassLoader()); m = c.getMethod("create", URL.class, ReleaseId.class, KieModuleModel.class); } catch (Exception e) { throw new RuntimeException("It is necessary to have the drools-osgi-integration module on the path in order to create a KieProject from an ogsi bundle", e); } try { return (InternalKieModule) m.invoke(null, configFileURL, releaseId, kieProject); } catch (Exception e) { throw new RuntimeException("Failure creating a OsgiKieModule caused by: " + e.getMessage(), e); } } private KieModuleModel fetchKieModuleModel(ConfigurableListableBeanFactory beanFactory) { KieModuleModelImpl kieModuleModel = new KieModuleModelImpl(); addKieBaseModels(beanFactory, kieModuleModel); return kieModuleModel; } private void addKieBaseModels(ConfigurableListableBeanFactory beanFactory, KieModuleModelImpl kieModuleModel) { BeanExpressionContext context = new BeanExpressionContext(beanFactory, null); for (String beanDef : beanFactory.getBeanDefinitionNames()){ BeanDefinition beanDefinition = beanFactory.getBeanDefinition(beanDef); if ( beanDefinition.getBeanClassName() != null && beanDefinition.getBeanClassName().equalsIgnoreCase(KBaseFactoryBean.class.getName())){ KieBaseModelImpl kBase = new KieBaseModelImpl(); kBase.setKModule(kieModuleModel); kBase.setName( getPropertyValue( beanDefinition, "kBaseName" )); kBase.setDefault( "true".equals( getPropertyValue(beanDefinition, "def") ) ); String packages = getPropertyValue( beanDefinition, "packages" ); if ( !packages.isEmpty() ) { packages = checkAndResolveSpringExpression(beanFactory, context, packages); for ( String pkg : packages.split( "," ) ) { kBase.addPackage( pkg.trim() ); } } String includes = getPropertyValue( beanDefinition, "includes" ); if ( !includes.isEmpty() ) { includes = checkAndResolveSpringExpression(beanFactory, context, includes); for ( String include : includes.split( "," ) ) { kBase.addInclude(include.trim()); } } String eventMode = getPropertyValue(beanDefinition, "eventProcessingMode"); if ( !eventMode.isEmpty() ) { eventMode = checkAndResolveSpringExpression(beanFactory, context, eventMode); kBase.setEventProcessingMode( EventProcessingOption.determineEventProcessingMode(eventMode) ); } String equalsBehavior = getPropertyValue(beanDefinition, "equalsBehavior"); if ( !equalsBehavior.isEmpty() ) { equalsBehavior = checkAndResolveSpringExpression(beanFactory, context, equalsBehavior); kBase.setEqualsBehavior( EqualityBehaviorOption.determineEqualityBehavior(equalsBehavior) ); } String declarativeAgenda = getPropertyValue(beanDefinition, "declarativeAgenda"); if ( !declarativeAgenda.isEmpty() ) { declarativeAgenda = checkAndResolveSpringExpression(beanFactory, context, declarativeAgenda); kBase.setDeclarativeAgenda(DeclarativeAgendaOption.determineDeclarativeAgenda(declarativeAgenda)); } String scope = getPropertyValue(beanDefinition, "scope"); if ( !scope.isEmpty() ) { scope = checkAndResolveSpringExpression(beanFactory, context, scope); kBase.setScope( scope.trim() ); } kieModuleModel.getRawKieBaseModels().put( kBase.getName(), kBase ); beanDefinition.getPropertyValues().addPropertyValue(new PropertyValue("releaseId", releaseId)); addKieSessionModels(beanFactory, kBase); } } } protected String checkAndResolveSpringExpression(ConfigurableListableBeanFactory beanFactory, BeanExpressionContext context, String expression) { if ( expression.startsWith("#{") && expression.endsWith("}")) { return (String) beanFactory.getBeanExpressionResolver().evaluate(expression, context); } return expression; } private String getPropertyValue(BeanDefinition beanDefinition, String propertyName) { PropertyValue propertyValue = beanDefinition.getPropertyValues().getPropertyValue(propertyName); return propertyValue != null ? (String) propertyValue.getValue() : ""; } private void addKieSessionModels(ConfigurableListableBeanFactory beanFactory, KieBaseModelImpl kBase) { for (String beanDef : beanFactory.getBeanDefinitionNames()){ BeanDefinition beanDefinition = beanFactory.getBeanDefinition(beanDef); if ( beanDefinition.getBeanClassName() != null && beanDefinition.getBeanClassName().equalsIgnoreCase(KSessionFactoryBean.class.getName())){ String kBaseName = getPropertyValue(beanDefinition, "kBaseName"); if ( kBase.getName().equalsIgnoreCase(kBaseName)) { String name = getPropertyValue(beanDefinition, "name"); String type = getPropertyValue(beanDefinition, "type"); KieSessionModelImpl kSession = new KieSessionModelImpl(kBase, name); kSession.setType(!type.isEmpty() ? KieSessionModel.KieSessionType.valueOf(type.toUpperCase()) : KieSessionModel.KieSessionType.STATEFUL); Map<String, KieSessionModel> rawKieSessionModels = kBase.getRawKieSessionModels(); rawKieSessionModels.put(kSession.getName(), kSession); beanDefinition.getPropertyValues().addPropertyValue(new PropertyValue("releaseId", releaseId)); kSession.setDefault( "true".equals( getPropertyValue(beanDefinition, "def") ) ); String clockType = getPropertyValue(beanDefinition, "clockType"); if ( !clockType.isEmpty() ) { kSession.setClockType( ClockTypeOption.get(clockType) ); } String scope = getPropertyValue(beanDefinition, "scope"); if ( !scope.isEmpty() ) { kSession.setScope( scope.trim() ); } } } } } @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { try { if (isEapContext(applicationContext)) { Enumeration<URL> urls = getClass().getClassLoader().getResources("/"); while (urls.hasMoreElements()) { URL url = urls.nextElement(); if (url.toString().endsWith("WEB-INF/classes/")) { configFileURL = url; break; } } } else { configFileURL = applicationContext.getResource("classpath:/").getURL(); } } catch (IOException e) { throw new RuntimeException(e); } log.info("classpath root URL: " + configFileURL); } private boolean isEapContext(ApplicationContext applicationContext) throws IOException { URL url = applicationContext.getResource("classpath:/").getURL(); if (isEapUrl(url)) { return true; } else { Enumeration<URL> urls = getClass().getClassLoader().getResources("/"); while (urls.hasMoreElements()) { if (isEapUrl(urls.nextElement())) { return true; } } } return false; } private boolean isEapUrl(URL url) { return url.toString().endsWith("service-loader-resources/"); } }
/* *Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * *WSO2 Inc. licenses this file to you under the Apache License, *Version 2.0 (the "License"); you may not use this file except *in compliance with the License. *You may obtain a copy of the License at * *http://www.apache.org/licenses/LICENSE-2.0 * *Unless required by applicable law or agreed to in writing, *software distributed under the License is distributed on an *"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *KIND, either express or implied. See the License for the *specific language governing permissions and limitations *under the License. */ package org.wso2.carbon.esb.car.deployment.test; import org.apache.axiom.om.OMElement; import org.apache.axis2.AxisFault; import org.apache.commons.lang.ArrayUtils; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import org.wso2.carbon.integration.common.admin.client.ApplicationAdminClient; import org.wso2.carbon.integration.common.admin.client.CarbonAppUploaderClient; import org.wso2.esb.integration.common.utils.ESBIntegrationTest; import javax.activation.DataHandler; import javax.activation.FileDataSource; import java.io.File; import java.net.URL; import java.util.Calendar; import java.util.concurrent.TimeUnit; public class ClassMediatorCarTestCase extends ESBIntegrationTest { private CarbonAppUploaderClient carbonAppUploaderClient; private ApplicationAdminClient applicationAdminClient; private final int MAX_TIME = 120000; private final String car1Name = "MediatorCApp_1.0.0"; private final String car2Name = "MediatorCApp2_1.0.0"; private final String car1FileName = car1Name + ".car"; private final String car2FileName = car2Name + ".car"; private final String proxyName = "MediatorTestProxy"; private boolean isCarFile1Uploaded = false; private boolean isCarFile2Uploaded = false; @BeforeClass(alwaysRun = true, description = "Test Car with Mediator deployment") protected void uploadCar1Test() throws Exception { super.init(); carbonAppUploaderClient = new CarbonAppUploaderClient(context.getContextUrls().getBackEndUrl(), getSessionCookie()); carbonAppUploaderClient.uploadCarbonAppArtifact(car1FileName, new DataHandler(new FileDataSource( new File(getESBResourceLocation() + File.separator + "car" + File.separator + car1FileName)))); isCarFile1Uploaded = true; applicationAdminClient = new ApplicationAdminClient(context.getContextUrls().getBackEndUrl(), getSessionCookie()); Assert.assertTrue(isCarFileDeployed(car1Name), "Car file deployment failed"); TimeUnit.SECONDS.sleep(5); } @Test(groups = {"wso2.esb"}, description = "Test Car with Mediator deployment and invocation") public void capp1DeploymentAndServiceInvocation() throws Exception { Assert.assertTrue(esbUtils.isProxyDeployed(context.getContextUrls().getBackEndUrl(), getSessionCookie(), proxyName) , "Proxy service deployment failed"); OMElement response = null; try { response = axis2Client.sendCustomQuoteRequest( getProxyServiceURLHttp(proxyName), null, "WSO2"); } catch (AxisFault axisFault) { throw new Exception("Service Invocation Failed > " + axisFault.getMessage(), axisFault); } Assert.assertNotNull(response, "Response message null"); Assert.assertTrue(response.toString().contains("MEDIATOR1"), "MEDIATOR1 element not found in response message"); } @Test(groups = {"wso2.esb"}, description = "Test Car with Mediator un-deployment" , dependsOnMethods = {"capp1DeploymentAndServiceInvocation"}) public void capp1UnDeploymentTest() throws Exception { applicationAdminClient.deleteApplication(car1Name); isCarFile1Uploaded = false; Assert.assertTrue(isCarFileUnDeployed(car1Name), "Car file undeployment failed"); TimeUnit.SECONDS.sleep(5); Assert.assertTrue(esbUtils.isProxyUnDeployed(context.getContextUrls().getBackEndUrl(), getSessionCookie(), proxyName) , "Car1 un-deployment failed"); } @Test(groups = {"wso2.esb"}, description = "Test Re deploy car file" , dependsOnMethods = {"capp1UnDeploymentTest"}) protected void uploadCar2Test() throws Exception { super.init(); carbonAppUploaderClient = new CarbonAppUploaderClient(context.getContextUrls().getBackEndUrl(), getSessionCookie()); carbonAppUploaderClient. uploadCarbonAppArtifact(car2FileName, new DataHandler(new FileDataSource( new File(getESBResourceLocation() + File.separator + "car" + File.separator + car2FileName)))); isCarFile2Uploaded = true; applicationAdminClient = new ApplicationAdminClient(context.getContextUrls().getBackEndUrl(), getSessionCookie()); Assert.assertTrue(isCarFileDeployed(car2Name), "Car file deployment failed"); TimeUnit.SECONDS.sleep(5); } @Test(groups = {"wso2.esb"}, description = "Test Car with Mediator hot deployment" , dependsOnMethods = {"uploadCar2Test"}) public void capp2DeploymentAndServiceInvocation() throws Exception { Assert.assertTrue(esbUtils.isProxyDeployed(context.getContextUrls().getBackEndUrl(), getSessionCookie(), proxyName) , "Proxy service deployment failed"); OMElement response = null; try { response = axis2Client.sendCustomQuoteRequest( getProxyServiceURLHttp(proxyName), null, "WSO2"); } catch (AxisFault axisFault) { throw new Exception("Service Invocation Failed > " + axisFault.getMessage(), axisFault); } Assert.assertNotNull(response, "Response message null"); Assert.assertTrue(response.toString().contains("MEDIATOR2"), "MEDIATOR2 element not found in response message"); } @Test(groups = {"wso2.esb"}, description = "Test Car with Mediator un-deployment" , dependsOnMethods = {"capp2DeploymentAndServiceInvocation"}) public void capp2UnDeploymentTest() throws Exception { applicationAdminClient.deleteApplication(car2Name); isCarFile2Uploaded = false; Assert.assertTrue(isCarFileUnDeployed(car2Name), "Car file undeployment failed"); TimeUnit.SECONDS.sleep(5); Assert.assertTrue(esbUtils.isProxyUnDeployed(context.getContextUrls().getBackEndUrl(), getSessionCookie(), proxyName) , "Car2 un-deployment failed"); } @AfterClass(alwaysRun = true) public void cleanupArtifactsIfExist() throws Exception { if (isCarFile1Uploaded) { applicationAdminClient.deleteApplication(car1Name); } if (isCarFile2Uploaded) { applicationAdminClient.deleteApplication(car2Name); } super.cleanup(); } private boolean isCarFileDeployed(String carFileName) throws Exception { log.info("waiting " + MAX_TIME + " millis for car deployment " + carFileName); boolean isCarFileDeployed = false; Calendar startTime = Calendar.getInstance(); long time; while ((time = (Calendar.getInstance().getTimeInMillis() - startTime.getTimeInMillis())) < MAX_TIME) { String[] applicationList = applicationAdminClient.listAllApplications(); if (applicationList != null) { if (ArrayUtils.contains(applicationList, carFileName)) { isCarFileDeployed = true; log.info("car file deployed in " + time + " mills"); return isCarFileDeployed; } } try { Thread.sleep(1000); } catch (InterruptedException e) { //ignore } } return isCarFileDeployed; } private boolean isCarFileUnDeployed(String carFileName) throws Exception { log.info("waiting " + MAX_TIME + " millis for car undeployment " + carFileName); boolean isCarFileUnDeployed = false; Calendar startTime = Calendar.getInstance(); long time; while ((time = (Calendar.getInstance().getTimeInMillis() - startTime.getTimeInMillis())) < MAX_TIME) { String[] applicationList = applicationAdminClient.listAllApplications(); if (applicationList != null) { if (!ArrayUtils.contains(applicationList, carFileName)) { isCarFileUnDeployed = true; log.info("car file deployed in " + time + " mills"); return isCarFileUnDeployed; } try { Thread.sleep(1000); } catch (InterruptedException e) { //ignore } } else { isCarFileUnDeployed = true; log.info("car file deployed in " + time + " mills"); return isCarFileUnDeployed; } } return isCarFileUnDeployed; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.filters; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; public class FiltersAggregator extends BucketsAggregator { public static final ParseField FILTERS_FIELD = new ParseField("filters"); public static final ParseField OTHER_BUCKET_FIELD = new ParseField("other_bucket"); public static final ParseField OTHER_BUCKET_KEY_FIELD = new ParseField("other_bucket_key"); public static class KeyedFilter implements Writeable, ToXContent { private final String key; private final QueryBuilder filter; public KeyedFilter(String key, QueryBuilder filter) { if (key == null) { throw new IllegalArgumentException("[key] must not be null"); } if (filter == null) { throw new IllegalArgumentException("[filter] must not be null"); } this.key = key; this.filter = filter; } /** * Read from a stream. */ public KeyedFilter(StreamInput in) throws IOException { key = in.readString(); filter = in.readNamedWriteable(QueryBuilder.class); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(key); out.writeNamedWriteable(filter); } public String key() { return key; } public QueryBuilder filter() { return filter; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(key, filter); return builder; } @Override public int hashCode() { return Objects.hash(key, filter); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } KeyedFilter other = (KeyedFilter) obj; return Objects.equals(key, other.key) && Objects.equals(filter, other.filter); } } private final String[] keys; private Weight[] filters; private final boolean keyed; private final boolean showOtherBucket; private final String otherBucketKey; private final int totalNumKeys; public FiltersAggregator(String name, AggregatorFactories factories, String[] keys, Weight[] filters, boolean keyed, String otherBucketKey, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); this.keyed = keyed; this.keys = keys; this.filters = filters; this.showOtherBucket = otherBucketKey != null; this.otherBucketKey = otherBucketKey; if (showOtherBucket) { this.totalNumKeys = keys.length + 1; } else { this.totalNumKeys = keys.length; } } @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // no need to provide deleted docs to the filter final Bits[] bits = new Bits[filters.length]; for (int i = 0; i < filters.length; ++i) { bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorerSupplier(ctx)); } return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long bucket) throws IOException { boolean matched = false; for (int i = 0; i < bits.length; i++) { if (bits[i].get(doc)) { collectBucket(sub, doc, bucketOrd(bucket, i)); matched = true; } } if (showOtherBucket && !matched) { collectBucket(sub, doc, bucketOrd(bucket, bits.length)); } } }; } @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { List<InternalFilters.InternalBucket> buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { long bucketOrd = bucketOrd(owningBucketOrdinal, i); InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(keys[i], bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); buckets.add(bucket); } // other bucket if (showOtherBucket) { long bucketOrd = bucketOrd(owningBucketOrdinal, keys.length); InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); buckets.add(bucket); } return new InternalFilters(name, buckets, keyed, pipelineAggregators(), metaData()); } @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List<InternalFilters.InternalBucket> buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(keys[i], 0, subAggs, keyed); buckets.add(bucket); } if (showOtherBucket) { InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs, keyed); buckets.add(bucket); } return new InternalFilters(name, buckets, keyed, pipelineAggregators(), metaData()); } final long bucketOrd(long owningBucketOrdinal, int filterOrd) { return owningBucketOrdinal * totalNumKeys + filterOrd; } }
/* * Copyright 2010 Vodafone Group Services Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.onesocialweb.gwt.xml; import org.w3c.dom.Attr; import org.w3c.dom.CDATASection; import org.w3c.dom.Comment; import org.w3c.dom.DOMConfiguration; import org.w3c.dom.DOMException; import org.w3c.dom.DOMImplementation; import org.w3c.dom.Document; import org.w3c.dom.DocumentFragment; import org.w3c.dom.DocumentType; import org.w3c.dom.Element; import org.w3c.dom.EntityReference; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.ProcessingInstruction; import org.w3c.dom.Text; public class DocumentAdapter extends NodeAdapter implements Document { public DocumentAdapter(com.google.gwt.xml.client.Document document) { super(document); } @Override public Node adoptNode(Node source) throws DOMException { // TODO Auto-generated method stub return null; } @Override public Attr createAttribute(String name) throws DOMException { // TODO Auto-generated method stub return null; } @Override public Attr createAttributeNS(String namespaceURI, String qualifiedName) throws DOMException { // TODO Auto-generated method stub return null; } @Override public CDATASection createCDATASection(String data) throws DOMException { return new CDataSectionAdapter(getGwtDocument() .createCDATASection(data)); } @Override public Comment createComment(String data) { return new CommentAdapter(getGwtDocument().createComment(data)); } @Override public DocumentFragment createDocumentFragment() { return new DocumentFragmentAdapter(getGwtDocument() .createDocumentFragment()); } @Override public Element createElement(String tagName) throws DOMException { return new ElementAdapter(getGwtDocument().createElement(tagName)); } @Override public Element createElementNS(String namespaceURI, String qualifiedName) throws DOMException { com.google.gwt.xml.client.Element e = getGwtDocument().createElement( qualifiedName); e.setAttribute("xmlns", namespaceURI); return new ElementAdapter(e); } @Override public EntityReference createEntityReference(String name) throws DOMException { // TODO Auto-generated method stub return null; } @Override public ProcessingInstruction createProcessingInstruction(String target, String data) throws DOMException { // TODO Auto-generated method stub return null; } @Override public Text createTextNode(String data) { return new TextAdapter(getGwtDocument().createTextNode(data)); } @Override public DocumentType getDoctype() { // TODO Auto-generated method stub return null; } @Override public Element getDocumentElement() { return new ElementAdapter(getGwtDocument().getDocumentElement()); } @Override public String getDocumentURI() { // TODO Auto-generated method stub return null; } @Override public DOMConfiguration getDomConfig() { // TODO Auto-generated method stub return null; } @Override public Element getElementById(String elementId) { return new ElementAdapter(getGwtDocument().getElementById(elementId)); } @Override public NodeList getElementsByTagName(String tagname) { return new NodeListAdapter(getGwtDocument().getElementsByTagName( tagname)); } @Override public NodeList getElementsByTagNameNS(String namespaceURI, String localName) { return getElementsByTagName(localName); } @Override public DOMImplementation getImplementation() { // TODO Auto-generated method stub return null; } @Override public String getInputEncoding() { // TODO Auto-generated method stub return null; } @Override public boolean getStrictErrorChecking() { // TODO Auto-generated method stub return false; } @Override public String getXmlEncoding() { // TODO Auto-generated method stub return null; } @Override public boolean getXmlStandalone() { // TODO Auto-generated method stub return false; } @Override public String getXmlVersion() { // TODO Auto-generated method stub return null; } @Override public Node importNode(Node importedNode, boolean deep) throws DOMException { if (importedNode instanceof NodeAdapter) { return new NodeAdapter(getGwtDocument().importNode( ((NodeAdapter) importedNode).getGwtNode(), deep)); } return null; } @Override public void normalizeDocument() { getGwtDocument().normalize(); } @Override public Node renameNode(Node n, String namespaceURI, String qualifiedName) throws DOMException { // TODO Auto-generated method stub return null; } @Override public void setDocumentURI(String documentURI) { // TODO Auto-generated method stub } @Override public void setStrictErrorChecking(boolean strictErrorChecking) { // TODO Auto-generated method stub } @Override public void setXmlStandalone(boolean xmlStandalone) throws DOMException { // TODO Auto-generated method stub } @Override public void setXmlVersion(String xmlVersion) throws DOMException { // TODO Auto-generated method stub } protected com.google.gwt.xml.client.Document getGwtDocument() { return (com.google.gwt.xml.client.Document) getGwtNode(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ipc; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.AbstractQueue; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Abstracts queue operations for different blocking queues. */ public class CallQueueManager<E extends Schedulable> extends AbstractQueue<E> implements BlockingQueue<E> { public static final Logger LOG = LoggerFactory.getLogger(CallQueueManager.class); // Number of checkpoints for empty queue. private static final int CHECKPOINT_NUM = 20; // Interval to check empty queue. private static final long CHECKPOINT_INTERVAL_MS = 10; @SuppressWarnings("unchecked") static <E> Class<? extends BlockingQueue<E>> convertQueueClass( Class<?> queueClass, Class<E> elementClass) { return (Class<? extends BlockingQueue<E>>)queueClass; } @SuppressWarnings("unchecked") static Class<? extends RpcScheduler> convertSchedulerClass( Class<?> schedulerClass) { return (Class<? extends RpcScheduler>)schedulerClass; } private volatile boolean clientBackOffEnabled; private boolean serverFailOverEnabled; // Atomic refs point to active callQueue // We have two so we can better control swapping private final AtomicReference<BlockingQueue<E>> putRef; private final AtomicReference<BlockingQueue<E>> takeRef; private RpcScheduler scheduler; public CallQueueManager(Class<? extends BlockingQueue<E>> backingClass, Class<? extends RpcScheduler> schedulerClass, boolean clientBackOffEnabled, int maxQueueSize, String namespace, Configuration conf) { int priorityLevels = parseNumLevels(namespace, conf); this.scheduler = createScheduler(schedulerClass, priorityLevels, namespace, conf); int[] capacityWeights = parseCapacityWeights(priorityLevels, namespace, conf); BlockingQueue<E> bq = createCallQueueInstance(backingClass, priorityLevels, maxQueueSize, namespace, capacityWeights, conf); this.clientBackOffEnabled = clientBackOffEnabled; this.serverFailOverEnabled = conf.getBoolean( namespace + "." + CommonConfigurationKeys.IPC_CALLQUEUE_SERVER_FAILOVER_ENABLE, CommonConfigurationKeys.IPC_CALLQUEUE_SERVER_FAILOVER_ENABLE_DEFAULT); this.putRef = new AtomicReference<BlockingQueue<E>>(bq); this.takeRef = new AtomicReference<BlockingQueue<E>>(bq); LOG.info("Using callQueue: {}, queueCapacity: {}, " + "scheduler: {}, ipcBackoff: {}.", backingClass, maxQueueSize, schedulerClass, clientBackOffEnabled); } @VisibleForTesting // only! CallQueueManager(BlockingQueue<E> queue, RpcScheduler scheduler, boolean clientBackOffEnabled, boolean serverFailOverEnabled) { this.putRef = new AtomicReference<BlockingQueue<E>>(queue); this.takeRef = new AtomicReference<BlockingQueue<E>>(queue); this.scheduler = scheduler; this.clientBackOffEnabled = clientBackOffEnabled; this.serverFailOverEnabled = serverFailOverEnabled; } private static <T extends RpcScheduler> T createScheduler( Class<T> theClass, int priorityLevels, String ns, Configuration conf) { // Used for custom, configurable scheduler try { Constructor<T> ctor = theClass.getDeclaredConstructor(int.class, String.class, Configuration.class); return ctor.newInstance(priorityLevels, ns, conf); } catch (RuntimeException e) { throw e; } catch (InvocationTargetException e) { throw new RuntimeException(theClass.getName() + " could not be constructed.", e.getCause()); } catch (Exception e) { } try { Constructor<T> ctor = theClass.getDeclaredConstructor(int.class); return ctor.newInstance(priorityLevels); } catch (RuntimeException e) { throw e; } catch (InvocationTargetException e) { throw new RuntimeException(theClass.getName() + " could not be constructed.", e.getCause()); } catch (Exception e) { } // Last attempt try { Constructor<T> ctor = theClass.getDeclaredConstructor(); return ctor.newInstance(); } catch (RuntimeException e) { throw e; } catch (InvocationTargetException e) { throw new RuntimeException(theClass.getName() + " could not be constructed.", e.getCause()); } catch (Exception e) { } // Nothing worked throw new RuntimeException(theClass.getName() + " could not be constructed."); } private <T extends BlockingQueue<E>> T createCallQueueInstance( Class<T> theClass, int priorityLevels, int maxLen, String ns, int[] capacityWeights, Configuration conf) { // Used for custom, configurable callqueues try { Constructor<T> ctor = theClass.getDeclaredConstructor(int.class, int.class, String.class, int[].class, Configuration.class); return ctor.newInstance(priorityLevels, maxLen, ns, capacityWeights, conf); } catch (RuntimeException e) { throw e; } catch (InvocationTargetException e) { throw new RuntimeException(theClass.getName() + " could not be constructed.", e.getCause()); } catch (Exception e) { } // Used for LinkedBlockingQueue, ArrayBlockingQueue, etc try { Constructor<T> ctor = theClass.getDeclaredConstructor(int.class); return ctor.newInstance(maxLen); } catch (RuntimeException e) { throw e; } catch (InvocationTargetException e) { throw new RuntimeException(theClass.getName() + " could not be constructed.", e.getCause()); } catch (Exception e) { } // Last attempt try { Constructor<T> ctor = theClass.getDeclaredConstructor(); return ctor.newInstance(); } catch (RuntimeException e) { throw e; } catch (InvocationTargetException e) { throw new RuntimeException(theClass.getName() + " could not be constructed.", e.getCause()); } catch (Exception e) { } // Nothing worked throw new RuntimeException(theClass.getName() + " could not be constructed."); } boolean isClientBackoffEnabled() { return clientBackOffEnabled; } // Based on policy to determine back off current call boolean shouldBackOff(Schedulable e) { return scheduler.shouldBackOff(e); } void addResponseTime(String name, Schedulable e, ProcessingDetails details) { scheduler.addResponseTime(name, e, details); } // This should be only called once per call and cached in the call object int getPriorityLevel(Schedulable e) { return scheduler.getPriorityLevel(e); } void setClientBackoffEnabled(boolean value) { clientBackOffEnabled = value; } /** * Insert e into the backing queue or block until we can. If client * backoff is enabled this method behaves like add which throws if * the queue overflows. * If we block and the queue changes on us, we will insert while the * queue is drained. */ @Override public void put(E e) throws InterruptedException { if (!isClientBackoffEnabled()) { putRef.get().put(e); } else if (shouldBackOff(e)) { throwBackoff(); } else { // No need to re-check backoff criteria since they were just checked addInternal(e, false); } } @Override public boolean add(E e) { return addInternal(e, true); } @VisibleForTesting boolean addInternal(E e, boolean checkBackoff) { if (checkBackoff && isClientBackoffEnabled() && shouldBackOff(e)) { throwBackoff(); } try { return putRef.get().add(e); } catch (CallQueueOverflowException ex) { // queue provided a custom exception that may control if the client // should be disconnected. throw ex; } catch (IllegalStateException ise) { throwBackoff(); } return true; } // ideally this behavior should be controllable too. private void throwBackoff() throws IllegalStateException { throw serverFailOverEnabled ? CallQueueOverflowException.FAILOVER : CallQueueOverflowException.DISCONNECT; } /** * Insert e into the backing queue. * Return true if e is queued. * Return false if the queue is full. */ @Override public boolean offer(E e) { return putRef.get().offer(e); } @Override public boolean offer(E e, long timeout, TimeUnit unit) throws InterruptedException { return putRef.get().offer(e, timeout, unit); } @Override public E peek() { return takeRef.get().peek(); } @Override public E poll() { return takeRef.get().poll(); } @Override public E poll(long timeout, TimeUnit unit) throws InterruptedException { return takeRef.get().poll(timeout, unit); } /** * Retrieve an E from the backing queue or block until we can. * Guaranteed to return an element from the current queue. */ @Override public E take() throws InterruptedException { E e = null; while (e == null) { e = takeRef.get().poll(1000L, TimeUnit.MILLISECONDS); } return e; } @Override public int size() { return takeRef.get().size(); } @Override public int remainingCapacity() { return takeRef.get().remainingCapacity(); } /** * Read the number of levels from the configuration. * This will affect the FairCallQueue's overall capacity. * @throws IllegalArgumentException on invalid queue count */ @SuppressWarnings("deprecation") private static int parseNumLevels(String ns, Configuration conf) { // Fair call queue levels (IPC_CALLQUEUE_PRIORITY_LEVELS_KEY) // takes priority over the scheduler level key // (IPC_SCHEDULER_PRIORITY_LEVELS_KEY) int retval = conf.getInt(ns + "." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 0); if (retval == 0) { // No FCQ priority level configured retval = conf.getInt(ns + "." + CommonConfigurationKeys.IPC_SCHEDULER_PRIORITY_LEVELS_KEY, CommonConfigurationKeys.IPC_SCHEDULER_PRIORITY_LEVELS_DEFAULT_KEY); } else { LOG.warn(ns + "." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY + " is deprecated. Please use " + ns + "." + CommonConfigurationKeys.IPC_SCHEDULER_PRIORITY_LEVELS_KEY + "."); } if(retval < 1) { throw new IllegalArgumentException("numLevels must be at least 1"); } return retval; } /** * Read the weights of capacity in callqueue and pass the value to * callqueue constructions. */ private static int[] parseCapacityWeights( int priorityLevels, String ns, Configuration conf) { int[] weights = conf.getInts(ns + "." + CommonConfigurationKeys.IPC_CALLQUEUE_CAPACITY_WEIGHTS_KEY); if (weights.length == 0) { weights = getDefaultQueueCapacityWeights(priorityLevels); } else if (weights.length != priorityLevels) { throw new IllegalArgumentException( CommonConfigurationKeys.IPC_CALLQUEUE_CAPACITY_WEIGHTS_KEY + " must " + "specify " + priorityLevels + " capacity weights: one for each " + "priority level"); } else { // only allow positive numbers for (int w : weights) { if (w <= 0) { throw new IllegalArgumentException( CommonConfigurationKeys.IPC_CALLQUEUE_CAPACITY_WEIGHTS_KEY + " only takes positive weights. " + w + " capacity weight " + "found"); } } } return weights; } /** * By default, queue capacity is the same for all priority levels. * * @param priorityLevels number of levels * @return default weights */ public static int[] getDefaultQueueCapacityWeights(int priorityLevels) { int[] weights = new int[priorityLevels]; Arrays.fill(weights, 1); return weights; } /** * Replaces active queue with the newly requested one and transfers * all calls to the newQ before returning. */ public synchronized void swapQueue( Class<? extends RpcScheduler> schedulerClass, Class<? extends BlockingQueue<E>> queueClassToUse, int maxSize, String ns, Configuration conf) { int priorityLevels = parseNumLevels(ns, conf); this.scheduler.stop(); RpcScheduler newScheduler = createScheduler(schedulerClass, priorityLevels, ns, conf); int[] capacityWeights = parseCapacityWeights(priorityLevels, ns, conf); BlockingQueue<E> newQ = createCallQueueInstance(queueClassToUse, priorityLevels, maxSize, ns, capacityWeights, conf); // Our current queue becomes the old queue BlockingQueue<E> oldQ = putRef.get(); // Swap putRef first: allow blocked puts() to be unblocked putRef.set(newQ); // Wait for handlers to drain the oldQ while (!queueIsReallyEmpty(oldQ)) {} // Swap takeRef to handle new calls takeRef.set(newQ); this.scheduler = newScheduler; LOG.info("Old Queue: " + stringRepr(oldQ) + ", " + "Replacement: " + stringRepr(newQ)); } /** * Checks if queue is empty by checking at CHECKPOINT_NUM points with * CHECKPOINT_INTERVAL_MS interval. * This doesn't mean the queue might not fill up at some point later, but * it should decrease the probability that we lose a call this way. */ private boolean queueIsReallyEmpty(BlockingQueue<?> q) { for (int i = 0; i < CHECKPOINT_NUM; i++) { try { Thread.sleep(CHECKPOINT_INTERVAL_MS); } catch (InterruptedException ie) { return false; } if (!q.isEmpty()) { return false; } } return true; } private String stringRepr(Object o) { return o.getClass().getName() + '@' + Integer.toHexString(o.hashCode()); } @Override public int drainTo(Collection<? super E> c) { return takeRef.get().drainTo(c); } @Override public int drainTo(Collection<? super E> c, int maxElements) { return takeRef.get().drainTo(c, maxElements); } @Override public Iterator<E> iterator() { return takeRef.get().iterator(); } // exception that mimics the standard ISE thrown by blocking queues but // embeds a rpc server exception for the client to retry and indicate // if the client should be disconnected. @SuppressWarnings("serial") static class CallQueueOverflowException extends IllegalStateException { private static String TOO_BUSY = "Server too busy"; static final CallQueueOverflowException KEEPALIVE = new CallQueueOverflowException( new RetriableException(TOO_BUSY), RpcStatusProto.ERROR); static final CallQueueOverflowException DISCONNECT = new CallQueueOverflowException( new RetriableException(TOO_BUSY + " - disconnecting"), RpcStatusProto.FATAL); static final CallQueueOverflowException FAILOVER = new CallQueueOverflowException( new StandbyException(TOO_BUSY + " - disconnect and failover"), RpcStatusProto.FATAL); CallQueueOverflowException(final IOException ioe, final RpcStatusProto status) { super("Queue full", new RpcServerException(ioe.getMessage(), ioe){ @Override public RpcStatusProto getRpcStatusProto() { return status; } }); } @Override public IOException getCause() { return (IOException)super.getCause(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.igfs; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.net.InetAddress; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.igfs.IgfsBlockLocation; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; /** * File block location in the grid. */ public class IgfsBlockLocationImpl implements IgfsBlockLocation, Externalizable { /** */ private static final long serialVersionUID = 0L; /** */ private long start; /** */ private long len; /** */ @GridToStringInclude private Collection<UUID> nodeIds; /** */ private Collection<String> names; /** */ private Collection<String> hosts; /** * Empty constructor for externalizable. */ public IgfsBlockLocationImpl() { // No-op. } /** * @param location HDFS block location. * @param len New length. */ public IgfsBlockLocationImpl(IgfsBlockLocation location, long len) { assert location != null; start = location.start(); this.len = len; nodeIds = location.nodeIds(); names = location.names(); hosts = location.hosts(); } /** * @param start Start. * @param len Length. * @param nodes Affinity nodes. */ public IgfsBlockLocationImpl(long start, long len, Collection<ClusterNode> nodes) { assert start >= 0; assert len > 0; assert nodes != null && !nodes.isEmpty(); this.start = start; this.len = len; convertFromNodes(nodes); } /** * @return Start position. */ @Override public long start() { return start; } /** * @return Length. */ @Override public long length() { return len; } /** * @return Node IDs. */ @Override public Collection<UUID> nodeIds() { return nodeIds; } /** {@inheritDoc} */ @Override public Collection<String> names() { return names; } /** {@inheritDoc} */ @Override public Collection<String> hosts() { return hosts; } /** {@inheritDoc} */ @Override public int hashCode() { int res = (int)(start ^ (start >>> 32)); res = 31 * res + (int)(len ^ (len >>> 32)); res = 31 * res + nodeIds.hashCode(); return res; } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (o == this) return true; if (o == null || getClass() != o.getClass()) return false; IgfsBlockLocationImpl that = (IgfsBlockLocationImpl)o; return len == that.len && start == that.start && F.eq(nodeIds, that.nodeIds) && F.eq(names, that.names) && F.eq(hosts, that.hosts); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(IgfsBlockLocationImpl.class, this); } /** * Writes this object to data output. Note that this is not externalizable * interface because we want to eliminate any marshaller. * * @param out Data output to write. * @throws IOException If write failed. */ @Override public void writeExternal(ObjectOutput out) throws IOException { assert names != null; assert hosts != null; out.writeLong(start); out.writeLong(len); out.writeBoolean(nodeIds != null); if (nodeIds != null) { out.writeInt(nodeIds.size()); for (UUID nodeId : nodeIds) U.writeUuid(out, nodeId); } out.writeInt(names.size()); for (String name : names) out.writeUTF(name); out.writeInt(hosts.size()); for (String host : hosts) out.writeUTF(host); } /** * Reads object from data input. Note we do not use externalizable interface * to eliminate marshaller. * * @param in Data input. * @throws IOException If read failed. */ @Override public void readExternal(ObjectInput in) throws IOException { start = in.readLong(); len = in.readLong(); int size; if (in.readBoolean()) { size = in.readInt(); nodeIds = new ArrayList<>(size); for (int i = 0; i < size; i++) nodeIds.add(U.readUuid(in)); } size = in.readInt(); names = new ArrayList<>(size); for (int i = 0; i < size; i++) names.add(in.readUTF()); size = in.readInt(); hosts = new ArrayList<>(size); for (int i = 0; i < size; i++) hosts.add(in.readUTF()); } /** * Converts collection of rich nodes to block location data. * * @param nodes Collection of affinity nodes. */ private void convertFromNodes(Collection<ClusterNode> nodes) { Collection<String> names = new LinkedHashSet<>(); Collection<String> hosts = new LinkedHashSet<>(); Collection<UUID> nodeIds = new ArrayList<>(nodes.size()); for (final ClusterNode node : nodes) { // Normalize host names into Hadoop-expected format. try { Collection<InetAddress> addrs = U.toInetAddresses(node); for (InetAddress addr : addrs) { if (addr.getHostName() == null) names.add(addr.getHostAddress() + ":" + 9001); else { names.add(addr.getHostName() + ":" + 9001); // hostname:portNumber hosts.add(addr.getHostName()); } } } catch (IgniteCheckedException ignored) { names.addAll(node.addresses()); } nodeIds.add(node.id()); } this.nodeIds = nodeIds; this.names = names; this.hosts = hosts; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.server.session; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.security.KeyPair; import java.util.Collection; import java.util.Map; import java.util.Objects; import org.apache.sshd.common.FactoryManager; import org.apache.sshd.common.FactoryManagerUtils; import org.apache.sshd.common.NamedResource; import org.apache.sshd.common.ServiceFactory; import org.apache.sshd.common.SshConstants; import org.apache.sshd.common.SshException; import org.apache.sshd.common.future.SshFutureListener; import org.apache.sshd.common.io.IoService; import org.apache.sshd.common.io.IoSession; import org.apache.sshd.common.io.IoWriteFuture; import org.apache.sshd.common.kex.KexProposalOption; import org.apache.sshd.common.kex.KexState; import org.apache.sshd.common.keyprovider.KeyPairProvider; import org.apache.sshd.common.session.AbstractSession; import org.apache.sshd.common.util.GenericUtils; import org.apache.sshd.common.util.ValidateUtils; import org.apache.sshd.common.util.buffer.Buffer; import org.apache.sshd.common.util.buffer.ByteArrayBuffer; import org.apache.sshd.server.ServerFactoryManager; /** * TODO Add javadoc * * @author <a href="mailto:dev@mina.apache.org">Apache MINA SSHD Project</a> */ public class ServerSessionImpl extends AbstractSession implements ServerSession { protected static final long MAX_PACKETS = 1L << 31; private long maxBytes = 1024 * 1024 * 1024; // 1 GB private long maxKeyInterval = 60 * 60 * 1000; // 1 hour public ServerSessionImpl(ServerFactoryManager server, IoSession ioSession) throws Exception { super(true, server, ioSession); maxBytes = Math.max(32, getLongProperty(ServerFactoryManager.REKEY_BYTES_LIMIT, maxBytes)); maxKeyInterval = getLongProperty(ServerFactoryManager.REKEY_TIME_LIMIT, maxKeyInterval); log.info("Server session created from {}", ioSession.getRemoteAddress()); sendServerIdentification(); } @Override public ServerFactoryManager getFactoryManager() { return (ServerFactoryManager) factoryManager; } @Override protected void checkKeys() { // nothing } @Override public void startService(String name) throws Exception { currentService = ServiceFactory.Utils.create(getFactoryManager().getServiceFactories(), name, this); } @Override protected void serviceAccept() throws IOException { // TODO: can services be initiated by the server-side ? disconnect(SshConstants.SSH2_DISCONNECT_PROTOCOL_ERROR, "Unsupported packet: SSH_MSG_SERVICE_ACCEPT"); } @Override protected void checkRekey() throws IOException { if (KexState.DONE.equals(kexState.get())) { long now = System.currentTimeMillis(); if ((inPacketsCount.get() > MAX_PACKETS) || (outPacketsCount.get() > MAX_PACKETS) || (inBytesCount.get() > maxBytes) || (outBytesCount.get() > maxBytes) || ((maxKeyInterval > 0L) && ((now - lastKeyTimeValue.get()) > maxKeyInterval))) { reExchangeKeys(); } } } protected void sendServerIdentification() { FactoryManager manager = getFactoryManager(); String ident = FactoryManagerUtils.getString(manager, ServerFactoryManager.SERVER_IDENTIFICATION); if (GenericUtils.isEmpty(ident)) { serverVersion = DEFAULT_SSH_VERSION_PREFIX + manager.getVersion(); } else { serverVersion = DEFAULT_SSH_VERSION_PREFIX + ident; } sendIdentification(serverVersion); } @Override protected byte[] sendKexInit(Map<KexProposalOption, String> proposal) throws IOException { mergeProposals(serverProposal, proposal); return super.sendKexInit(proposal); } @Override protected void setKexSeed(byte... seed) { i_s = ValidateUtils.checkNotNullAndNotEmpty(seed, "No KEX seed"); } @Override protected String resolveAvailableSignaturesProposal(FactoryManager manager) { /* * Make sure we can provide key(s) for the available signatures */ KeyPairProvider kpp = manager.getKeyPairProvider(); Collection<String> supported = NamedResource.Utils.getNameList(manager.getSignatureFactories()); Iterable<String> provided = (kpp == null) ? null : kpp.getKeyTypes(); if ((provided == null) || GenericUtils.isEmpty(supported)) { return resolveEmptySignaturesProposal(supported, provided); } StringBuilder resolveKeys = null; for (String keyType : provided) { if (!supported.contains(keyType)) { if (log.isDebugEnabled()) { log.debug("resolveAvailableSignaturesProposal(" + provided + ") " + keyType + " not in list of supported: " + supported); } continue; } if (resolveKeys == null) { resolveKeys = new StringBuilder(supported.size() * 16 /* ecdsa-sha2-xxxx */); } if (resolveKeys.length() > 0) { resolveKeys.append(','); } resolveKeys.append(keyType); } if (GenericUtils.isEmpty(resolveKeys)) { return resolveEmptySignaturesProposal(supported, provided); } else { return resolveKeys.toString(); } } /** * Called by {@link #resolveAvailableSignaturesProposal(FactoryManager)} * if none of the provided keys is supported - last chance for the derived * implementation to do something * * @param supported The supported key types - may be {@code null}/empty * @param provided The available signature types - may be {@code null}/empty * @return The resolved proposal - {@code null} by default */ protected String resolveEmptySignaturesProposal(Iterable<String> supported, Iterable<String> provided) { if (log.isDebugEnabled()) { log.debug("resolveEmptySignaturesProposal({}) none of the keys appears in supported list: {}", provided, supported); } return null; } @Override protected boolean readIdentification(Buffer buffer) throws IOException { clientVersion = doReadIdentification(buffer, true); if (GenericUtils.isEmpty(clientVersion)) { return false; } log.debug("Client version string: {}", clientVersion); if (!clientVersion.startsWith(DEFAULT_SSH_VERSION_PREFIX)) { String msg = "Unsupported protocol version: " + clientVersion; ioSession.write(new ByteArrayBuffer((msg + "\n").getBytes(StandardCharsets.UTF_8))).addListener(new SshFutureListener<IoWriteFuture>() { @Override public void operationComplete(IoWriteFuture future) { close(true); } }); throw new SshException(msg); } else { kexState.set(KexState.INIT); sendKexInit(); } return true; } @Override protected void receiveKexInit(Map<KexProposalOption, String> proposal, byte[] seed) throws IOException { mergeProposals(clientProposal, proposal); i_c = seed; } @Override public KeyPair getHostKey() { String value = getNegotiatedKexParameter(KexProposalOption.SERVERKEYS); KeyPairProvider provider = ValidateUtils.checkNotNull(factoryManager.getKeyPairProvider(), "No host keys provider"); return provider.loadKey(value); } @Override public int getActiveSessionCountForUser(String userName) { if (GenericUtils.isEmpty(userName)) { return 0; } IoService service = ioSession.getService(); Map<?, IoSession> sessionsMap = service.getManagedSessions(); if (GenericUtils.isEmpty(sessionsMap)) { return 0; } int totalCount = 0; for (IoSession is : sessionsMap.values()) { ServerSession session = (ServerSession) getSession(is, true); if (session == null) { continue; } String sessionUser = session.getUsername(); if ((!GenericUtils.isEmpty(sessionUser)) && Objects.equals(sessionUser, userName)) { totalCount++; } } return totalCount; } /** * Returns the session id. * * @return The session id. */ public long getId() { return ioSession.getId(); } }
// Copyright 2019 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.base.Suppliers; import com.google.common.collect.MapMaker; import com.google.devtools.build.lib.actions.ActionExecutionException; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.skyframe.ArtifactFunction.SourceArtifactException; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.ValueOrException3; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.Supplier; /** * A builder of values for {@link ArtifactNestedSetKey}. * * <p>When an Action is executed with ActionExecutionFunction, the actions's input {@code * NestedSet<Artifact>} could be evaluated as an {@link ArtifactNestedSetKey}[1]. * * <p>{@link ArtifactNestedSetFunction} then evaluates the {@link ArtifactNestedSetKey} by: * * <p>- Evaluating the directs elements as Artifacts. Commit the result into * artifactSkyKeyToSkyValue. * * <p>- Evaluating the transitive elements as {@link ArtifactNestedSetKey}s. * * <p>ActionExecutionFunction can then access this map to get the Artifacts' values. * * <p>[1] Heuristic: If the size of the NestedSet exceeds a certain threshold, we evaluate it as an * ArtifactNestedSetKey. */ final class ArtifactNestedSetFunction implements SkyFunction { /** * A concurrent map from Artifacts' SkyKeys to their SkyValue, for Artifacts that are part of * NestedSets which were evaluated as {@link ArtifactNestedSetKey}. * * <p>Question: Why don't we clear artifactSkyKeyToSkyValue after each build? * * <p>The map maintains an invariant: if an ArtifactNestedSetKey exists on Skyframe, the SkyValues * of its member Artifacts are available in artifactSkyKeyToSkyValue. * * <p>Example: Action A has as input NestedSet X, where X = (X1, X2), where X1 & X2 are 2 * transitive NestedSets. * * <p>Run 0: Establish dependency from A to X and from X to X1 & X2. Artifacts from X1 & X2 have * entries in artifactSkyKeyToSkyValue. * * <p>Run 1 (incremental): Some changes were made to an Artifact in X1 such that X1, X and A's * SkyKeys are marked as dirty. A's ActionLookupData has to be re-evaluated. This involves asking * Skyframe to compute SkyValues for its inputs. * * <p>However, X2 is not dirty, so Skyframe won't re-run ArtifactNestedSetFunction#compute for X2, * therefore not populating artifactSkyKeyToSkyValue with X2's member Artifacts. Hence if we clear * artifactSkyKeyToSkyValue between build 0 and 1, X2's member artifacts' SkyValues would not be * available in the map. TODO(leba): Make this weak-keyed. */ private ConcurrentMap<SkyKey, SkyValue> artifactSkyKeyToSkyValue = new ConcurrentHashMap<>(); /** * Maps the NestedSets' underlying objects to the corresponding SkyKey. This is to avoid * re-creating SkyKey for the same nested set upon reevaluation because of e.g. a missing value. * * <p>The map weakly references its values: when the ArtifactNestedSetKey becomes otherwise * unreachable, the map entry is collected. */ private final ConcurrentMap<NestedSet.Node, ArtifactNestedSetKey> nestedSetToSkyKey = new MapMaker().weakValues().makeMap(); private final Supplier<ArtifactNestedSetValue> valueSupplier; private static ArtifactNestedSetFunction singleton = null; private static Integer sizeThreshold = null; private ArtifactNestedSetFunction(Supplier<ArtifactNestedSetValue> valueSupplier) { this.valueSupplier = valueSupplier; } @Override public SkyValue compute(SkyKey skyKey, Environment env) throws InterruptedException, ArtifactNestedSetFunctionException { List<SkyKey> depKeys = getDepSkyKeys((ArtifactNestedSetKey) skyKey); List< ValueOrException3< SourceArtifactException, ActionExecutionException, ArtifactNestedSetEvalException>> depsEvalResult = env.getOrderedValuesOrThrow( depKeys, SourceArtifactException.class, ActionExecutionException.class, ArtifactNestedSetEvalException.class); NestedSetBuilder<Pair<SkyKey, Exception>> transitiveExceptionsBuilder = NestedSetBuilder.stableOrder(); boolean catastrophic = false; // Throw a SkyFunctionException when a dep evaluation results in an exception. // Only non-null values should be committed to // ArtifactNestedSetFunction#artifacSkyKeyToSkyValue. int i = 0; for (ValueOrException3< SourceArtifactException, ActionExecutionException, ArtifactNestedSetEvalException> valueOrException : depsEvalResult) { SkyKey key = depKeys.get(i++); try { // Trigger the exception, if any. SkyValue value = valueOrException.get(); if (key instanceof ArtifactNestedSetKey || value == null) { continue; } artifactSkyKeyToSkyValue.put(key, value); } catch (SourceArtifactException e) { // SourceArtifactException is never catastrophic. transitiveExceptionsBuilder.add(Pair.of(key, e)); } catch (ActionExecutionException e) { transitiveExceptionsBuilder.add(Pair.of(key, e)); catastrophic |= e.isCatastrophe(); } catch (ArtifactNestedSetEvalException e) { catastrophic |= e.isCatastrophic(); transitiveExceptionsBuilder.addTransitive(e.getNestedExceptions()); } } if (!transitiveExceptionsBuilder.isEmpty()) { NestedSet<Pair<SkyKey, Exception>> transitiveExceptions = transitiveExceptionsBuilder.build(); // The NestedSet of exceptions is usually small, hence flattening won't be too costly. Pair<SkyKey, Exception> firstSkyKeyAndException = transitiveExceptions.toList().get(0); throw new ArtifactNestedSetFunctionException( new ArtifactNestedSetEvalException( "Error evaluating artifact nested set. First exception: " + firstSkyKeyAndException.getSecond() + ", SkyKey: " + firstSkyKeyAndException.getFirst(), transitiveExceptions, catastrophic)); } // This should only happen when all error handling is done. if (env.valuesMissing()) { return null; } return valueSupplier.get(); } private List<SkyKey> getDepSkyKeys(ArtifactNestedSetKey skyKey) { NestedSet<Artifact> set = skyKey.getSet(); List<SkyKey> keys = new ArrayList<>(); for (Artifact file : set.getLeaves()) { keys.add(Artifact.key(file)); } for (NestedSet<Artifact> nonLeaf : set.getNonLeaves()) { keys.add( nestedSetToSkyKey.computeIfAbsent( nonLeaf.toNode(), (node) -> new ArtifactNestedSetKey(nonLeaf, node))); } return keys; } static ArtifactNestedSetFunction getInstance() { return checkNotNull(singleton); } /** * Creates a new instance. Should only be used in {@code SkyframeExecutor#skyFunctions}. Keeping * this method separated from {@code #getInstance} since sometimes we need to overwrite the * existing instance. * * <p>If value-based change pruning is disabled, the function makes an optimization of using a * singleton {@link ArtifactNestedSetValue}, since (in)equality of the value doesn't matter. */ static ArtifactNestedSetFunction createInstance(boolean valueBasedChangePruningEnabled) { singleton = new ArtifactNestedSetFunction( valueBasedChangePruningEnabled ? ArtifactNestedSetValue::new : Suppliers.ofInstance(new ArtifactNestedSetValue())); return singleton; } /** Reset the various state-keeping maps of ArtifactNestedSetFunction. */ void resetArtifactNestedSetFunctionMaps() { artifactSkyKeyToSkyValue = new ConcurrentHashMap<>(); } SkyValue getValueForKey(SkyKey skyKey) { return artifactSkyKeyToSkyValue.get(skyKey); } void updateValueForKey(SkyKey skyKey, SkyValue skyValue) { artifactSkyKeyToSkyValue.put(skyKey, skyValue); } @Override public String extractTag(SkyKey skyKey) { return null; } /** * Get the threshold to which we evaluate a NestedSet as a Skykey. If sizeThreshold is unset, * return the default value of 0. */ static int getSizeThreshold() { return sizeThreshold == null ? 0 : sizeThreshold; } /** * Updates the sizeThreshold value if the existing value differs from newValue. * * @param newValue The new value from --experimental_nested_set_as_skykey_threshold. * @return whether an update was made. */ static boolean sizeThresholdUpdated(int newValue) { // If this is the first time the value is set, it's not considered "updated". if (sizeThreshold == null) { sizeThreshold = newValue; return false; } if (sizeThreshold == newValue || (sizeThreshold <= 0 && newValue <= 0)) { return false; } sizeThreshold = newValue; return true; } /** Mainly used for error bubbling when evaluating direct/transitive children. */ private static final class ArtifactNestedSetFunctionException extends SkyFunctionException { private final boolean catastrophic; ArtifactNestedSetFunctionException(ArtifactNestedSetEvalException e) { super(e, Transience.PERSISTENT); this.catastrophic = e.isCatastrophic(); } @Override public boolean isCatastrophic() { return catastrophic; } } /** Bundles the exceptions from the evaluation of the children keys together. */ static final class ArtifactNestedSetEvalException extends Exception { private final NestedSet<Pair<SkyKey, Exception>> nestedExceptions; private final boolean catastrophic; ArtifactNestedSetEvalException( String message, NestedSet<Pair<SkyKey, Exception>> nestedExceptions, boolean catastrophic) { super(message); this.nestedExceptions = nestedExceptions; this.catastrophic = catastrophic; } NestedSet<Pair<SkyKey, Exception>> getNestedExceptions() { return nestedExceptions; } // Should be true if at least one child exception is catastrophic. boolean isCatastrophic() { return catastrophic; } } }
/* * Copyright (c) 2015 Celestibytes * * Maintainer: Okkapel * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package celestibytes.miscutils.lwjgl.opengl; import java.nio.ByteBuffer; import org.lwjgl.BufferUtils; import org.lwjgl.opengl.GL11; public class RBH { // Buffer format(bytes): UUUU VVVV R G B A XXXX YYYY ZZZZ [NX][NX][NX][NX] [NY][NY][NY][NY] [NZ][NZ][NZ][NZ] public static final int BUFFER_SIZE = 1 << 22; public static final int BUFFER_STRIDE = 8 * 4 + 4; private static final int DEFAULT_BUFFER_COUNT = 1; public static final RBH INSTANCE = new RBH(false, DEFAULT_BUFFER_COUNT, BUFFER_SIZE); private boolean busy = false; private int drawMode = -1; private boolean blending = false; private ByteBuffer bbuf; private ByteBuffer[] bbufs; private boolean useNormals = false; private boolean skipMode = false; private boolean useTexture = false; private int vertexCount = 0; private int vertexRenderOffset = 0; private int vertexRenderCount = 0; private int attachedBufferPos = 0, attachedBufferLimit = 0; private byte currRed = (byte)0xFF, currGreen = (byte)0xFF, currBlue = (byte)0xFF, currAlpha = (byte)0xFF; private int currTex = 0; private float translX = 0f, translY = 0f, translZ = 0f; private RBH(boolean enableNormals, int bufferCount, int bufferSize) { if(bufferCount < 1) { throw new IllegalArgumentException("Buffer count must be at least 1"); } useNormals = enableNormals; bbufs = new ByteBuffer[bufferCount]; for(int i=0;i<bufferCount;i++) { bbufs[i] = BufferUtils.createByteBuffer(bufferSize); } bbuf = bbufs[0]; } private void reset(/*Hey, I'm a very ununpurposeful comment*/) { bbuf = bbufs[0]; bbuf.position(0); bbuf.limit(bbuf.capacity()); busy = false; vertexCount = 0; vertexRenderOffset = 0; vertexRenderCount = 0; skipMode = false; useTexture = false; blending = false; currRed = (byte)0xFF; currGreen = (byte)0xFF; currBlue = (byte)0xFF; currAlpha = (byte)0xFF; disableTexture(); disableBlending(); translX = 0f; translY = 0f; translZ = 0f; } private void afterOper() { disableTexture(); disableBlending(); } public void attachBuffer(ByteBuffer buffer) { if(busy) { System.err.println("RBH is currently busy!"); return; } reset(); attachedBufferLimit = buffer.limit(); attachedBufferPos = buffer.position(); bbuf = buffer; busy = true; skipMode = true; } public void startDrawingTriangles() { startDrawing(GL11.GL_TRIANGLES); } public void startDrawingQuads() { startDrawing(GL11.GL_QUADS); } public void startDrawing(int drawMode) { if(busy) { System.err.println("RBH is currently busy!"); return; } reset(); this.drawMode = drawMode; busy = true; } public void finishEditing() { bbuf.position(attachedBufferPos); bbuf.limit(attachedBufferLimit); afterOper(); busy = false; } public ByteBuffer createBuffer() { ByteBuffer ret = BufferUtils.createByteBuffer(bbuf.position()); bbuf.limit(bbuf.position()); bbuf.position(0); ret.put(bbuf); ret.flip(); afterOper(); busy = false; return ret; } public byte[] createArray() { byte[] ret = new byte[bbuf.position()]; bbuf.limit(bbuf.position()); bbuf.position(0); bbuf.get(ret); afterOper(); busy = false; return ret; } public boolean isBusy() { return busy; } public void enableTexture(int tex) { if(!useTexture) { GL11.glEnable(GL11.GL_TEXTURE_2D); useTexture = true; } currTex = tex; GL11.glBindTexture(GL11.GL_TEXTURE_2D, currTex); } public void disableTexture() { currTex = 0; GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); if(useTexture) { GL11.glDisable(GL11.GL_TEXTURE_2D); useTexture = false; } } public int getCurrTexture() { return currTex; } /** Enables GL_BLEND and sets the default blend mode */ public void enableBlending() { if(!blending) { blending = true; GL11.glEnable(GL11.GL_BLEND); } GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); } public void enableBlending(int gl_sfactor, int gl_dfactor) { if(!blending) { blending = true; GL11.glEnable(GL11.GL_BLEND); } GL11.glBlendFunc(gl_sfactor, gl_dfactor); } public void disableBlending() { if(blending) { blending = false; GL11.glDisable(GL11.GL_BLEND); } } public void setVertRenderCount(int count) { vertexRenderCount = count; } public void setVertRenderOffset(int offset) { vertexRenderOffset = offset; } public int getVertexCount() { return vertexCount; } public void setBufferPos(int newPos) { bbuf.position(newPos); } public int getBufferPos() { return bbuf.position(); } public void setBufferVertexPos(int vertexIndex) { bbuf.position(vertexIndex * BUFFER_STRIDE); } public int getVertexIndex() { return (int) (bbuf.position() / BUFFER_STRIDE); } public DataPart getBufferAlignment() { int bidx = bbuf.position() % BUFFER_STRIDE; if(bidx < 8) { return DataPart.UV_COORD; } if(bidx < 12) { return DataPart.COLOR; } if(bidx < 24) { return DataPart.VERTEX; } if(bidx < 36) { return DataPart.UV_COORD; } return null; } public void alignBuffer(DataPart part) { switch(part) { case UV_COORD: bbuf.position(getVertexIndex() * BUFFER_STRIDE); break; case COLOR: bbuf.position(getVertexIndex() * BUFFER_STRIDE + 8); break; case VERTEX: bbuf.position(getVertexIndex() * BUFFER_STRIDE + 12); break; case NORMAL: bbuf.position(getVertexIndex() * BUFFER_STRIDE + 24); System.err.println("Usage of normals not implemented!"); } } public void setAlignedPos(int vertexIndex, DataPart part) { switch(part) { case UV_COORD: bbuf.position(vertexIndex * BUFFER_STRIDE); break; case COLOR: bbuf.position(vertexIndex * BUFFER_STRIDE + 8); break; case VERTEX: bbuf.position(vertexIndex * BUFFER_STRIDE + 12); break; case NORMAL: bbuf.position(vertexIndex * BUFFER_STRIDE + 24); System.err.println("Usage of normals not implemented!"); } } public void setSkipMode(boolean enabled) { skipMode = enabled; } public void setColorRBG(int color) { currRed = (byte)((color & 0xFF000000) >> 24); currGreen = (byte)((color & 0xFF0000) >> 16); currBlue = (byte)((color & 0xFF00) >> 8); } public void setColorRBGA(int color) { currRed = (byte)((color & 0xFF000000) >> 24); currGreen = (byte)((color & 0xFF0000) >> 16); currBlue = (byte)((color & 0xFF00) >> 8); currAlpha = (byte)(color & 0xFF); } public void setColor(float r, float g, float b) { currRed = 0; currGreen = 0; currBlue = 0; currRed |= (int)(0xFF * r); currGreen |= (int)(0xFF * g); currBlue |= (int)(0xFF * b); } public void setColor(float r, float g, float b, float a) { currRed = 0; currGreen = 0; currBlue = 0; currAlpha = 0; currRed |= (int)(0xFF * r); currGreen |= (int)(0xFF * g); currBlue |= (int)(0xFF * b); currAlpha |= (int)(0xFF * a); } public void setColorRed(float r) { currRed = 0; currRed |= (int)(0xFF * r); } public void setColorGreen(float g) { currGreen = 0; currGreen |= (int)(0xFF * g); } public void setColorBlue(float b) { currBlue = 0; currBlue |= (int)(0xFF * b); } public void setColorAlpha(float a) { currAlpha = 0; currAlpha |= (int)(0xFF * a); } public void setTranslation(float x, float y, float z) { translX = x; translY = y; translZ = z; } public void addTranslation(float x, float y, float z) { translX += x; translY += y; translZ += z; } public void writeVertexTCP(float u, float v, int r, int g, int b, int a, float x, float y, float z) { useTexture = true; bbuf.putFloat(u); bbuf.putFloat(v); bbuf.put((byte)(r & 0xFF)); bbuf.put((byte)(g & 0xFF)); bbuf.put((byte)(b & 0xFF)); bbuf.put((byte)(a & 0xFF)); bbuf.putFloat(x + translX); bbuf.putFloat(y + translY); bbuf.putFloat(z + translZ); writeZerof(); writeZerof(); writeZerof(); vertexCount++; } public void writeVertexCP(int r, int g, int b, int a, float x, float y, float z) { if(skipMode) { bbuf.position(bbuf.position() + 8); } else { writeZerof(); writeZerof(); } bbuf.put((byte)(r & 0xFF)); bbuf.put((byte)(g & 0xFF)); bbuf.put((byte)(b & 0xFF)); bbuf.put((byte)(a & 0xFF)); bbuf.putFloat(x + translX); bbuf.putFloat(y + translY); bbuf.putFloat(z + translZ); writeZerof(); writeZerof(); writeZerof(); vertexCount++; } public void writeVertexTP(float u, float v, float x, float y, float z) { useTexture = true; bbuf.putFloat(u); bbuf.putFloat(v); if(skipMode) { bbuf.position(bbuf.position() + 4); } else { bbuf.put(currRed); bbuf.put(currGreen); bbuf.put(currBlue); bbuf.put(currAlpha); } bbuf.putFloat(x + translX); bbuf.putFloat(y + translY); bbuf.putFloat(z + translZ); writeZerof(); writeZerof(); writeZerof(); vertexCount++; } public void writeVertexP(float x, float y, float z) { if(skipMode) { bbuf.position(bbuf.position() + 12); } else { writeZerof(); writeZerof(); bbuf.put(currRed); bbuf.put(currGreen); bbuf.put(currBlue); bbuf.put(currAlpha); } bbuf.putFloat(x + translX); bbuf.putFloat(y + translY); bbuf.putFloat(z + translZ); writeZerof(); writeZerof(); writeZerof(); vertexCount++; } /** Draws vertices in the buffer and disables blending and textures, also resets current texture */ public void draw() { if(!busy) { System.err.println("Not drawing!"); return; } if(drawMode == -1) { busy = false; System.err.println("Invalid drawMode or was not drawing!"); return; } if(vertexCount < 1) { busy = false; return; } vertexRenderCount = vertexCount; if(useTexture) { bbuf.position(0); GL11.glTexCoordPointer(2, GL11.GL_FLOAT, BUFFER_STRIDE, bbuf); GL11.glEnableClientState(GL11.GL_TEXTURE_COORD_ARRAY); } bbuf.position(8); GL11.glColorPointer(4, true, BUFFER_STRIDE, bbuf); GL11.glEnableClientState(GL11.GL_COLOR_ARRAY); bbuf.position(12); GL11.glVertexPointer(3, GL11.GL_FLOAT, BUFFER_STRIDE, bbuf); GL11.glEnableClientState(GL11.GL_VERTEX_ARRAY); GL11.glDrawArrays(drawMode, vertexRenderOffset, vertexRenderCount); GL11.glDisableClientState(GL11.GL_VERTEX_ARRAY); GL11.glDisableClientState(GL11.GL_COLOR_ARRAY); if(useTexture) { GL11.glDisableClientState(GL11.GL_TEXTURE_COORD_ARRAY); } afterOper(); busy = false; } public void writeByte(byte b) { bbuf.put(b); } public void writeFloat(float f) { bbuf.putFloat(f); } public void writeZerob() { bbuf.put((byte)0); } public void writeZerof() { bbuf.putFloat(0f); } public static enum DataPart { UV_COORD, COLOR, VERTEX, NORMAL; } }
/* * Copyright (C) 2015 Tanner Perrien * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.droiddevil.myuber.ui; import android.appwidget.AppWidgetManager; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.location.Address; import android.location.Location; import android.os.Bundle; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.EditText; import android.widget.GridView; import android.widget.ImageView; import android.widget.ListView; import android.widget.RemoteViews; import android.widget.TextView; import android.widget.Toast; import com.droiddevil.myuber.R; import com.droiddevil.myuber.WidgetUpdateService; import com.droiddevil.myuber.annotations.ForActivity; import com.droiddevil.myuber.data.api.UberService; import com.droiddevil.myuber.db.models.WidgetRecord; import com.droiddevil.myuber.rx.EndlessObserver; import com.droiddevil.myuber.uber.UberProduct; import com.droiddevil.myuber.uber.UberProductResponse; import com.droiddevil.myuber.utils.LocationUtils; import com.google.android.gms.analytics.HitBuilders; import com.google.android.gms.analytics.Tracker; import com.squareup.picasso.Picasso; import java.util.ArrayList; import java.util.List; import javax.inject.Inject; import butterknife.ButterKnife; import butterknife.InjectView; import butterknife.OnClick; import rx.Observer; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; import rx.subscriptions.SerialSubscription; import timber.log.Timber; public class ConfigureMyUberActivity extends BaseActivity { @Inject @ForActivity Context mContext; @Inject Picasso mPicasso; @Inject UberService mUberService; @Inject Tracker mAnalyticsTracker; @InjectView(R.id.title) EditText mTitle; @InjectView(R.id.address) EditText mAddress; @InjectView(R.id.address_status) ImageView mAddressStatus; @InjectView(R.id.user_location_address) TextView mUserLocationAddress; @InjectView(R.id.uber_ride_list) GridView mUberRideList; @InjectView(R.id.list_loading_indicator) View mListLoadingIndicator; private int mAppWidgetId; private UberRideAdapter mUberRideListAdapter; private List<UberProduct> mUberProducts; private UberProduct mSelectedUberProduct; private Address mDestinationAddress; private SerialSubscription mUberProductSubscription = new SerialSubscription(); private SerialSubscription mUserLocationSubscription = new SerialSubscription(); private SerialSubscription mUserLocationAddressSubscription = new SerialSubscription(); private SerialSubscription mDestinationAddressSubscription = new SerialSubscription(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_configure_myuber); // View injection ButterKnife.inject(this); // Get app widget ID Intent intent = getIntent(); Bundle extras = intent.getExtras(); if (extras != null) { mAppWidgetId = extras.getInt(AppWidgetManager.EXTRA_APPWIDGET_ID, AppWidgetManager.INVALID_APPWIDGET_ID); } // Set default result setResult(RESULT_CANCELED); mUberProducts = new ArrayList<UberProduct>(); mUberRideListAdapter = new UberRideAdapter(mContext, mPicasso, mUberProducts); mUberRideList.setAdapter(mUberRideListAdapter); mUberRideList.setChoiceMode(ListView.CHOICE_MODE_SINGLE); mUberRideList.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { mSelectedUberProduct = mUberProducts.get(position); } }); // Get address field updates mAddress.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { mDestinationAddressSubscription.set(LocationUtils.getAddressFromLocation(mContext, s.toString()) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new EndlessObserver<Address>() { @Override public void onNext(Address address) { mDestinationAddress = address; mAddressStatus.setImageDrawable(new ColorDrawable(mDestinationAddress == null ? Color.RED : Color.GREEN)); } })); } }); // Get user location mUserLocationSubscription.set(LocationUtils.getUserLocation(mContext) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new EndlessObserver<Location>() { @Override public void onError(Throwable e) { Timber.e(e, "An error occurred while fetching the user's location"); } @Override public void onNext(Location location) { onLocationLocked(location); } })); } @Override protected void onDestroy() { super.onDestroy(); mUberProductSubscription.unsubscribe(); mUserLocationSubscription.unsubscribe(); mUserLocationAddressSubscription.unsubscribe(); mDestinationAddressSubscription.unsubscribe(); } private void onLocationLocked(Location location) { // Fetch Uber products for this location fetchUberProductsForLocation(location.getLatitude(), location.getLongitude()); // Get address from GPS location mUserLocationAddressSubscription.set(LocationUtils.getAddressFromCoords(mContext, location.getLatitude(), location.getLongitude()) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new EndlessObserver<Address>() { @Override public void onNext(Address address) { if (address != null && address.getMaxAddressLineIndex() > 0) { // TODO: strengthen address formatting mUserLocationAddress.setText(String.format("%s, %s", address.getAddressLine(0), address.getAddressLine(1))); } else { mUserLocationAddress.setText(R.string.activity_configure_section_ride_location_not_found); } } })); } private void fetchUberProductsForLocation(double lat, double lng) { mUberProductSubscription.set(mUberService.getProducts(lat, lng) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new Observer<UberProductResponse>() { @Override public void onCompleted() { mListLoadingIndicator.setVisibility(View.GONE); } @Override public void onError(Throwable e) { Timber.e(e, "Could not fetch uber products"); mListLoadingIndicator.setVisibility(View.GONE); } @Override public void onNext(UberProductResponse response) { mUberProducts.clear(); mUberProducts.addAll(response.getProducts()); mUberRideListAdapter.notifyDataSetChanged(); } })); } @OnClick(R.id.save) public void onSaveClicked() { // Check form if (!isFormValid()) { Toast.makeText(mContext, R.string.activity_configure_invalid_form, Toast.LENGTH_LONG).show(); return; } // Track event mAnalyticsTracker.send( new HitBuilders.EventBuilder() .setCategory("Widget") .setAction("Add") .setLabel(mSelectedUberProduct.getDisplayName()) .setValue(1) .build()); // Save data record WidgetRecord record = new WidgetRecord(); record.setAppWidgetId(mAppWidgetId); record.setTitle(mTitle.getText().toString()); record.setUberProductId(mSelectedUberProduct.getId()); record.setUberProductDisplayName(mSelectedUberProduct.getDisplayName()); record.setDestinationAddress(mAddress.getText().toString()); record.setDestinationLatitude(mDestinationAddress.getLatitude()); record.setDestinationLongitude(mDestinationAddress.getLongitude()); record.save(); // Get widget manager AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(mContext); // Update widget layout RemoteViews views = new RemoteViews(mContext.getPackageName(), R.layout.widget_small); views.setTextViewText(R.id.title, mTitle.getText()); views.setTextViewText(R.id.ride, mSelectedUberProduct.getDisplayName()); views.setTextViewText(R.id.eta, "..."); views.setTextViewText(R.id.cost, "..."); appWidgetManager.updateAppWidget(mAppWidgetId, views); // Start update service mContext.startService(WidgetUpdateService.createStartIntent(mContext, new int[]{mAppWidgetId})); // Build result intent Intent intent = new Intent(); intent.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, mAppWidgetId); setResult(RESULT_OK, intent); finish(); } @OnClick(R.id.cancel) public void onCancelClicked() { finish(); } private boolean isFormValid() { if (TextUtils.isEmpty(mTitle.getText())) { return false; } if (mDestinationAddress == null) { return false; } if (mSelectedUberProduct == null) { return false; } return true; } private static class UberRideAdapter extends BaseAdapter { private LayoutInflater mInflater; private Picasso mPicasso; private List<UberProduct> mProducts; public UberRideAdapter(Context context, Picasso picasso, List<UberProduct> products) { mInflater = LayoutInflater.from(context); mPicasso = picasso; mProducts = products; } @Override public int getCount() { return mProducts.size(); } @Override public Object getItem(int position) { return mProducts.get(position); } @Override public long getItemId(int position) { return position; } @Override public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = mInflater.inflate(R.layout.listitem_uber_product, parent, false); } // Get product UberProduct product = mProducts.get(position); // Set image ImageView image = ButterKnife.findById(convertView, R.id.image); mPicasso.load(product.getImage()).centerInside().fit().into(image); // Set title TextView title = ButterKnife.findById(convertView, R.id.title); title.setText(product.getDisplayName()); return convertView; } } }
import java.math.BigInteger; import java.net.*; import java.io.*; import java.nio.file.Path; import java.nio.file.Paths; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; public class Server implements Runnable { private ServerThread clients[] = new ServerThread[50]; private ServerSocket server = null; private Thread thread = null; private int clientCount = 0; private String adminPassword = "pw"; private List<String> bannedIpAddresses = new ArrayList<String>(); private Map<Integer,String> disconnectReason = new HashMap<Integer,String>() {{ put(ClientRequested,"has closed the Connection"); put(SockedClosed,"lost Connection to the Server"); put(ClientKicked,"was Kicked from the Server"); }}; // Reasons why the connection was closed private final int ClientRequested = 0; private final int SockedClosed = 1; private final int ClientKicked = 2; private final int OutdatedClient = 3; public Server(int port) { try { System.out.println("Binding to port " + port); server = new ServerSocket(port); System.out.println("Server started: " + server); start(); } catch(IOException ioe){ System.out.println("Can't bind to port " + port + ": " + ioe.getMessage()); } try { Path path = Paths.get("banned.txt"); Scanner sc = new Scanner(path); while (sc.hasNextLine()) { bannedIpAddresses.add(sc.nextLine()); } } catch (Exception ex) { System.out.println(ex.getMessage()); } } public void run() { while (thread != null){ try { System.out.println("Waiting for a client ..."); addThread(server.accept()); } catch(IOException ioe){ System.out.println("Server accept error: " + ioe); stop(); } } } public void start() { if (thread == null){ thread = new Thread(this); thread.start(); } } public void stop() { if (thread != null) { thread.stop(); thread = null; } } private int findClient(int ID) { for (int i = 0; i < clientCount; i++) { if (clients[i].getID() == ID) { return i; } } return -1; } public synchronized void handle(int ID, String input,String name){ System.out.println("handle: " + input); if (input.equals(Commands.CLOSE_CONNECTION)) { clients[findClient(ID)].send("/close"); remove(ID,ClientRequested); } else if(input.equals("|.GUI")) { clients[findClient(ID)].setGUI(); } else if(input.startsWith("|.DH")) { String split[] = input.split(" "); clients[findClient(ID)].finaliseDiffieHellman(new BigInteger(split[1])); } else if (input.startsWith(Commands.CHANGE_NAME) && clients[findClient(ID)].isKeyExchanged()) { String[] split = input.split(" "); if(split.length == 2) { clients[findClient(ID)].setClientName(split[1]); System.out.println("Changed name of " + name + " [" + ID + "]" + " to " + split[1]); for (int i = 0; i < clientCount; i++) { clients[i].send("Server: " + name + " Changed his name to " + split[1]); } } else { clients[findClient(ID)].send("No Name Specified or name contains spaces!"); } } else if(input.startsWith(Commands.MAKE_ADMIN) && clients[findClient(ID)].isKeyExchanged()) { String[] split = input.split(" "); if(split.length == 2) { if(split[1].equals(adminPassword)) { clients[findClient(ID)].setAdmin(); clients[findClient(ID)].send("Access Granted!"); } else { clients[findClient(ID)].send("Access Denied!"); } } else { clients[findClient(ID)].send("Not enough arguments! Usage: ./admin password"); } } else if (input.startsWith(Commands.BAN_USER) && clients[findClient(ID)].isKeyExchanged()) { if(clients[findClient(ID)].isAdmin()) { String[] split = input.split(" "); if(split.length == 2) { ServerThread clientToKick; int clientToKickID = Integer.parseInt(split[1]); try { clientToKick = clients[findClient(clientToKickID)]; } catch(Exception ex) { clientToKick = null; } if(clientToKick != null && !clientToKick.isAdmin()) { try { PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter("banned.txt", true))); out.println(clientToKick.getSocket().getInetAddress().getHostAddress()); bannedIpAddresses.add(clientToKick.getSocket().getInetAddress().getHostAddress()); out.close(); } catch (IOException e) { } remove(clientToKickID,ClientKicked); } else { clients[findClient(ID)].send("Client doesn't exist or is Admin!"); } } else { clients[findClient(ID)].send("Not enough arguments! Usage: ./ban ID"); } } else { clients[findClient(ID)].send("You don't have permission to use this command!"); } } else if (input.startsWith(Commands.KICK_USER) && clients[findClient(ID)].isKeyExchanged()) { if(clients[findClient(ID)].isAdmin()) { String[] split = input.split(" "); if(split.length == 2) { ServerThread clientToKick; int clientToKickID = Integer.parseInt(split[1]); try { clientToKick = clients[findClient(clientToKickID)]; } catch(Exception ex) { clientToKick = null; } if(clientToKick != null && !clientToKick.isAdmin()) { remove(clientToKickID,ClientKicked); } else { clients[findClient(ID)].send("Client doesn't exist or is Admin!"); } } else { clients[findClient(ID)].send("Not enough arguments! Usage: ./kick ID"); } } else { clients[findClient(ID)].send("You don't have permission to use this command!"); } } else if(input.startsWith(Commands.REMOVE_ADMIN) && clients[findClient(ID)].isKeyExchanged()) { if(clients[findClient(ID)].isAdmin()) { String[] split = input.split(" "); if(split.length == 2) { int clientToKickID = Integer.parseInt(split[1]); ServerThread removeAdminClient; try { removeAdminClient = clients[findClient(clientToKickID)]; } catch(Exception ex) { removeAdminClient = null; } if(removeAdminClient != null) { removeAdminClient.removeAdmin(); clients[findClient(ID)].send("Removed admin right from: " + removeAdminClient.getClientName()); } } else { clients[findClient(ID)].send("Not enough arguments! Usage: ./rAdmin ID"); } } else { clients[findClient(ID)].send("You don't have permission to use this command!"); } } else if(input.equals(Commands.LIST_USERS) && clients[findClient(ID)].isKeyExchanged()) { if(clients[findClient(ID)].isAdmin()) { if(clients[findClient(ID)].isGUI()) clients[findClient(ID)].send("Currently online: "); for (int i = 0; i < clientCount; i++) { String infoString = "Name: " + clients[i].getClientName() + "\nID: " + clients[i].getID() + "\nisAdmin: " + clients[i].isAdmin() + "\nisGUI: " + clients[i].isGUI() + "\n"+clients[i].getSocket() + "\n"; clients[findClient(ID)].send(infoString); } } else { clients[findClient(ID)].send("You don't have permission to use this command!"); } } else if(input.startsWith(Commands.CHANGE_ADMIN_PASSWORD) && clients[findClient(ID)].isKeyExchanged()) { if(clients[findClient(ID)].isAdmin()) { String[] split = input.split(" "); if (split.length == 2) { adminPassword = split[1]; System.out.println("Admin password was Changed by: " + clients[findClient(ID)].getClientName()); clients[findClient(ID)].send("The admin password was changed successfully!"); } else { clients[findClient(ID)].send("Not enough arguments! Usage: ./setAdminPassword password"); } } else { clients[findClient(ID)].send("You don't have permission to use this command!"); } } else if(input.equals(Commands.HELP) && clients[findClient(ID)].isKeyExchanged()) { if(clients[findClient(ID)].isAdmin()) { clients[findClient(ID)].send("./list\n./kick\n./setAdminPassword\n./admin\n./rAdmin"); } else { clients[findClient(ID)].send("You don't have permission to use this command!"); } } else if(input.equals(Commands.LIST_BANNED_USERS)) { if(clients[findClient(ID)].isAdmin()) { for(String banned : bannedIpAddresses) { clients[findClient(ID)].send(banned); } } else { clients[findClient(ID)].send("You don't have permission to use this command!"); } } else if(clients[findClient(ID)].isKeyExchanged()) { DateFormat dateFormat = new SimpleDateFormat("HH:mm:ss"); Date date = new Date(); for (int i = 0; i < clientCount; i++) { if(clients[i].getID() != ID) { clients[i].send("["+ dateFormat.format(date) + "] " + name + " [" + ID + "] : " + input); } else if(clients[findClient(ID)].isGUI()) { clients[i].send("["+ dateFormat.format(date) + "] " + name + " [" + ID + "] : " + input); } } } } public synchronized void remove(int ID,int r) { int pos = findClient(ID); if (pos >= 0) { ServerThread toTerminate = clients[pos]; System.out.println("Removing client thread " + ID + " at " + pos); String removeMsg = "Server: " + toTerminate.getClientName() + " <" + ID + "> " + disconnectReason.get(r); if(r == ClientKicked) { toTerminate.send("You were Kicked from the Server!"); toTerminate.send("/close"); } if (pos < clientCount-1) { for (int i = pos+1; i < clientCount; i++) { clients[i-1] = clients[i]; } } clientCount--; for (int i = 0; i < clientCount; i++) { clients[i].send(removeMsg); } try { toTerminate.close(); } catch(IOException ioe) { System.out.println("Error closing thread: " + ioe); } toTerminate.stop(); } } private boolean banned(String ipAddress) { for (String ip : bannedIpAddresses) { if(ipAddress.equals(ip)) { System.out.println("IP: " + ip + " is Banned!"); return true; } } return false; } private void addThread(Socket socket) { if (clientCount < clients.length && !banned(socket.getInetAddress().getHostAddress())) { System.out.println("Client accepted: " + socket); clients[clientCount] = new ServerThread(this, socket); try { clients[clientCount].open(); clients[clientCount].start(); ServerThread newClient = clients[clientCount]; clientCount++; newClient.startDH_KeyExchange(); if(InetAddress.getLoopbackAddress().getHostAddress().equals(socket.getInetAddress().getHostAddress())) { newClient.setAdmin(); } for (int i = 0; i < (clientCount - 1); i++) { clients[i].send("Server: " + newClient.getClientName() + " <" + newClient.getID() + "> Connected from: " + socket.getInetAddress().getHostAddress()); } } catch(IOException ioe) { System.out.println("Error opening thread: " + ioe); } } else { if(clientCount > clients.length) System.out.println("Client refused: maximum number of connections reached (" + clients.length + ")"); else System.out.println("Client refused: Banned!"); try { DataOutputStream streamOut = new DataOutputStream(new BufferedOutputStream(socket.getOutputStream())); streamOut.writeUTF(AES.encrypt("Server is at the maximum capacity!","Encryption_Key")); streamOut.close(); socket.close(); } catch (Exception ex) { System.out.println(ex.getMessage()); } } } public static void main(String args[]) { int port = 1337; if(args.length == 1) { port = Integer.parseInt(args[0]); } Server server = new Server(port); server.start(); } }
package com.jcumulus.server.rtmfp.publisher; /** * jCumulus is a Java port of Cumulus OpenRTMP * * Copyright 2011 OpenRTMFP * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License received along this program for more * details (or else see http://www.gnu.org/licenses/). * * * This file is a part of jCumulus. */ import com.jcumulus.server.rtmfp.packet.*; import com.jcumulus.server.rtmfp.ISession; import com.jcumulus.server.rtmfp.Peer; import com.jcumulus.server.rtmfp.pipe.C; import com.jcumulus.server.rtmfp.flow.H; import com.jcumulus.server.rtmfp.stream.BinaryWriter; import com.jcumulus.server.rtmfp.stream.B; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; public class E { private static final Logger F = Logger.getLogger(E.class); public static final Integer N = Integer.valueOf(128); public static final Integer C = Integer.valueOf(16); public static final Integer M = Integer.valueOf(32); public static final Integer H = Integer.valueOf(2); public static final Integer E = Integer.valueOf(1); public static com.jcumulus.server.rtmfp.application.C I = new com.jcumulus.server.rtmfp.application.C(); protected int A; private int J; protected Peer K; private boolean G; private ISession D; private com.jcumulus.server.rtmfp.publisher.D B; protected com.jcumulus.server.rtmfp.publisher.FlowWriter O; private Map P; private String L; public E(int i, byte abyte0[], String s, Peer p, ISession d) { P = new HashMap(); A = i; J = 0; K = p; G = false; B = null; D = d; O = new com.jcumulus.server.rtmfp.publisher.FlowWriter(abyte0, d); O.C(i); O.C(s); } public void C() { E(); O.C(); } void E() { if(G) return; if(O.G() != null) F.debug((new StringBuilder()).append("Flow ").append(A).append(" consumed").toString()); G = true; } protected void A(String s) { F.error((new StringBuilder()).append("Flow ").append(A).append(" failed : %s").append(s).toString()); if(!G) { B b = D.A((byte)94, com.jcumulus.server.rtmfp.pipe.C.A(A) + 1, null); b.D(A); b.B((byte)0); } } private byte B(AudioPacket a) { if(a.I() == 0) return 0; byte byte0 = a.L(); switch(byte0) { case 17: // '\021' a.D(1); // fall through case 20: // '\024' a.D(4); return 20; case 15: // '\017' a.D(5); break; case 4: // '\004' a.D(4); break; case 2: // '\002' case 3: // '\003' case 5: // '\005' case 6: // '\006' case 7: // '\007' case 10: // '\n' case 11: // '\013' case 12: // '\f' case 13: // '\r' case 14: // '\016' case 16: // '\020' case 18: // '\022' case 19: // '\023' default: F.error((new StringBuilder()).append("Unpacking type '").append(byte0).append("' unknown").toString()); break; case 1: // '\001' case 8: // '\b' case 9: // '\t' break; } return byte0; } public void F() { int i = 0; ArrayList arraylist = new ArrayList(); int j = J; boolean flag = false; Integer integer; for(Iterator iterator = P.keySet().iterator(); iterator.hasNext();) integer = (Integer)iterator.next(); int k = B == null ? 127 : B.B() <= 16128 ? 16128 - B.B() : 0; if(O.G() == null) k = 0; B b = D.A((byte)81, com.jcumulus.server.rtmfp.pipe.C.A(A) + com.jcumulus.server.rtmfp.pipe.C.A(k) + com.jcumulus.server.rtmfp.pipe.C.A(J) + i, null); b.D(A); b.D(k); b.D(J); Integer integer1; for(Iterator iterator1 = arraylist.iterator(); iterator1.hasNext(); b.D(integer1.intValue())) integer1 = (Integer)iterator1.next(); A(); O.A(); } public void A(int i, int j, AudioPacket a, byte byte0) { if(G) return; int k = J + 1; if(i < k) { F.debug((new StringBuilder()).append("Stage ").append(i).append(" on flow ").append(A).append(" has already been received").toString()); return; } if(j > i) { F.warn((new StringBuilder()).append("DeltaNAck ").append(j).append(" superior to stage ").append(i).append(" on flow ").append(A).toString()); j = i; } if(J < i - j) { Iterator iterator = P.keySet().iterator(); do { if(!iterator.hasNext()) break; Integer integer = (Integer)iterator.next(); if(integer.intValue() > i) break; AudioPacket a1 = ((com.jcumulus.server.rtmfp.publisher.B)P.get(integer)).B(); A(integer.intValue(), a1, ((com.jcumulus.server.rtmfp.publisher.B)P.get(integer)).A()); if(G) return; iterator.remove(); } while(true); k = i; } if(i > k) { com.jcumulus.server.rtmfp.publisher.B b = (com.jcumulus.server.rtmfp.publisher.B)P.get(Integer.valueOf(i)); if(b == null) { P.put(Integer.valueOf(i), new com.jcumulus.server.rtmfp.publisher.B(a, byte0)); if(P.size() > 100) F.debug((new StringBuilder()).append("fragmentMap.size()=").append(P.size()).toString()); } else { F.debug((new StringBuilder()).append("Stage ").append(i).append(" on flow ").append(A).append(" has already been received").toString()); } } else { A(k++, a, byte0); Iterator iterator1 = P.keySet().iterator(); do { if(!iterator1.hasNext()) break; Integer integer1 = (Integer)iterator1.next(); if(integer1.intValue() > k) break; AudioPacket a2 = ((com.jcumulus.server.rtmfp.publisher.B)P.get(integer1)).B(); A(k++, a2, ((com.jcumulus.server.rtmfp.publisher.B)P.get(integer1)).A()); if(G) break; iterator1.remove(); } while(true); } } void A(int i, AudioPacket a, byte byte0) { if(i <= J) { F.error((new StringBuilder()).append("Stage ").append(i).append(" not sorted on flow").append(A).toString()); return; } if(i > J + 1) { int j = i - J - 1; J = i; if(B != null) B = null; if((byte0 != 0) & (M != null)) { A(j + 1); return; } A(j); } else { J = i; } if((byte0 & H.intValue()) != 0) { if(B != null) B = null; return; } if((byte0 & M.intValue()) != 0) { if(B == null) { F.warn("A received message tells to have a 'beforepart' and nevertheless partbuffer is empty, certainly some packets were lost"); A(1); B = null; return; } B.A(a); if((byte0 & C.intValue()) != 0) return; a = B.A(); } else if((byte0 & C.intValue()) != 0) { if(B != null) { F.error("A received message tells to have not 'beforepart' and nevertheless partbuffer exists"); A(B.B()); } B = new com.jcumulus.server.rtmfp.publisher.D(a); return; } byte byte1 = B(a); if(byte1 != 0) { O.A(Double.valueOf(0.0D)); String s = null; com.jcumulus.server.rtmfp.flow.B b = new com.jcumulus.server.rtmfp.flow.B(a); if(byte1 == 20 || byte1 == 15) { s = b.E(); if(byte1 == 20) { O.A(b.N()); if(b.G() == com.jcumulus.server.rtmfp.flow.H.Null) b.P(); } } try { switch(byte1) { case 15: // '\017' case 20: // '\024' A(s, b); break; case 8: // '\b' C(a); break; case 9: // '\t' A(a); break; default: A(byte1, a); break; } } catch(Exception exception) { F.error(exception.getMessage(), exception); L = (new StringBuilder()).append("flow error, ").append(exception).toString(); } } O.A(Double.valueOf(0.0D)); if(B != null) B = null; if((byte0 & E.intValue()) != 0) E(); } protected void A(String s, com.jcumulus.server.rtmfp.flow.B b) { F.error((new StringBuilder()).append("Message '").append(s).append("' unknown for flow ").append(A).toString()); } protected void A(byte byte0, Packet a) { F.error((new StringBuilder()).append("Raw message unknown for flow ").append(A).toString()); } protected void C(AudioPacket a) { F.error((new StringBuilder()).append("Audio packet untreated for flow ").append(A).toString()); } protected void A(AudioPacket a) { F.error((new StringBuilder()).append("Video packet untreated for flow ").append(A).toString()); } protected void A(int i) { F.info((new StringBuilder()).append(i).append(" fragments lost on flow").append(A).toString()); } protected void A() { } public boolean B() { return G; } public int D() { return A; } public String G() { return L; } }
/* * oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.xdi.oxauth.interop; import org.testng.annotations.Parameters; import org.testng.annotations.Test; import org.xdi.oxauth.BaseTest; import org.xdi.oxauth.client.*; import org.xdi.oxauth.model.common.AuthenticationMethod; import org.xdi.oxauth.model.common.GrantType; import org.xdi.oxauth.model.common.ResponseType; import org.xdi.oxauth.model.crypto.OxAuthCryptoProvider; import org.xdi.oxauth.model.crypto.signature.SignatureAlgorithm; import org.xdi.oxauth.model.register.ApplicationType; import org.xdi.oxauth.model.util.StringUtils; import java.util.Arrays; import java.util.List; import java.util.UUID; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; /** * OC5:FeatureTest-Support Authentication to Token Endpoint with Symmetrically Signed JWTs * * @author Javier Rojas Blum * @version June 17, 2016 */ public class SupportAuthenticationToTokenEndpointWithSymmetricallySignedJWTs extends BaseTest { @Parameters({"redirectUris", "redirectUri", "userId", "userSecret", "sectorIdentifierUri"}) @Test public void supportAuthenticationToTokenEndpointWithSymmetricallySignedJWTsHS256( final String redirectUris, final String redirectUri, final String userId, final String userSecret, final String sectorIdentifierUri) throws Exception { showTitle("OC5:FeatureTest-Support Authentication to Token Endpoint with Symmetrically Signed JWTs (HS256)"); // 1. Register client RegisterRequest registerRequest = new RegisterRequest(ApplicationType.WEB, "oxAuth test app", StringUtils.spaceSeparatedToList(redirectUris)); registerRequest.setTokenEndpointAuthMethod(AuthenticationMethod.CLIENT_SECRET_JWT); registerRequest.setSectorIdentifierUri(sectorIdentifierUri); RegisterClient registerClient = new RegisterClient(registrationEndpoint); registerClient.setRequest(registerRequest); RegisterResponse registerResponse = registerClient.exec(); showClient(registerClient); assertEquals(registerResponse.getStatus(), 200, "Unexpected response code: " + registerResponse.getEntity()); assertNotNull(registerResponse.getClientId()); assertNotNull(registerResponse.getClientSecret()); assertNotNull(registerResponse.getRegistrationAccessToken()); assertNotNull(registerResponse.getClientIdIssuedAt()); assertNotNull(registerResponse.getClientSecretExpiresAt()); String clientId = registerResponse.getClientId(); String clientSecret = registerResponse.getClientSecret(); // 2. Request authorization List<ResponseType> responseTypes = Arrays.asList(ResponseType.CODE); List<String> scopes = Arrays.asList("openid", "profile", "address", "email"); String state = UUID.randomUUID().toString(); AuthorizationRequest authorizationRequest = new AuthorizationRequest(responseTypes, clientId, scopes, redirectUri, null); authorizationRequest.setState(state); AuthorizationResponse authorizationResponse = authenticateResourceOwnerAndGrantAccess( authorizationEndpoint, authorizationRequest, userId, userSecret); assertNotNull(authorizationResponse.getLocation()); assertNotNull(authorizationResponse.getCode()); assertNotNull(authorizationResponse.getState()); String authorizationCode = authorizationResponse.getCode(); // 3. Get Access Token OxAuthCryptoProvider cryptoProvider = new OxAuthCryptoProvider(); TokenRequest tokenRequest = new TokenRequest(GrantType.AUTHORIZATION_CODE); tokenRequest.setAuthenticationMethod(AuthenticationMethod.CLIENT_SECRET_JWT); tokenRequest.setCryptoProvider(cryptoProvider); tokenRequest.setAudience(tokenEndpoint); tokenRequest.setCode(authorizationCode); tokenRequest.setRedirectUri(redirectUri); tokenRequest.setAuthUsername(clientId); tokenRequest.setAuthPassword(clientSecret); TokenClient tokenClient = new TokenClient(tokenEndpoint); tokenClient.setRequest(tokenRequest); TokenResponse tokenResponse = tokenClient.exec(); showClient(tokenClient); assertEquals(tokenResponse.getStatus(), 200, "Unexpected response code: " + tokenResponse.getStatus()); assertNotNull(tokenResponse.getEntity(), "The entity is null"); assertNotNull(tokenResponse.getAccessToken(), "The access token is null"); assertNotNull(tokenResponse.getExpiresIn(), "The expires in value is null"); assertNotNull(tokenResponse.getTokenType(), "The token type is null"); assertNotNull(tokenResponse.getRefreshToken(), "The refresh token is null"); } @Parameters({"redirectUris", "redirectUri", "userId", "userSecret", "sectorIdentifierUri"}) @Test public void supportAuthenticationToTokenEndpointWithSymmetricallySignedJWTsHS384( final String redirectUris, final String redirectUri, final String userId, final String userSecret, final String sectorIdentifierUri) throws Exception { showTitle("OC5:FeatureTest-Support Authentication to Token Endpoint with Symmetrically Signed JWTs (HS384)"); // 1. Register client RegisterRequest registerRequest = new RegisterRequest(ApplicationType.WEB, "oxAuth test app", StringUtils.spaceSeparatedToList(redirectUris)); registerRequest.setTokenEndpointAuthMethod(AuthenticationMethod.CLIENT_SECRET_JWT); registerRequest.setSectorIdentifierUri(sectorIdentifierUri); RegisterClient registerClient = new RegisterClient(registrationEndpoint); registerClient.setRequest(registerRequest); RegisterResponse registerResponse = registerClient.exec(); showClient(registerClient); assertEquals(registerResponse.getStatus(), 200, "Unexpected response code: " + registerResponse.getEntity()); assertNotNull(registerResponse.getClientId()); assertNotNull(registerResponse.getClientSecret()); assertNotNull(registerResponse.getRegistrationAccessToken()); assertNotNull(registerResponse.getClientIdIssuedAt()); assertNotNull(registerResponse.getClientSecretExpiresAt()); String clientId = registerResponse.getClientId(); String clientSecret = registerResponse.getClientSecret(); // 2. Request authorization List<ResponseType> responseTypes = Arrays.asList(ResponseType.CODE); List<String> scopes = Arrays.asList("openid", "profile", "address", "email"); String state = UUID.randomUUID().toString(); AuthorizationRequest authorizationRequest = new AuthorizationRequest(responseTypes, clientId, scopes, redirectUri, null); authorizationRequest.setState(state); AuthorizationResponse authorizationResponse = authenticateResourceOwnerAndGrantAccess( authorizationEndpoint, authorizationRequest, userId, userSecret); assertNotNull(authorizationResponse.getLocation()); assertNotNull(authorizationResponse.getCode()); assertNotNull(authorizationResponse.getState()); String authorizationCode = authorizationResponse.getCode(); // 3. Get Access Token OxAuthCryptoProvider cryptoProvider = new OxAuthCryptoProvider(); TokenRequest tokenRequest = new TokenRequest(GrantType.AUTHORIZATION_CODE); tokenRequest.setAuthenticationMethod(AuthenticationMethod.CLIENT_SECRET_JWT); tokenRequest.setCryptoProvider(cryptoProvider); tokenRequest.setAlgorithm(SignatureAlgorithm.HS384); tokenRequest.setAudience(tokenEndpoint); tokenRequest.setCode(authorizationCode); tokenRequest.setRedirectUri(redirectUri); tokenRequest.setAuthUsername(clientId); tokenRequest.setAuthPassword(clientSecret); TokenClient tokenClient = new TokenClient(tokenEndpoint); tokenClient.setRequest(tokenRequest); TokenResponse tokenResponse = tokenClient.exec(); showClient(tokenClient); assertEquals(tokenResponse.getStatus(), 200, "Unexpected response code: " + tokenResponse.getStatus()); assertNotNull(tokenResponse.getEntity(), "The entity is null"); assertNotNull(tokenResponse.getAccessToken(), "The access token is null"); assertNotNull(tokenResponse.getExpiresIn(), "The expires in value is null"); assertNotNull(tokenResponse.getTokenType(), "The token type is null"); assertNotNull(tokenResponse.getRefreshToken(), "The refresh token is null"); } @Parameters({"redirectUris", "redirectUri", "userId", "userSecret", "sectorIdentifierUri"}) @Test public void supportAuthenticationToTokenEndpointWithSymmetricallySignedJWTsHS512( final String redirectUris, final String redirectUri, final String userId, final String userSecret, final String sectorIdentifierUri) throws Exception { showTitle("OC5:FeatureTest-Support Authentication to Token Endpoint with Symmetrically Signed JWTs (HS512)"); // 1. Register client RegisterRequest registerRequest = new RegisterRequest(ApplicationType.WEB, "oxAuth test app", StringUtils.spaceSeparatedToList(redirectUris)); registerRequest.setTokenEndpointAuthMethod(AuthenticationMethod.CLIENT_SECRET_JWT); registerRequest.setSectorIdentifierUri(sectorIdentifierUri); RegisterClient registerClient = new RegisterClient(registrationEndpoint); registerClient.setRequest(registerRequest); RegisterResponse registerResponse = registerClient.exec(); showClient(registerClient); assertEquals(registerResponse.getStatus(), 200, "Unexpected response code: " + registerResponse.getEntity()); assertNotNull(registerResponse.getClientId()); assertNotNull(registerResponse.getClientSecret()); assertNotNull(registerResponse.getRegistrationAccessToken()); assertNotNull(registerResponse.getClientIdIssuedAt()); assertNotNull(registerResponse.getClientSecretExpiresAt()); String clientId = registerResponse.getClientId(); String clientSecret = registerResponse.getClientSecret(); // 2. Request authorization List<ResponseType> responseTypes = Arrays.asList(ResponseType.CODE); List<String> scopes = Arrays.asList("openid", "profile", "address", "email"); String state = UUID.randomUUID().toString(); AuthorizationRequest authorizationRequest = new AuthorizationRequest(responseTypes, clientId, scopes, redirectUri, null); authorizationRequest.setState(state); AuthorizationResponse authorizationResponse = authenticateResourceOwnerAndGrantAccess( authorizationEndpoint, authorizationRequest, userId, userSecret); assertNotNull(authorizationResponse.getLocation()); assertNotNull(authorizationResponse.getCode()); assertNotNull(authorizationResponse.getState()); String authorizationCode = authorizationResponse.getCode(); // 3. Get Access Token OxAuthCryptoProvider cryptoProvider = new OxAuthCryptoProvider(); TokenRequest tokenRequest = new TokenRequest(GrantType.AUTHORIZATION_CODE); tokenRequest.setAuthenticationMethod(AuthenticationMethod.CLIENT_SECRET_JWT); tokenRequest.setCryptoProvider(cryptoProvider); tokenRequest.setAlgorithm(SignatureAlgorithm.HS512); tokenRequest.setAudience(tokenEndpoint); tokenRequest.setCode(authorizationCode); tokenRequest.setRedirectUri(redirectUri); tokenRequest.setAuthUsername(clientId); tokenRequest.setAuthPassword(clientSecret); TokenClient tokenClient = new TokenClient(tokenEndpoint); tokenClient.setRequest(tokenRequest); TokenResponse tokenResponse = tokenClient.exec(); showClient(tokenClient); assertEquals(tokenResponse.getStatus(), 200, "Unexpected response code: " + tokenResponse.getStatus()); assertNotNull(tokenResponse.getEntity(), "The entity is null"); assertNotNull(tokenResponse.getAccessToken(), "The access token is null"); assertNotNull(tokenResponse.getExpiresIn(), "The expires in value is null"); assertNotNull(tokenResponse.getTokenType(), "The token type is null"); assertNotNull(tokenResponse.getRefreshToken(), "The refresh token is null"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.ha.session; import java.io.IOException; import javax.servlet.ServletException; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Engine; import org.apache.catalina.Host; import org.apache.catalina.LifecycleException; import org.apache.catalina.Manager; import org.apache.catalina.Session; import org.apache.catalina.connector.Request; import org.apache.catalina.connector.Response; import org.apache.catalina.ha.CatalinaCluster; import org.apache.catalina.ha.ClusterManager; import org.apache.catalina.ha.ClusterValve; import org.apache.catalina.session.ManagerBase; import org.apache.catalina.session.PersistentManager; import org.apache.catalina.valves.ValveBase; import org.apache.tomcat.util.res.StringManager; /** * Valve to handle Tomcat jvmRoute takeover using mod_jk module after node * failure. After a node crashes, subsequent requests go to other cluster nodes. * That incurs a drop in performance. When this Valve is enabled on a backup * node and sees a request, which was intended for another (thus failed) node, * it will rewrite the cookie jsessionid information to use the route to this * backup cluster node, that answered the request. After the response is * delivered to the client, all subsequent client requests will go directly to * the backup node. The change of sessionid is also sent to all other cluster * nodes. After all that, the session stickiness will work directly to the * backup node and the traffic will not go back to the failed node after it is * restarted! * * <p> * For this valve to function correctly, so that all nodes of the cluster * receive the sessionid change notifications that it generates, the following * ClusterListener MUST be configured at all nodes of the cluster: * {@link org.apache.catalina.ha.session.JvmRouteSessionIDBinderListener * JvmRouteSessionIDBinderListener} since Tomcat 5.5.10, and both * JvmRouteSessionIDBinderListener and JvmRouteSessionIDBinderLifecycleListener * for earlier versions of Tomcat. * * <p> * Add this Valve to your host definition at conf/server.xml . * * Since 5.5.10 as direct cluster valve:<br/> * * <pre> * &lt;Cluster&gt; * &lt;Valve className=&quot;org.apache.catalina.ha.session.JvmRouteBinderValve&quot; /&gt; * &lt;/Cluster&gt; * </pre> * * <br /> * Before 5.5.10 as Host element:<br/> * * <pre> * &lt;Host&gt; * &lt;Valve className=&quot;org.apache.catalina.ha.session.JvmRouteBinderValve&quot; /&gt; * &lt;/Host&gt; * </pre> * * <em>A Trick:</em><br/> * You can enable this mod_jk turnover mode via JMX before you drop a node to * all backup nodes! Set enable true on all JvmRouteBinderValve backups, disable * worker at mod_jk and then drop node and restart it! Then enable mod_jk worker * and disable JvmRouteBinderValves again. This use case means that only * requested sessions are migrated. * * @author Peter Rossbach */ public class JvmRouteBinderValve extends ValveBase implements ClusterValve { /*--Static Variables----------------------------------------*/ public static final org.apache.juli.logging.Log log = org.apache.juli.logging.LogFactory .getLog(JvmRouteBinderValve.class); /** * The descriptive information about this implementation. */ protected static final String info = "org.apache.catalina.ha.session.JvmRouteBinderValve/1.2"; //------------------------------------------------------ Constructor public JvmRouteBinderValve() { super(true); } /*--Instance Variables--------------------------------------*/ /** * the cluster */ protected CatalinaCluster cluster; /** * The string manager for this package. */ protected static final StringManager sm = StringManager.getManager(Constants.Package); /** * enabled this component */ protected boolean enabled = true; /** * number of session that no at this tomcat instanz hosted */ protected long numberOfSessions = 0; protected String sessionIdAttribute = "org.apache.catalina.ha.session.JvmRouteOrignalSessionID"; /*--Logic---------------------------------------------------*/ /** * Return descriptive information about this implementation. */ @Override public String getInfo() { return (info); } /** * set session id attribute to failed node for request. * * @return Returns the sessionIdAttribute. */ public String getSessionIdAttribute() { return sessionIdAttribute; } /** * get name of failed request session attribute * * @param sessionIdAttribute * The sessionIdAttribute to set. */ public void setSessionIdAttribute(String sessionIdAttribute) { this.sessionIdAttribute = sessionIdAttribute; } /** * @return Returns the number of migrated sessions. */ public long getNumberOfSessions() { return numberOfSessions; } /** * @return Returns the enabled. */ public boolean getEnabled() { return enabled; } /** * @param enabled * The enabled to set. */ public void setEnabled(boolean enabled) { this.enabled = enabled; } /** * Detect possible the JVMRoute change at cluster backup node.. * * @param request * tomcat request being processed * @param response * tomcat response being processed * @exception IOException * if an input/output error has occurred * @exception ServletException * if a servlet error has occurred */ @Override public void invoke(Request request, Response response) throws IOException, ServletException { if (getEnabled() && request.getContext() != null && request.getContext().getDistributable() && !request.isAsyncDispatching()) { // valve cluster can access manager - other cluster handle turnover // at host level - hopefully! Manager manager = request.getContext().getManager(); if (manager != null && ( (manager instanceof ClusterManager && getCluster() != null && getCluster().getManager(((ClusterManager)manager).getName()) != null) || (manager instanceof PersistentManager))) handlePossibleTurnover(request); } // Pass this request on to the next valve in our pipeline getNext().invoke(request, response); } /** * handle possible session turn over. * * @see JvmRouteBinderValve#handleJvmRoute(Request, String, String) * @param request current request */ protected void handlePossibleTurnover(Request request) { String sessionID = request.getRequestedSessionId() ; if (sessionID != null) { long t1 = System.currentTimeMillis(); String jvmRoute = getLocalJvmRoute(request); if (jvmRoute == null) { if (log.isDebugEnabled()) log.debug(sm.getString("jvmRoute.missingJvmRouteAttribute")); return; } handleJvmRoute( request, sessionID, jvmRoute); if (log.isDebugEnabled()) { long t2 = System.currentTimeMillis(); long time = t2 - t1; log.debug(sm.getString("jvmRoute.turnoverInfo", Long.valueOf(time))); } } } /** * get jvmroute from engine * * @param request current request * @return return jvmRoute from ManagerBase or null */ protected String getLocalJvmRoute(Request request) { Manager manager = getManager(request); if(manager instanceof ManagerBase) return ((ManagerBase) manager).getJvmRoute(); return null ; } /** * get Cluster DeltaManager * * @param request current request * @return manager or null */ protected Manager getManager(Request request) { Manager manager = request.getContext().getManager(); if (log.isDebugEnabled()) { if(manager != null) log.debug(sm.getString("jvmRoute.foundManager", manager, request.getContext().getName())); else log.debug(sm.getString("jvmRoute.notFoundManager", request.getContext().getName())); } return manager; } /** * @return Returns the cluster. */ @Override public CatalinaCluster getCluster() { return cluster; } /** * @param cluster The cluster to set. */ @Override public void setCluster(CatalinaCluster cluster) { this.cluster = cluster; } /** * Handle jvmRoute stickiness after tomcat instance failed. After this * correction a new Cookie send to client with new jvmRoute and the * SessionID change propagate to the other cluster nodes. * * @param request current request * @param sessionId * request SessionID from Cookie * @param localJvmRoute * local jvmRoute */ protected void handleJvmRoute( Request request, String sessionId, String localJvmRoute) { // get requested jvmRoute. String requestJvmRoute = null; int index = sessionId.indexOf("."); if (index > 0) { requestJvmRoute = sessionId .substring(index + 1, sessionId.length()); } if (requestJvmRoute != null && !requestJvmRoute.equals(localJvmRoute)) { if (log.isDebugEnabled()) { log.debug(sm.getString("jvmRoute.failover", requestJvmRoute, localJvmRoute, sessionId)); } Session catalinaSession = null; try { catalinaSession = getManager(request).findSession(sessionId); } catch (IOException e) { // Hups! } String id = sessionId.substring(0, index); String newSessionID = id + "." + localJvmRoute; // OK - turnover the session and inform other cluster nodes if (catalinaSession != null) { changeSessionID(request, sessionId, newSessionID, catalinaSession); numberOfSessions++; } else { try { catalinaSession = getManager(request).findSession(newSessionID); } catch (IOException e) { // Hups! } if (catalinaSession != null) { // session is rewrite at other request, rewrite this also changeRequestSessionID(request, sessionId, newSessionID); } else { if (log.isDebugEnabled()) { log.debug(sm.getString("jvmRoute.cannotFindSession",sessionId)); } } } } } /** * change session id and send to all cluster nodes * * @param request current request * @param sessionId * original session id * @param newSessionID * new session id for node migration * @param catalinaSession * current session with original session id */ protected void changeSessionID(Request request, String sessionId, String newSessionID, Session catalinaSession) { fireLifecycleEvent("Before session migration", catalinaSession); catalinaSession.setId(newSessionID, false); // FIXME: Why we remove change data from other running request? // setId also trigger resetDeltaRequest!! if (catalinaSession instanceof DeltaSession) ((DeltaSession) catalinaSession).resetDeltaRequest(); changeRequestSessionID(request, sessionId, newSessionID); // now sending the change to all other clusternodes! sendSessionIDClusterBackup(request,sessionId, newSessionID); fireLifecycleEvent("After session migration", catalinaSession); if (log.isDebugEnabled()) { log.debug(sm.getString("jvmRoute.changeSession", sessionId, newSessionID)); } } /** * Change Request Session id * @param request current request * @param sessionId * original session id * @param newSessionID * new session id for node migration */ protected void changeRequestSessionID(Request request, String sessionId, String newSessionID) { request.changeSessionId(newSessionID); // set original sessionid at request, to allow application detect the // change if (sessionIdAttribute != null && !"".equals(sessionIdAttribute)) { if (log.isDebugEnabled()) { log.debug(sm.getString("jvmRoute.set.orignalsessionid",sessionIdAttribute,sessionId)); } request.setAttribute(sessionIdAttribute, sessionId); } } /** * Send the changed Sessionid to all clusternodes. * * @see JvmRouteSessionIDBinderListener#messageReceived( * org.apache.catalina.ha.ClusterMessage) * @param sessionId * current failed sessionid * @param newSessionID * new session id, bind to the new cluster node */ protected void sendSessionIDClusterBackup(Request request, String sessionId, String newSessionID) { CatalinaCluster c = getCluster(); if (c != null && !(getManager(request) instanceof BackupManager)) { SessionIDMessage msg = new SessionIDMessage(); msg.setOrignalSessionID(sessionId); msg.setBackupSessionID(newSessionID); Context context = request.getContext(); msg.setContextName(context.getName()); msg.setHost(context.getParent().getName()); c.send(msg); } } /** * Start this component and implement the requirements * of {@link org.apache.catalina.util.LifecycleBase#startInternal()}. * * @exception LifecycleException if this component detects a fatal error * that prevents this component from being used */ @Override protected synchronized void startInternal() throws LifecycleException { if (cluster == null) { Container hostContainer = getContainer(); // compatibility with JvmRouteBinderValve version 1.1 // ( setup at context.xml or context.xml.default ) if (!(hostContainer instanceof Host)) { if (log.isWarnEnabled()) log.warn(sm.getString("jvmRoute.configure.warn")); hostContainer = hostContainer.getParent(); } if (hostContainer instanceof Host && ((Host) hostContainer).getCluster() != null) { cluster = (CatalinaCluster) ((Host) hostContainer).getCluster(); } else { Container engine = hostContainer.getParent() ; if (engine instanceof Engine && ((Engine) engine).getCluster() != null) { cluster = (CatalinaCluster) ((Engine) engine).getCluster(); } } } if (log.isInfoEnabled()) { log.info(sm.getString("jvmRoute.valve.started")); if (cluster == null) log.info(sm.getString("jvmRoute.noCluster")); } super.startInternal(); } /** * Stop this component and implement the requirements * of {@link org.apache.catalina.util.LifecycleBase#stopInternal()}. * * @exception LifecycleException if this component detects a fatal error * that prevents this component from being used */ @Override protected synchronized void stopInternal() throws LifecycleException { super.stopInternal(); cluster = null; numberOfSessions = 0; if (log.isInfoEnabled()) log.info(sm.getString("jvmRoute.valve.stopped")); } }
package org.usfirst.frc.team4678.robot; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.interfaces.Gyro; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import edu.wpi.cscore.UsbCamera; import edu.wpi.first.wpilibj.*; import com.ctre.CANTalon; import java.util.*; import org.usfirst.frc.team4678.robot.Baller.PanelStates; import edu.wpi.first.wpilibj.CameraServer; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ /** * TODO - Classes - Controls for Gear simplified - Tune PID for gear - * Switchable Drive * */ public class Robot extends IterativeRobot { /// Robot Port Mappings // Compressor public static final int COMPRESSOR = 0; // Motors public static final int LEFT_DRIVE_MOTOR = 1; // pwm 1 public static final int RIGHT_DRIVE_MOTOR = 0; // pwm 0 public static final int RIGHT_ENC_CHANNEL_A = 0; public static final int RIGHT_ENC_CHANNEL_B = 1; public static final int LEFT_ENC_CHANNEL_A = 2; public static final int LEFT_ENC_CHANNEL_B = 3; public static final int CLAW_PIVOT_MOTOR = 2; public static final int CLIMBER_MOTOR = 2; // pwm 2 public static final int BALL_PIVOT_MOTOR = 0; public static final int BALL_ROLLER_MOTOR = 1; public static final int WIND_MILL_SPIN_MOTOR = 3; // pwm 3 public static final int WIND_MILL_LIFT_MOTOR = 4; // pwm 4 public static final int PICKUP_PANEL_SERVO_LEFT_ID = 5; // pwm 5 public static final int PICKUP_PANEL_SERVO_RIGHT_ID = 6; // pwm 6 //public static final int SHOOTER_MOTOR = 3; //public static final int ELEVATOR_MOTOR = 7; //pwm 7 public static int drivingDirection = 0; public static final int gearSensor1 = 2; public static final int gearSensor2 = 3; public static boolean GOSCORE = false; // Pneumatics public static final int PCM = 0; public static final int LOW_GEAR = 2; public static final int HIGH_GEAR = 3; public static final int CLAW_RETRACT = 1; public static final int CLAW_EXTEND = 0; public static final int HOPPER_OPEN = 4; public static final int HOPPER_CLOSE = 5; public static final boolean DEBUG = true; // Controllers public static final int DRIVERGAMEPAD = 0; public static final int OPERATORGAMEPAD = 1; public static final int SELECTORSWITCHES = 2; // PIDConstants // Claw public static final double clawPIDP = 4; public static final double clawPIDI = 0; public static final double clawPIDD = 0; // GamePadMapping // Driver public static final int LEFT_AXISX = 0; public static final int LEFT_AXISY = 1; public static final int RIGHT_AXISX = 2; public static final int RIGHT_AXISY = 3; public static final int PICKUP_BTN = 4; public static final int CLAMP_BTN = 3; public static final int READY_TO_SCORE_BTN = 2; public static final int PLACE_BTN = 1; public static final int SHIFT_UP_BTN = 6; public static final int SHIFT_DOWN_BTN = 5; public static final int CLIMB_FAST_BTN = 7; public static final int CLIMB_SLOW_BTN = 8; public static String autoModes[] = { "Do Nothing", "Mode 1", "Mode 2", "Mode 3", "Mode 4", "Mode 5", "Mode 6", "Mode 7", "Mode 8", "Mode 9", "Mode 10" }; // Operator // DriveTrain // Controllers public static Joystick driverGamePad; public static Joystick operatorGamePad; public static Joystick selectorswitches; // Claw public static DoubleSolenoid clawGrabber; public static CANTalon clawPivot; // ROBOT CLASSES public static Climber climber; public static GearClaw claw; public static DriveTrain driveTrain; public static Baller baller; // Camera public static UsbCamera camera; public static PowerDistributionPanel pdp; public static int gdist = 0; public static boolean oscillate = false; // State Machine Enums // DriveStateMachine /** * This function is run when the robot is first started up and should be * used for any initialization code. */ public static int autoMode = 0; public static int teleIterations = 0; public static int autoIterations = 0; public static int secondndGearDistance = 0; //public static String hopperState = (baller.) public static AutoState nothing; public static ArrayList<AutoState> nothingList; public static AutoMode nothingAuto; public static AutoState middleGearAutoState1; public static AutoState middleGearAutoState2; public static AutoState middleGearAutoState3; public static AutoState middleGearAutoState4; public static ArrayList<AutoState> middleGearAutoArrayList; public static AutoMode middleGearAuto; public static AutoState middleGearToRightGearAutoState1; public static AutoState middleGearToRightGearAutoState2; public static AutoState middleGearToRightGearAutoState3; public static AutoState middleGearToRightGearAutoState4; public static AutoState middleGearToRightGearAutoState5; public static AutoState middleGearToRightGearAutoState6; public static AutoState middleGearToRightGearAutoState7; public static AutoState middleGearToRightGearAutoState8; public static AutoState middleGearToRightGearAutoState9; public static AutoState middleGearToRightGearAutoState10; public static AutoState middleGearToRightGearAutoState11; public static ArrayList<AutoState> middleGearToRightGearAutoArrayList; public static AutoMode middleGearToRightGearAuto; public static AutoState middleGearToLeftGearAutoState1; public static AutoState middleGearToLeftGearAutoState2; public static AutoState middleGearToLeftGearAutoState3; public static AutoState middleGearToLeftGearAutoState4; public static AutoState middleGearToLeftGearAutoState5; public static AutoState middleGearToLeftGearAutoState6; public static AutoState middleGearToLeftGearAutoState7; public static AutoState middleGearToLeftGearAutoState8; public static AutoState middleGearToLeftGearAutoState9; public static AutoState middleGearToLeftGearAutoState10; public static AutoState middleGearToLeftGearAutoState11; public static ArrayList<AutoState> middleGearToLeftGearAutoArrayList; public static AutoMode middleGearToLeftGearAuto; public static AutoState rightGearAutoState1; public static AutoState rightGearAutoState2; public static AutoState rightGearAutoState3; public static AutoState rightGearAutoState4; public static AutoState rightGearAutoState5; public static AutoState rightGearAutoState6; public static ArrayList<AutoState> rightGearAutoArrayList; public static AutoMode rightGearAuto; public static AutoState leftGearAutoState1; public static AutoState leftGearAutoState2; public static AutoState leftGearAutoState3; public static AutoState leftGearAutoState4; public static AutoState leftGearAutoState5; public static AutoState leftGearAutoState6; public static ArrayList<AutoState> leftGearAutoArrayList; public static AutoMode leftGearAuto; public static AutoState leftToMiddleStraightAutoState1; public static AutoState leftToMiddleStraightAutoState2; public static AutoState leftToMiddleStraightAutoState3; public static AutoState leftToMiddleStraightAutoState4; public static AutoState leftToMiddleStraightAutoState5; public static AutoState leftToMiddleStraightAutoState6; public static AutoState leftToMiddleStraightAutoState7; public static AutoState leftToMiddleStraightAutoState8; public static ArrayList<AutoState> leftGearToMiddleStraightAutoArrayList; public static AutoMode leftGearToMiddleStraightAuto; public static AutoState leftToMiddleCrossAutoState1; public static AutoState leftToMiddleCrossAutoState2; public static AutoState leftToMiddleCrossAutoState3; public static AutoState leftToMiddleCrossAutoState4; public static AutoState leftToMiddleCrossAutoState5; public static AutoState leftToMiddleCrossAutoState6; public static AutoState leftToMiddleCrossAutoState7; public static AutoState leftToMiddleCrossAutoState8; public static ArrayList<AutoState> leftGearToMiddleCrossAutoArrayList; public static AutoMode leftGearToMiddleCrossAuto; public static AutoState leftToBoilerAutoState1; public static AutoState leftToBoilerAutoState2; public static AutoState leftToBoilerAutoState3; public static AutoState leftToBoilerAutoState4; public static AutoState leftToBoilerAutoState5; public static AutoState leftToBoilerAutoState6; public static AutoState leftToBoilerAutoState7; public static AutoState leftToBoilerAutoState8; public static ArrayList<AutoState> leftGearToBoilerAutoArrayList; public static AutoMode leftGearToBoilerAuto; public static AutoState rightToMiddleStraightAutoState1; public static AutoState rightToMiddleStraightAutoState2; public static AutoState rightToMiddleStraightAutoState3; public static AutoState rightToMiddleStraightAutoState4; public static AutoState rightToMiddleStraightAutoState5; public static AutoState rightToMiddleStraightAutoState6; public static AutoState rightToMiddleStraightAutoState7; public static AutoState rightToMiddleStraightAutoState8; public static ArrayList<AutoState> rightGearToMiddleStraightAutoArrayList; public static AutoMode rightGearToMiddleStraightAuto; public static AutoState rightToMiddleCrossAutoState1; public static AutoState rightToMiddleCrossAutoState2; public static AutoState rightToMiddleCrossAutoState3; public static AutoState rightToMiddleCrossAutoState4; public static AutoState rightToMiddleCrossAutoState5; public static AutoState rightToMiddleCrossAutoState6; public static AutoState rightToMiddleCrossAutoState7; public static AutoState rightToMiddleCrossAutoState8; public static ArrayList<AutoState> rightGearToMiddleCrossAutoArrayList; public static AutoMode rightGearToMiddleCrossAuto; public static AutoState rightToBoilerAutoState1; public static AutoState rightToBoilerAutoState2; public static AutoState rightToBoilerAutoState3; public static AutoState rightToBoilerAutoState4; public static AutoState rightToBoilerAutoState5; public static AutoState rightToBoilerAutoState6; public static AutoState rightToBoilerAutoState7; public static AutoState rightToBoilerAutoState8; public static ArrayList<AutoState> rightGearToBoilerAutoArrayList; public static AutoMode rightGearToBoilerAuto; public static AutoMode selectedAutoMode; public static String autoName = "Nothing"; public static boolean pickupBtnHeld = false; public static boolean clampBtnHeld = false; public static TwoGearAuto twoGearAuto; public void autoModeAssemble(){ twoGearAuto = new TwoGearAuto(); middleGearAutoState1 = new AutoState(0,0,GearClaw.states.LIFT, 30, this); middleGearAutoState2 = new AutoState(-230,-230, 0.75, 15,25,0.5,0.4, GearClaw.states.READYTOSCORE, this); middleGearAutoState3 = new AutoState(0,0,GearClaw.states.SCORE,15, this); middleGearAutoState4 = new AutoState(-1500, 0,GearClaw.states.SCORE,0, this); middleGearAutoArrayList = new ArrayList<AutoState>(); middleGearAutoArrayList.add(middleGearAutoState1); middleGearAutoArrayList.add(middleGearAutoState2); middleGearAutoArrayList.add(middleGearAutoState3); middleGearAutoArrayList.add(middleGearAutoState4); middleGearAuto = new AutoMode(middleGearAutoArrayList); middleGearToRightGearAutoState1 = middleGearAutoState1; middleGearToRightGearAutoState2 = middleGearAutoState2; middleGearToRightGearAutoState3 = middleGearAutoState3; middleGearToRightGearAutoState4 = middleGearAutoState4; middleGearToRightGearAutoState5 = new AutoState(-180,-180, 0.75, 15,25,0.5,0.4, GearClaw.states.READYTOSCORE, this); middleGearToRightGearAutoState6 = new AutoState(0,90, GearClaw.states.PICKUP, 0, this); middleGearToRightGearAutoState7 = new AutoState(twoGearAuto, this); middleGearToRightGearAutoState8 = new AutoState(0,90, GearClaw.states.READYTOSCORE, 0, this); middleGearToRightGearAutoState9 = new AutoState(180,170, 0.75, 15, 25, 0.5, 0.4, GearClaw.states.READYTOSCORE, this); middleGearToRightGearAutoState10 = new AutoState(0,0,GearClaw.states.SCORE,15, this); middleGearToRightGearAutoState11 = new AutoState(-1500, 0,GearClaw.states.SCORE,0, this); middleGearToRightGearAutoArrayList = new ArrayList<AutoState>(); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState1); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState2); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState3); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState4); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState5); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState6); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState7); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState8); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState9); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState10); middleGearToRightGearAutoArrayList.add(middleGearToRightGearAutoState11); middleGearToRightGearAuto = new AutoMode(middleGearToRightGearAutoArrayList); middleGearToLeftGearAutoState1 = middleGearAutoState1; middleGearToLeftGearAutoState2 = middleGearAutoState2; middleGearToLeftGearAutoState3 = middleGearAutoState3; middleGearToLeftGearAutoState4 = middleGearAutoState4; middleGearToLeftGearAutoState5 = new AutoState(-180,-180, 0.75, 15,25,0.5,0.4, GearClaw.states.READYTOSCORE, this); middleGearToLeftGearAutoState6 = new AutoState(0,90, GearClaw.states.PICKUP, 0, this); middleGearToLeftGearAutoState7 = new AutoState(twoGearAuto, this); middleGearToLeftGearAutoState8 = new AutoState(0,90, GearClaw.states.READYTOSCORE, 0, this); middleGearToLeftGearAutoState9 = new AutoState(180,170, 0.75, 15, 25, 0.5, 0.4, GearClaw.states.READYTOSCORE, this); middleGearToLeftGearAutoState10 = new AutoState(0,0,GearClaw.states.SCORE,15, this); middleGearToLeftGearAutoState11 = new AutoState(-1500, 0,GearClaw.states.SCORE,0, this); middleGearToLeftGearAutoArrayList = new ArrayList<AutoState>(); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState1); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState2); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState3); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState4); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState5); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState6); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState7); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState8); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState9); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState10); middleGearToLeftGearAutoArrayList.add(middleGearToLeftGearAutoState11); middleGearToLeftGearAuto = new AutoMode(middleGearToLeftGearAutoArrayList); nothing = new AutoState(0,0, this); nothingList = new ArrayList<AutoState>(); nothingList.add(nothing); nothingAuto = new AutoMode(nothingList); selectedAutoMode = nothingAuto; rightGearAutoState1 = new AutoState(0,0,GearClaw.states.LIFT, 30, this); //rightGearAutoState2 = new AutoState(8200,0,GearClaw.states.LIFT, 0, this); rightGearAutoState2 = new AutoState(-340,-270, GearClaw.states.READYTOSCORE, this); //rightGearAutoState3 = new AutoState(0,-45, GearClaw.states.READYTOSCORE, 0 ,this); rightGearAutoState3 = new AutoState(1000,0,GearClaw.states.READYTOSCORE, 0, this); rightGearAutoState4 = new AutoState(0,0,GearClaw.states.SCORE, 15, this); rightGearAutoState5 = new AutoState(-2400,0,GearClaw.states.SCORE, 0, this); rightGearAutoArrayList = new ArrayList<AutoState>(); rightGearAutoArrayList.add(rightGearAutoState1); rightGearAutoArrayList.add(rightGearAutoState2); rightGearAutoArrayList.add(rightGearAutoState3); rightGearAutoArrayList.add(rightGearAutoState4); rightGearAutoArrayList.add(rightGearAutoState5); //rightGearAutoArrayList.add(rightGearAutoState6); rightGearAuto = new AutoMode(rightGearAutoArrayList); rightToMiddleStraightAutoState1 = rightGearAutoState1; rightToMiddleStraightAutoState2 = rightGearAutoState2; rightToMiddleStraightAutoState3 = rightGearAutoState3; rightToMiddleStraightAutoState4 = rightGearAutoState4; rightToMiddleStraightAutoState5 = rightGearAutoState5; //rightToMiddleAutoState6 = rightGearAutoState6; rightToMiddleStraightAutoState6 = new AutoState(0,53,GearClaw.states.LIFT, 0, this); rightToMiddleStraightAutoState7 = new AutoState(-950,-900,this); rightGearToMiddleStraightAutoArrayList = new ArrayList<AutoState>(); rightGearToMiddleStraightAutoArrayList.add(rightToMiddleStraightAutoState1); rightGearToMiddleStraightAutoArrayList.add(rightToMiddleStraightAutoState2); rightGearToMiddleStraightAutoArrayList.add(rightToMiddleStraightAutoState3); rightGearToMiddleStraightAutoArrayList.add(rightToMiddleStraightAutoState4); rightGearToMiddleStraightAutoArrayList.add(rightToMiddleStraightAutoState5); rightGearToMiddleStraightAutoArrayList.add(rightToMiddleStraightAutoState6); rightGearToMiddleStraightAutoArrayList.add(rightToMiddleStraightAutoState7); //rightGearToMiddleAutoArrayList.add(rightToMiddleAutoState8); rightGearToMiddleStraightAuto = new AutoMode(rightGearToMiddleStraightAutoArrayList); rightToMiddleCrossAutoState1 = rightGearAutoState1; rightToMiddleCrossAutoState2 = rightGearAutoState2; rightToMiddleCrossAutoState3 = rightGearAutoState3; rightToMiddleCrossAutoState4 = rightGearAutoState4; rightToMiddleCrossAutoState5 = rightGearAutoState5; //rightToMiddleAutoState6 = rightGearAutoState6; rightToMiddleCrossAutoState6 = new AutoState(0,42,GearClaw.states.LIFT, 0, this); rightToMiddleCrossAutoState7 = new AutoState(-850,-700,this); rightToMiddleCrossAutoState8 = new AutoState(-275,-350,this); rightGearToMiddleCrossAutoArrayList = new ArrayList<AutoState>(); rightGearToMiddleCrossAutoArrayList.add(rightToMiddleCrossAutoState1); rightGearToMiddleCrossAutoArrayList.add(rightToMiddleCrossAutoState2); rightGearToMiddleCrossAutoArrayList.add(rightToMiddleCrossAutoState3); rightGearToMiddleCrossAutoArrayList.add(rightToMiddleCrossAutoState4); rightGearToMiddleCrossAutoArrayList.add(rightToMiddleCrossAutoState5); rightGearToMiddleCrossAutoArrayList.add(rightToMiddleCrossAutoState6); rightGearToMiddleCrossAutoArrayList.add(rightToMiddleCrossAutoState7); rightGearToMiddleCrossAutoArrayList.add(rightToMiddleCrossAutoState8); rightGearToMiddleCrossAuto = new AutoMode(rightGearToMiddleCrossAutoArrayList); rightToBoilerAutoState1 = rightGearAutoState1; rightToBoilerAutoState2 = rightGearAutoState2; rightToBoilerAutoState3 = rightGearAutoState3; rightToBoilerAutoState4 = rightGearAutoState4; rightToBoilerAutoState5 = rightGearAutoState5; rightToBoilerAutoState6 = new AutoState(255, 255, this); //rightToMiddleAutoState6 = rightGearAutoState6; rightToBoilerAutoState7 = new AutoState(-0.25,0.25,5,this); //rightToBoilerAutoState7 = new AutoState(185,215,Baller.autoStates.DEPLOY,this); rightToBoilerAutoState8 = new AutoState(Baller.autoStates.SHOOT, this); rightGearToBoilerAutoArrayList = new ArrayList<AutoState>(); rightGearToBoilerAutoArrayList.add(rightToBoilerAutoState1); rightGearToBoilerAutoArrayList.add(rightToBoilerAutoState2); rightGearToBoilerAutoArrayList.add(rightToBoilerAutoState3); rightGearToBoilerAutoArrayList.add(rightToBoilerAutoState4); rightGearToBoilerAutoArrayList.add(rightToBoilerAutoState5); rightGearToBoilerAutoArrayList.add(rightToBoilerAutoState6); rightGearToBoilerAutoArrayList.add(rightToBoilerAutoState7); rightGearToBoilerAutoArrayList.add(rightToBoilerAutoState8); rightGearToBoilerAuto = new AutoMode(rightGearToBoilerAutoArrayList); leftGearAutoState1 = new AutoState(0,0,GearClaw.states.LIFT, 30, this); leftGearAutoState2 = new AutoState(-270,-340, GearClaw.states.READYTOSCORE, this); //leftGearAutoState3 = new AutoState(0,5, GearClaw.states.READYTOSCORE, 0 ,this); leftGearAutoState3 = new AutoState(1000,0,GearClaw.states.READYTOSCORE, 0, this); leftGearAutoState4 = new AutoState(0,0,GearClaw.states.SCORE, 15, this); leftGearAutoState5 = new AutoState(-2400,0,GearClaw.states.SCORE, 0, this); leftGearAutoArrayList = new ArrayList<AutoState>(); leftGearAutoArrayList.add(leftGearAutoState1); leftGearAutoArrayList.add(leftGearAutoState2); leftGearAutoArrayList.add(leftGearAutoState3); leftGearAutoArrayList.add(leftGearAutoState4); leftGearAutoArrayList.add(leftGearAutoState5); //leftGearAutoArrayList.add(leftGearAutoState6); leftGearAuto = new AutoMode(leftGearAutoArrayList); leftToMiddleStraightAutoState1 = leftGearAutoState1; leftToMiddleStraightAutoState2 = leftGearAutoState2; leftToMiddleStraightAutoState3 = leftGearAutoState3; leftToMiddleStraightAutoState4 = leftGearAutoState4; leftToMiddleStraightAutoState5 = leftGearAutoState5; //leftToMiddleAutoState6 = leftGearAutoState6; leftToMiddleStraightAutoState6 = new AutoState(0,-53,GearClaw.states.LIFT, 0, this); leftToMiddleStraightAutoState7 = new AutoState(-900,-950, this); leftGearToMiddleStraightAutoArrayList = new ArrayList<AutoState>(); leftGearToMiddleStraightAutoArrayList.add(leftToMiddleStraightAutoState1); leftGearToMiddleStraightAutoArrayList.add(leftToMiddleStraightAutoState2); leftGearToMiddleStraightAutoArrayList.add(leftToMiddleStraightAutoState3); leftGearToMiddleStraightAutoArrayList.add(leftToMiddleStraightAutoState4); leftGearToMiddleStraightAutoArrayList.add(leftToMiddleStraightAutoState5); leftGearToMiddleStraightAutoArrayList.add(leftToMiddleStraightAutoState6); leftGearToMiddleStraightAutoArrayList.add(leftToMiddleStraightAutoState7); //leftGearToMiddleAutoArrayList.add(leftToMiddleAutoState8); leftGearToMiddleStraightAuto = new AutoMode(leftGearToMiddleStraightAutoArrayList); leftToMiddleCrossAutoState1 = leftGearAutoState1; leftToMiddleCrossAutoState2 = leftGearAutoState2; leftToMiddleCrossAutoState3 = leftGearAutoState3; leftToMiddleCrossAutoState4 = leftGearAutoState4; leftToMiddleCrossAutoState5 = leftGearAutoState5; //leftToMiddleAutoState6 = leftGearAutoState6; leftToMiddleCrossAutoState6 = new AutoState(0,-42,GearClaw.states.LIFT, 0, this); leftToMiddleCrossAutoState7 = new AutoState(-700,-850, this); leftToMiddleCrossAutoState8 = new AutoState(-350, -275, this); leftGearToMiddleCrossAutoArrayList = new ArrayList<AutoState>(); leftGearToMiddleCrossAutoArrayList.add(leftToMiddleCrossAutoState1); leftGearToMiddleCrossAutoArrayList.add(leftToMiddleCrossAutoState2); leftGearToMiddleCrossAutoArrayList.add(leftToMiddleCrossAutoState3); leftGearToMiddleCrossAutoArrayList.add(leftToMiddleCrossAutoState4); leftGearToMiddleCrossAutoArrayList.add(leftToMiddleCrossAutoState5); leftGearToMiddleCrossAutoArrayList.add(leftToMiddleCrossAutoState6); leftGearToMiddleCrossAutoArrayList.add(leftToMiddleCrossAutoState7); leftGearToMiddleCrossAutoArrayList.add(leftToMiddleCrossAutoState8); //leftGearToMiddleAutoArrayList.add(leftToMiddleAutoState8); leftGearToMiddleCrossAuto = new AutoMode(leftGearToMiddleCrossAutoArrayList); leftToBoilerAutoState1 = leftGearAutoState1; leftToBoilerAutoState2 = leftGearAutoState2; leftToBoilerAutoState3 = leftGearAutoState3; leftToBoilerAutoState4 = leftGearAutoState4; leftToBoilerAutoState5 = leftGearAutoState5; leftToBoilerAutoState6 = new AutoState(255, 255, this); //leftToBoilerAutoState6 = new AutoState(GearClaw.states.LIFT, this); leftToBoilerAutoState7 = new AutoState(-0.25,0.25,5,this); leftToBoilerAutoState8 = new AutoState(Baller.autoStates.SHOOT, this); leftGearToBoilerAutoArrayList = new ArrayList<AutoState>(); leftGearToBoilerAutoArrayList.add(leftToBoilerAutoState1); leftGearToBoilerAutoArrayList.add(leftToBoilerAutoState2); leftGearToBoilerAutoArrayList.add(leftToBoilerAutoState3); leftGearToBoilerAutoArrayList.add(leftToBoilerAutoState4); leftGearToBoilerAutoArrayList.add(leftToBoilerAutoState5); leftGearToBoilerAutoArrayList.add(leftToBoilerAutoState6); leftGearToBoilerAutoArrayList.add(leftToBoilerAutoState7); leftGearToBoilerAutoArrayList.add(leftToBoilerAutoState8); leftGearToBoilerAuto = new AutoMode(leftGearToBoilerAutoArrayList); } @Override public void robotInit() { controllerInit(); driveTrain = new DriveTrain(LEFT_DRIVE_MOTOR, RIGHT_DRIVE_MOTOR, COMPRESSOR, PCM, HIGH_GEAR, LOW_GEAR, driverGamePad, RIGHT_ENC_CHANNEL_A, RIGHT_ENC_CHANNEL_B, LEFT_ENC_CHANNEL_A, LEFT_ENC_CHANNEL_B); climber = new Climber(CLIMBER_MOTOR); claw = new GearClaw(PCM, CLAW_EXTEND, CLAW_RETRACT, CLAW_PIVOT_MOTOR, clawPIDP, clawPIDI, clawPIDD, gearSensor1, gearSensor2, this); baller = new Baller(BALL_PIVOT_MOTOR, BALL_ROLLER_MOTOR, WIND_MILL_SPIN_MOTOR, WIND_MILL_LIFT_MOTOR, PCM, HOPPER_OPEN,HOPPER_CLOSE );// SHOOTER_MOTOR, ELEVATOR_MOTOR); camera = CameraServer.getInstance().startAutomaticCapture(0); camera.setResolution(640, 480); camera.setFPS(30); pdp = new PowerDistributionPanel(0); autoModeAssemble(); resetSensors(); leftGearAuto.currentState = 0; smartDashboard(); } @Override public void autonomousInit() { selectedAutoMode.currentState = 0; driveTrain.shiftDown(); resetSensors(); } /** * This function is called periodically during autonomous */ @Override public void autonomousPeriodic() { smartDashboard(); autoIterations++; SmartDashboard.putNumber("Right Encoder", driveTrain.rightEncoder.get()); //SmartDashboard.putNumber("Shooter Speed", baller.getShooterSpeed()); SmartDashboard.putNumber("Left Encoder", driveTrain.leftEncoder.get()); SmartDashboard.putNumber("Auto State", rightGearAuto.currentState); if(autoName == "Nothing"){ }else{ selectedAutoMode.runMode(); } //driveTrain.pidTurn(90); //driveTrain.pidEncTurn(500); } /** * This function is called periodically during operator control */ @Override public void teleopInit() { driveTrain.setState(DriveTrain.states.JOYSTICKDRIVE); } @Override public void teleopPeriodic() { baller.hopperDashboardPrint(); SmartDashboard.putNumber("Tele Iterations", teleIterations); SmartDashboard.putNumber("Auto Iterations", autoIterations); SmartDashboard.putString("Hopper: ", Baller.hopperPrint); teleIterations++; driverControls(); //baller.printShooterSpeed(); operatorControls(); //operatorBTNpadControls(); driveTrain.stateMachine(); claw.stateMachine(); smartDashboard(); // if (oscillate) { // baller.oscillate(); // } } @Override public void disabledPeriodic() { if(readSelector1() == 0){ if(readSelector2() == 0){ selectedAutoMode = leftGearAuto; autoName = "leftGearAuto"; }else if(readSelector2() == 1){ selectedAutoMode = middleGearAuto; autoName = "middleGearAuto"; }else if(readSelector2() == 2){ selectedAutoMode = rightGearAuto; autoName = "rightGearAuto"; }else if(readSelector2() == 3){ selectedAutoMode = leftGearToMiddleStraightAuto; autoName = "leftGearToMiddleStraightAuto"; }else if(readSelector2() == 4){ selectedAutoMode = rightGearToMiddleCrossAuto; autoName = "rightGearToMiddleCrossAuto"; }else if(readSelector2() == 5){ selectedAutoMode = rightGearToBoilerAuto; autoName = "rightGearToBoilerAuto"; }else if(readSelector2() == 6){ }else if(readSelector2() == 7){ } }else if(readSelector1() == 1){ if(readSelector2() == 0){ selectedAutoMode = leftGearAuto; autoName = "leftGearAuto"; }else if(readSelector2() == 1){ selectedAutoMode = middleGearAuto; autoName = "middleGearAuto"; }else if(readSelector2() == 2){ selectedAutoMode = rightGearAuto; autoName = "rightGearAuto"; }else if(readSelector2() == 3){ selectedAutoMode = leftGearToMiddleCrossAuto; autoName = "leftGearToMiddleCrossAuto"; }else if(readSelector2() == 4){ selectedAutoMode = rightGearToMiddleStraightAuto; autoName = "rightGearToMiddleStraightAuto"; }else if(readSelector2() == 5){ selectedAutoMode = leftGearToBoilerAuto; autoName = "leftGearToBoilerAuto"; }else if(readSelector2() == 6){ }else if(readSelector2() == 7){ } }else{ autoName = "Nothing"; } if(driverGamePad.getRawButton(1)){ resetSensors(); } smartDashboard(); } /** * This function is called periodically during test mode */ @Override public void testPeriodic() { } public void controllerInit() { driverGamePad = new Joystick(DRIVERGAMEPAD); operatorGamePad = new Joystick(OPERATORGAMEPAD); selectorswitches = new Joystick(SELECTORSWITCHES); } public void driverControls() { //System.out.println("Fish"); if(claw.currentState != GearClaw.states.PICKUP){ GOSCORE = true; }else{ GOSCORE = false; } if (driverGamePad.getRawButton(SHIFT_DOWN_BTN)) { driveTrain.shiftDown(); } if (driverGamePad.getRawButton(SHIFT_UP_BTN)) { driveTrain.shiftUp(); } if (driverGamePad.getRawButton(PICKUP_BTN) && baller.getCanLowerClawStatus() == true && !pickupBtnHeld) { claw.setState(GearClaw.states.PICKUP); } if (driverGamePad.getRawButton(3)) { claw.setState(GearClaw.states.READYTOSCORE); //driveTrain.pidDrive(5000, 1); } if (driverGamePad.getRawButton(READY_TO_SCORE_BTN)) { claw.setState(GearClaw.states.SCORE); //driveTrain.pidTurn(90, 1); } if (driverGamePad.getRawButton(1)) { claw.setState(GearClaw.states.CLAMP); //resetSensors(); }else if(!driverGamePad.getRawButton(1) && clampBtnHeld){ claw.toGoUp = true; } if(driverGamePad.getRawAxis(DriveTrain.AXIS +1) < -0.3){ drivingDirection = 1; }else if(driverGamePad.getRawAxis(DriveTrain.AXIS +1) > 0.3){ drivingDirection = -1; } if(driverGamePad.getRawButton(7) && drivingDirection == 1){ driveTrain.shiftUp(); driveTrain.setState(DriveTrain.states.PIVOTLEFTGEARFORWARD); }else if(driverGamePad.getRawButton(8) && drivingDirection == 1){ driveTrain.shiftUp(); driveTrain.setState(DriveTrain.states.PIVOTRIGHTGEARFORWARD); }else if(driverGamePad.getRawButton(7) && drivingDirection == -1){ driveTrain.shiftUp(); driveTrain.setState(DriveTrain.states.PIVOTLEFTBALLFORWARD); }else if(driverGamePad.getRawButton(8) && drivingDirection == -1){ driveTrain.shiftUp(); driveTrain.setState(DriveTrain.states.PIVOTRIGHTBALLFORWARD); }else if(driverGamePad.getRawButton(1) || driverGamePad.getRawButton(3) || driverGamePad.getRawButton(2)){ driveTrain.setState(DriveTrain.states.AUTO); }else { driveTrain.setState(DriveTrain.states.JOYSTICKDRIVE); } // if (driverGamePad.getPOV() == 180) { // baller.pickup(); // } // if (driverGamePad.getPOV() == 0) { // baller.enclose(); // } // if (driverGamePad.getPOV() == 90) { // baller.stopDown(); // } // if (driverGamePad.getPOV() == 270) { // baller.release(); // } /*if(driverGamePad.getRawButton(8)){ claw.setState(GearClaw.states.READYTOSCORE); }*/ if(driverGamePad.getRawButton(PICKUP_BTN)){ pickupBtnHeld = true; }else{ pickupBtnHeld = false; } if(driverGamePad.getRawButton(1)){ clampBtnHeld = true; }else{ clampBtnHeld = false; } } // public void operatorBTNpadControls(){ // if(operator16.getRawButton(1)) { // reset GotoDistance code // driveTrain.resetGoToDistanceState(); // gdist = 0; // allow gotodistance to run // driveTrain.leftEncoder.reset(); // driveTrain.rightEncoder.reset(); // } // if(operator16.getRawButton(2)) { // Test goto distance code // driveTrain.setState(DriveTrain.states.AUTO); // Disable the joystick drive control // if (gdist == 0) { // if(driveTrain.goToDistance(-260, -330, 1, 15, 25, 0.50, 0.50)) { // rightCentimeters, leftCentimeters, power, rampUpDistance, // gdist = 1; // end go to Distance when we get true // } // rampDownDistance, startingPower, endingPower // } // } // else if (DriveTrain.currentState == DriveTrain.states.AUTO) // { // driveTrain.setState(DriveTrain.states.JOYSTICKDRIVE); // back to JOYSTICK when we let go of the button // } // if (operator16.getRawButton(3)) { // //baller.hopperExtend(); // //oscillate = true; // } // if (operator16.getRawButton(4)) { // //oscillate = false; // } // if (operator16.getRawButton(1)) { // claw.setState(GearClaw.states.HOLD); // ; // } // if (operator16.getRawButton(16)){ // baller.lowGoalReady(); // } // if (operator16.getRawButton(14)){ // baller.lowGoalHopper(); // } // if (operator16.getRawButton(15)){ // baller.lowGoalShoot(); // } // if (operator16.getRawButton(10)){ // baller.lowGoalReverse(); // } // if (operator16.getRawButton(13)){ // baller.lowGoalStop(); // } // if (operator16.getRawButton(12)){ // baller.pickup(); // } // if (operator16.getRawButton(9)){ // baller.enclose(); // } // if (operator16.getRawButton(8)){ // baller.stopDown(); // } // } public void operatorControls() { // System.out.println("HELLO"); // if(operatorGamePad.getRawButton(1) && claw.canOpenPanel == true) { // //can only lower mills if the claw is in an upper position (same // situation as the pickup panel) // baller.lowerMills(); // } if ((operatorGamePad.getRawButton(1))&&(claw.clawIsInsideBumper())) { // can only lower mills if the // claw is in an upper position // (same situation as the pickup // panel) // System.out.println("Trying to lower the wind mills!!!!"); baller.lowerMills(); } if (operatorGamePad.getRawButton(4)) { baller.stopMills(); // stops the mills from spinning baller.liftMills(); // lifts mills to rest position } if (operatorGamePad.getRawButton(3)) { baller.spinMillOut(); // spins the wind mills away from the robot } if (operatorGamePad.getRawButton(2)) { baller.spinMillIn(); // spins wind mills towards the intake } if ((operatorGamePad.getRawButton(5))&&(claw.clawIsInsideBumper())) { //System.out.println("hopperExtend"); baller.hopperExtend(); //System.out.println("deploy is allowed, opening pickup panel!"); } //System.out.println("Cat"); if (operatorGamePad.getRawButton(6)) { // always allowed to do this so // no need for extra conditions //System.out.println("hopperRetract"); //System.out.println("Dog"); baller.hopperRetract(); //SmartDashboard.putNumber("Operator Controls WOrking", 13); // baller.retractPickUpPanel(); //closes pickup panel } if (operatorGamePad.getRawButton(7)) { // if (driveTrain.goToDistance(100, 100, 0.5, 20, 20, 0.3, 0.2)) { // driveTrain.stopDriveMotors(); // driveTrain.resetGoToDistanceState(); // } } if(operatorGamePad.getRawButton(12)){ resetSensors(); } //if(operatorGamePad.getRawButton(11)){ // claw.setState(GearClaw.states.LIFT); //} if (operatorGamePad.getRawButton(8)) { climber.climbFast(); } // } else if (operatorGamePad.getPOV() == 90) { // climber.climbMedium(); // } else if (operatorGamePad.getPOV() == 180) { // climber.climbSlow(); else { climber.climbStop(); } // Use joystick to operate the low goal shooting instead of 16 button pad // Left and Right to activate the agitator (right = fwd, left = rvs) // Up to run the roller in full speed feed into low goal // Down to run the roller in low speed, increasing speed the further the joystick // is moved in the downward direction // pressing the joystick down (button 11) to reverse the rollers, agitators to // run in the away from roller direction (reverse). // The agitators can be activate this way at any time. Other functions may // require some pre-requisite conditions // If door is closed, only the agitators will work. Otherwise, low goal // functions will be activated, provide the pivot is in the scoring position // If it's not, the up joystick function will send it to scoring position and // then allow low goal roller operation. // Testing shows we shouldn't do much of anything for joystick axis values // below 0.25. Fully diagonal positions do not return 1 for both axis, // more like 0.75 so anything over 0.5 should be good for full on. // Released position does not always provide a 0.0 return value but it will certainly // be less than 0.25 double ogpxaxis = operatorGamePad.getRawAxis(LEFT_AXISX); double ogpyaxis = operatorGamePad.getRawAxis(LEFT_AXISY); //System.out.println("ogpxaxis = "+ogpxaxis+" ogpyaxis = "+ogpyaxis); if (ogpxaxis > 0.25) { // activate agitator based on x axis baller.agitate(1.0); } else if (ogpxaxis < -0.25) { baller.agitate(-1.0); } else { baller.agitate(0.0); // turn it off if joystick is in middle. } if ((Baller.panelState == PanelStates.DEPLOYED)&&(Baller.intakeState != Baller.IntakeStates.PICKUP)) { // Only when deployed // and not in PICKUP mode ... if (operatorGamePad.getRawButton(11)) { // If joystick pressed, this is reverse baller.lowGoalReverse(); baller.agitate(-1.0); } else { // other low goal stuff can happen as long as we're not pressing the joystick if (ogpyaxis < -0.5) { // When pressing up ... if (!baller.lowGoalIsReady()) { baller.lowGoalReady(); } else { baller.lowGoalShoot(); // shoot when ready. } } else if (ogpyaxis > 0.25) { // when joystick is moved down, vary speed of shooter from slow to fast. baller.lowGoalVarShoot(ogpyaxis * -35000); // will be from about 9000 to 35000 } else baller.lowGoalStop(); // stop rollers if joystick is put in center. } } if (Baller.panelState == PanelStates.DEPLOYED){ if (operatorGamePad.getPOV() == 90) { baller.pickup(); } if (operatorGamePad.getPOV() == 180) { baller.enclose(); } if (operatorGamePad.getPOV() == 270) { baller.stopDown(); } } } public int readSelector1() { // Read the left selector switch for auto configuration int rval; rval = 0; if (selectorswitches.getRawButton(14)) rval += 1; if (selectorswitches.getRawButton(15)) rval += 2; if (selectorswitches.getRawButton(16)) rval += 4; return(rval); } public int readSelector2() { // Read the right selector switch for auto configuration int rval; rval = 0; if (selectorswitches.getRawButton(13)) rval += 1; if (selectorswitches.getRawButton(12)) rval += 2; if (selectorswitches.getRawButton(11)) rval += 4; return(rval); } public void smartDashboard() { if (DEBUG) { SmartDashboard.putNumber("Right Encoder", driveTrain.rightEncoder.get()); //SmartDashboard.putNumber("Shooter Speed", baller.getShooterSpeed()); SmartDashboard.putNumber("Left Encoder", driveTrain.leftEncoder.get()); SmartDashboard.putNumber("gyro adr", driveTrain.gyro.getAngle()); SmartDashboard.putNumber("Claw Encoder", claw.pivotMotor.getPulseWidthPosition()); SmartDashboard.putNumber("Ball Pivot Encoder", baller.pivotMotor.getPulseWidthPosition()); SmartDashboard.putNumber("First climber motor", pdp.getCurrent(12)); SmartDashboard.putNumber("Second Climber motor", pdp.getCurrent(2)); // SmartDashboard.putNumber("Ball Roller Encoder", // baller.intakeMotor.getPulseWidthPosition()); // SmartDashboard.putNumber("Claw Encoder 2", // clawPivot.getEncPosition()); SmartDashboard.putString("Auto Mode", autoName); // SmartDashboard.putNumber("Auto Mode", readSelector1()); SmartDashboard.putNumber("AutoState" , selectedAutoMode.currentState); SmartDashboard.putNumber("gear Right", claw.gearRight.getValue()); SmartDashboard.putNumber("gear Left", claw.gearLeft.getValue()); SmartDashboard.putBoolean("GoScore", GOSCORE); SmartDashboard.putNumber("NavX", driveTrain.navX.getAngle()); } else { } } public void resetSensors(){ driveTrain.leftEncoder.reset(); driveTrain.rightEncoder.reset(); driveTrain.gyro.reset(); driveTrain.navX.reset(); } }
package net.minecraft.block; import com.google.common.base.Predicate; import net.minecraft.block.properties.IProperty; import net.minecraft.block.properties.PropertyBool; import net.minecraft.block.properties.PropertyEnum; import net.minecraft.block.state.BlockState; import net.minecraft.block.state.IBlockState; import net.minecraft.util.BlockPos; import net.minecraft.world.World; public class BlockRailPowered extends BlockRailBase { public static final PropertyEnum field_176568_b = PropertyEnum.create("shape", BlockRailBase.EnumRailDirection.class, new Predicate() { private static final String __OBFID = "CL_00002080"; public boolean func_180133_a(BlockRailBase.EnumRailDirection p_180133_1_) { return p_180133_1_ != BlockRailBase.EnumRailDirection.NORTH_EAST && p_180133_1_ != BlockRailBase.EnumRailDirection.NORTH_WEST && p_180133_1_ != BlockRailBase.EnumRailDirection.SOUTH_EAST && p_180133_1_ != BlockRailBase.EnumRailDirection.SOUTH_WEST; } public boolean apply(Object p_apply_1_) { return this.func_180133_a((BlockRailBase.EnumRailDirection)p_apply_1_); } }); public static final PropertyBool field_176569_M = PropertyBool.create("powered"); private static final String __OBFID = "CL_00000288"; protected BlockRailPowered() { super(true); this.setDefaultState(this.blockState.getBaseState().withProperty(field_176568_b, BlockRailBase.EnumRailDirection.NORTH_SOUTH).withProperty(field_176569_M, Boolean.valueOf(false))); } protected boolean func_176566_a(World worldIn, BlockPos p_176566_2_, IBlockState p_176566_3_, boolean p_176566_4_, int p_176566_5_) { if (p_176566_5_ >= 8) { return false; } else { int var6 = p_176566_2_.getX(); int var7 = p_176566_2_.getY(); int var8 = p_176566_2_.getZ(); boolean var9 = true; BlockRailBase.EnumRailDirection var10 = (BlockRailBase.EnumRailDirection)p_176566_3_.getValue(field_176568_b); switch (BlockRailPowered.SwitchEnumRailDirection.field_180121_a[var10.ordinal()]) { case 1: if (p_176566_4_) { ++var8; } else { --var8; } break; case 2: if (p_176566_4_) { --var6; } else { ++var6; } break; case 3: if (p_176566_4_) { --var6; } else { ++var6; ++var7; var9 = false; } var10 = BlockRailBase.EnumRailDirection.EAST_WEST; break; case 4: if (p_176566_4_) { --var6; ++var7; var9 = false; } else { ++var6; } var10 = BlockRailBase.EnumRailDirection.EAST_WEST; break; case 5: if (p_176566_4_) { ++var8; } else { --var8; ++var7; var9 = false; } var10 = BlockRailBase.EnumRailDirection.NORTH_SOUTH; break; case 6: if (p_176566_4_) { ++var8; ++var7; var9 = false; } else { --var8; } var10 = BlockRailBase.EnumRailDirection.NORTH_SOUTH; } return this.func_176567_a(worldIn, new BlockPos(var6, var7, var8), p_176566_4_, p_176566_5_, var10) ? true : var9 && this.func_176567_a(worldIn, new BlockPos(var6, var7 - 1, var8), p_176566_4_, p_176566_5_, var10); } } protected boolean func_176567_a(World worldIn, BlockPos p_176567_2_, boolean p_176567_3_, int p_176567_4_, BlockRailBase.EnumRailDirection p_176567_5_) { IBlockState var6 = worldIn.getBlockState(p_176567_2_); if (var6.getBlock() != this) { return false; } else { BlockRailBase.EnumRailDirection var7 = (BlockRailBase.EnumRailDirection)var6.getValue(field_176568_b); return p_176567_5_ == BlockRailBase.EnumRailDirection.EAST_WEST && (var7 == BlockRailBase.EnumRailDirection.NORTH_SOUTH || var7 == BlockRailBase.EnumRailDirection.ASCENDING_NORTH || var7 == BlockRailBase.EnumRailDirection.ASCENDING_SOUTH) ? false : (p_176567_5_ == BlockRailBase.EnumRailDirection.NORTH_SOUTH && (var7 == BlockRailBase.EnumRailDirection.EAST_WEST || var7 == BlockRailBase.EnumRailDirection.ASCENDING_EAST || var7 == BlockRailBase.EnumRailDirection.ASCENDING_WEST) ? false : (((Boolean)var6.getValue(field_176569_M)).booleanValue() ? (worldIn.isBlockPowered(p_176567_2_) ? true : this.func_176566_a(worldIn, p_176567_2_, var6, p_176567_3_, p_176567_4_ + 1)) : false)); } } protected void func_176561_b(World worldIn, BlockPos p_176561_2_, IBlockState p_176561_3_, Block p_176561_4_) { boolean var5 = ((Boolean)p_176561_3_.getValue(field_176569_M)).booleanValue(); boolean var6 = worldIn.isBlockPowered(p_176561_2_) || this.func_176566_a(worldIn, p_176561_2_, p_176561_3_, true, 0) || this.func_176566_a(worldIn, p_176561_2_, p_176561_3_, false, 0); if (var6 != var5) { worldIn.setBlockState(p_176561_2_, p_176561_3_.withProperty(field_176569_M, Boolean.valueOf(var6)), 3); worldIn.notifyNeighborsOfStateChange(p_176561_2_.offsetDown(), this); if (((BlockRailBase.EnumRailDirection)p_176561_3_.getValue(field_176568_b)).func_177018_c()) { worldIn.notifyNeighborsOfStateChange(p_176561_2_.offsetUp(), this); } } } public IProperty func_176560_l() { return field_176568_b; } /** * Convert the given metadata into a BlockState for this Block */ public IBlockState getStateFromMeta(int meta) { return this.getDefaultState().withProperty(field_176568_b, BlockRailBase.EnumRailDirection.func_177016_a(meta & 7)).withProperty(field_176569_M, Boolean.valueOf((meta & 8) > 0)); } /** * Convert the BlockState into the correct metadata value */ public int getMetaFromState(IBlockState state) { byte var2 = 0; int var3 = var2 | ((BlockRailBase.EnumRailDirection)state.getValue(field_176568_b)).func_177015_a(); if (((Boolean)state.getValue(field_176569_M)).booleanValue()) { var3 |= 8; } return var3; } protected BlockState createBlockState() { return new BlockState(this, new IProperty[] {field_176568_b, field_176569_M}); } static final class SwitchEnumRailDirection { static final int[] field_180121_a = new int[BlockRailBase.EnumRailDirection.values().length]; private static final String __OBFID = "CL_00002079"; static { try { field_180121_a[BlockRailBase.EnumRailDirection.NORTH_SOUTH.ordinal()] = 1; } catch (NoSuchFieldError var6) { ; } try { field_180121_a[BlockRailBase.EnumRailDirection.EAST_WEST.ordinal()] = 2; } catch (NoSuchFieldError var5) { ; } try { field_180121_a[BlockRailBase.EnumRailDirection.ASCENDING_EAST.ordinal()] = 3; } catch (NoSuchFieldError var4) { ; } try { field_180121_a[BlockRailBase.EnumRailDirection.ASCENDING_WEST.ordinal()] = 4; } catch (NoSuchFieldError var3) { ; } try { field_180121_a[BlockRailBase.EnumRailDirection.ASCENDING_NORTH.ordinal()] = 5; } catch (NoSuchFieldError var2) { ; } try { field_180121_a[BlockRailBase.EnumRailDirection.ASCENDING_SOUTH.ordinal()] = 6; } catch (NoSuchFieldError var1) { ; } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.cluster.service.ClusterApplierService; import org.elasticsearch.cluster.service.MasterService; import javax.annotation.Nullable; import org.elasticsearch.threadpool.ThreadPool; import java.util.Objects; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.AbstractQueuedSynchronizer; public abstract class BaseFuture<V> implements Future<V> { private static final String BLOCKING_OP_REASON = "Blocking operation"; /** * Synchronization control for AbstractFutures. */ private final Sync<V> sync = new Sync<>(); /* * Improve the documentation of when InterruptedException is thrown. Our * behavior matches the JDK's, but the JDK's documentation is misleading. */ /** * {@inheritDoc} * <p> * The default {@link BaseFuture} implementation throws {@code * InterruptedException} if the current thread is interrupted before or during * the call, even if the value is already available. * * @throws InterruptedException if the current thread was interrupted before * or during the call (optional but recommended). * @throws CancellationException {@inheritDoc} */ @Override public V get(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException, ExecutionException { assert timeout <= 0 || blockingAllowed(); return sync.get(unit.toNanos(timeout)); } /* * Improve the documentation of when InterruptedException is thrown. Our * behavior matches the JDK's, but the JDK's documentation is misleading. */ /** * {@inheritDoc} * <p> * The default {@link BaseFuture} implementation throws {@code * InterruptedException} if the current thread is interrupted before or during * the call, even if the value is already available. * * @throws InterruptedException if the current thread was interrupted before * or during the call (optional but recommended). * @throws CancellationException {@inheritDoc} */ @Override public V get() throws InterruptedException, ExecutionException { assert blockingAllowed(); return sync.get(); } // protected so that it can be overridden in specific instances protected boolean blockingAllowed() { // Crate change: blob module requires blocking operations on transport return ThreadPool.assertNotScheduleThread(BLOCKING_OP_REASON) && ClusterApplierService.assertNotClusterStateUpdateThread(BLOCKING_OP_REASON) && MasterService.assertNotMasterUpdateThread(BLOCKING_OP_REASON); } @Override public boolean isDone() { return sync.isDone(); } @Override public boolean isCancelled() { return sync.isCancelled(); } @Override public boolean cancel(boolean mayInterruptIfRunning) { if (!sync.cancel()) { return false; } done(); if (mayInterruptIfRunning) { interruptTask(); } return true; } /** * Subclasses can override this method to implement interruption of the * future's computation. The method is invoked automatically by a successful * call to {@link #cancel(boolean) cancel(true)}. * <p> * The default implementation does nothing. * * @since 10.0 */ protected void interruptTask() { } /** * Subclasses should invoke this method to set the result of the computation * to {@code value}. This will set the state of the future to * {@link BaseFuture.Sync#COMPLETED} and call {@link #done()} if the * state was successfully changed. * * @param value the value that was the result of the task. * @return true if the state was successfully changed. */ protected boolean set(@Nullable V value) { boolean result = sync.set(value); if (result) { done(); } return result; } /** * Subclasses should invoke this method to set the result of the computation * to an error, {@code throwable}. This will set the state of the future to * {@link BaseFuture.Sync#COMPLETED} and call {@link #done()} if the * state was successfully changed. * * @param throwable the exception that the task failed with. * @return true if the state was successfully changed. * @throws Error if the throwable was an {@link Error}. */ protected boolean setException(Throwable throwable) { boolean result = sync.setException(Objects.requireNonNull(throwable)); if (result) { done(); } // If it's an Error, we want to make sure it reaches the top of the // call stack, so we rethrow it. // we want to notify the listeners we have with errors as well, as it breaks // how we work in ES in terms of using assertions // if (throwable instanceof Error) { // throw (Error) throwable; // } return result; } protected void done() { } /** * <p>Following the contract of {@link AbstractQueuedSynchronizer} we create a * private subclass to hold the synchronizer. This synchronizer is used to * implement the blocking and waiting calls as well as to handle state changes * in a thread-safe manner. The current state of the future is held in the * Sync state, and the lock is released whenever the state changes to either * {@link #COMPLETED} or {@link #CANCELLED}. * <p> * To avoid races between threads doing release and acquire, we transition * to the final state in two steps. One thread will successfully CAS from * RUNNING to COMPLETING, that thread will then set the result of the * computation, and only then transition to COMPLETED or CANCELLED. * <p> * We don't use the integer argument passed between acquire methods so we * pass around a -1 everywhere. */ static final class Sync<V> extends AbstractQueuedSynchronizer { /* Valid states. */ static final int RUNNING = 0; static final int COMPLETING = 1; static final int COMPLETED = 2; static final int CANCELLED = 4; private V value; private Throwable exception; /* * Acquisition succeeds if the future is done, otherwise it fails. */ @Override protected int tryAcquireShared(int ignored) { if (isDone()) { return 1; } return -1; } /* * We always allow a release to go through, this means the state has been * successfully changed and the result is available. */ @Override protected boolean tryReleaseShared(int finalState) { setState(finalState); return true; } /** * Blocks until the task is complete or the timeout expires. Throws a * {@link TimeoutException} if the timer expires, otherwise behaves like * {@link #get()}. */ V get(long nanos) throws TimeoutException, CancellationException, ExecutionException, InterruptedException { // Attempt to acquire the shared lock with a timeout. if (!tryAcquireSharedNanos(-1, nanos)) { throw new TimeoutException("Timeout waiting for task."); } return getValue(); } /** * Blocks until {@link #complete(Object, Throwable, int)} has been * successfully called. Throws a {@link CancellationException} if the task * was cancelled, or a {@link ExecutionException} if the task completed with * an error. */ V get() throws CancellationException, ExecutionException, InterruptedException { // Acquire the shared lock allowing interruption. acquireSharedInterruptibly(-1); return getValue(); } /** * Implementation of the actual value retrieval. Will return the value * on success, an exception on failure, a cancellation on cancellation, or * an illegal state if the synchronizer is in an invalid state. */ private V getValue() throws CancellationException, ExecutionException { int state = getState(); switch (state) { case COMPLETED: if (exception != null) { throw new ExecutionException(exception); } else { return value; } case CANCELLED: throw new CancellationException("Task was cancelled."); default: throw new IllegalStateException( "Error, synchronizer in invalid state: " + state); } } /** * Checks if the state is {@link #COMPLETED} or {@link #CANCELLED}. */ boolean isDone() { return (getState() & (COMPLETED | CANCELLED)) != 0; } /** * Checks if the state is {@link #CANCELLED}. */ boolean isCancelled() { return getState() == CANCELLED; } /** * Transition to the COMPLETED state and set the value. */ boolean set(@Nullable V v) { return complete(v, null, COMPLETED); } /** * Transition to the COMPLETED state and set the exception. */ boolean setException(Throwable t) { return complete(null, t, COMPLETED); } /** * Transition to the CANCELLED state. */ boolean cancel() { return complete(null, null, CANCELLED); } /** * Implementation of completing a task. Either {@code v} or {@code t} will * be set but not both. The {@code finalState} is the state to change to * from {@link #RUNNING}. If the state is not in the RUNNING state we * return {@code false} after waiting for the state to be set to a valid * final state ({@link #COMPLETED} or {@link #CANCELLED}). * * @param v the value to set as the result of the computation. * @param t the exception to set as the result of the computation. * @param finalState the state to transition to. */ private boolean complete(@Nullable V v, @Nullable Throwable t, int finalState) { boolean doCompletion = compareAndSetState(RUNNING, COMPLETING); if (doCompletion) { // If this thread successfully transitioned to COMPLETING, set the value // and exception and then release to the final state. this.value = v; this.exception = t; releaseShared(finalState); } else if (getState() == COMPLETING) { // If some other thread is currently completing the future, block until // they are done so we can guarantee completion. acquireShared(-1); } return doCompletion; } } }
/*------------------------------------------------------------------------- * * Copyright (c) 2003-2011, PostgreSQL Global Development Group * * *------------------------------------------------------------------------- */ package org.postgresql.core; import java.io.BufferedOutputStream; import java.io.InputStream; import java.io.OutputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.EOFException; import java.io.Writer; import java.net.InetSocketAddress; import java.net.Socket; import java.sql.SQLException; import org.postgresql.util.GT; import org.postgresql.util.HostSpec; import org.postgresql.util.PSQLState; import org.postgresql.util.PSQLException; /** * Wrapper around the raw connection to the server that implements some basic * primitives (reading/writing formatted data, doing string encoding, etc). *<p> * In general, instances of PGStream are not threadsafe; the caller must ensure * that only one thread at a time is accessing a particular PGStream instance. */ public class PGStream { private final HostSpec hostSpec; private final byte[] _int4buf; private final byte[] _int2buf; private Socket connection; private VisibleBufferedInputStream pg_input; private OutputStream pg_output; private byte[] streamBuffer; private Encoding encoding; private Writer encodingWriter; /** * Constructor: Connect to the PostgreSQL back end and return * a stream connection. * * @param hostSpec the host and port to connect to * @exception IOException if an IOException occurs below it. */ public PGStream(HostSpec hostSpec) throws IOException { this.hostSpec = hostSpec; Socket socket = new Socket(); socket.connect(new InetSocketAddress(hostSpec.getHost(), hostSpec.getPort())); changeSocket(socket); setEncoding(Encoding.getJVMEncoding("US-ASCII")); _int2buf = new byte[2]; _int4buf = new byte[4]; } public HostSpec getHostSpec() { return hostSpec; } public Socket getSocket() { return connection; } /** * Check for pending backend messages without blocking. * Might return false when there actually are messages * waiting, depending on the characteristics of the * underlying socket. This is used to detect asynchronous * notifies from the backend, when available. * * @return true if there is a pending backend message */ public boolean hasMessagePending() throws IOException { return pg_input.available() > 0 || connection.getInputStream().available() > 0; } /** * Switch this stream to using a new socket. Any existing socket * is <em>not</em> closed; it's assumed that we are changing to * a new socket that delegates to the original socket (e.g. SSL). * * @param socket the new socket to change to * @throws IOException if something goes wrong */ public void changeSocket(Socket socket) throws IOException { this.connection = socket; // Submitted by Jason Venner <jason@idiom.com>. Disable Nagle // as we are selective about flushing output only when we // really need to. connection.setTcpNoDelay(true); // Buffer sizes submitted by Sverre H Huseby <sverrehu@online.no> pg_input = new VisibleBufferedInputStream(connection.getInputStream(), 8192); pg_output = new BufferedOutputStream(connection.getOutputStream(), 8192); if (encoding != null) setEncoding(encoding); } public Encoding getEncoding() { return encoding; } /** * Change the encoding used by this connection. * * @param encoding the new encoding to use * @throws IOException if something goes wrong */ public void setEncoding(Encoding encoding) throws IOException { // Close down any old writer. if (encodingWriter != null) encodingWriter.close(); this.encoding = encoding; // Intercept flush() downcalls from the writer; our caller // will call PGStream.flush() as needed. OutputStream interceptor = new FilterOutputStream(pg_output) { public void flush() throws IOException { } public void close() throws IOException { super.flush(); } }; encodingWriter = encoding.getEncodingWriter(interceptor); } /** * Get a Writer instance that encodes directly onto the underlying stream. *<p> * The returned Writer should not be closed, as it's a shared object. * Writer.flush needs to be called when switching between use of the Writer and * use of the PGStream write methods, but it won't actually flush output * all the way out -- call {@link #flush} to actually ensure all output * has been pushed to the server. * * @return the shared Writer instance * @throws IOException if something goes wrong. */ public Writer getEncodingWriter() throws IOException { if (encodingWriter == null) throw new IOException("No encoding has been set on this connection"); return encodingWriter; } /** * Sends a single character to the back end * * @param val the character to be sent * @exception IOException if an I/O error occurs */ public void SendChar(int val) throws IOException { pg_output.write(val); } /** * Sends a 4-byte integer to the back end * * @param val the integer to be sent * @exception IOException if an I/O error occurs */ public void SendInteger4(int val) throws IOException { _int4buf[0] = (byte)(val >>> 24); _int4buf[1] = (byte)(val >>> 16); _int4buf[2] = (byte)(val >>> 8); _int4buf[3] = (byte)(val); pg_output.write(_int4buf); } /** * Sends a 2-byte integer (short) to the back end * * @param val the integer to be sent * @exception IOException if an I/O error occurs or <code>val</code> cannot be encoded in 2 bytes */ public void SendInteger2(int val) throws IOException { if (val < Short.MIN_VALUE || val > Short.MAX_VALUE) throw new IOException("Tried to send an out-of-range integer as a 2-byte value: " + val); _int2buf[0] = (byte)(val >>> 8); _int2buf[1] = (byte)val; pg_output.write(_int2buf); } /** * Send an array of bytes to the backend * * @param buf The array of bytes to be sent * @exception IOException if an I/O error occurs */ public void Send(byte buf[]) throws IOException { pg_output.write(buf); } /** * Send a fixed-size array of bytes to the backend. If buf.length < siz, * pad with zeros. If buf.lengh > siz, truncate the array. * * @param buf the array of bytes to be sent * @param siz the number of bytes to be sent * @exception IOException if an I/O error occurs */ public void Send(byte buf[], int siz) throws IOException { Send(buf, 0, siz); } /** * Send a fixed-size array of bytes to the backend. If length < siz, * pad with zeros. If length > siz, truncate the array. * * @param buf the array of bytes to be sent * @param off offset in the array to start sending from * @param siz the number of bytes to be sent * @exception IOException if an I/O error occurs */ public void Send(byte buf[], int off, int siz) throws IOException { int bufamt = buf.length - off; pg_output.write(buf, off, bufamt < siz ? bufamt : siz); for (int i = bufamt ; i < siz ; ++i) { pg_output.write(0); } } /** * Receives a single character from the backend, without * advancing the current protocol stream position. * * @return the character received * @exception IOException if an I/O Error occurs */ public int PeekChar() throws IOException { int c = pg_input.peek(); if (c < 0) throw new EOFException(); return c; } /** * Receives a single character from the backend * * @return the character received * @exception IOException if an I/O Error occurs */ public int ReceiveChar() throws IOException { int c = pg_input.read(); if (c < 0) throw new EOFException(); return c; } /** * Receives a four byte integer from the backend * * @return the integer received from the backend * @exception IOException if an I/O error occurs */ public int ReceiveInteger4() throws IOException { if (pg_input.read(_int4buf) != 4) throw new EOFException(); return (_int4buf[0] & 0xFF) << 24 | (_int4buf[1] & 0xFF) << 16 | (_int4buf[2] & 0xFF) << 8 | _int4buf[3] & 0xFF; } /** * Receives a two byte integer from the backend * * @return the integer received from the backend * @exception IOException if an I/O error occurs */ public int ReceiveInteger2() throws IOException { if (pg_input.read(_int2buf) != 2) throw new EOFException(); return (_int2buf[0] & 0xFF) << 8 | _int2buf[1] & 0xFF; } /** * Receives a fixed-size string from the backend. * * @param len the length of the string to receive, in bytes. * @return the decoded string */ public String ReceiveString(int len) throws IOException { if (!pg_input.ensureBytes(len)) { throw new EOFException(); } String res = encoding.decode(pg_input.getBuffer(), pg_input.getIndex(), len); pg_input.skip(len); return res; } /** * Receives a null-terminated string from the backend. If we don't see a * null, then we assume something has gone wrong. * * @return string from back end * @exception IOException if an I/O error occurs, or end of file */ public String ReceiveString() throws IOException { int len = pg_input.scanCStringLength(); String res = encoding.decode(pg_input.getBuffer(), pg_input.getIndex(), len - 1); pg_input.skip(len); return res; } /** * Read a tuple from the back end. A tuple is a two dimensional * array of bytes. This variant reads the V3 protocol's tuple * representation. * * @return null if the current response has no more tuples, otherwise * an array of bytearrays * @exception IOException if a data I/O error occurs */ public byte[][] ReceiveTupleV3() throws IOException, OutOfMemoryError { //TODO: use l_msgSize int l_msgSize = ReceiveInteger4(); int i; int l_nf = ReceiveInteger2(); byte[][] answer = new byte[l_nf][]; OutOfMemoryError oom = null; for (i = 0 ; i < l_nf ; ++i) { int l_size = ReceiveInteger4(); if (l_size != -1) { try { answer[i] = new byte[l_size]; Receive(answer[i], 0, l_size); } catch(OutOfMemoryError oome) { oom = oome; Skip(l_size); } } } if (oom != null) throw oom; return answer; } /** * Read a tuple from the back end. A tuple is a two dimensional * array of bytes. This variant reads the V2 protocol's tuple * representation. * * @param nf the number of fields expected * @param bin true if the tuple is a binary tuple * @return null if the current response has no more tuples, otherwise * an array of bytearrays * @exception IOException if a data I/O error occurs */ public byte[][] ReceiveTupleV2(int nf, boolean bin) throws IOException, OutOfMemoryError { int i, bim = (nf + 7) / 8; byte[] bitmask = Receive(bim); byte[][] answer = new byte[nf][]; int whichbit = 0x80; int whichbyte = 0; OutOfMemoryError oom = null; for (i = 0 ; i < nf ; ++i) { boolean isNull = ((bitmask[whichbyte] & whichbit) == 0); whichbit >>= 1; if (whichbit == 0) { ++whichbyte; whichbit = 0x80; } if (!isNull) { int len = ReceiveInteger4(); if (!bin) len -= 4; if (len < 0) len = 0; try { answer[i] = new byte[len]; Receive(answer[i], 0, len); } catch(OutOfMemoryError oome) { oom = oome; Skip(len); } } } if (oom != null) throw oom; return answer; } /** * Reads in a given number of bytes from the backend * * @param siz number of bytes to read * @return array of bytes received * @exception IOException if a data I/O error occurs */ public byte[] Receive(int siz) throws IOException { byte[] answer = new byte[siz]; Receive(answer, 0, siz); return answer; } /** * Reads in a given number of bytes from the backend * * @param buf buffer to store result * @param off offset in buffer * @param siz number of bytes to read * @exception IOException if a data I/O error occurs */ public void Receive(byte[] buf, int off, int siz) throws IOException { int s = 0; while (s < siz) { int w = pg_input.read(buf, off + s, siz - s); if (w < 0) throw new EOFException(); s += w; } } public void Skip(int size) throws IOException { long s = 0; while (s < size) { s += pg_input.skip(size - s); } } /** * Copy data from an input stream to the connection. * * @param inStream the stream to read data from * @param remaining the number of bytes to copy */ public void SendStream(InputStream inStream, int remaining) throws IOException { int expectedLength = remaining; if (streamBuffer == null) streamBuffer = new byte[8192]; while (remaining > 0) { int count = (remaining > streamBuffer.length ? streamBuffer.length : remaining); int readCount; try { readCount = inStream.read(streamBuffer, 0, count); if (readCount < 0) throw new EOFException(GT.tr("Premature end of input stream, expected {0} bytes, but only read {1}.", new Object[]{new Integer(expectedLength), new Integer(expectedLength - remaining)})); } catch (IOException ioe) { while (remaining > 0) { Send(streamBuffer, count); remaining -= count; count = (remaining > streamBuffer.length ? streamBuffer.length : remaining); } throw new PGBindException(ioe); } Send(streamBuffer, readCount); remaining -= readCount; } } /** * Flush any pending output to the backend. * @exception IOException if an I/O error occurs */ public void flush() throws IOException { if (encodingWriter != null) encodingWriter.flush(); pg_output.flush(); } /** * Consume an expected EOF from the backend * @exception SQLException if we get something other than an EOF */ public void ReceiveEOF() throws SQLException, IOException { int c = pg_input.read(); if (c < 0) return; throw new PSQLException(GT.tr("Expected an EOF from server, got: {0}", new Integer(c)), PSQLState.COMMUNICATION_ERROR); } /** * Closes the connection * * @exception IOException if an I/O Error occurs */ public void close() throws IOException { if (encodingWriter != null) encodingWriter.close(); pg_output.close(); pg_input.close(); connection.close(); } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.runtime; import static com.google.devtools.build.lib.analysis.config.CoreOptionConverters.BUILD_SETTING_CONVERTERS; import static com.google.devtools.build.lib.packages.RuleClass.Builder.STARLARK_BUILD_SETTING_DEFAULT_ATTR_NAME; import static com.google.devtools.build.lib.packages.Type.BOOLEAN; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.Multimap; import com.google.devtools.build.lib.cmdline.LabelValidator; import com.google.devtools.build.lib.cmdline.LabelValidator.BadLabelException; import com.google.devtools.build.lib.cmdline.TargetParsingException; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.packages.BuildSetting; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; import com.google.devtools.build.lib.skyframe.TargetPatternPhaseValue; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.common.options.Converter; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsParsingException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.TreeMap; /** * An options parser for starlark defined options. Takes a mutable {@link OptionsParser} that has * already parsed all native options (including those needed for loading). This class is in charge * of parsing and setting the starlark options for this {@link OptionsParser}. */ public class StarlarkOptionsParser { private final SkyframeExecutor skyframeExecutor; private final PathFragment relativeWorkingDirectory; private final ExtendedEventHandler reporter; private final OptionsParser nativeOptionsParser; // Result of #parse, store the parsed options and their values. private final Map<String, Object> starlarkOptions = new TreeMap<>(); // Map of parsed starlark options to their loaded BuildSetting objects (used for canonicalization) private final Map<String, BuildSetting> parsedBuildSettings = new HashMap<>(); /** * {@link ExtendedEventHandler} override that passes through "normal" events but not events that * would go to the build event proto. * * <p>Starlark flags are conceptually options but still need target pattern evaluation in {@link * com.google.devtools.build.lib.skyframe.TargetPatternPhaseFunction} to translate their labels to * actual targets. If we pass the {@link #post}able events that function calls, that would produce * "target loaded" and "target configured" events in the build event proto output that consumers * can confuse with actual targets requested by the build. * * <p>This is important because downstream services (like a continuous integration tool or build * results dashboard) read these messages to reconcile which requested targets were built. If they * determine Blaze tried to build {@code //foo //bar} then see a "target configured" message for * some other target {@code //my_starlark_flag}, they might show misleading messages like "Built 3 * of 2 requested targets.". * * <p>Hence this class. By dropping those events, we restrict all info and error reporting logic * to the options parsing pipeline. */ private static class NonPostingEventHandler implements ExtendedEventHandler { private final ExtendedEventHandler delegate; NonPostingEventHandler(ExtendedEventHandler delegate) { this.delegate = delegate; } @Override public void handle(Event e) { delegate.handle(e); } @Override public void post(ExtendedEventHandler.Postable e) {} } // Local cache of build settings so we don't repeatedly load them. private final Map<String, Target> buildSettings = new HashMap<>(); private StarlarkOptionsParser( SkyframeExecutor skyframeExecutor, PathFragment relativeWorkingDirectory, ExtendedEventHandler reporter, OptionsParser nativeOptionsParser) { this.skyframeExecutor = skyframeExecutor; this.relativeWorkingDirectory = relativeWorkingDirectory; this.reporter = new NonPostingEventHandler(reporter); this.nativeOptionsParser = nativeOptionsParser; } public static StarlarkOptionsParser newStarlarkOptionsParser( CommandEnvironment env, OptionsParser optionsParser) { return new StarlarkOptionsParser( env.getSkyframeExecutor(), env.getRelativeWorkingDirectory(), env.getReporter(), optionsParser); } /** Parses all pre "--" residue for Starlark options. */ // TODO(blaze-configurability): This method somewhat reinvents the wheel of // OptionsParserImpl.identifyOptionAndPossibleArgument. Consider combining. This would probably // require multiple rounds of parsing to fit starlark-defined options into native option format. @VisibleForTesting public void parse(ExtendedEventHandler eventHandler) throws OptionsParsingException { ImmutableList.Builder<String> residue = new ImmutableList.Builder<>(); // Map of <option name (label), <unparsed option value, loaded option>>. Multimap<String, Pair<String, Target>> unparsedOptions = LinkedListMultimap.create(); // sort the old residue into starlark flags and legitimate residue for (String arg : nativeOptionsParser.getPreDoubleDashResidue()) { // TODO(bazel-team): support single dash options? if (!arg.startsWith("--")) { residue.add(arg); continue; } parseArg(arg, unparsedOptions, eventHandler); } List<String> postDoubleDashResidue = nativeOptionsParser.getPostDoubleDashResidue(); residue.addAll(postDoubleDashResidue); nativeOptionsParser.setResidue(residue.build(), postDoubleDashResidue); if (unparsedOptions.isEmpty()) { return; } // Map of flag label as a string to its loaded target and set value after parsing. HashMap<String, Pair<Target, Object>> buildSettingWithTargetAndValue = new HashMap<>(); for (Map.Entry<String, Pair<String, Target>> option : unparsedOptions.entries()) { String loadedFlag = option.getKey(); String unparsedValue = option.getValue().first; Target buildSettingTarget = option.getValue().second; BuildSetting buildSetting = buildSettingTarget.getAssociatedRule().getRuleClassObject().getBuildSetting(); // Do not recognize internal options, which are treated as if they did not exist. if (!buildSetting.isFlag()) { throw new OptionsParsingException( String.format("Unrecognized option: %s=%s", loadedFlag, unparsedValue)); } Type<?> type = buildSetting.getType(); Converter<?> converter = BUILD_SETTING_CONVERTERS.get(type); Object value; try { value = converter.convert(unparsedValue); } catch (OptionsParsingException e) { throw new OptionsParsingException( String.format( "While parsing option %s=%s: '%s' is not a %s", loadedFlag, unparsedValue, unparsedValue, type), e); } if (buildSetting.allowsMultiple()) { List<Object> newValue; if (buildSettingWithTargetAndValue.containsKey(loadedFlag)) { newValue = new ArrayList<>( (Collection<?>) buildSettingWithTargetAndValue.get(loadedFlag).getSecond()); } else { newValue = new ArrayList<>(); } newValue.add(value); value = newValue; } buildSettingWithTargetAndValue.put(loadedFlag, Pair.of(buildSettingTarget, value)); } Map<String, Object> parsedOptions = new HashMap<>(); for (String buildSetting : buildSettingWithTargetAndValue.keySet()) { Pair<Target, Object> buildSettingAndFinalValue = buildSettingWithTargetAndValue.get(buildSetting); Target buildSettingTarget = buildSettingAndFinalValue.getFirst(); BuildSetting buildSettingObject = buildSettingTarget.getAssociatedRule().getRuleClassObject().getBuildSetting(); boolean allowsMultiple = buildSettingObject.allowsMultiple(); parsedBuildSettings.put(buildSetting, buildSettingObject); Object value = buildSettingAndFinalValue.getSecond(); if (allowsMultiple) { List<?> defaultValue = ImmutableList.of( Objects.requireNonNull( buildSettingTarget .getAssociatedRule() .getAttr(STARLARK_BUILD_SETTING_DEFAULT_ATTR_NAME))); List<?> newValue = (List<?>) value; if (!newValue.equals(defaultValue)) { parsedOptions.put(buildSetting, value); } } else { if (!value.equals( buildSettingTarget .getAssociatedRule() .getAttr(STARLARK_BUILD_SETTING_DEFAULT_ATTR_NAME))) { parsedOptions.put(buildSetting, buildSettingAndFinalValue.getSecond()); } } } nativeOptionsParser.setStarlarkOptions(ImmutableMap.copyOf(parsedOptions)); this.starlarkOptions.putAll(parsedOptions); } private void parseArg( String arg, Multimap<String, Pair<String, Target>> unparsedOptions, ExtendedEventHandler eventHandler) throws OptionsParsingException { int equalsAt = arg.indexOf('='); String name = equalsAt == -1 ? arg.substring(2) : arg.substring(2, equalsAt); if (name.trim().isEmpty()) { throw new OptionsParsingException("Invalid options syntax: " + arg, arg); } String value = equalsAt == -1 ? null : arg.substring(equalsAt + 1); if (value != null) { // --flag=value or -flag=value form Target buildSettingTarget = loadBuildSetting(name, eventHandler); // Use the canonical form to ensure we don't have // duplicate options getting into the starlark options map. unparsedOptions.put( buildSettingTarget.getLabel().getCanonicalForm(), new Pair<>(value, buildSettingTarget)); } else { boolean booleanValue = true; // check --noflag form if (name.startsWith("no")) { booleanValue = false; name = name.substring(2); } Target buildSettingTarget = loadBuildSetting(name, eventHandler); BuildSetting current = buildSettingTarget.getAssociatedRule().getRuleClassObject().getBuildSetting(); if (current.getType().equals(BOOLEAN)) { // --boolean_flag or --noboolean_flag // Ditto w/r/t canonical form. unparsedOptions.put( buildSettingTarget.getLabel().getCanonicalForm(), new Pair<>(String.valueOf(booleanValue), buildSettingTarget)); } else { if (!booleanValue) { // --no(non_boolean_flag) throw new OptionsParsingException( "Illegal use of 'no' prefix on non-boolean option: " + name, name); } throw new OptionsParsingException("Expected value after " + arg); } } } private Target loadBuildSetting(String targetToBuild, ExtendedEventHandler eventHandler) throws OptionsParsingException { if (buildSettings.containsKey(targetToBuild)) { return buildSettings.get(targetToBuild); } Target buildSetting; try { TargetPatternPhaseValue result = skyframeExecutor.loadTargetPatternsWithoutFilters( reporter, Collections.singletonList(targetToBuild), relativeWorkingDirectory, SkyframeExecutor.DEFAULT_THREAD_COUNT, /*keepGoing=*/ false); buildSetting = Iterables.getOnlyElement( result.getTargets(eventHandler, skyframeExecutor.getPackageManager())); } catch (InterruptedException | TargetParsingException e) { Thread.currentThread().interrupt(); throw new OptionsParsingException( "Error loading option " + targetToBuild + ": " + e.getMessage(), targetToBuild, e); } Rule associatedRule = buildSetting.getAssociatedRule(); if (associatedRule == null || associatedRule.getRuleClassObject().getBuildSetting() == null) { throw new OptionsParsingException("Unrecognized option: " + targetToBuild, targetToBuild); } buildSettings.put(targetToBuild, buildSetting); return buildSetting; } /** * Separates out any Starlark options from the given list * * <p>This method doesn't go through the trouble to actually load build setting targets and verify * they are build settings, it just assumes all strings that look like they could be build * settings, aka are formatted like a flag and can parse out to a proper label, are build * settings. Use actual parsing functions above to do full build setting verification. * * @param list List of strings from which to parse out starlark options * @return Returns a pair of string lists. The first item contains the list of starlark options * that were removed; the second contains the remaining string from the original list. */ public static Pair<ImmutableList<String>, ImmutableList<String>> removeStarlarkOptions( List<String> list) { ImmutableList.Builder<String> keep = ImmutableList.builder(); ImmutableList.Builder<String> remove = ImmutableList.builder(); for (String name : list) { // Check if the string is a flag and trim off "--" if so. if (!name.startsWith("--")) { keep.add(name); continue; } String potentialStarlarkFlag = name.substring(2); // Check if the string uses the "no" prefix for setting boolean flags to false, trim // off "no" if so. if (potentialStarlarkFlag.startsWith("no")) { potentialStarlarkFlag = potentialStarlarkFlag.substring(2); } // Check if the string contains a value, trim off the value if so. int equalsIdx = potentialStarlarkFlag.indexOf('='); if (equalsIdx > 0) { potentialStarlarkFlag = potentialStarlarkFlag.substring(0, equalsIdx); } // Check if we can properly parse the (potentially trimmed) string as a label. If so, count // as starlark flag, else count as regular residue. try { LabelValidator.validateAbsoluteLabel(potentialStarlarkFlag); remove.add(name); } catch (BadLabelException e) { keep.add(name); } } return Pair.of(remove.build(), keep.build()); } @VisibleForTesting public static StarlarkOptionsParser newStarlarkOptionsParserForTesting( SkyframeExecutor skyframeExecutor, ExtendedEventHandler reporter, PathFragment relativeWorkingDirectory, OptionsParser nativeOptionsParser) { return new StarlarkOptionsParser( skyframeExecutor, relativeWorkingDirectory, reporter, nativeOptionsParser); } @VisibleForTesting public void setResidueForTesting(List<String> residue) { nativeOptionsParser.setResidue(residue, ImmutableList.of()); } @VisibleForTesting public OptionsParser getNativeOptionsParserFortesting() { return nativeOptionsParser; } public boolean checkIfParsedOptionAllowsMultiple(String option) { return parsedBuildSettings.get(option).allowsMultiple(); } public Type<?> getParsedOptionType(String option) { return parsedBuildSettings.get(option).getType(); } /** Return a canoncalized list of the starlark options and values that this parser has parsed. */ @SuppressWarnings("unchecked") public List<String> canonicalize() { ImmutableList.Builder<String> result = new ImmutableList.Builder<>(); for (Map.Entry<String, Object> starlarkOption : starlarkOptions.entrySet()) { String starlarkOptionName = starlarkOption.getKey(); Object starlarkOptionValue = starlarkOption.getValue(); String starlarkOptionString = "--" + starlarkOptionName + "="; if (checkIfParsedOptionAllowsMultiple(starlarkOptionName)) { Preconditions.checkState( starlarkOption.getValue() instanceof List, "Found a starlark option value that isn't a list for an allow multiple option."); for (Object singleValue : (List) starlarkOptionValue) { result.add(starlarkOptionString + singleValue); } } else if (getParsedOptionType(starlarkOptionName).equals(Type.STRING_LIST)) { result.add( starlarkOptionString + String.join(",", ((Iterable<String>) starlarkOptionValue))); } else { result.add(starlarkOptionString + starlarkOptionValue); } } return result.build(); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options.editor; import com.intellij.codeInsight.CodeInsightSettings; import com.intellij.codeInsight.ProjectCodeInsightSettings; import com.intellij.execution.util.ListTableWithButtons; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.ui.ComboBoxTableRenderer; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.DocumentAdapter; import com.intellij.ui.GuiUtils; import com.intellij.ui.JBColor; import com.intellij.ui.ScrollingUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.ListTableModel; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableCellEditor; import javax.swing.table.TableCellRenderer; import java.awt.*; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.regex.Pattern; class ExcludeTable extends ListTableWithButtons<ExcludeTable.Item> { private static final Pattern ourPackagePattern = Pattern.compile("(\\w+\\.)*\\w+"); private static final ColumnInfo<Item, String> NAME_COLUMN = new ColumnInfo<Item, String>("Class or package") { @Nullable @Override public String valueOf(Item pair) { return pair.exclude; } @Nullable @Override public TableCellEditor getEditor(Item pair) { final JTextField field = GuiUtils.createUndoableTextField(); field.getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(DocumentEvent e) { field.setForeground( ourPackagePattern.matcher(field.getText()).matches() ? UIUtil.getTableForeground() : JBColor.RED); } }); return new DefaultCellEditor(field); } @Nullable @Override public TableCellRenderer getRenderer(Item pair) { return new DefaultTableCellRenderer() { @NotNull @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); if (!ourPackagePattern.matcher((String)value).matches()) { component.setForeground(JBColor.RED); } return component; } }; } @Override public boolean isCellEditable(Item pair) { return true; } @Override public void setValue(Item item, String value) { item.exclude = value; } }; private static final ColumnInfo<Item, ExclusionScope> SCOPE_COLUMN = new ColumnInfo<Item, ExclusionScope>("Scope") { @Nullable @Override public ExclusionScope valueOf(Item pair) { return pair.scope; } @Nullable @Override public TableCellRenderer getRenderer(Item pair) { return new ComboBoxTableRenderer<ExclusionScope>(ExclusionScope.values()); } @Nullable @Override public TableCellEditor getEditor(Item pair) { return new DefaultCellEditor(new ComboBox(ExclusionScope.values())); } @Override public boolean isCellEditable(Item pair) { return true; } @Override public void setValue(Item pair, ExclusionScope value) { pair.scope = value; } @Nullable @Override public String getMaxStringValue() { return "Project"; } }; private final Project myProject; public ExcludeTable(@NotNull Project project) { myProject = project; getTableView().getEmptyText().setText(ApplicationBundle.message("exclude.from.imports.no.exclusions")); } @Override protected ListTableModel createListModel() { return new ListTableModel<Item>(NAME_COLUMN, SCOPE_COLUMN); } @Override protected Item createElement() { return new Item("", ExclusionScope.IDE); } @Override protected boolean isEmpty(Item element) { return element.exclude.isEmpty(); } @Override protected Item cloneElement(Item variable) { return new Item(variable.exclude, variable.scope); } @Override protected boolean canDeleteElement(Item selection) { return true; } void addExcludePackage(String packageName) { if (packageName == null) { return; } int index = 0; while (index < getTableView().getListTableModel().getRowCount()) { if (getTableView().getListTableModel().getItem(index).exclude.compareTo(packageName) > 0) { break; } index++; } getTableView().getListTableModel().insertRow(index, new Item(packageName, ExclusionScope.IDE)); getTableView().clearSelection(); getTableView().addRowSelectionInterval(index, index); ScrollingUtil.ensureIndexIsVisible(getTableView(), index, 0); IdeFocusManager.getGlobalInstance().requestFocus(getTableView(), false); } void reset() { java.util.List<Item> rows = ContainerUtil.newArrayList(); for (String s : CodeInsightSettings.getInstance().EXCLUDED_PACKAGES) { rows.add(new Item(s, ExclusionScope.IDE)); } for (String s : ProjectCodeInsightSettings.getSettings(myProject).excludedNames) { rows.add(new Item(s, ExclusionScope.Project)); } Collections.sort(rows, new Comparator<Item>() { @Override public int compare(Item o1, Item o2) { return o1.exclude.compareTo(o2.exclude); } }); setValues(rows); } void apply() { CodeInsightSettings.getInstance().EXCLUDED_PACKAGES = ArrayUtil.toStringArray(getExcludedPackages(ExclusionScope.IDE)); ProjectCodeInsightSettings.getSettings(myProject).excludedNames = getExcludedPackages(ExclusionScope.Project); } private List<String> getExcludedPackages(ExclusionScope scope) { List<String> result = ContainerUtil.newArrayList(); for (Item pair : getTableView().getListTableModel().getItems()) { if (scope == pair.scope) { result.add(pair.exclude); } } Collections.sort(result); return result; } boolean isModified() { return !getExcludedPackages(ExclusionScope.IDE).equals(Arrays.asList(CodeInsightSettings.getInstance().EXCLUDED_PACKAGES)) || !getExcludedPackages(ExclusionScope.Project).equals(ProjectCodeInsightSettings.getSettings(myProject).excludedNames); } private enum ExclusionScope {Project, IDE} static class Item { String exclude; ExclusionScope scope; Item(@NotNull String exclude, ExclusionScope scope) { this.exclude = exclude; this.scope = scope; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.reindex; import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsExchange; import com.sun.net.httpserver.HttpsParameters; import com.sun.net.httpserver.HttpsServer; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.PemKeyConfig; import org.elasticsearch.common.ssl.PemTrustConfig; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.watcher.ResourceWatcherService; import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.BeforeClass; import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.TrustManager; import javax.net.ssl.X509ExtendedKeyManager; import javax.net.ssl.X509ExtendedTrustManager; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.file.Path; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import static org.mockito.Mockito.mock; /** * Because core ES doesn't have SSL available, this test uses a mock webserver * as the remote endpoint. * This makes it hard to test actual reindex functionality, but does allow us to test that the correct connections are made with the * right SSL keys + trust settings. */ @SuppressForbidden(reason = "use http server") public class ReindexRestClientSslTests extends ESTestCase { private static HttpsServer server; private static Consumer<HttpsExchange> handler = ignore -> { }; @BeforeClass public static void setupHttpServer() throws Exception { InetSocketAddress address = new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0); SSLContext sslContext = buildServerSslContext(); server = MockHttpServer.createHttps(address, 0); server.setHttpsConfigurator(new ClientAuthHttpsConfigurator(sslContext)); server.start(); server.createContext("/", http -> { assert http instanceof HttpsExchange; HttpsExchange https = (HttpsExchange) http; handler.accept(https); // Always respond with 200 // * If the reindex sees the 200, it means the SSL connection was established correctly. // * We can check client certs in the handler. https.sendResponseHeaders(200, 0); https.close(); }); } @AfterClass public static void shutdownHttpServer() { server.stop(0); server = null; handler = null; } private static SSLContext buildServerSslContext() throws Exception { final SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); final char[] password = "http-password".toCharArray(); final Path cert = PathUtils.get(ReindexRestClientSslTests.class.getResource("http/http.crt").toURI()); final Path key = PathUtils.get(ReindexRestClientSslTests.class.getResource("http/http.key").toURI()); final X509ExtendedKeyManager keyManager = new PemKeyConfig(cert, key, password).createKeyManager(); final Path ca = PathUtils.get(ReindexRestClientSslTests.class.getResource("ca.pem").toURI()); final X509ExtendedTrustManager trustManager = new PemTrustConfig(Collections.singletonList(ca)).createTrustManager(); sslContext.init(new KeyManager[] { keyManager }, new TrustManager[] { trustManager }, null); return sslContext; } public void testClientFailsWithUntrustedCertificate() throws IOException { final List<Thread> threads = new ArrayList<>(); final Settings settings = Settings.builder() .put("path.home", createTempDir()) .build(); final Environment environment = TestEnvironment.newEnvironment(settings); final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class)); try (RestClient client = TransportReindexAction.buildRestClient(getRemoteInfo(), ssl, 1L, threads)) { expectThrows(SSLHandshakeException.class, () -> client.performRequest(new Request("GET", "/"))); } } public void testClientSucceedsWithCertificateAuthorities() throws IOException { final List<Thread> threads = new ArrayList<>(); final Path ca = getDataPath("ca.pem"); final Settings settings = Settings.builder() .put("path.home", createTempDir()) .putList("reindex.ssl.certificate_authorities", ca.toString()) .build(); final Environment environment = TestEnvironment.newEnvironment(settings); final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class)); try (RestClient client = TransportReindexAction.buildRestClient(getRemoteInfo(), ssl, 1L, threads)) { final Response response = client.performRequest(new Request("GET", "/")); assertThat(response.getStatusLine().getStatusCode(), Matchers.is(200)); } } public void testClientSucceedsWithVerificationDisabled() throws IOException { assumeFalse("Cannot disable verification in FIPS JVM", inFipsJvm()); final List<Thread> threads = new ArrayList<>(); final Settings settings = Settings.builder() .put("path.home", createTempDir()) .put("reindex.ssl.verification_mode", "NONE") .build(); final Environment environment = TestEnvironment.newEnvironment(settings); final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class)); try (RestClient client = TransportReindexAction.buildRestClient(getRemoteInfo(), ssl, 1L, threads)) { final Response response = client.performRequest(new Request("GET", "/")); assertThat(response.getStatusLine().getStatusCode(), Matchers.is(200)); } } public void testClientPassesClientCertificate() throws IOException { final List<Thread> threads = new ArrayList<>(); final Path ca = getDataPath("ca.pem"); final Path cert = getDataPath("client/client.crt"); final Path key = getDataPath("client/client.key"); final Settings settings = Settings.builder() .put("path.home", createTempDir()) .putList("reindex.ssl.certificate_authorities", ca.toString()) .put("reindex.ssl.certificate", cert) .put("reindex.ssl.key", key) .put("reindex.ssl.key_passphrase", "client-password") .build(); AtomicReference<Certificate[]> clientCertificates = new AtomicReference<>(); handler = https -> { try { clientCertificates.set(https.getSSLSession().getPeerCertificates()); } catch (SSLPeerUnverifiedException e) { logger.warn("Client did not provide certificates", e); clientCertificates.set(null); } }; final Environment environment = TestEnvironment.newEnvironment(settings); final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class)); try (RestClient client = TransportReindexAction.buildRestClient(getRemoteInfo(), ssl, 1L, threads)) { final Response response = client.performRequest(new Request("GET", "/")); assertThat(response.getStatusLine().getStatusCode(), Matchers.is(200)); final Certificate[] certs = clientCertificates.get(); assertThat(certs, Matchers.notNullValue()); assertThat(certs, Matchers.arrayWithSize(1)); assertThat(certs[0], Matchers.instanceOf(X509Certificate.class)); final X509Certificate clientCert = (X509Certificate) certs[0]; assertThat(clientCert.getSubjectDN().getName(), Matchers.is("CN=client")); assertThat(clientCert.getIssuerDN().getName(), Matchers.is("CN=Elastic Certificate Tool Autogenerated CA")); } } private RemoteInfo getRemoteInfo() { return new RemoteInfo("https", server.getAddress().getHostName(), server.getAddress().getPort(), "/", new BytesArray("test"), "user", "password", Collections.emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); } @SuppressForbidden(reason = "use http server") private static class ClientAuthHttpsConfigurator extends HttpsConfigurator { ClientAuthHttpsConfigurator(SSLContext sslContext) { super(sslContext); } @Override public void configure(HttpsParameters params) { params.setWantClientAuth(true); } } }
package com.engine.JComponents; import com.engine.EngineHelpers.EModes; import com.engine.EngineHelpers.EngineMethods; import com.engine.GUIWindows.*; import com.engine.J8Helpers.Extensions.UIThread; import com.engine.Utilities.Settings; import javax.swing.*; import java.awt.*; import java.util.ArrayList; import java.util.Arrays; import static com.engine.EngineHelpers.EBOOLS.ENGINE_IS_PAUSED; import static com.engine.EngineHelpers.EConstants.*; import static com.engine.EngineHelpers.EngineMethods.createEngineInstructionsWindow; import static com.engine.EngineHelpers.EngineMethods.createGraphInstructionsWindow; public class CMenuBar extends JMenuBar { public Color bgColor = new Color(20, 23, 25).brighter(); private static JMenuItem enginepause; private static Font font1 = new Font(Font.SERIF, Font.PLAIN, 21); public static Font menuitemfont = new Font(Font.SERIF, Font.PLAIN, 18); public static ArrayList<JMenu> menus = new ArrayList<>(); public static ArrayList<JMenuItem> menuItems = new ArrayList<>(); public static JRadioButtonMenuItem[] pModes, pTypes, pGravModes; public static ButtonGroup particleModesGroup, particleTypesGroup, particleGravitationGroup; /*Note: Check that accelerators don't affect changing variables*/ public CMenuBar() { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { EException.append(e); } this.setBorder(BorderFactory.createLineBorder(bgColor, 1, false)); this.add(Box.createHorizontalStrut(11)); //File Begin JMenu mnFile = new JMenu("File"); mnFile.setFont(font1); mnFile.setForeground(Color.white); this.add(mnFile); this.add(Box.createHorizontalStrut(11)); menus.add(mnFile); //File End JMenuItem exit = new JMenuItem("Exit"); exit.setFont(font1); exit.addActionListener(e -> UIThread.openUI(QuitWindow::getInstance)); mnFile.add(exit); menuItems.add(exit); //Edit Begin JMenu mnEdit = new JMenu("Edit"); mnEdit.setFont(font1); mnEdit.setForeground(Color.white); this.add(mnEdit); menus.add(mnEdit); JMenuItem trimParticleArrays = new JMenuItem("Trim Particle Lists"); trimParticleArrays.addActionListener(e -> EngineMethods.trimParticleArrays()); mnEdit.add(trimParticleArrays); menuItems.add(trimParticleArrays); JMenuItem clearParticleArrays = new JMenuItem("Clear All Lists"); clearParticleArrays.addActionListener(e -> EngineMethods.clearParticleArrays()); mnEdit.add(clearParticleArrays); menuItems.add(clearParticleArrays); //Edit End this.add(Box.createHorizontalStrut(11)); //ShortCuts Begin JMenu mnUIWindows = new JMenu("Short-Cuts"); mnUIWindows.setForeground(Color.white); mnUIWindows.setFont(font1); this.add(mnUIWindows); menus.add(mnUIWindows); JMenuItem optionsMenu = new JMenuItem("Options Menu"); optionsMenu.addActionListener(e -> UIThread.openUI(OptionsMenu::getInstance)); mnUIWindows.add(optionsMenu); menuItems.add(optionsMenu); JMenuItem stats_panel = new JMenuItem("Stats Panel"); stats_panel.addActionListener(e -> UIThread.openUI(StatsPanel::getInstance)); mnUIWindows.add(stats_panel); menuItems.add(stats_panel); JMenuItem sliderUI = new JMenuItem("Slide Editor"); sliderUI.addActionListener(e -> UIThread.openUI(SlideEditor::getInstance)); mnUIWindows.add(sliderUI); menuItems.add(sliderUI); JMenuItem exceptionUI = new JMenuItem("Exception Log"); exceptionUI.addActionListener(e -> UIThread.openUI(EException::getInstance)); mnUIWindows.add(exceptionUI); menuItems.add(exceptionUI); JMenuItem thinkingParticlesUI = new JMenuItem("Color Editor"); thinkingParticlesUI.addActionListener(e -> UIThread.openUI(ColorEditor::getInstance)); mnUIWindows.add(thinkingParticlesUI); menuItems.add(thinkingParticlesUI); JMenuItem particleGraphUI = new JMenuItem("Graph Editor"); particleGraphUI.addActionListener(e -> UIThread.openUI(ParticleGraph::getInstance)); mnUIWindows.add(particleGraphUI); menuItems.add(particleGraphUI); JMenuItem vphysicseditor = new JMenuItem("VPhysics Editor"); vphysicseditor.addActionListener(e -> UIThread.openUI(() -> VPhysicsEditor.getInstance(EFrame))); mnUIWindows.add(vphysicseditor); menuItems.add(vphysicseditor); JMenuItem timemachine = new JMenuItem("Color Time Machine"); timemachine.addActionListener(e -> UIThread.openUI(ColorTimeMachine::getInstance)); mnUIWindows.add(timemachine); menuItems.add(timemachine); this.add(Box.createHorizontalStrut(11)); JMenuItem customForces = new JMenuItem("Organic Forces Editor"); customForces.addActionListener(e -> UIThread.openUI(() -> OrganicForces.getInstance(EFrame))); mnUIWindows.add(customForces); menuItems.add(customForces); JMenuItem flowfieldUI = new JMenuItem("Flow Field Editor"); flowfieldUI.addActionListener(e -> UIThread.openUI(() -> FlowFieldUI.getInstance(EFrame))); mnUIWindows.add(flowfieldUI); menuItems.add(flowfieldUI); //ShortCuts End setUpModes(); menuItems.addAll(Arrays.asList(pGravModes)); menuItems.addAll(Arrays.asList(pModes)); menuItems.addAll(Arrays.asList(pTypes)); this.add(Box.createHorizontalStrut(11)); JMenu mnSettings = new JMenu("Settings"); mnSettings.setForeground(Color.white); mnSettings.setFont(font1); this.add(mnSettings); menus.add(mnSettings); enginepause = new JMenuItem(isPaused()); enginepause.addActionListener(e -> { ENGINE_IS_PAUSED.toggleValue(); enginepause.setText(isPaused()); EngineMethods.setEngineTitleState(); }); mnSettings.add(enginepause); menuItems.add(enginepause); JMenuItem settingsSave = new JMenuItem("Save Settings"); settingsSave.addActionListener(e -> Settings.saveSettings()); mnSettings.add(settingsSave); menuItems.add(settingsSave); JMenuItem settingsLoad = new JMenuItem("Load Settings"); settingsLoad.addActionListener(e -> Settings.loadSettings()); mnSettings.add(settingsLoad); menuItems.add(settingsLoad); JMenuItem settingsUI = new JMenuItem("Settings Editor"); settingsUI.addActionListener(e -> UIThread.openUI(SettingsEditor::getInstance)); mnSettings.add(settingsUI); menuItems.add(settingsUI); //Add themes JMenu theme_menu = new JMenu("Themes"); theme_menu.getPopupMenu().setBorder(BorderFactory.createLineBorder(bgColor.darker())); theme_menu.setOpaque(true); //Have to set opaque inside another menu mnSettings.add(theme_menu); menuItems.add(theme_menu); JMenuItem theme1 = new JMenuItem("Default"); theme1.addActionListener(e -> Themes.defaultTheme()); theme1.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme1); menuItems.add(theme1); JMenuItem theme2 = new JMenuItem("Midnight Blues"); theme2.addActionListener(e -> Themes.midnightBlues()); theme2.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme2); menuItems.add(theme2); JMenuItem theme3 = new JMenuItem("Dark Mocha"); theme3.addActionListener(e -> Themes.darkMocha()); theme3.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme3); menuItems.add(theme3); JMenuItem theme4 = new JMenuItem("Mild Tangerine"); theme4.addActionListener(e -> Themes.darkTangerine()); theme4.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme4); menuItems.add(theme4); JMenuItem theme5 = new JMenuItem("Serene Sienna"); theme5.addActionListener(e -> Themes.sienna()); theme5.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme5); menuItems.add(theme5); JMenuItem theme6 = new JMenuItem("WinterGreen Dream"); theme6.addActionListener(e -> Themes.winterGreenDream()); theme6.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme6); menuItems.add(theme6); JMenuItem theme7 = new JMenuItem("Vegas Gold"); theme7.addActionListener(e -> Themes.vegasGold()); theme7.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme7); menuItems.add(theme7); JMenuItem theme8 = new JMenuItem("RoseWood"); theme8.addActionListener(e -> Themes.roseWood()); theme8.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme8); menuItems.add(theme8); JMenuItem theme9 = new JMenuItem("Antique Pink"); theme9.addActionListener(e -> Themes.antiquePink()); theme9.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme9); menuItems.add(theme9); JMenuItem theme10 = new JMenuItem("Night Violet"); theme10.addActionListener(e -> Themes.nightViolet()); theme10.setBorder(BorderFactory.createLineBorder(bgColor)); theme_menu.add(theme10); menuItems.add(theme10); this.add(Box.createHorizontalStrut(11)); JMenu mnHelp = new JMenu("Help"); mnHelp.setFont(font1); mnHelp.setForeground(Color.white); menus.add(mnHelp); JMenuItem helpInstructions = new JMenuItem("Engine Instructions"); helpInstructions.addActionListener(e -> createEngineInstructionsWindow(EFrame)); mnHelp.add(helpInstructions); menuItems.add(helpInstructions); JMenuItem helpGraphInstructions = new JMenuItem("Graph Instructions"); helpGraphInstructions.addActionListener(e -> createGraphInstructionsWindow(EFrame)); mnHelp.add(helpGraphInstructions); this.add(mnHelp); menuItems.add(helpGraphInstructions); if (System.getProperty("os.name").toLowerCase().contains("windows")) { JMenuItem accessibility = new JMenuItem("On-Screen Keyboard"); accessibility.addActionListener(e -> { try { Runtime.getRuntime().exec("cmd /c osk"); } catch (Exception f) { EException.append(f); } }); mnHelp.add(accessibility); menuItems.add(accessibility); } //Setup design for (JMenu menu : menus) { menu.setFont(font1); menu.getPopupMenu().setBorder(BorderFactory.createLineBorder(bgColor.darker())); menu.setForeground(Color.white); } for (JMenuItem menuItem : menuItems) { menuItem.setOpaque(true); menuItem.setBackground(bgColor); menuItem.setForeground(Color.white); menuItem.setFont(menuitemfont); } updateState(); //updateAllRadios(); } private void setUpModes() { JMenu modes = new JMenu("Modes"); modes.setForeground(Color.white); modes.setFont(font1); this.add(modes); menus.add(modes); JMenu particleModes = new JMenu("Particle Modes"); particleModes.getPopupMenu().setBorder(BorderFactory.createLineBorder(bgColor.darker())); particleModes.setOpaque(true); //Have to set opaque inside another menu menuItems.add(particleModes); particleModesGroup = new ButtonGroup(); pModes = new JRadioButtonMenuItem[5]; pModes[0] = new JRadioButtonMenuItem("Normal Mode"); pModes[0].setActionCommand("0"); particleModesGroup.add(pModes[0]); particleModes.add(pModes[0]); pModes[1] = new JRadioButtonMenuItem("Multi Mode"); pModes[1].setActionCommand("1"); particleModesGroup.add(pModes[1]); particleModes.add(pModes[1]); pModes[2] = new JRadioButtonMenuItem("Fireworks Mode"); pModes[2].setActionCommand("2"); particleModesGroup.add(pModes[2]); particleModes.add(pModes[2]); pModes[3] = new JRadioButtonMenuItem("Graph Mode"); pModes[3].setActionCommand("3"); particleModesGroup.add(pModes[3]); particleModes.add(pModes[3]); pModes[4] = new JRadioButtonMenuItem("Ragdoll Mode"); pModes[4].setActionCommand("4"); particleModesGroup.add(pModes[4]); particleModes.add(pModes[4]); for (JRadioButtonMenuItem b : pModes) { if (Integer.parseInt(b.getActionCommand()) == ENGINE_MODE.getValue()) { b.setSelected(true); break; } } for (JRadioButtonMenuItem b : pModes) { b.addActionListener(e -> ENGINE_MODE = EModes.ENGINE_MODES.values()[Integer.parseInt(particleModesGroup.getSelection().getActionCommand())]); } modes.add(particleModes); //// JMenu particleTypes = new JMenu("Particle Types"); particleTypes.getPopupMenu().setBorder(BorderFactory.createLineBorder(bgColor.darker())); particleTypes.setOpaque(true); //Have to set opaque inside another menu menuItems.add(particleTypes); particleTypesGroup = new ButtonGroup(); pTypes = new JRadioButtonMenuItem[9]; pTypes[0] = new JRadioButtonMenuItem("Particle"); pTypes[0].setActionCommand("0"); particleTypesGroup.add(pTypes[0]); particleTypes.add(pTypes[0]); pTypes[1] = new JRadioButtonMenuItem("Gravity Point"); pTypes[1].setActionCommand("1"); particleTypesGroup.add(pTypes[1]); particleTypes.add(pTypes[1]); pTypes[2] = new JRadioButtonMenuItem("Emitter"); pTypes[2].setActionCommand("2"); particleTypesGroup.add(pTypes[2]); particleTypes.add(pTypes[2]); pTypes[3] = new JRadioButtonMenuItem("Flux"); pTypes[3].setActionCommand("3"); particleTypesGroup.add(pTypes[3]); particleTypes.add(pTypes[3]); pTypes[4] = new JRadioButtonMenuItem("Q.E.D"); pTypes[4].setActionCommand("4"); particleTypesGroup.add(pTypes[4]); particleTypes.add(pTypes[4]); pTypes[5] = new JRadioButtonMenuItem("Ion"); pTypes[5].setActionCommand("5"); particleTypesGroup.add(pTypes[5]); particleTypes.add(pTypes[5]); pTypes[6] = new JRadioButtonMenuItem("Black Hole"); pTypes[6].setActionCommand("6"); particleTypesGroup.add(pTypes[6]); particleTypes.add(pTypes[6]); pTypes[7] = new JRadioButtonMenuItem("Duplex"); pTypes[7].setActionCommand("7"); particleTypesGroup.add(pTypes[7]); particleTypes.add(pTypes[7]); pTypes[8] = new JRadioButtonMenuItem("Portal"); pTypes[8].setActionCommand("8"); particleTypesGroup.add(pTypes[8]); particleTypes.add(pTypes[8]); for (JRadioButtonMenuItem b : pTypes) { if (Integer.parseInt(b.getActionCommand()) == PARTICLE_TYPE.getValue()) { b.setSelected(true); break; } } for (JRadioButtonMenuItem b : pTypes) { b.addActionListener(e -> PARTICLE_TYPE = EModes.PARTICLE_TYPES.values()[Integer.parseInt(particleTypesGroup.getSelection().getActionCommand())]); } modes.add(particleTypes); /// JMenu gravitationModes = new JMenu("Gravitation Modes"); gravitationModes.getPopupMenu().setBorder(BorderFactory.createLineBorder(bgColor.darker())); gravitationModes.setOpaque(true); //Have to set opaque inside another menu menuItems.add(gravitationModes); particleGravitationGroup = new ButtonGroup(); pGravModes = new JRadioButtonMenuItem[9]; pGravModes[0] = new JRadioButtonMenuItem("Default Force"); pGravModes[0].setActionCommand("0"); particleGravitationGroup.add(pGravModes[0]); gravitationModes.add(pGravModes[0]); pGravModes[1] = new JRadioButtonMenuItem("Cosine and Sine"); pGravModes[1].setActionCommand("1"); particleGravitationGroup.add(pGravModes[1]); gravitationModes.add(pGravModes[1]); pGravModes[2] = new JRadioButtonMenuItem("Arc Tangent"); pGravModes[2].setActionCommand("2"); particleGravitationGroup.add(pGravModes[2]); gravitationModes.add(pGravModes[2]); pGravModes[3] = new JRadioButtonMenuItem("Horizontal Wave"); pGravModes[3].setActionCommand("3"); particleGravitationGroup.add(pGravModes[3]); gravitationModes.add(pGravModes[3]); pGravModes[4] = new JRadioButtonMenuItem("Vertical Wave"); pGravModes[4].setActionCommand("4"); particleGravitationGroup.add(pGravModes[4]); gravitationModes.add(pGravModes[4]); pGravModes[5] = new JRadioButtonMenuItem("Spirals"); pGravModes[5].setActionCommand("5"); particleGravitationGroup.add(pGravModes[5]); gravitationModes.add(pGravModes[5]); pGravModes[6] = new JRadioButtonMenuItem("Repellent"); pGravModes[6].setActionCommand("6"); particleGravitationGroup.add(pGravModes[6]); gravitationModes.add(pGravModes[6]); pGravModes[7] = new JRadioButtonMenuItem("Organic"); pGravModes[7].setActionCommand("7"); particleGravitationGroup.add(pGravModes[7]); gravitationModes.add(pGravModes[7]); pGravModes[8] = new JRadioButtonMenuItem("Flow Field"); pGravModes[8].setActionCommand("8"); particleGravitationGroup.add(pGravModes[8]); gravitationModes.add(pGravModes[8]); for (JRadioButtonMenuItem b : pGravModes) { if (Integer.parseInt(b.getActionCommand()) == PARTICLE_GRAVITATION_MODE.getValue()) { b.setSelected(true); break; } } for (JRadioButtonMenuItem b : pGravModes) { b.addActionListener(e -> PARTICLE_GRAVITATION_MODE = EModes.GRAVITATION_MODES.values()[Integer.parseInt(particleGravitationGroup.getSelection().getActionCommand())]); } modes.add(gravitationModes); this.add(modes); } public static void updateParticleModesRadios() { for (JRadioButtonMenuItem b : pModes) { if (Integer.parseInt(b.getActionCommand()) == ENGINE_MODE.getValue()) { b.setSelected(true); break; } } } public static void updateParticleTypesRadios() { for (JRadioButtonMenuItem b : pTypes) { if (Integer.parseInt(b.getActionCommand()) == PARTICLE_TYPE.getValue()) { b.setSelected(true); break; } } } public static void updateGravitationModesRadios() { for (JRadioButtonMenuItem b : pGravModes) { if (Integer.parseInt(b.getActionCommand()) == PARTICLE_GRAVITATION_MODE.getValue()) { b.setSelected(true); break; } } } public static void updateAllRadios() { updateParticleModesRadios(); updateParticleTypesRadios(); updateGravitationModesRadios(); updateState(); } private static String isPaused() { return ENGINE_IS_PAUSED.value() ? "Resume Engine" : "Pause Engine"; } public static void updateState() { enginepause.setText(isPaused()); } protected void paintComponent(Graphics g) { super.paintComponent(g); g.setColor(bgColor); g.fillRect(0, 0, getWidth(), getHeight()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.logging.log4j; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.apache.samza.config.Config; import org.apache.samza.config.MapConfig; import org.apache.samza.logging.log4j.serializers.LoggingEventJsonSerde; import org.apache.samza.logging.log4j.serializers.LoggingEventStringSerde; import org.apache.samza.logging.log4j.serializers.LoggingEventStringSerdeFactory; import org.junit.After; import org.junit.Assert; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class TestStreamAppender { private static final Logger LOG = Logger.getLogger(TestStreamAppender.class); @After public void tearDown() { LOG.removeAllAppenders(); MockSystemProducer.listeners.clear(); MockSystemProducer.messagesReceived.clear(); MockSystemAdmin.createdStreamSpec = null; } @Test public void testDefaultSerde() { System.setProperty("samza.container.name", "samza-container-1"); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); // trigger system set up by sending a log LOG.info("log message"); assertNotNull(systemProducerAppender.getSerde()); assertEquals(LoggingEventJsonSerde.class, systemProducerAppender.getSerde().getClass()); } @Test public void testNonDefaultSerde() { System.setProperty("samza.container.name", "samza-container-1"); String streamName = StreamAppender.getStreamName("log4jTest", "1"); Map<String, String> map = new HashMap<>(); map.put("job.name", "log4jTest"); map.put("job.id", "1"); map.put("serializers.registry.log4j-string.class", LoggingEventStringSerdeFactory.class.getCanonicalName()); map.put("systems.mock.samza.factory", MockSystemFactory.class.getCanonicalName()); map.put("systems.mock.streams." + streamName + ".samza.msg.serde", "log4j-string"); map.put("task.log4j.system", "mock"); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(new MapConfig(map)); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); // trigger system set up by sending a log LOG.info("log message"); assertNotNull(systemProducerAppender.getSerde()); assertEquals(LoggingEventStringSerde.class, systemProducerAppender.getSerde().getClass()); } @Test public void testSystemProducerAppenderInContainer() throws InterruptedException { System.setProperty("samza.container.name", "samza-container-1"); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(); PatternLayout layout = new PatternLayout(); layout.setConversionPattern("%m"); systemProducerAppender.setLayout(layout); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); List<String> messages = Lists.newArrayList("testing1", "testing2"); logAndVerifyMessages(messages); } @Test public void testSystemProducerAppenderNotInitialized() throws InterruptedException { System.setProperty("samza.container.name", "samza-job-coordinator"); // add a counter to make sure that the initial message doesn't get produced AtomicInteger numMessagesProduced = new AtomicInteger(0); MockSystemProducer.listeners.add((source, envelope) -> numMessagesProduced.incrementAndGet()); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(baseConfig(), false); PatternLayout layout = new PatternLayout(); layout.setConversionPattern("%m"); systemProducerAppender.setLayout(layout); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); LOG.info("no-received"); // System isn't initialized yet, so this message should be dropped // explicitly trigger initialization to test that new messages do get sent to the stream systemProducerAppender.setupSystem(); systemProducerAppender.systemInitialized = true; List<String> messages = Lists.newArrayList("testing3", "testing4"); logAndVerifyMessages(messages); assertEquals(messages.size(), numMessagesProduced.get()); } @Test public void testNoStreamCreationUponSetupByDefault() { System.setProperty("samza.container.name", "samza-container-1"); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); // trigger system set up by sending a log LOG.info("log message"); Assert.assertNull(MockSystemAdmin.createdStreamSpec); } @Test public void testStreamCreationUpSetupWhenEnabled() { System.setProperty("samza.container.name", "samza-container-1"); MapConfig mapConfig = new MapConfig(ImmutableMap.of( "task.log4j.create.stream.enabled", "true", // Enable explicit stream creation "job.name", "log4jTest", "job.id", "1", "systems.mock.samza.factory", MockSystemFactory.class.getCanonicalName(), "task.log4j.system", "mock")); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(mapConfig); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); // trigger system set up by sending a log LOG.info("log message"); Assert.assertEquals("__samza_log4jTest_1_logs", MockSystemAdmin.createdStreamSpec.getPhysicalName()); // job.container.count defaults to 1 Assert.assertEquals(1, MockSystemAdmin.createdStreamSpec.getPartitionCount()); } @Test public void testStreamCreationUpSetupWithJobContainerCountConfigured() { System.setProperty("samza.container.name", "samza-container-1"); MapConfig mapConfig = new MapConfig(new ImmutableMap.Builder<String, String>() .put("task.log4j.create.stream.enabled", "true") // Enable explicit stream creation .put("job.name", "log4jTest") .put("job.id", "1") .put("systems.mock.samza.factory", MockSystemFactory.class.getCanonicalName()) .put("task.log4j.system", "mock") .put("job.container.count", "4") .build()); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(mapConfig); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); // trigger system set up by sending a log LOG.info("log message"); Assert.assertEquals("__samza_log4jTest_1_logs", MockSystemAdmin.createdStreamSpec.getPhysicalName()); Assert.assertEquals(4, MockSystemAdmin.createdStreamSpec.getPartitionCount()); } @Test public void testStreamCreationUpSetupWithPartitionCountConfigured() { System.setProperty("samza.container.name", "samza-container-1"); MapConfig mapConfig = new MapConfig(ImmutableMap.of( "task.log4j.create.stream.enabled", "true", // Enable explicit stream creation "job.name", "log4jTest", "job.id", "1", "systems.mock.samza.factory", MockSystemFactory.class.getCanonicalName(), "task.log4j.system", "mock")); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(mapConfig); systemProducerAppender.setPartitionCount(8); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); // trigger system set up by sending a log LOG.info("log message"); Assert.assertEquals("__samza_log4jTest_1_logs", MockSystemAdmin.createdStreamSpec.getPhysicalName()); Assert.assertEquals(8, MockSystemAdmin.createdStreamSpec.getPartitionCount()); } @Test public void testExceptionsDoNotKillTransferThread() throws InterruptedException { System.setProperty("samza.container.name", "samza-container-1"); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(); PatternLayout layout = new PatternLayout(); layout.setConversionPattern("%m"); systemProducerAppender.setLayout(layout); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); List<String> messages = Lists.newArrayList("testing5", "testing6", "testing7"); // Set up latch final CountDownLatch allMessagesSent = new CountDownLatch(messages.size()); MockSystemProducer.listeners.add((source, envelope) -> { allMessagesSent.countDown(); if (allMessagesSent.getCount() == messages.size() - 1) { throw new RuntimeException(); // Throw on the first message } }); // Log the messages messages.forEach(LOG::info); // Wait for messages assertTrue("Thread did not send all messages. Count: " + allMessagesSent.getCount(), allMessagesSent.await(60, TimeUnit.SECONDS)); } @Test public void testQueueTimeout() throws InterruptedException { System.setProperty("samza.container.name", "samza-container-1"); MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(); systemProducerAppender.queueTimeoutS = 1; PatternLayout layout = new PatternLayout(); layout.setConversionPattern("%m"); systemProducerAppender.setLayout(layout); systemProducerAppender.activateOptions(); LOG.addAppender(systemProducerAppender); int extraMessageCount = 5; int expectedMessagesSent = extraMessageCount - 1; // -1 because when the queue is drained there is one additional message that couldn't be added List<String> messages = new ArrayList<>(StreamAppender.DEFAULT_QUEUE_SIZE + extraMessageCount); for (int i = 0; i < StreamAppender.DEFAULT_QUEUE_SIZE + extraMessageCount; i++) { messages.add(String.valueOf(i)); } // Set up latch final CountDownLatch allMessagesSent = new CountDownLatch(expectedMessagesSent); // We expect to drop all but the extra messages final CountDownLatch waitForTimeout = new CountDownLatch(1); MockSystemProducer.listeners.add((source, envelope) -> { allMessagesSent.countDown(); try { waitForTimeout.await(); } catch (InterruptedException e) { fail("Test could not run properly because of a thread interrupt."); } }); // Log the messages. This is where the timeout will happen! messages.forEach(LOG::info); assertEquals(messages.size() - expectedMessagesSent, systemProducerAppender.metrics.logMessagesDropped.getCount()); // Allow all the rest of the messages to send. waitForTimeout.countDown(); // Wait for messages assertTrue("Thread did not send all messages. Count: " + allMessagesSent.getCount(), allMessagesSent.await(60, TimeUnit.SECONDS)); assertEquals(expectedMessagesSent, MockSystemProducer.messagesReceived.size()); } private void logAndVerifyMessages(List<String> messages) throws InterruptedException { // Set up latch final CountDownLatch allMessagesSent = new CountDownLatch(messages.size()); MockSystemProducer.listeners.add((source, envelope) -> allMessagesSent.countDown()); // Log the messages messages.forEach(LOG::info); // Wait for messages assertTrue("Timeout while waiting for StreamAppender to send all messages. Count: " + allMessagesSent.getCount(), allMessagesSent.await(60, TimeUnit.SECONDS)); // Verify assertEquals(messages.size(), MockSystemProducer.messagesReceived.size()); for (int i = 0; i < messages.size(); i++) { assertTrue("Message mismatch at index " + i, new String((byte[]) MockSystemProducer.messagesReceived.get(i)).contains(asJsonMessageSegment(messages.get(i)))); } } private String asJsonMessageSegment(String message) { return String.format("\"message\":\"%s\"", message); } private static Config baseConfig() { Map<String, String> map = new HashMap<>(); map.put("job.name", "log4jTest"); map.put("systems.mock.samza.factory", MockSystemFactory.class.getCanonicalName()); map.put("task.log4j.system", "mock"); return new MapConfig(map); } /** * Mock class which overrides config-related methods in {@link StreamAppender} for testing. */ private static class MockSystemProducerAppender extends StreamAppender { private final Config config; private final boolean readyToInitialize; public MockSystemProducerAppender() { this(baseConfig(), true); } public MockSystemProducerAppender(Config config) { this(config, true); } public MockSystemProducerAppender(Config config, boolean readyToInitialize) { this.config = config; this.readyToInitialize = readyToInitialize; } @Override boolean readyToInitialize() { return this.readyToInitialize; } @Override protected Config getConfig() { return config; } } }
package embedding; import java.util.*; /** * Build a layered graph and sample node sequence from it. * We use word2vec to learn the node embedding, and thus * each node name is String type. * * Created by kok on 1/2/17. */ public class LayeredGraph { public static Random rnd = new Random(); public static int numLayer = 8; static public class Edge { public Vertex from; public Vertex to; public double weight; public Edge(Vertex f, Vertex t, double w) { from = f; to = t; weight = w; } } static public class Vertex { public String name; public int id; public List<Edge> edgesOut; public double outDegree; int[] aliasTable; double[] probTable; public Vertex(String n, int i) { name = n; id = i; edgesOut = new ArrayList<>(); outDegree = 0; } public void addOutEdge(Edge e) { edgesOut.add(e); outDegree += e.weight; } /** * Preprocess to generate the alias table. This enables the O(1) random sampling. */ public void initiateAliasTable() { int k = edgesOut.size(); probTable = new double[k]; aliasTable = new int[k]; Arrays.fill(aliasTable, -1); for (int i = 0; i < k; i++) { double w = edgesOut.get(i).weight; probTable[i] = k * w / outDegree; } for (int l1 = 0; l1 < k; l1++) { if (probTable[l1] != 1.0 && aliasTable[l1] == -1) { for (int l2 = 0; l2 < k; l2++) { if (l2 != l1 && aliasTable[l2] == -1) { if (probTable[l1] > 1.0 && probTable[l2] < 1.0) { aliasTable[l2] = l1; probTable[l1] -= 1 - probTable[l2]; } else if (probTable[l1] < 1.0 && probTable[l2] > 1.0) { aliasTable[l1] = l2; probTable[l2] -= 1 - probTable[l1]; // l1 is exactly full break; } } } } } } /** * O(V) sample next vertex * @deprecated this is slow, use the {@link Vertex#sampleNextVertex()} instead. * @return next vertex */ public Vertex sampleNextVertex_OV() { double s = rnd.nextDouble() * outDegree; double cnt = 0; for (Edge e : edgesOut) { cnt += e.weight; if (cnt >= s) return e.to; } return null; } /** * O(1) sample next vertex with alias table. * @return next Vertex */ public Vertex sampleNextVertex() { int k = edgesOut.size(); if (k == 0) return null; double x = rnd.nextDouble(); int i = (int) (x * k); double y = x * k - i; if (y < probTable[i]) return edgesOut.get(i).to; else return edgesOut.get(aliasTable[i]).to; } /** * O(1) sample next edge with alias table. [test purpose] * @param x the random number from range [0,1) * @return next vertex */ public Vertex sampleNextVertex(double x) { int k = edgesOut.size(); int i = (int) (x * k); double y = x * k - i; if (y < probTable[i]) return edgesOut.get(i).to; else return edgesOut.get(aliasTable[i]).to; } } /** * ================================================================== * LayeredGraph starts here * ================================================================== */ public List<Edge> allEdges; public Map<String, Vertex> allVertices; public List<Vertex> sourceVertices; protected double sourceWeightSum; protected double[] probTable; protected int[] aliasTable; public LayeredGraph() { allEdges = new ArrayList<>(); allVertices = new HashMap<>(); sourceVertices = new ArrayList<>(); sourceWeightSum = 0; } public void addEdge(String fn, String tn, double weight) { Vertex f, t; if (! allVertices.containsKey(fn)) { f = new Vertex(fn, allVertices.size()); allVertices.put(fn, f); } else f = allVertices.get(fn); if (! allVertices.containsKey(tn)) { t = new Vertex(tn, allVertices.size()); allVertices.put(tn, t); } else t = allVertices.get(tn); Edge e = new Edge(f, t, weight); allEdges.add(e); f.addOutEdge(e); } /** * addSourceVertex should be called after all edges are added. * @param vn */ public void addSourceVertex(String vn) { Vertex v; if (! allVertices.containsKey(vn)) v = new Vertex(vn, allVertices.size()); else v = allVertices.get(vn); sourceVertices.add(v); sourceWeightSum += v.outDegree; } /** * initialize alias tables for all vertices. * This should be called after all edges are added. */ public void initiateAliasTables() { // initiate fast edge sampling allVertices.values().stream().forEach(v -> v.initiateAliasTable()); // initiate fast source vertices sampling int k = sourceVertices.size(); aliasTable = new int[k]; probTable = new double[k]; Arrays.fill(aliasTable, -1); for (int i = 0; i < k; i++) { double w = sourceVertices.get(i).outDegree; probTable[i] = k * w / sourceWeightSum; } for (int l1 = 0; l1 < k; l1++) { if (probTable[l1] != 1.0 && aliasTable[l1] == -1) { for (int l2 = 0; l2 < k; l2++) { if (l2 != l1 && aliasTable[l2] == -1) { if (probTable[l1] > 1.0 && probTable[l2] < 1.0) { aliasTable[l2] = l1; probTable[l1] -= 1 - probTable[l2]; } else if (probTable[l1] < 1.0 && probTable[l2] > 1.0) { aliasTable[l1] = l2; probTable[l2] -= 1 - probTable[l1]; // l1 is exactly full break; } } } } } } /** * Use alias table method to sample vertex sequence. O(1) method * @return */ public List<String> sampleVertexSequence() { LinkedList<String> seq = new LinkedList<>(); double x = rnd.nextDouble(); int k = sourceVertices.size(); int i = (int) (x * k); double y = x * k - i; if ( y < probTable[i]) seq.add(sourceVertices.get(i).name); else seq.add(sourceVertices.get(aliasTable[i]).name); while (seq.size() < numLayer) { Vertex v = allVertices.get(seq.getLast()); Vertex nn = v.sampleNextVertex(); if (nn == null) break; seq.add(nn.name); } return seq; } /** * Use O(v) method to sample vertex sequence. * @deprecated this is slow. Use this only for comparison purpose with the better method * {@link LayeredGraph#sampleVertexSequence()}. * @return */ public List<String> sampleVertexSequence_OV() { double s = rnd.nextDouble() * sourceWeightSum; LinkedList<String> seq = new LinkedList<>(); double cnt = 0; for (Vertex v : sourceVertices) { cnt += v.outDegree; if (cnt >= s) { seq.add(v.name); break; } } while (seq.size() < numLayer) { Vertex v = allVertices.get(seq.getLast()); Vertex nn = v.sampleNextVertex_OV(); if (nn == null) break; seq.add(nn.name); } return seq; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.rollup; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation.InternalBucket; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation.SingleValue; import org.elasticsearch.search.aggregations.metrics.InternalAvg; import org.elasticsearch.search.aggregations.metrics.InternalMax; import org.elasticsearch.search.aggregations.metrics.InternalMin; import org.elasticsearch.search.aggregations.metrics.InternalSum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.xpack.core.rollup.RollupField; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; /** * This class contains static utilities that combine the responses from an msearch * with rollup + non-rollup agg trees into a single, regular search response * representing the union * */ public class RollupResponseTranslator { private static final Logger logger = Loggers.getLogger(RollupResponseTranslator.class); /** * Verifies a live-only search response. Essentially just checks for failure then returns * the response since we have no work to do */ public static SearchResponse verifyResponse(MultiSearchResponse.Item normalResponse) { if (normalResponse.isFailure()) { throw new RuntimeException(normalResponse.getFailureMessage(), normalResponse.getFailure()); } return normalResponse.getResponse(); } /** * Translates a rollup-only search response back into the expected convention. Similar to * {@link #combineResponses(MultiSearchResponse.Item[], InternalAggregation.ReduceContext)} except it only * has to deal with the rollup response (no live response) * * See {@link #combineResponses(MultiSearchResponse.Item[], InternalAggregation.ReduceContext)} for more details * on the translation conventions */ public static SearchResponse translateResponse(MultiSearchResponse.Item[] rolledMsearch, InternalAggregation.ReduceContext reduceContext) { List<SearchResponse> responses = Arrays.stream(rolledMsearch) .map(item -> { if (item.isFailure()) { throw new RuntimeException(item.getFailureMessage(), item.getFailure()); } return item.getResponse(); }).collect(Collectors.toList()); return doCombineResponse(null, responses, reduceContext); } /** * Combines an msearch with rollup + live aggregations into a SearchResponse * representing the union of the two responses. The response format is identical to * a non-rollup search response (aka a "normal aggregation" response). * * If the MSearch Response returns the following: * * <pre>{@code * [ * { * "took":228, * "timed_out":false, * "_shards":{...}, * "hits":{...}, * "aggregations":{ * "histo":{ * "buckets":[ * { * "key_as_string":"2017-05-15T00:00:00.000Z", * "key":1494806400000, * "doc_count":1, * "the_max":{ * "value":1.0 * } * } * ] * } * } * }, * { * "took":205, * "timed_out":false, * "_shards":{...}, * "hits":{...}, * "aggregations":{ * "filter_histo":{ * "doc_count":1, * "histo":{ * "buckets":[ * { * "key_as_string":"2017-05-14T00:00:00.000Z", * "key":1494720000000, * "doc_count":1, * "the_max":{ * "value":19995.0 * }, * "histo._count":{ * "value":1.0E9 * } * } * ] * } * } * } * } * }</pre> * * It would be collapsed into: * * <pre>{@code * { * "took": 228, * "timed_out": false, * "_shards": {...}, * "hits": {...}, * "aggregations": { * "histo": { * "buckets": [ * { * "key_as_string": "2017-05-14T00:00:00.000Z", * "key": 1494720000000, * "doc_count": 1000000000, * "the_max": { * "value": 19995 * } * }, * { * "key_as_string": "2017-05-15T00:00:00.000Z", * "key": 1494806400000, * "doc_count": 1, * "the_max": { * "value": 1 * } * } * ] * } * } * } * }</pre> * * It essentially takes the conventions listed in {@link RollupRequestTranslator} and processes them * so that the final product looks like a regular aggregation response, allowing it to be * reduced/merged into the response from the un-rolled index * * @param msearchResponses The responses from the msearch, where the first response is the live-index response */ public static SearchResponse combineResponses(MultiSearchResponse.Item[] msearchResponses, InternalAggregation.ReduceContext reduceContext) { boolean liveMissing = false; assert msearchResponses.length >= 2; // The live response is always first MultiSearchResponse.Item liveResponse = msearchResponses[0]; if (liveResponse.isFailure()) { Exception e = liveResponse.getFailure(); // If we have a rollup response we can tolerate a missing live response if (e instanceof IndexNotFoundException) { logger.warn("\"Live\" index not found during rollup search.", e); liveMissing = true; } else { throw new RuntimeException(liveResponse.getFailureMessage(), liveResponse.getFailure()); } } List<SearchResponse> rolledResponses = Arrays.stream(msearchResponses) .skip(1) .map(item -> { if (item.isFailure()) { Exception e = item.getFailure(); // If we have a normal response we can tolerate a missing rollup response, although it theoretically // should be handled by a different code path (verifyResponse) if (e instanceof IndexNotFoundException) { logger.warn("Rollup index not found during rollup search.", e); } else { throw new RuntimeException(item.getFailureMessage(), item.getFailure()); } return null; } else { return item.getResponse(); } }).filter(Objects::nonNull).collect(Collectors.toList()); // If we only have a live index left, process it directly if (rolledResponses.isEmpty() && liveMissing == false) { return verifyResponse(liveResponse); } else if (rolledResponses.isEmpty() && liveMissing) { throw new RuntimeException("No indices (live or rollup) found during rollup search"); } return doCombineResponse(liveResponse.getResponse(), rolledResponses, reduceContext); } private static SearchResponse doCombineResponse(SearchResponse liveResponse, List<SearchResponse> rolledResponses, InternalAggregation.ReduceContext reduceContext) { final InternalAggregations liveAggs = liveResponse != null ? (InternalAggregations)liveResponse.getAggregations() : InternalAggregations.EMPTY; int missingRollupAggs = rolledResponses.stream().mapToInt(searchResponse -> { if (searchResponse == null || searchResponse.getAggregations() == null || searchResponse.getAggregations().asList().size() == 0) { return 1; } return 0; }).sum(); // We had no rollup aggs, so there is nothing to process if (missingRollupAggs == rolledResponses.size()) { // Return an empty response, but make sure we include all the shard, failure, etc stats return mergeFinalResponse(liveResponse, rolledResponses, InternalAggregations.EMPTY); } else if (missingRollupAggs > 0 && missingRollupAggs != rolledResponses.size()) { // We were missing some but not all the aggs, unclear how to handle this. Bail. throw new RuntimeException("Expected to find aggregations in rollup response, but none found."); } // The combination process returns a tree that is identical to the non-rolled // which means we can use aggregation's reduce method to combine, just as if // it was a result from another shard InternalAggregations currentTree = new InternalAggregations(Collections.emptyList()); for (SearchResponse rolledResponse : rolledResponses) { List<InternalAggregation> unrolledAggs = new ArrayList<>(rolledResponse.getAggregations().asList().size()); for (Aggregation agg : rolledResponse.getAggregations()) { // We expect a filter agg here because the rollup convention is that all translated aggs // will start with a filter, containing various agg-specific predicates. If there // *isn't* a filter agg here, something has gone very wrong! if ((agg instanceof InternalFilter) == false) { throw new RuntimeException("Expected [" +agg.getName() + "] to be a FilterAggregation, but was [" + agg.getClass().getSimpleName() + "]"); } unrolledAggs.addAll(unrollAgg(((InternalFilter)agg).getAggregations(), liveAggs, currentTree)); } // Iteratively merge in each new set of unrolled aggs, so that we can identify/fix overlapping doc_counts // in the next round of unrolling InternalAggregations finalUnrolledAggs = new InternalAggregations(unrolledAggs); currentTree = InternalAggregations.reduce(Arrays.asList(currentTree, finalUnrolledAggs), new InternalAggregation.ReduceContext(reduceContext.bigArrays(), reduceContext.scriptService(), true)); } // Add in the live aggregations if they exist if (liveAggs.asList().size() != 0) { currentTree = InternalAggregations.reduce(Arrays.asList(currentTree, liveAggs), new InternalAggregation.ReduceContext(reduceContext.bigArrays(), reduceContext.scriptService(), true)); } return mergeFinalResponse(liveResponse, rolledResponses, currentTree); } private static SearchResponse mergeFinalResponse(SearchResponse liveResponse, List<SearchResponse> rolledResponses, InternalAggregations aggs) { int totalShards = rolledResponses.stream().mapToInt(SearchResponse::getTotalShards).sum(); int sucessfulShards = rolledResponses.stream().mapToInt(SearchResponse::getSuccessfulShards).sum(); int skippedShards = rolledResponses.stream().mapToInt(SearchResponse::getSkippedShards).sum(); long took = rolledResponses.stream().mapToLong(r -> r.getTook().getMillis()).sum() ; boolean isTimedOut = rolledResponses.stream().anyMatch(SearchResponse::isTimedOut); boolean isTerminatedEarly = rolledResponses.stream() .filter(r -> r.isTerminatedEarly() != null) .anyMatch(SearchResponse::isTerminatedEarly); int numReducePhases = rolledResponses.stream().mapToInt(SearchResponse::getNumReducePhases).sum(); if (liveResponse != null) { totalShards += liveResponse.getTotalShards(); sucessfulShards += liveResponse.getSuccessfulShards(); skippedShards += liveResponse.getSkippedShards(); took = Math.max(took, liveResponse.getTook().getMillis()); isTimedOut = isTimedOut && liveResponse.isTimedOut(); isTerminatedEarly = isTerminatedEarly && liveResponse.isTerminatedEarly(); numReducePhases += liveResponse.getNumReducePhases(); } InternalSearchResponse combinedInternal = new InternalSearchResponse(SearchHits.empty(), aggs, null, null, isTimedOut, isTerminatedEarly, numReducePhases); // Shard failures are ignored atm, so returning an empty array is fine return new SearchResponse(combinedInternal, null, totalShards, sucessfulShards, skippedShards, took, ShardSearchFailure.EMPTY_ARRAY, rolledResponses.get(0).getClusters()); } /** * Takes an aggregation with rollup conventions and unrolls into a "normal" agg tree * * @param rolled The rollup aggregation that we wish to unroll * @param original The unrolled, "live" aggregation (if it exists) that matches the current rolled aggregation * * @return An unrolled aggregation that mimics the structure of `base`, allowing reduction */ private static List<InternalAggregation> unrollAgg(InternalAggregations rolled, InternalAggregations original, InternalAggregations currentTree) { return rolled.asList().stream() .filter(subAgg -> !subAgg.getName().endsWith("." + RollupField.COUNT_FIELD)) .map(agg -> { // During the translation process, some aggregations' doc_counts are stored in accessory // `sum` metric aggs, so we may need to extract that. Unfortunately, structure of multibucket vs // leaf metric is slightly different; multibucket count is stored per-bucket in a sub-agg, while // metric is "next" to the metric as a sibling agg. // // So we only look for a count if this is not a multibucket, as multibuckets will handle // the doc_count themselves on a per-bucket basis. // long count = -1; if (agg instanceof InternalMultiBucketAggregation == false) { count = getAggCount(agg, rolled.getAsMap()); } return unrollAgg((InternalAggregation)agg, original.get(agg.getName()), currentTree.get(agg.getName()), count); }).collect(Collectors.toList()); } /** * Takes an aggregation with rollup conventions and unrolls into a "normal" agg tree * * @param rolled The rollup aggregation that we wish to unroll * @param originalAgg The unrolled, "live" aggregation (if it exists) that matches the current rolled aggregation * @param count The doc_count for `rolled`, required by some aggs (e.g. avg) * * @return An unrolled aggregation that mimics the structure of base, allowing reduction */ protected static InternalAggregation unrollAgg(InternalAggregation rolled, InternalAggregation originalAgg, InternalAggregation currentTree, long count) { if (rolled instanceof InternalMultiBucketAggregation) { return unrollMultiBucket((InternalMultiBucketAggregation) rolled, (InternalMultiBucketAggregation) originalAgg, (InternalMultiBucketAggregation) currentTree); } else if (rolled instanceof SingleValue) { return unrollMetric((SingleValue) rolled, count); } else { throw new RuntimeException("Unable to unroll aggregation tree. Aggregation [" + rolled.getName() + "] is of type [" + rolled.getClass().getSimpleName() + "] which is " + "currently unsupported."); } } /** * Unrolls Multibucket aggregations (e.g. terms, histograms, etc). This overload signature should be * called by other internal methods in this class, rather than directly calling the per-type methods. */ @SuppressWarnings("unchecked") private static InternalAggregation unrollMultiBucket(InternalMultiBucketAggregation rolled, InternalMultiBucketAggregation original, InternalMultiBucketAggregation currentTree) { // The only thing unique between all the multibucket agg is the type of bucket they // need, so this if/else simply creates specialized closures that return the appropriate // bucket type. Otherwise the heavy-lifting is in // {@link #unrollMultiBucket(InternalMultiBucketAggregation, InternalMultiBucketAggregation, TriFunction)} if (rolled instanceof InternalDateHistogram) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { long key = ((InternalDateHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalDateHistogram.Bucket)bucket).getFormatter(); assert bucketCount >= 0; return new InternalDateHistogram.Bucket(key, bucketCount, ((InternalDateHistogram.Bucket) bucket).getKeyed(), formatter, subAggs); }); } else if (rolled instanceof InternalHistogram) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { long key = ((InternalHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalHistogram.Bucket)bucket).getFormatter(); assert bucketCount >= 0; return new InternalHistogram.Bucket(key, bucketCount, ((InternalHistogram.Bucket) bucket).getKeyed(), formatter, subAggs); }); } else if (rolled instanceof StringTerms) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { BytesRef key = new BytesRef(bucket.getKeyAsString().getBytes(StandardCharsets.UTF_8)); assert bucketCount >= 0; //TODO expose getFormatter(), keyed upstream in Core return new StringTerms.Bucket(key, bucketCount, subAggs, false, 0, DocValueFormat.RAW); }); } else if (rolled instanceof LongTerms) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { long key = (long)bucket.getKey(); assert bucketCount >= 0; //TODO expose getFormatter(), keyed upstream in Core return new LongTerms.Bucket(key, bucketCount, subAggs, false, 0, DocValueFormat.RAW); }); } else { throw new RuntimeException("Unable to unroll aggregation tree. Aggregation [" + rolled.getName() + "] is of type [" + rolled.getClass().getSimpleName() + "] which is " + "currently unsupported."); } } /** * Helper method which unrolls a generic multibucket agg. Prefer to use the other overload * as a consumer of the API * * @param source The rolled aggregation that we wish to unroll * @param bucketFactory A Trifunction which generates new buckets for the given type of multibucket */ private static <A extends InternalMultiBucketAggregation, B extends InternalBucket, T extends InternalMultiBucketAggregation<A, B>> InternalAggregation unrollMultiBucket(T source, T original, T currentTree, TriFunction<InternalBucket, Long, InternalAggregations, B> bucketFactory) { Map<Object, InternalBucket> originalKeys = new HashMap<>(); Map<Object, InternalBucket> currentKeys = new HashMap<>(); if (original != null) { original.getBuckets().forEach(b -> originalKeys.put(b.getKey(), b)); } if (currentTree != null) { currentTree.getBuckets().forEach(b -> currentKeys.put(b.getKey(), b)); } // Iterate over the buckets in the multibucket List<B> buckets = source.getBuckets() .stream() .filter(b -> originalKeys.containsKey(b.getKey()) == false) // If the original has this key, ignore the rolled version .map(bucket -> { // Grab the value from the count agg (if it exists), which represents this bucket's doc_count long bucketCount = getAggCount(source, bucket.getAggregations().getAsMap()); // Don't generate buckets if the doc count is zero if (bucketCount == 0) { return null; } // current, partially merged tree contains this key. Defer to the existing doc_count if it is non-zero if (currentKeys.containsKey(bucket.getKey()) && currentKeys.get(bucket.getKey()).getDocCount() != 0) { // Unlike above where we return null if doc_count is zero, we return a doc_count: 0 bucket // here because it may have sub-aggs that need merging, whereas above the bucket was just empty/null bucketCount = 0; } // Then iterate over the subAggs in the bucket InternalAggregations subAggs = unrollSubAggsFromMulti(bucket, originalKeys.get(bucket.getKey()), currentKeys.get(bucket.getKey())); return bucketFactory.apply(bucket, bucketCount, subAggs); }) .filter(Objects::nonNull) .collect(Collectors.toList()); return source.create(buckets); } /** * Generic method to help iterate over sub-aggregation buckets and recursively unroll * * @param bucket The current bucket that we wish to unroll */ private static InternalAggregations unrollSubAggsFromMulti(InternalBucket bucket, InternalBucket original, InternalBucket currentTree) { // Iterate over the subAggs in each bucket return new InternalAggregations(bucket.getAggregations() .asList().stream() // Avoid any rollup count metrics, as that's not a true "sub-agg" but rather agg // added by the rollup for accounting purposes (e.g. doc_count) .filter(subAgg -> !subAgg.getName().endsWith("." + RollupField.COUNT_FIELD)) .map(subAgg -> { long count = getAggCount(subAgg, bucket.getAggregations().asMap()); InternalAggregation originalSubAgg = null; if (original != null && original.getAggregations() != null) { originalSubAgg = original.getAggregations().get(subAgg.getName()); } InternalAggregation currentSubAgg = null; if (currentTree != null && currentTree.getAggregations() != null) { currentSubAgg = currentTree.getAggregations().get(subAgg.getName()); } return unrollAgg((InternalAggregation) subAgg, originalSubAgg, currentSubAgg, count); }).collect(Collectors.toList())); } private static InternalAggregation unrollMetric(SingleValue metric, long count) { // TODO comment from Colin in review: // RAW won't work here long term since if you do a max on e.g. a date field it will // render differently for the rolled up and non-rolled up results. At the moment // the formatter is not exposed on the internal agg objects but I think this is // something we can discuss exposing if (metric instanceof InternalMax || metric instanceof InternalMin) { return metric; } else if (metric instanceof InternalSum) { // If count is anything other than -1, this sum is actually an avg if (count != -1) { // Note: Avgs have a slightly different name to prevent collision with empty bucket defaults return new InternalAvg(metric.getName().replace("." + RollupField.VALUE, ""), metric.value(), count, DocValueFormat.RAW, metric.pipelineAggregators(), metric.getMetaData()); } return metric; } else { throw new RuntimeException("Unable to unroll metric. Aggregation [" + metric.getName() + "] is of type [" + metric.getClass().getSimpleName() + "] which is " + "currently unsupported."); } } private static long getAggCount(Aggregation agg, Map<String, Aggregation> aggMap) { String countPath = null; if (agg.getType().equals(DateHistogramAggregationBuilder.NAME) || agg.getType().equals(HistogramAggregationBuilder.NAME) || agg.getType().equals(StringTerms.NAME) || agg.getType().equals(LongTerms.NAME)) { countPath = RollupField.formatCountAggName(agg.getName()); } else if (agg.getType().equals(SumAggregationBuilder.NAME)) { // Note: Avgs have a slightly different name to prevent collision with empty bucket defaults countPath = RollupField.formatCountAggName(agg.getName().replace("." + RollupField.VALUE, "")); } if (countPath != null && aggMap.get(countPath) != null) { // we always set the count fields to Sum aggs, so this is safe assert aggMap.get(countPath) instanceof InternalSum; return (long)((InternalSum) aggMap.get(countPath)).getValue(); } return -1; } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.get; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.junit.Test; import java.util.Map; import static org.elasticsearch.client.Requests.clusterHealthRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.*; public class GetActionTests extends ElasticsearchIntegrationTest { @Test public void simpleGetTests() { client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)).execute().actionGet(); ensureGreen(); GetResponse response = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); logger.info("--> realtime get 1"); response = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime get 1 (no source, implicit)"); response = client().prepareGet("test", "type1", "1").setFields(Strings.EMPTY_ARRAY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getFields().size(), equalTo(0)); assertThat(response.getSourceAsBytes(), nullValue()); logger.info("--> realtime get 1 (no source, explicit)"); response = client().prepareGet("test", "type1", "1").setFetchSource(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getFields().size(), equalTo(0)); assertThat(response.getSourceAsBytes(), nullValue()); logger.info("--> realtime get 1 (no type)"); response = client().prepareGet("test", null, "1").execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> non realtime get 1"); response = client().prepareGet("test", "type1", "1").setRealtime(false).execute().actionGet(); assertThat(response.isExists(), equalTo(false)); logger.info("--> realtime fetch of field (requires fetching parsing source)"); response = client().prepareGet("test", "type1", "1").setFields("field1").execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsBytes(), nullValue()); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source (requires fetching parsing source)"); response = client().prepareGet("test", "type1", "1").setFields("field1").setFetchSource("field1", null).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsMap(), hasKey("field1")); assertThat(response.getSourceAsMap(), not(hasKey("field2"))); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> flush the index, so we load it from it"); client().admin().indices().prepareFlush().execute().actionGet(); logger.info("--> realtime get 1 (loaded from index)"); response = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> non realtime get 1 (loaded from index)"); response = client().prepareGet("test", "type1", "1").setRealtime(false).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime fetch of field (loaded from index)"); response = client().prepareGet("test", "type1", "1").setFields("field1").execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsBytes(), nullValue()); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source (loaded from index)"); response = client().prepareGet("test", "type1", "1").setFields("field1").setFetchSource(true).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsBytes(), not(nullValue())); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> update doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").execute().actionGet(); logger.info("--> realtime get 1"); response = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_1")); logger.info("--> update doc 1 again"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1_2", "field2", "value2_2").execute().actionGet(); response = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_2")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_2")); DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").execute().actionGet(); assertThat(deleteResponse.isNotFound(), equalTo(false)); response = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(response.isExists(), equalTo(false)); } @Test public void simpleMultiGetTests() throws Exception { try { client().admin().indices().prepareDelete("test").execute().actionGet(); } catch (Exception e) { // fine } client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)).execute().actionGet(); ensureGreen(); MultiGetResponse response = client().prepareMultiGet().add("test", "type1", "1").execute().actionGet(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } response = client().prepareMultiGet() .add("test", "type1", "1") .add("test", "type1", "15") .add("test", "type1", "3") .add("test", "type1", "9") .add("test", "type1", "11") .execute().actionGet(); assertThat(response.getResponses().length, equalTo(5)); assertThat(response.getResponses()[0].getId(), equalTo("1")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[1].getId(), equalTo("15")); assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(false)); assertThat(response.getResponses()[2].getId(), equalTo("3")); assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[3].getId(), equalTo("9")); assertThat(response.getResponses()[3].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[4].getId(), equalTo("11")); assertThat(response.getResponses()[4].getResponse().isExists(), equalTo(false)); // multi get with specific field response = client().prepareMultiGet() .add(new MultiGetRequest.Item("test", "type1", "1").fields("field")) .add(new MultiGetRequest.Item("test", "type1", "3").fields("field")) .execute().actionGet(); assertThat(response.getResponses().length, equalTo(2)); assertThat(response.getResponses()[0].getResponse().getSourceAsBytes(), nullValue()); assertThat(response.getResponses()[0].getResponse().getField("field").getValues().get(0).toString(), equalTo("value1")); } @Test public void realtimeGetWithCompress() throws Exception { client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)) .addMapping("type", jsonBuilder().startObject().startObject("type").startObject("_source").field("compress", true).endObject().endObject().endObject()) .execute().actionGet(); ensureGreen(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < 10000; i++) { sb.append((char) i); } String fieldValue = sb.toString(); client().prepareIndex("test", "type", "1").setSource("field", fieldValue).execute().actionGet(); // realtime get GetResponse getResponse = client().prepareGet("test", "type", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo(fieldValue)); } @Test public void getFieldsWithDifferentTypes() throws Exception { client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)) .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", true).endObject().endObject().endObject()) .addMapping("type2", jsonBuilder().startObject().startObject("type2") .startObject("_source").field("enabled", false).endObject() .startObject("properties") .startObject("str").field("type", "string").field("store", "yes").endObject() .startObject("strs").field("type", "string").field("store", "yes").endObject() .startObject("int").field("type", "integer").field("store", "yes").endObject() .startObject("ints").field("type", "integer").field("store", "yes").endObject() .startObject("date").field("type", "date").field("store", "yes").endObject() .startObject("binary").field("type", "binary").field("store", "yes").endObject() .endObject() .endObject().endObject()) .execute().actionGet(); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource( jsonBuilder().startObject() .field("str", "test") .field("strs", new String[]{"A", "B", "C"}) .field("int", 42) .field("ints", new int[]{1, 2, 3, 4}) .field("date", "2012-11-13T15:26:14.000Z") .field("binary", Base64.encodeBytes(new byte[]{1, 2, 3})) .endObject()).execute().actionGet(); client().prepareIndex("test", "type2", "1").setSource( jsonBuilder().startObject() .field("str", "test") .field("strs", new String[]{"A", "B", "C"}) .field("int", 42) .field("ints", new int[]{1, 2, 3, 4}) .field("date", "2012-11-13T15:26:14.000Z") .field("binary", Base64.encodeBytes(new byte[]{1, 2, 3})) .endObject()).execute().actionGet(); // realtime get with stored source logger.info("--> realtime get (from source)"); GetResponse getResponse = client().prepareGet("test", "type1", "1").setFields("str", "strs", "int", "ints", "date", "binary").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat((String) getResponse.getField("str").getValue(), equalTo("test")); assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C")); assertThat((Long) getResponse.getField("int").getValue(), equalTo(42l)); assertThat(getResponse.getField("ints").getValues(), contains((Object) 1L, 2L, 3L, 4L)); assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z")); assertThat(getResponse.getField("binary").getValue(), instanceOf(String.class)); // its a String..., not binary mapped logger.info("--> realtime get (from stored fields)"); getResponse = client().prepareGet("test", "type2", "1").setFields("str", "strs", "int", "ints", "date", "binary").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat((String) getResponse.getField("str").getValue(), equalTo("test")); assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C")); assertThat((Integer) getResponse.getField("int").getValue(), equalTo(42)); assertThat(getResponse.getField("ints").getValues(), contains((Object) 1, 2, 3, 4)); assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z")); assertThat((BytesReference) getResponse.getField("binary").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3}))); logger.info("--> flush the index, so we load it from it"); client().admin().indices().prepareFlush().execute().actionGet(); logger.info("--> non realtime get (from source)"); getResponse = client().prepareGet("test", "type1", "1").setFields("str", "strs", "int", "ints", "date", "binary").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat((String) getResponse.getField("str").getValue(), equalTo("test")); assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C")); assertThat((Long) getResponse.getField("int").getValue(), equalTo(42l)); assertThat(getResponse.getField("ints").getValues(), contains((Object) 1L, 2L, 3L, 4L)); assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z")); assertThat(getResponse.getField("binary").getValue(), instanceOf(String.class)); // its a String..., not binary mapped logger.info("--> non realtime get (from stored fields)"); getResponse = client().prepareGet("test", "type2", "1").setFields("str", "strs", "int", "ints", "date", "binary").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat((String) getResponse.getField("str").getValue(), equalTo("test")); assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C")); assertThat((Integer) getResponse.getField("int").getValue(), equalTo(42)); assertThat(getResponse.getField("ints").getValues(), contains((Object) 1, 2, 3, 4)); assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z")); assertThat((BytesReference) getResponse.getField("binary").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3}))); } @Test public void testGetDocWithMultivaluedFields() throws Exception { try { client().admin().indices().prepareDelete("test").execute().actionGet(); } catch (Exception e) { // fine } String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field").field("type", "string").field("store", "yes").endObject() .endObject() .endObject().endObject().string(); String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("field").field("type", "string").field("store", "yes").endObject() .endObject() .startObject("_source").field("enabled", false).endObject() .endObject().endObject().string(); client().admin().indices().prepareCreate("test") .addMapping("type1", mapping1) .addMapping("type2", mapping2) .setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)) .execute().actionGet(); ensureGreen(); GetResponse response = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(response.isExists(), equalTo(false)); response = client().prepareGet("test", "type2", "1").execute().actionGet(); assertThat(response.isExists(), equalTo(false)); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject().field("field", "1", "2").endObject()) .execute().actionGet(); client().prepareIndex("test", "type2", "1") .setSource(jsonBuilder().startObject().field("field", "1", "2").endObject()) .execute().actionGet(); response = client().prepareGet("test", "type1", "1") .setFields("field") .execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getType(), equalTo("type1")); assertThat(response.getFields().size(), equalTo(1)); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); response = client().prepareGet("test", "type2", "1") .setFields("field") .execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getType(), equalTo("type2")); assertThat(response.getId(), equalTo("1")); assertThat(response.getFields().size(), equalTo(1)); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); // Now test values being fetched from stored fields. client().admin().indices().prepareRefresh("test").execute().actionGet(); response = client().prepareGet("test", "type1", "1") .setFields("field") .execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getFields().size(), equalTo(1)); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); response = client().prepareGet("test", "type2", "1") .setFields("field") .execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getFields().size(), equalTo(1)); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); } @Test public void testThatGetFromTranslogShouldWorkWithExclude() throws Exception { String index = "test"; String type = "type1"; String mapping = jsonBuilder() .startObject() .startObject(type) .startObject("_source") .array("excludes", "excluded") .endObject() .endObject() .endObject() .string(); client().admin().indices().prepareCreate(index) .addMapping(type, mapping) .setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)) .execute().actionGet(); client().prepareIndex(index, type, "1") .setSource(jsonBuilder().startObject().field("field", "1", "2").field("excluded", "should not be seen").endObject()) .execute().actionGet(); GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").execute().actionGet(); client().admin().indices().prepareFlush(index).execute().actionGet(); GetResponse responseAfterFlush = client().prepareGet(index, type, "1").execute().actionGet(); assertThat(responseBeforeFlush.isExists(), is(true)); assertThat(responseAfterFlush.isExists(), is(true)); assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("field")); assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded"))); assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); } @Test public void testThatGetFromTranslogShouldWorkWithInclude() throws Exception { String index = "test"; String type = "type1"; String mapping = jsonBuilder() .startObject() .startObject(type) .startObject("_source") .array("includes", "included") .endObject() .endObject() .endObject() .string(); client().admin().indices().prepareCreate(index) .addMapping(type, mapping) .setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)) .execute().actionGet(); client().prepareIndex(index, type, "1") .setSource(jsonBuilder().startObject().field("field", "1", "2").field("included", "should be seen").endObject()) .execute().actionGet(); GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").execute().actionGet(); client().admin().indices().prepareFlush(index).execute().actionGet(); GetResponse responseAfterFlush = client().prepareGet(index, type, "1").execute().actionGet(); assertThat(responseBeforeFlush.isExists(), is(true)); assertThat(responseAfterFlush.isExists(), is(true)); assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field"))); assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included")); assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); } @SuppressWarnings("unchecked") @Test public void testThatGetFromTranslogShouldWorkWithIncludeExcludeAndFields() throws Exception { String index = "test"; String type = "type1"; String mapping = jsonBuilder() .startObject() .startObject(type) .startObject("_source") .array("includes", "included") .array("exlcudes", "excluded") .endObject() .endObject() .endObject() .string(); client().admin().indices().prepareCreate(index) .addMapping(type, mapping) .setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)) .execute().actionGet(); client().prepareIndex(index, type, "1") .setSource(jsonBuilder().startObject() .field("field", "1", "2") .startObject("included").field("field", "should be seen").field("field2", "extra field to remove").endObject() .startObject("excluded").field("field", "should not be seen").field("field2", "should not be seen").endObject() .endObject()) .execute().actionGet(); GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").setFields("_source", "included", "excluded").execute().actionGet(); assertThat(responseBeforeFlush.isExists(), is(true)); assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded"))); assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field"))); assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included")); // now tests that extra source filtering works as expected GetResponse responseBeforeFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included", "excluded") .setFetchSource(new String[]{"field", "*.field"}, new String[]{"*.field2"}).get(); assertThat(responseBeforeFlushWithExtraFilters.isExists(), is(true)); assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("excluded"))); assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("field"))); assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), hasKey("included")); assertThat((Map<String, Object>) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), hasKey("field")); assertThat((Map<String, Object>) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), not(hasKey("field2"))); client().admin().indices().prepareFlush(index).execute().actionGet(); GetResponse responseAfterFlush = client().prepareGet(index, type, "1").setFields("_source", "included", "excluded").execute().actionGet(); GetResponse responseAfterFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included", "excluded") .setFetchSource("*.field", "*.field2").get(); assertThat(responseAfterFlush.isExists(), is(true)); assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); assertThat(responseAfterFlushWithExtraFilters.isExists(), is(true)); assertThat(responseBeforeFlushWithExtraFilters.getSourceAsString(), is(responseAfterFlushWithExtraFilters.getSourceAsString())); } @Test public void testGetWithVersion() { client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)).execute().actionGet(); ClusterHealthResponse clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus()).actionGet(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); GetResponse response = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").execute().actionGet(); // From translog: // version 0 means ignore version, which is the default response = client().prepareGet("test", "type1", "1").setVersion(0).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1l)); response = client().prepareGet("test", "type1", "1").setVersion(1).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1l)); try { client().prepareGet("test", "type1", "1").setVersion(2).execute().actionGet(); assert false; } catch (VersionConflictEngineException e) { } // From Lucene index: client().admin().indices().prepareRefresh("test").execute().actionGet(); // version 0 means ignore version, which is the default response = client().prepareGet("test", "type1", "1").setVersion(0).setRealtime(false).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1l)); response = client().prepareGet("test", "type1", "1").setVersion(1).setRealtime(false).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1l)); try { client().prepareGet("test", "type1", "1").setVersion(2).setRealtime(false).execute().actionGet(); assert false; } catch (VersionConflictEngineException e) { } logger.info("--> index doc 1 again, so increasing the version"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").execute().actionGet(); // From translog: // version 0 means ignore version, which is the default response = client().prepareGet("test", "type1", "1").setVersion(0).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(2l)); try { client().prepareGet("test", "type1", "1").setVersion(1).execute().actionGet(); assert false; } catch (VersionConflictEngineException e) { } response = client().prepareGet("test", "type1", "1").setVersion(2).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(2l)); // From Lucene index: client().admin().indices().prepareRefresh("test").execute().actionGet(); // version 0 means ignore version, which is the default response = client().prepareGet("test", "type1", "1").setVersion(0).setRealtime(false).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(2l)); try { client().prepareGet("test", "type1", "1").setVersion(1).setRealtime(false).execute().actionGet(); assert false; } catch (VersionConflictEngineException e) { } response = client().prepareGet("test", "type1", "1").setVersion(2).setRealtime(false).execute().actionGet(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(2l)); } @Test public void testMultiGetWithVersion() throws Exception { try { client().admin().indices().prepareDelete("test").execute().actionGet(); } catch (Exception e) { // fine } client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1)).execute().actionGet(); ClusterHealthResponse clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus()).actionGet(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); MultiGetResponse response = client().prepareMultiGet().add("test", "type1", "1").execute().actionGet(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 3; i++) { client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } // Version from translog response = client().prepareMultiGet() .add(new MultiGetRequest.Item("test", "type1", "1").version(0)) .add(new MultiGetRequest.Item("test", "type1", "1").version(1)) .add(new MultiGetRequest.Item("test", "type1", "1").version(2)) .execute().actionGet(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("1")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[1].getId(), equalTo("1")); assertThat(response.getResponses()[1].getFailure(), nullValue()); assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[1].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[2].getFailure(), notNullValue()); assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1")); assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("VersionConflictEngineException")); //Version from Lucene index client().admin().indices().prepareRefresh("test").execute().actionGet(); response = client().prepareMultiGet() .add(new MultiGetRequest.Item("test", "type1", "1").version(0)) .add(new MultiGetRequest.Item("test", "type1", "1").version(1)) .add(new MultiGetRequest.Item("test", "type1", "1").version(2)) .setRealtime(false) .execute().actionGet(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("1")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[1].getId(), equalTo("1")); assertThat(response.getResponses()[1].getFailure(), nullValue()); assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[1].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[2].getFailure(), notNullValue()); assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1")); assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("VersionConflictEngineException")); for (int i = 0; i < 3; i++) { client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } // Version from translog response = client().prepareMultiGet() .add(new MultiGetRequest.Item("test", "type1", "2").version(0)) .add(new MultiGetRequest.Item("test", "type1", "2").version(1)) .add(new MultiGetRequest.Item("test", "type1", "2").version(2)) .execute().actionGet(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("2")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); assertThat(response.getResponses()[1].getFailure(), notNullValue()); assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2")); assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("VersionConflictEngineException")); assertThat(response.getResponses()[2].getId(), equalTo("2")); assertThat(response.getResponses()[2].getFailure(), nullValue()); assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[2].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); //Version from Lucene index client().admin().indices().prepareRefresh("test").execute().actionGet(); response = client().prepareMultiGet() .add(new MultiGetRequest.Item("test", "type1", "2").version(0)) .add(new MultiGetRequest.Item("test", "type1", "2").version(1)) .add(new MultiGetRequest.Item("test", "type1", "2").version(2)) .setRealtime(false) .execute().actionGet(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("2")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); assertThat(response.getResponses()[1].getFailure(), notNullValue()); assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2")); assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("VersionConflictEngineException")); assertThat(response.getResponses()[2].getId(), equalTo("2")); assertThat(response.getResponses()[2].getFailure(), nullValue()); assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[2].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); } }
package org.ripple.bouncycastle.asn1.x500.style; import java.io.IOException; import java.util.Hashtable; import org.ripple.bouncycastle.asn1.ASN1Encodable; import org.ripple.bouncycastle.asn1.ASN1GeneralizedTime; import org.ripple.bouncycastle.asn1.ASN1ObjectIdentifier; import org.ripple.bouncycastle.asn1.DERIA5String; import org.ripple.bouncycastle.asn1.DERPrintableString; import org.ripple.bouncycastle.asn1.DERUTF8String; import org.ripple.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.ripple.bouncycastle.asn1.x500.AttributeTypeAndValue; import org.ripple.bouncycastle.asn1.x500.RDN; import org.ripple.bouncycastle.asn1.x500.X500Name; import org.ripple.bouncycastle.asn1.x500.X500NameStyle; import org.ripple.bouncycastle.asn1.x509.X509ObjectIdentifiers; public class BCStyle implements X500NameStyle { public static final X500NameStyle INSTANCE = new BCStyle(); /** * country code - StringType(SIZE(2)) */ public static final ASN1ObjectIdentifier C = new ASN1ObjectIdentifier("2.5.4.6"); /** * organization - StringType(SIZE(1..64)) */ public static final ASN1ObjectIdentifier O = new ASN1ObjectIdentifier("2.5.4.10"); /** * organizational unit name - StringType(SIZE(1..64)) */ public static final ASN1ObjectIdentifier OU = new ASN1ObjectIdentifier("2.5.4.11"); /** * Title */ public static final ASN1ObjectIdentifier T = new ASN1ObjectIdentifier("2.5.4.12"); /** * common name - StringType(SIZE(1..64)) */ public static final ASN1ObjectIdentifier CN = new ASN1ObjectIdentifier("2.5.4.3"); /** * device serial number name - StringType(SIZE(1..64)) */ public static final ASN1ObjectIdentifier SN = new ASN1ObjectIdentifier("2.5.4.5"); /** * street - StringType(SIZE(1..64)) */ public static final ASN1ObjectIdentifier STREET = new ASN1ObjectIdentifier("2.5.4.9"); /** * device serial number name - StringType(SIZE(1..64)) */ public static final ASN1ObjectIdentifier SERIALNUMBER = SN; /** * locality name - StringType(SIZE(1..64)) */ public static final ASN1ObjectIdentifier L = new ASN1ObjectIdentifier("2.5.4.7"); /** * state, or province name - StringType(SIZE(1..64)) */ public static final ASN1ObjectIdentifier ST = new ASN1ObjectIdentifier("2.5.4.8"); /** * Naming attributes of type X520name */ public static final ASN1ObjectIdentifier SURNAME = new ASN1ObjectIdentifier("2.5.4.4"); public static final ASN1ObjectIdentifier GIVENNAME = new ASN1ObjectIdentifier("2.5.4.42"); public static final ASN1ObjectIdentifier INITIALS = new ASN1ObjectIdentifier("2.5.4.43"); public static final ASN1ObjectIdentifier GENERATION = new ASN1ObjectIdentifier("2.5.4.44"); public static final ASN1ObjectIdentifier UNIQUE_IDENTIFIER = new ASN1ObjectIdentifier("2.5.4.45"); /** * businessCategory - DirectoryString(SIZE(1..128) */ public static final ASN1ObjectIdentifier BUSINESS_CATEGORY = new ASN1ObjectIdentifier( "2.5.4.15"); /** * postalCode - DirectoryString(SIZE(1..40) */ public static final ASN1ObjectIdentifier POSTAL_CODE = new ASN1ObjectIdentifier( "2.5.4.17"); /** * dnQualifier - DirectoryString(SIZE(1..64) */ public static final ASN1ObjectIdentifier DN_QUALIFIER = new ASN1ObjectIdentifier( "2.5.4.46"); /** * RFC 3039 Pseudonym - DirectoryString(SIZE(1..64) */ public static final ASN1ObjectIdentifier PSEUDONYM = new ASN1ObjectIdentifier( "2.5.4.65"); /** * RFC 3039 DateOfBirth - GeneralizedTime - YYYYMMDD000000Z */ public static final ASN1ObjectIdentifier DATE_OF_BIRTH = new ASN1ObjectIdentifier( "1.3.6.1.5.5.7.9.1"); /** * RFC 3039 PlaceOfBirth - DirectoryString(SIZE(1..128) */ public static final ASN1ObjectIdentifier PLACE_OF_BIRTH = new ASN1ObjectIdentifier( "1.3.6.1.5.5.7.9.2"); /** * RFC 3039 Gender - PrintableString (SIZE(1)) -- "M", "F", "m" or "f" */ public static final ASN1ObjectIdentifier GENDER = new ASN1ObjectIdentifier( "1.3.6.1.5.5.7.9.3"); /** * RFC 3039 CountryOfCitizenship - PrintableString (SIZE (2)) -- ISO 3166 * codes only */ public static final ASN1ObjectIdentifier COUNTRY_OF_CITIZENSHIP = new ASN1ObjectIdentifier( "1.3.6.1.5.5.7.9.4"); /** * RFC 3039 CountryOfResidence - PrintableString (SIZE (2)) -- ISO 3166 * codes only */ public static final ASN1ObjectIdentifier COUNTRY_OF_RESIDENCE = new ASN1ObjectIdentifier( "1.3.6.1.5.5.7.9.5"); /** * ISIS-MTT NameAtBirth - DirectoryString(SIZE(1..64) */ public static final ASN1ObjectIdentifier NAME_AT_BIRTH = new ASN1ObjectIdentifier("1.3.36.8.3.14"); /** * RFC 3039 PostalAddress - SEQUENCE SIZE (1..6) OF * DirectoryString(SIZE(1..30)) */ public static final ASN1ObjectIdentifier POSTAL_ADDRESS = new ASN1ObjectIdentifier("2.5.4.16"); /** * RFC 2256 dmdName */ public static final ASN1ObjectIdentifier DMD_NAME = new ASN1ObjectIdentifier("2.5.4.54"); /** * id-at-telephoneNumber */ public static final ASN1ObjectIdentifier TELEPHONE_NUMBER = X509ObjectIdentifiers.id_at_telephoneNumber; /** * id-at-name */ public static final ASN1ObjectIdentifier NAME = X509ObjectIdentifiers.id_at_name; /** * Email address (RSA PKCS#9 extension) - IA5String. * <p>Note: if you're trying to be ultra orthodox, don't use this! It shouldn't be in here. */ public static final ASN1ObjectIdentifier EmailAddress = PKCSObjectIdentifiers.pkcs_9_at_emailAddress; /** * more from PKCS#9 */ public static final ASN1ObjectIdentifier UnstructuredName = PKCSObjectIdentifiers.pkcs_9_at_unstructuredName; public static final ASN1ObjectIdentifier UnstructuredAddress = PKCSObjectIdentifiers.pkcs_9_at_unstructuredAddress; /** * email address in Verisign certificates */ public static final ASN1ObjectIdentifier E = EmailAddress; /* * others... */ public static final ASN1ObjectIdentifier DC = new ASN1ObjectIdentifier("0.9.2342.19200300.100.1.25"); /** * LDAP User id. */ public static final ASN1ObjectIdentifier UID = new ASN1ObjectIdentifier("0.9.2342.19200300.100.1.1"); /** * default look up table translating OID values into their common symbols following * the convention in RFC 2253 with a few extras */ private static final Hashtable DefaultSymbols = new Hashtable(); /** * look up table translating common symbols into their OIDS. */ private static final Hashtable DefaultLookUp = new Hashtable(); static { DefaultSymbols.put(C, "C"); DefaultSymbols.put(O, "O"); DefaultSymbols.put(T, "T"); DefaultSymbols.put(OU, "OU"); DefaultSymbols.put(CN, "CN"); DefaultSymbols.put(L, "L"); DefaultSymbols.put(ST, "ST"); DefaultSymbols.put(SN, "SERIALNUMBER"); DefaultSymbols.put(EmailAddress, "E"); DefaultSymbols.put(DC, "DC"); DefaultSymbols.put(UID, "UID"); DefaultSymbols.put(STREET, "STREET"); DefaultSymbols.put(SURNAME, "SURNAME"); DefaultSymbols.put(GIVENNAME, "GIVENNAME"); DefaultSymbols.put(INITIALS, "INITIALS"); DefaultSymbols.put(GENERATION, "GENERATION"); DefaultSymbols.put(UnstructuredAddress, "unstructuredAddress"); DefaultSymbols.put(UnstructuredName, "unstructuredName"); DefaultSymbols.put(UNIQUE_IDENTIFIER, "UniqueIdentifier"); DefaultSymbols.put(DN_QUALIFIER, "DN"); DefaultSymbols.put(PSEUDONYM, "Pseudonym"); DefaultSymbols.put(POSTAL_ADDRESS, "PostalAddress"); DefaultSymbols.put(NAME_AT_BIRTH, "NameAtBirth"); DefaultSymbols.put(COUNTRY_OF_CITIZENSHIP, "CountryOfCitizenship"); DefaultSymbols.put(COUNTRY_OF_RESIDENCE, "CountryOfResidence"); DefaultSymbols.put(GENDER, "Gender"); DefaultSymbols.put(PLACE_OF_BIRTH, "PlaceOfBirth"); DefaultSymbols.put(DATE_OF_BIRTH, "DateOfBirth"); DefaultSymbols.put(POSTAL_CODE, "PostalCode"); DefaultSymbols.put(BUSINESS_CATEGORY, "BusinessCategory"); DefaultSymbols.put(TELEPHONE_NUMBER, "TelephoneNumber"); DefaultSymbols.put(NAME, "Name"); DefaultLookUp.put("c", C); DefaultLookUp.put("o", O); DefaultLookUp.put("t", T); DefaultLookUp.put("ou", OU); DefaultLookUp.put("cn", CN); DefaultLookUp.put("l", L); DefaultLookUp.put("st", ST); DefaultLookUp.put("sn", SN); DefaultLookUp.put("serialnumber", SN); DefaultLookUp.put("street", STREET); DefaultLookUp.put("emailaddress", E); DefaultLookUp.put("dc", DC); DefaultLookUp.put("e", E); DefaultLookUp.put("uid", UID); DefaultLookUp.put("surname", SURNAME); DefaultLookUp.put("givenname", GIVENNAME); DefaultLookUp.put("initials", INITIALS); DefaultLookUp.put("generation", GENERATION); DefaultLookUp.put("unstructuredaddress", UnstructuredAddress); DefaultLookUp.put("unstructuredname", UnstructuredName); DefaultLookUp.put("uniqueidentifier", UNIQUE_IDENTIFIER); DefaultLookUp.put("dn", DN_QUALIFIER); DefaultLookUp.put("pseudonym", PSEUDONYM); DefaultLookUp.put("postaladdress", POSTAL_ADDRESS); DefaultLookUp.put("nameofbirth", NAME_AT_BIRTH); DefaultLookUp.put("countryofcitizenship", COUNTRY_OF_CITIZENSHIP); DefaultLookUp.put("countryofresidence", COUNTRY_OF_RESIDENCE); DefaultLookUp.put("gender", GENDER); DefaultLookUp.put("placeofbirth", PLACE_OF_BIRTH); DefaultLookUp.put("dateofbirth", DATE_OF_BIRTH); DefaultLookUp.put("postalcode", POSTAL_CODE); DefaultLookUp.put("businesscategory", BUSINESS_CATEGORY); DefaultLookUp.put("telephonenumber", TELEPHONE_NUMBER); DefaultLookUp.put("name", NAME); } protected BCStyle() { } public ASN1Encodable stringToValue(ASN1ObjectIdentifier oid, String value) { if (value.length() != 0 && value.charAt(0) == '#') { try { return IETFUtils.valueFromHexString(value, 1); } catch (IOException e) { throw new RuntimeException("can't recode value for oid " + oid.getId()); } } else { if (value.length() != 0 && value.charAt(0) == '\\') { value = value.substring(1); } if (oid.equals(EmailAddress) || oid.equals(DC)) { return new DERIA5String(value); } else if (oid.equals(DATE_OF_BIRTH)) // accept time string as well as # (for compatibility) { return new ASN1GeneralizedTime(value); } else if (oid.equals(C) || oid.equals(SN) || oid.equals(DN_QUALIFIER) || oid.equals(TELEPHONE_NUMBER)) { return new DERPrintableString(value); } } return new DERUTF8String(value); } public String oidToDisplayName(ASN1ObjectIdentifier oid) { return (String)DefaultSymbols.get(oid); } public String[] oidToAttrNames(ASN1ObjectIdentifier oid) { return IETFUtils.findAttrNamesForOID(oid, DefaultLookUp); } public ASN1ObjectIdentifier attrNameToOID(String attrName) { return IETFUtils.decodeAttrName(attrName, DefaultLookUp); } public boolean areEqual(X500Name name1, X500Name name2) { RDN[] rdns1 = name1.getRDNs(); RDN[] rdns2 = name2.getRDNs(); if (rdns1.length != rdns2.length) { return false; } boolean reverse = false; if (rdns1[0].getFirst() != null && rdns2[0].getFirst() != null) { reverse = !rdns1[0].getFirst().getType().equals(rdns2[0].getFirst().getType()); // guess forward } for (int i = 0; i != rdns1.length; i++) { if (!foundMatch(reverse, rdns1[i], rdns2)) { return false; } } return true; } private boolean foundMatch(boolean reverse, RDN rdn, RDN[] possRDNs) { if (reverse) { for (int i = possRDNs.length - 1; i >= 0; i--) { if (possRDNs[i] != null && rdnAreEqual(rdn, possRDNs[i])) { possRDNs[i] = null; return true; } } } else { for (int i = 0; i != possRDNs.length; i++) { if (possRDNs[i] != null && rdnAreEqual(rdn, possRDNs[i])) { possRDNs[i] = null; return true; } } } return false; } protected boolean rdnAreEqual(RDN rdn1, RDN rdn2) { return IETFUtils.rDNAreEqual(rdn1, rdn2); } public RDN[] fromString(String dirName) { return IETFUtils.rDNsFromString(dirName, this); } public int calculateHashCode(X500Name name) { int hashCodeValue = 0; RDN[] rdns = name.getRDNs(); // this needs to be order independent, like equals for (int i = 0; i != rdns.length; i++) { if (rdns[i].isMultiValued()) { AttributeTypeAndValue[] atv = rdns[i].getTypesAndValues(); for (int j = 0; j != atv.length; j++) { hashCodeValue ^= atv[j].getType().hashCode(); hashCodeValue ^= calcHashCode(atv[j].getValue()); } } else { hashCodeValue ^= rdns[i].getFirst().getType().hashCode(); hashCodeValue ^= calcHashCode(rdns[i].getFirst().getValue()); } } return hashCodeValue; } private int calcHashCode(ASN1Encodable enc) { String value = IETFUtils.valueToString(enc); value = IETFUtils.canonicalize(value); return value.hashCode(); } public String toString(X500Name name) { StringBuffer buf = new StringBuffer(); boolean first = true; RDN[] rdns = name.getRDNs(); for (int i = 0; i < rdns.length; i++) { if (first) { first = false; } else { buf.append(','); } IETFUtils.appendRDN(buf, rdns[i], DefaultSymbols); } return buf.toString(); } }
package com.jivesoftware.os.miru.stumptown.deployable.region; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Optional; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.jivesoftware.os.miru.api.MiruActorId; import com.jivesoftware.os.miru.api.activity.MiruActivity; import com.jivesoftware.os.miru.api.base.MiruTenantId; import com.jivesoftware.os.miru.api.query.filter.MiruAuthzExpression; import com.jivesoftware.os.miru.api.query.filter.MiruFieldFilter; import com.jivesoftware.os.miru.api.query.filter.MiruFilter; import com.jivesoftware.os.miru.api.query.filter.MiruFilterOperation; import com.jivesoftware.os.miru.api.query.filter.MiruValue; import com.jivesoftware.os.miru.logappender.MiruLogEvent; import com.jivesoftware.os.miru.plugin.query.LuceneBackedQueryParser; import com.jivesoftware.os.miru.plugin.solution.MiruRequest; import com.jivesoftware.os.miru.plugin.solution.MiruResponse; import com.jivesoftware.os.miru.plugin.solution.MiruSolutionLogLevel; import com.jivesoftware.os.miru.plugin.solution.MiruTimeRange; import com.jivesoftware.os.miru.reco.plugins.distincts.DistinctsAnswer; import com.jivesoftware.os.miru.reco.plugins.distincts.DistinctsConstants; import com.jivesoftware.os.miru.reco.plugins.distincts.DistinctsQuery; import com.jivesoftware.os.miru.stumptown.deployable.StumptownSchemaConstants; import com.jivesoftware.os.miru.stumptown.deployable.storage.MiruStumptownPayloadStorage; import com.jivesoftware.os.miru.stumptown.plugins.StumptownAnswer; import com.jivesoftware.os.miru.stumptown.plugins.StumptownConstants; import com.jivesoftware.os.miru.stumptown.plugins.StumptownQuery; import com.jivesoftware.os.miru.ui.MiruPageRegion; import com.jivesoftware.os.miru.ui.MiruSoyRenderer; import com.jivesoftware.os.mlogger.core.ISO8601DateFormat; import com.jivesoftware.os.mlogger.core.MetricLogger; import com.jivesoftware.os.mlogger.core.MetricLoggerFactory; import com.jivesoftware.os.routing.bird.http.client.HttpResponse; import com.jivesoftware.os.routing.bird.http.client.HttpResponseMapper; import com.jivesoftware.os.routing.bird.http.client.RoundRobinStrategy; import com.jivesoftware.os.routing.bird.http.client.TenantAwareHttpClient; import com.jivesoftware.os.routing.bird.shared.ClientCall.ClientResponse; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.concurrent.TimeUnit; import static com.google.common.base.Objects.firstNonNull; // soy.stumptown.page.stumptownQueryPluginRegion public class StumptownQueryPluginRegion implements MiruPageRegion<Optional<StumptownQueryPluginRegion.StumptownPluginRegionInput>> { private static final MetricLogger LOG = MetricLoggerFactory.getLogger(); private final String template; private final String logEventTemplate; private final String noEventsTemplate; private final MiruSoyRenderer renderer; private final TenantAwareHttpClient<String> readerClient; private final ObjectMapper requestMapper; private final HttpResponseMapper responseMapper; private final MiruStumptownPayloadStorage payloads; private final LuceneBackedQueryParser messageQueryParser = new LuceneBackedQueryParser("message"); public StumptownQueryPluginRegion(String template, String logEventTemplate, String noEventsTemplate, MiruSoyRenderer renderer, TenantAwareHttpClient<String> readerClient, ObjectMapper requestMapper, HttpResponseMapper responseMapper, MiruStumptownPayloadStorage payloads) { this.template = template; this.logEventTemplate = logEventTemplate; this.noEventsTemplate = noEventsTemplate; this.renderer = renderer; this.readerClient = readerClient; this.requestMapper = requestMapper; this.responseMapper = responseMapper; this.payloads = payloads; } public static class StumptownPluginRegionInput { final String cluster; final String host; final String service; final String instance; final String version; final String logLevel; final int fromAgo; final int toAgo; final String fromTimeUnit; final String toTimeUnit; final String thread; final String logger; final String method; final String line; final String message; final String exceptionClass; final String thrown; final int buckets; final int messageCount; final String graphType; public StumptownPluginRegionInput(String cluster, String host, String service, String instance, String version, String logLevel, int fromAgo, int toAgo, String fromTimeUnit, String toTimeUnit, String thread, String logger, String method, String line, String message, String exceptionClass, String thrown, int buckets, int messageCount, String graphType) { this.cluster = cluster; this.host = host; this.service = service; this.instance = instance; this.version = version; this.logLevel = logLevel; this.fromAgo = fromAgo; this.toAgo = toAgo; this.fromTimeUnit = fromTimeUnit; this.toTimeUnit = toTimeUnit; this.thread = thread; this.logger = logger; this.method = method; this.line = line; this.message = message; this.exceptionClass = exceptionClass; this.thrown = thrown; this.buckets = buckets; this.messageCount = messageCount; this.graphType = graphType; } } @Override public String render(Optional<StumptownPluginRegionInput> optionalInput) { Map<String, Object> data = Maps.newHashMap(); try { if (optionalInput.isPresent()) { StumptownPluginRegionInput input = optionalInput.get(); int fromAgo = input.fromAgo > input.toAgo ? input.fromAgo : input.toAgo; int toAgo = input.fromAgo > input.toAgo ? input.toAgo : input.fromAgo; data.put("cluster", input.cluster); data.put("host", input.host); data.put("service", input.service); data.put("instance", input.instance); data.put("version", input.version); data.put("fromTimeUnit", input.fromTimeUnit); data.put("toTimeUnit", input.toTimeUnit); data.put("thread", input.thread); data.put("logger", input.logger); data.put("method", input.method); data.put("line", input.line); data.put("message", input.message); data.put("exceptionClass", input.exceptionClass); data.put("thrown", input.thrown); Set<String> logLevelSet = Sets.newHashSet(Splitter.on(',').split(input.logLevel)); data.put("logLevels", ImmutableMap.of( "trace", logLevelSet.contains("TRACE"), "debug", logLevelSet.contains("DEBUG"), "info", logLevelSet.contains("INFO"), "warn", logLevelSet.contains("WARN"), "error", logLevelSet.contains("ERROR"))); data.put("logLevelsList", input.logLevel); data.put("fromAgo", String.valueOf(fromAgo)); data.put("toAgo", String.valueOf(toAgo)); data.put("buckets", String.valueOf(input.buckets)); data.put("messageCount", String.valueOf(input.messageCount)); data.put("graphType", input.graphType); boolean execute = !logLevelSet.isEmpty(); data.put("execute", execute); } } catch (Exception e) { LOG.error("Unable to retrieve data", e); } return renderer.render(template, data); } public List<Map<String, String>> typeahead(String fieldName, String cluster, String host, String service, String instance, String version, int fromAgo, int toAgo, String fromTimeUnit, String toTimeUnit, String thread, String logger, String method, String line, String contains) throws Exception { MiruTimeRange timeRange = new MiruTimeRange(Long.MIN_VALUE, Long.MAX_VALUE); List<MiruValue> prefixes = null; if (contains != null && !contains.isEmpty()) { prefixes = Arrays.asList(new MiruValue(contains)); } List<MiruFieldFilter> fieldFilters = Lists.newArrayList(); List<MiruFieldFilter> notFieldFilters = Lists.newArrayList(); addFilter(fieldName, "cluster", cluster, fieldFilters, notFieldFilters); addFilter(fieldName, "host", host, fieldFilters, notFieldFilters); addFilter(fieldName, "service", service, fieldFilters, notFieldFilters); addFilter(fieldName, "instance", instance, fieldFilters, notFieldFilters); addFilter(fieldName, "version", version, fieldFilters, notFieldFilters); addFilter(fieldName, "thread", thread, fieldFilters, notFieldFilters); addFilter(fieldName, "methodName", method, fieldFilters, notFieldFilters); addFilter(fieldName, "logger", logger, fieldFilters, notFieldFilters); List<MiruFilter> filters = Lists.newArrayList(); filters.add(new MiruFilter(MiruFilterOperation.and, false, fieldFilters, null)); if (!notFieldFilters.isEmpty()) { filters.add(new MiruFilter(MiruFilterOperation.or, false, notFieldFilters, null)); } MiruResponse<DistinctsAnswer> response = null; MiruTenantId tenantId = StumptownSchemaConstants.TENANT_ID; String endpoint = DistinctsConstants.DISTINCTS_PREFIX + DistinctsConstants.CUSTOM_QUERY_ENDPOINT; String request = requestMapper.writeValueAsString(new MiruRequest<>("stump>typeahead", tenantId, MiruActorId.NOT_PROVIDED, MiruAuthzExpression.NOT_PROVIDED, new DistinctsQuery( timeRange, fieldName, null, new MiruFilter(MiruFilterOperation.pButNotQ, false, null, filters), prefixes), MiruSolutionLogLevel.NONE)); MiruResponse<DistinctsAnswer> distinctsResponse = readerClient.call("", new RoundRobinStrategy(), "stump>typeahead>" + fieldName, httpClient -> { HttpResponse httpResponse = httpClient.postJson(endpoint, request, null); @SuppressWarnings("unchecked") MiruResponse<DistinctsAnswer> extractResponse = responseMapper.extractResultFromResponse(httpResponse, MiruResponse.class, new Class[] { DistinctsAnswer.class }, null); return new ClientResponse<>(extractResponse, true); }); if (distinctsResponse != null && distinctsResponse.answer != null) { response = distinctsResponse; } else { LOG.warn("Empty distincts response from {}", tenantId); } List<Map<String, String>> data = new ArrayList<>(); if (response != null && response.answer != null) { int count = 0; for (MiruValue result : response.answer.results) { String v = result.last(); data.add(ImmutableMap.of("key", v, "name", v)); if (count > 10) { data.add(ImmutableMap.of("key", "...", "name", "...")); break; } count++; } } return data; } private void addFilter(String fieldName, String field, String value, List<MiruFieldFilter> fieldFilters, List<MiruFieldFilter> notFieldFilters) { if (fieldName == null || !fieldName.equals(field)) { QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, field, value); } } public Map<String, Object> poll(StumptownPluginRegionInput input) throws Exception { Map<String, Object> data = Maps.newHashMap(); int fromAgo = input.fromAgo > input.toAgo ? input.fromAgo : input.toAgo; int toAgo = input.fromAgo > input.toAgo ? input.toAgo : input.fromAgo; TimeUnit fromTimeUnit = TimeUnit.valueOf(input.fromTimeUnit); TimeUnit toTimeUnit = TimeUnit.valueOf(input.toTimeUnit); MiruTimeRange miruTimeRange = QueryUtils.toMiruTimeRange(fromAgo, fromTimeUnit, toAgo, toTimeUnit, input.buckets); MiruTenantId tenantId = StumptownSchemaConstants.TENANT_ID; MiruResponse<StumptownAnswer> response = null; List<MiruFieldFilter> fieldFilters = Lists.newArrayList(); List<MiruFieldFilter> notFieldFilters = Lists.newArrayList(); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "cluster", input.cluster); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "host", input.host); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "service", input.service); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "instance", input.instance); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "version", input.version); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "thread", input.thread); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "methodName", input.method); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "lineNumber", input.line); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "logger", input.logger); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "level", input.logLevel); QueryUtils.addFieldFilter(fieldFilters, notFieldFilters, "exceptionClass", input.exceptionClass); List<MiruFilter> subFilters = Lists.newArrayList(); if (!input.message.isEmpty()) { subFilters.add(messageQueryParser.parse("en", false, input.message.toLowerCase())); } if (!input.thrown.isEmpty()) { subFilters.add(messageQueryParser.parse("en", false, input.thrown.toLowerCase())); } List<MiruFilter> filters = Lists.newArrayList(); filters.add(new MiruFilter(MiruFilterOperation.and, false, fieldFilters, subFilters)); if (!notFieldFilters.isEmpty()) { filters.add(new MiruFilter(MiruFilterOperation.or, false, notFieldFilters, null)); } ImmutableMap<String, MiruFilter> stumptownFilters = ImmutableMap.of( "stumptown", new MiruFilter(MiruFilterOperation.pButNotQ, false, null, filters)); String endpoint = StumptownConstants.STUMPTOWN_PREFIX + StumptownConstants.CUSTOM_QUERY_ENDPOINT; String request = requestMapper.writeValueAsString(new MiruRequest<>("stumptownQuery", tenantId, MiruActorId.NOT_PROVIDED, MiruAuthzExpression.NOT_PROVIDED, new StumptownQuery( miruTimeRange, input.buckets, input.messageCount, MiruFilter.NO_FILTER, stumptownFilters), MiruSolutionLogLevel.NONE)); MiruResponse<StumptownAnswer> stumptownResponse = readerClient.call("", new RoundRobinStrategy(), "stumptownQuery", httpClient -> { HttpResponse httpResponse = httpClient.postJson(endpoint, request, null); @SuppressWarnings("unchecked") MiruResponse<StumptownAnswer> extractResponse = responseMapper.extractResultFromResponse(httpResponse, MiruResponse.class, new Class[] { StumptownAnswer.class }, null); return new ClientResponse<>(extractResponse, true); }); if (stumptownResponse != null && stumptownResponse.answer != null) { response = stumptownResponse; } else { LOG.warn("Empty stumptown response from {}", tenantId); } if (response != null && response.answer != null) { data.put("elapse", String.valueOf(response.totalElapsed)); Map<String, StumptownAnswer.Waveform> waveforms = response.answer.waveforms; if (waveforms == null) { waveforms = Collections.emptyMap(); LOG.warn("Empty waveform answer from stumptown"); } Map<String, Object> waveformData = Maps.newHashMap(); for (Map.Entry<String, StumptownAnswer.Waveform> entry : waveforms.entrySet()) { long[] waveform = entry.getValue().waveform; int[] counts = new int[waveform.length]; for (int i = 0; i < counts.length; i++) { counts[i] = (int) Math.min(waveform[i], Integer.MAX_VALUE); } waveformData.put(entry.getKey(), counts); } data.put("waveforms", waveformData); data.put("fromAgoSecs", fromTimeUnit.toSeconds(fromAgo)); data.put("toAgoSecs", toTimeUnit.toSeconds(toAgo)); List<Long> activityTimes = Lists.newArrayList(); for (StumptownAnswer.Waveform waveform : waveforms.values()) { for (MiruActivity activity : waveform.results) { activityTimes.add(activity.time); } } List<MiruLogEvent> logEvents = Lists.newArrayList(payloads.multiGet(tenantId, activityTimes, MiruLogEvent.class)); if (!logEvents.isEmpty()) { data.put("logEvents", Lists.transform(logEvents, logEvent -> renderer.render(logEventTemplate, ImmutableMap.of("event", ImmutableMap.<String, Object>builder() .put("datacenter", firstNonNull(logEvent.datacenter, "")) .put("cluster", firstNonNull(logEvent.cluster, "")) .put("host", firstNonNull(logEvent.host, "")) .put("service", firstNonNull(logEvent.service, "")) .put("instance", firstNonNull(logEvent.instance, "")) .put("version", firstNonNull(logEvent.version, "")) .put("level", firstNonNull(logEvent.level, "")) .put("threadName", firstNonNull(logEvent.threadName, "")) .put("loggerName", firstNonNull(logEvent.loggerName, "")) .put("method", firstNonNull(logEvent.methodName, "")) .put("line", firstNonNull(logEvent.lineNumber, "")) .put("message", firstNonNull(logEvent.message, "")) .put("timestamp", logEvent.timestamp != null ? new ISO8601DateFormat(TimeZone.getDefault()).format(new Date(Long.parseLong(logEvent.timestamp))) : "") .put("exceptionClass", firstNonNull(logEvent.exceptionClass, "")) .put("thrownStackTrace", logEvent.thrownStackTrace != null ? Arrays.asList(logEvent.thrownStackTrace) : Arrays.asList()) .build())))); } else { data.put("logEvents", Arrays.asList(renderer.render(noEventsTemplate, Collections.<String, Object>emptyMap()))); } } return data; } @Override public String getTitle() { return "Query"; } }
/* * Copyright 2008 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mybatis.generator.codegen.ibatis2.model; import static org.mybatis.generator.internal.util.JavaBeansUtil.getGetterMethodName; import static org.mybatis.generator.internal.util.StringUtility.stringHasValue; import static org.mybatis.generator.internal.util.messages.Messages.getString; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.mybatis.generator.api.CommentGenerator; import org.mybatis.generator.api.FullyQualifiedTable; import org.mybatis.generator.api.IntrospectedColumn; import org.mybatis.generator.api.dom.OutputUtilities; import org.mybatis.generator.api.dom.java.CompilationUnit; import org.mybatis.generator.api.dom.java.Field; import org.mybatis.generator.api.dom.java.FullyQualifiedJavaType; import org.mybatis.generator.api.dom.java.InnerClass; import org.mybatis.generator.api.dom.java.JavaVisibility; import org.mybatis.generator.api.dom.java.Method; import org.mybatis.generator.api.dom.java.Parameter; import org.mybatis.generator.api.dom.java.TopLevelClass; import org.mybatis.generator.codegen.AbstractJavaGenerator; import org.mybatis.generator.codegen.ibatis2.Ibatis2FormattingUtilities; import org.mybatis.generator.internal.rules.Rules; /** * * @author Jeff Butler * */ public class ExampleGenerator extends AbstractJavaGenerator { private boolean generateForJava5; public ExampleGenerator(boolean generateForJava5) { super(); this.generateForJava5 = generateForJava5; } @Override public List<CompilationUnit> getCompilationUnits() { FullyQualifiedTable table = introspectedTable.getFullyQualifiedTable(); progressCallback.startTask(getString("Progress.6", table.toString())); //$NON-NLS-1$ CommentGenerator commentGenerator = context.getCommentGenerator(); FullyQualifiedJavaType type = new FullyQualifiedJavaType(introspectedTable.getExampleType()); TopLevelClass topLevelClass = new TopLevelClass(type); topLevelClass.setVisibility(JavaVisibility.PUBLIC); commentGenerator.addJavaFileComment(topLevelClass); // add default constructor Method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setConstructor(true); method.setName(type.getShortName()); if (generateForJava5) { method.addBodyLine("oredCriteria = new ArrayList<Criteria>();"); //$NON-NLS-1$ } else { method.addBodyLine("oredCriteria = new ArrayList();"); //$NON-NLS-1$ } commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); // add shallow copy constructor if the update by // example methods are enabled - because the parameter // class for update by example methods will subclass this class Rules rules = introspectedTable.getRules(); if (rules.generateUpdateByExampleSelective() || rules.generateUpdateByExampleWithBLOBs() || rules.generateUpdateByExampleWithoutBLOBs()) { method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setConstructor(true); method.setName(type.getShortName()); method.addParameter(new Parameter(type, "example")); //$NON-NLS-1$ method.addBodyLine("this.orderByClause = example.orderByClause;"); //$NON-NLS-1$ method.addBodyLine("this.oredCriteria = example.oredCriteria;"); //$NON-NLS-1$ method.addBodyLine("this.distinct = example.distinct;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); } // add field, getter, setter for orderby clause Field field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); field.setType(FullyQualifiedJavaType.getStringInstance()); field.setName("orderByClause"); //$NON-NLS-1$ commentGenerator.addFieldComment(field, introspectedTable); topLevelClass.addField(field); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setName("setOrderByClause"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "orderByClause")); //$NON-NLS-1$ method.addBodyLine("this.orderByClause = orderByClause;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(FullyQualifiedJavaType.getStringInstance()); method.setName("getOrderByClause"); //$NON-NLS-1$ method.addBodyLine("return orderByClause;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); // add field, getter, setter for distinct field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); field.setType(FullyQualifiedJavaType.getBooleanPrimitiveInstance()); field.setName("distinct"); //$NON-NLS-1$ commentGenerator.addFieldComment(field, introspectedTable); topLevelClass.addField(field); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setName("setDistinct"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getBooleanPrimitiveInstance(), "distinct")); //$NON-NLS-1$ method.addBodyLine("this.distinct = distinct;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(FullyQualifiedJavaType.getBooleanPrimitiveInstance()); method.setName("isDistinct"); //$NON-NLS-1$ method.addBodyLine("return distinct;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); // add field and methods for the list of ored criteria field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); FullyQualifiedJavaType fqjt; if (generateForJava5) { fqjt = new FullyQualifiedJavaType("java.util.List<Criteria>"); //$NON-NLS-1$ } else { fqjt = new FullyQualifiedJavaType("java.util.List"); //$NON-NLS-1$ } field.setType(fqjt); field.setName("oredCriteria"); //$NON-NLS-1$ commentGenerator.addFieldComment(field, introspectedTable); topLevelClass.addField(field); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(fqjt); method.setName("getOredCriteria"); //$NON-NLS-1$ method.addBodyLine("return oredCriteria;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setName("or"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getCriteriaInstance(), "criteria")); //$NON-NLS-1$ method.addBodyLine("oredCriteria.add(criteria);"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setName("or"); //$NON-NLS-1$ method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("Criteria criteria = createCriteriaInternal();"); //$NON-NLS-1$ method.addBodyLine("oredCriteria.add(criteria);"); //$NON-NLS-1$ method.addBodyLine("return criteria;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setName("createCriteria"); //$NON-NLS-1$ method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("Criteria criteria = createCriteriaInternal();"); //$NON-NLS-1$ method.addBodyLine("if (oredCriteria.size() == 0) {"); //$NON-NLS-1$ method.addBodyLine("oredCriteria.add(criteria);"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ method.addBodyLine("return criteria;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("createCriteriaInternal"); //$NON-NLS-1$ method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("Criteria criteria = new Criteria();"); //$NON-NLS-1$ method.addBodyLine("return criteria;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setName("clear"); //$NON-NLS-1$ method.addBodyLine("oredCriteria.clear();"); //$NON-NLS-1$ method.addBodyLine("orderByClause = null;"); //$NON-NLS-1$ method.addBodyLine("distinct = false;"); //$NON-NLS-1$ commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); // now generate the inner class that holds the AND conditions topLevelClass.addInnerClass(getGeneratedCriteriaInnerClass(topLevelClass)); topLevelClass.addInnerClass(getCriteriaInnerClass(topLevelClass)); List<CompilationUnit> answer = new ArrayList<CompilationUnit>(); if (context.getPlugins().modelExampleClassGenerated(topLevelClass, introspectedTable)) { answer.add(topLevelClass); } return answer; } private InnerClass getCriteriaInnerClass(TopLevelClass topLevelClass) { Method method; InnerClass answer = new InnerClass(FullyQualifiedJavaType.getCriteriaInstance()); answer.setVisibility(JavaVisibility.PUBLIC); answer.setStatic(true); answer.setSuperClass(FullyQualifiedJavaType.getGeneratedCriteriaInstance()); context.getCommentGenerator().addClassComment(answer, introspectedTable, true); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("Criteria"); //$NON-NLS-1$ method.setConstructor(true); method.addBodyLine("super();"); //$NON-NLS-1$ answer.addMethod(method); return answer; } private InnerClass getGeneratedCriteriaInnerClass(TopLevelClass topLevelClass) { Field field; Method method; InnerClass answer = new InnerClass(FullyQualifiedJavaType.getGeneratedCriteriaInstance()); answer.setVisibility(JavaVisibility.PROTECTED); answer.setStatic(true); answer.setAbstract(true); context.getCommentGenerator().addClassComment(answer, introspectedTable); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("GeneratedCriteria"); //$NON-NLS-1$ method.setConstructor(true); method.addBodyLine("super();"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("criteriaWithoutValue = new ArrayList<String>();"); //$NON-NLS-1$ method.addBodyLine("criteriaWithSingleValue = new ArrayList<Map<String, Object>>();"); //$NON-NLS-1$ method.addBodyLine("criteriaWithListValue = new ArrayList<Map<String, Object>>();"); //$NON-NLS-1$ method.addBodyLine("criteriaWithBetweenValue = new ArrayList<Map<String, Object>>();"); //$NON-NLS-1$ } else { method.addBodyLine("criteriaWithoutValue = new ArrayList();"); //$NON-NLS-1$ method.addBodyLine("criteriaWithSingleValue = new ArrayList();"); //$NON-NLS-1$ method.addBodyLine("criteriaWithListValue = new ArrayList();"); //$NON-NLS-1$ method.addBodyLine("criteriaWithBetweenValue = new ArrayList();"); //$NON-NLS-1$ } answer.addMethod(method); List<String> criteriaLists = new ArrayList<String>(); criteriaLists.add("criteriaWithoutValue"); //$NON-NLS-1$ criteriaLists.add("criteriaWithSingleValue"); //$NON-NLS-1$ criteriaLists.add("criteriaWithListValue"); //$NON-NLS-1$ criteriaLists.add("criteriaWithBetweenValue"); //$NON-NLS-1$ for (IntrospectedColumn introspectedColumn : introspectedTable.getNonBLOBColumns()) { if (stringHasValue(introspectedColumn.getTypeHandler())) { criteriaLists.addAll(addtypeHandledObjectsAndMethods(introspectedColumn, method, answer)); } } // now generate the isValid method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setName("isValid"); //$NON-NLS-1$ method.setReturnType(FullyQualifiedJavaType.getBooleanPrimitiveInstance()); StringBuilder sb = new StringBuilder(); Iterator<String> strIter = criteriaLists.iterator(); sb.append("return "); //$NON-NLS-1$ sb.append(strIter.next()); sb.append(".size() > 0"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); while (strIter.hasNext()) { sb.setLength(0); OutputUtilities.javaIndent(sb, 1); sb.append("|| "); //$NON-NLS-1$ sb.append(strIter.next()); sb.append(".size() > 0"); //$NON-NLS-1$ if (!strIter.hasNext()) { sb.append(';'); } method.addBodyLine(sb.toString()); } answer.addMethod(method); // now we need to generate the methods that will be used in the SqlMap // to generate the dynamic where clause topLevelClass.addImportedType(FullyQualifiedJavaType.getNewMapInstance()); topLevelClass.addImportedType(FullyQualifiedJavaType.getNewListInstance()); topLevelClass.addImportedType(FullyQualifiedJavaType.getNewHashMapInstance()); topLevelClass.addImportedType(FullyQualifiedJavaType.getNewArrayListInstance()); field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); FullyQualifiedJavaType listOfStrings; if (generateForJava5) { listOfStrings = new FullyQualifiedJavaType("java.util.List<java.lang.String>"); //$NON-NLS-1$ } else { listOfStrings = new FullyQualifiedJavaType("java.util.List"); //$NON-NLS-1$ } field.setType(listOfStrings); field.setName("criteriaWithoutValue"); //$NON-NLS-1$ answer.addField(field); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(field.getType()); method.setName(getGetterMethodName(field.getName(), field.getType())); method.addBodyLine("return criteriaWithoutValue;"); //$NON-NLS-1$ answer.addMethod(method); FullyQualifiedJavaType listOfMaps; if (generateForJava5) { listOfMaps = new FullyQualifiedJavaType("java.util.List<java.util.Map<java.lang.String, java.lang.Object>>"); //$NON-NLS-1$ } else { listOfMaps = new FullyQualifiedJavaType("java.util.List"); //$NON-NLS-1$ } field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); field.setType(listOfMaps); field.setName("criteriaWithSingleValue"); //$NON-NLS-1$ answer.addField(field); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(field.getType()); method.setName(getGetterMethodName(field.getName(), field.getType())); method.addBodyLine("return criteriaWithSingleValue;"); //$NON-NLS-1$ answer.addMethod(method); field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); field.setType(listOfMaps); field.setName("criteriaWithListValue"); //$NON-NLS-1$ answer.addField(field); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(field.getType()); method.setName(getGetterMethodName(field.getName(), field.getType())); method.addBodyLine("return criteriaWithListValue;"); //$NON-NLS-1$ answer.addMethod(method); field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); field.setType(listOfMaps); field.setName("criteriaWithBetweenValue"); //$NON-NLS-1$ answer.addField(field); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(field.getType()); method.setName(getGetterMethodName(field.getName(), field.getType())); method.addBodyLine("return criteriaWithBetweenValue;"); //$NON-NLS-1$ answer.addMethod(method); // now add the methods for simplifying the individual field set methods method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterion"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addBodyLine("if (condition == null) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Value for condition cannot be null\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ method.addBodyLine("criteriaWithoutValue.add(condition);"); //$NON-NLS-1$ answer.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterion"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getObjectInstance(), "value")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (value == null) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Value for \" + property + \" cannot be null\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("Map<String, Object> map = new HashMap<String, Object>();"); //$NON-NLS-1$ } else { method.addBodyLine("Map map = new HashMap();"); //$NON-NLS-1$ } method.addBodyLine("map.put(\"condition\", condition);"); //$NON-NLS-1$ method.addBodyLine("map.put(\"value\", value);"); //$NON-NLS-1$ method.addBodyLine("criteriaWithSingleValue.add(map);"); //$NON-NLS-1$ answer.addMethod(method); FullyQualifiedJavaType listOfObjects; if (generateForJava5) { listOfObjects = new FullyQualifiedJavaType("java.util.List<? extends java.lang.Object>"); //$NON-NLS-1$ } else { listOfObjects = new FullyQualifiedJavaType("java.util.List"); //$NON-NLS-1$ } method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterion"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(listOfObjects, "values")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (values == null || values.size() == 0) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Value list for \" + property + \" cannot be null or empty\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("Map<String, Object> map = new HashMap<String, Object>();"); //$NON-NLS-1$ } else { method.addBodyLine("Map map = new HashMap();"); //$NON-NLS-1$ } method.addBodyLine("map.put(\"condition\", condition);"); //$NON-NLS-1$ method.addBodyLine("map.put(\"values\", values);"); //$NON-NLS-1$ method.addBodyLine("criteriaWithListValue.add(map);"); //$NON-NLS-1$ answer.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterion"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getObjectInstance(), "value1")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getObjectInstance(), "value2")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (value1 == null || value2 == null) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Between values for \" + property + \" cannot be null\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("List<Object> list = new ArrayList<Object>();"); //$NON-NLS-1$ } else { method.addBodyLine("List list = new ArrayList();"); //$NON-NLS-1$ } method.addBodyLine("list.add(value1);"); //$NON-NLS-1$ method.addBodyLine("list.add(value2);"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("Map<String, Object> map = new HashMap<String, Object>();"); //$NON-NLS-1$ } else { method.addBodyLine("Map map = new HashMap();"); //$NON-NLS-1$ } method.addBodyLine("map.put(\"condition\", condition);"); //$NON-NLS-1$ method.addBodyLine("map.put(\"values\", list);"); //$NON-NLS-1$ method.addBodyLine("criteriaWithBetweenValue.add(map);"); //$NON-NLS-1$ answer.addMethod(method); FullyQualifiedJavaType listOfDates; if (generateForJava5) { listOfDates = new FullyQualifiedJavaType("java.util.List<java.util.Date>"); //$NON-NLS-1$ } else { listOfDates = new FullyQualifiedJavaType("java.util.List"); //$NON-NLS-1$ } if (introspectedTable.hasJDBCDateColumns()) { topLevelClass.addImportedType(FullyQualifiedJavaType.getDateInstance()); topLevelClass.addImportedType(FullyQualifiedJavaType.getNewIteratorInstance()); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterionForJDBCDate"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getDateInstance(), "value")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (value == null) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Value for \" + property + \" cannot be null\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ method.addBodyLine("addCriterion(condition, new java.sql.Date(value.getTime()), property);"); //$NON-NLS-1$ answer.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterionForJDBCDate"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(listOfDates, "values")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (values == null || values.size() == 0) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Value list for \" + property + \" cannot be null or empty\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("List<java.sql.Date> dateList = new ArrayList<java.sql.Date>();"); //$NON-NLS-1$ method.addBodyLine("Iterator<Date> iter = values.iterator();"); //$NON-NLS-1$ method.addBodyLine("while (iter.hasNext()) {"); //$NON-NLS-1$ method.addBodyLine("dateList.add(new java.sql.Date(iter.next().getTime()));"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ } else { method.addBodyLine("List dateList = new ArrayList();"); //$NON-NLS-1$ method.addBodyLine("Iterator iter = values.iterator();"); //$NON-NLS-1$ method.addBodyLine("while (iter.hasNext()) {"); //$NON-NLS-1$ method.addBodyLine("dateList.add(new java.sql.Date(((Date)iter.next()).getTime()));"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ } method.addBodyLine("addCriterion(condition, dateList, property);"); //$NON-NLS-1$ answer.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterionForJDBCDate"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getDateInstance(), "value1")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getDateInstance(), "value2")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (value1 == null || value2 == null) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Between values for \" + property + \" cannot be null\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ method.addBodyLine("addCriterion(condition, new java.sql.Date(value1.getTime()), new java.sql.Date(value2.getTime()), property);"); //$NON-NLS-1$ answer.addMethod(method); } if (introspectedTable.hasJDBCTimeColumns()) { topLevelClass.addImportedType(FullyQualifiedJavaType.getDateInstance()); topLevelClass.addImportedType(FullyQualifiedJavaType.getNewIteratorInstance()); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterionForJDBCTime"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getDateInstance(), "value")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (value == null) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Value for \" + property + \" cannot be null\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ method.addBodyLine("addCriterion(condition, new java.sql.Time(value.getTime()), property);"); //$NON-NLS-1$ answer.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterionForJDBCTime"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(listOfDates, "values")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (values == null || values.size() == 0) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Value list for \" + property + \" cannot be null or empty\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("List<java.sql.Time> timeList = new ArrayList<java.sql.Time>();"); //$NON-NLS-1$ method.addBodyLine("Iterator<Date> iter = values.iterator();"); //$NON-NLS-1$ method.addBodyLine("while (iter.hasNext()) {"); //$NON-NLS-1$ method.addBodyLine("timeList.add(new java.sql.Time(iter.next().getTime()));"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ } else { method.addBodyLine("List timeList = new ArrayList();"); //$NON-NLS-1$ method.addBodyLine("Iterator iter = values.iterator();"); //$NON-NLS-1$ method.addBodyLine("while (iter.hasNext()) {"); //$NON-NLS-1$ method.addBodyLine("timeList.add(new java.sql.Time(((Date)iter.next()).getTime()));"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ } method.addBodyLine("addCriterion(condition, timeList, property);"); //$NON-NLS-1$ answer.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName("addCriterionForJDBCTime"); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getDateInstance(), "value1")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getDateInstance(), "value2")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (value1 == null || value2 == null) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Between values for \" + property + \" cannot be null\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ method.addBodyLine("addCriterion(condition, new java.sql.Time(value1.getTime()), new java.sql.Time(value2.getTime()), property);"); //$NON-NLS-1$ answer.addMethod(method); } for (IntrospectedColumn introspectedColumn : introspectedTable.getNonBLOBColumns()) { topLevelClass.addImportedType(introspectedColumn.getFullyQualifiedJavaType()); // here we need to add the individual methods for setting the // conditions for a field answer.addMethod(getSetNullMethod(introspectedColumn)); answer.addMethod(getSetNotNullMethod(introspectedColumn)); answer.addMethod(getSetEqualMethod(introspectedColumn)); answer.addMethod(getSetNotEqualMethod(introspectedColumn)); answer.addMethod(getSetGreaterThanMethod(introspectedColumn)); answer.addMethod(getSetGreaterThenOrEqualMethod(introspectedColumn)); answer.addMethod(getSetLessThanMethod(introspectedColumn)); answer.addMethod(getSetLessThanOrEqualMethod(introspectedColumn)); if (introspectedColumn.isJdbcCharacterColumn()) { answer.addMethod(getSetLikeMethod(introspectedColumn)); answer.addMethod(getSetNotLikeMethod(introspectedColumn)); } answer.addMethod(getSetInOrNotInMethod(introspectedColumn, true)); answer.addMethod(getSetInOrNotInMethod(introspectedColumn, false)); answer.addMethod(getSetBetweenOrNotBetweenMethod(introspectedColumn, true)); answer.addMethod(getSetBetweenOrNotBetweenMethod(introspectedColumn, false)); } return answer; } /** * This method adds all the extra methods and fields required to support a * user defined type handler on some column. * * @param introspectedColumn * @param constructor * @param innerClass * @return a list of the names of all Lists added to the class by this * method */ private List<String> addtypeHandledObjectsAndMethods(IntrospectedColumn introspectedColumn, Method constructor, InnerClass innerClass) { List<String> answer = new ArrayList<String>(); StringBuilder sb = new StringBuilder(); // add new private fields and public accessors in the class FullyQualifiedJavaType listOfMaps; if (generateForJava5) { listOfMaps = new FullyQualifiedJavaType("java.util.List<java.util.Map<java.lang.String, java.lang.Object>>"); //$NON-NLS-1$ } else { listOfMaps = new FullyQualifiedJavaType("java.util.List"); //$NON-NLS-1$ } sb.setLength(0); sb.append(introspectedColumn.getJavaProperty()); sb.append("CriteriaWithSingleValue"); //$NON-NLS-1$ answer.add(sb.toString()); Field field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); field.setType(listOfMaps); field.setName(sb.toString()); innerClass.addField(field); Method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(field.getType()); method.setName(getGetterMethodName(field.getName(), field.getType())); sb.insert(0, "return "); //$NON-NLS-1$ sb.append(';'); method.addBodyLine(sb.toString()); innerClass.addMethod(method); sb.setLength(0); sb.append(introspectedColumn.getJavaProperty()); sb.append("CriteriaWithListValue"); //$NON-NLS-1$ answer.add(sb.toString()); field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); field.setType(listOfMaps); field.setName(sb.toString()); innerClass.addField(field); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(field.getType()); method.setName(getGetterMethodName(field.getName(), field.getType())); sb.insert(0, "return "); //$NON-NLS-1$ sb.append(';'); method.addBodyLine(sb.toString()); innerClass.addMethod(method); sb.setLength(0); sb.append(introspectedColumn.getJavaProperty()); sb.append("CriteriaWithBetweenValue"); //$NON-NLS-1$ answer.add(sb.toString()); field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); field.setType(listOfMaps); field.setName(sb.toString()); innerClass.addField(field); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(field.getType()); method.setName(getGetterMethodName(field.getName(), field.getType())); sb.insert(0, "return "); //$NON-NLS-1$ sb.append(';'); method.addBodyLine(sb.toString()); innerClass.addMethod(method); // add constructor initialization sb.setLength(0); sb.append(introspectedColumn.getJavaProperty()); if (generateForJava5) { sb.append("CriteriaWithSingleValue = new ArrayList<Map<String, Object>>();"); //$NON-NLS-1$; } else { sb.append("CriteriaWithSingleValue = new ArrayList();"); //$NON-NLS-1$; } constructor.addBodyLine(sb.toString()); sb.setLength(0); sb.append(introspectedColumn.getJavaProperty()); if (generateForJava5) { sb.append("CriteriaWithListValue = new ArrayList<Map<String, Object>>();"); //$NON-NLS-1$ } else { sb.append("CriteriaWithListValue = new ArrayList();"); //$NON-NLS-1$ } constructor.addBodyLine(sb.toString()); sb.setLength(0); sb.append(introspectedColumn.getJavaProperty()); if (generateForJava5) { sb.append("CriteriaWithBetweenValue = new ArrayList<Map<String, Object>>();"); //$NON-NLS-1$ } else { sb.append("CriteriaWithBetweenValue = new ArrayList();"); //$NON-NLS-1$ } constructor.addBodyLine(sb.toString()); // now add the methods for simplifying the individual field set methods method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); sb.setLength(0); sb.append("add"); //$NON-NLS-1$ sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(3, Character.toUpperCase(sb.charAt(3))); sb.append("Criterion"); //$NON-NLS-1$ method.setName(sb.toString()); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(introspectedColumn.getFullyQualifiedJavaType(), "value")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (value == null) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Value for \" + property + \" cannot be null\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("Map<String, Object> map = new HashMap<String, Object>();"); //$NON-NLS-1$ } else { method.addBodyLine("Map map = new HashMap();"); //$NON-NLS-1$ } method.addBodyLine("map.put(\"condition\", condition);"); //$NON-NLS-1$ method.addBodyLine("map.put(\"value\", value);"); //$NON-NLS-1$ sb.setLength(0); sb.append(introspectedColumn.getJavaProperty()); sb.append("CriteriaWithSingleValue.add(map);"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); innerClass.addMethod(method); FullyQualifiedJavaType listOfObjects = FullyQualifiedJavaType.getNewListInstance(); if (generateForJava5) { listOfObjects.addTypeArgument(introspectedColumn.getFullyQualifiedJavaType()); } sb.setLength(0); sb.append("add"); //$NON-NLS-1$ sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(3, Character.toUpperCase(sb.charAt(3))); sb.append("Criterion"); //$NON-NLS-1$ method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName(sb.toString()); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(listOfObjects, "values")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (values == null || values.size() == 0) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Value list for \" + property + \" cannot be null or empty\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("Map<String, Object> map = new HashMap<String, Object>();"); //$NON-NLS-1$ } else { method.addBodyLine("Map map = new HashMap();"); //$NON-NLS-1$ } method.addBodyLine("map.put(\"condition\", condition);"); //$NON-NLS-1$ method.addBodyLine("map.put(\"values\", values);"); //$NON-NLS-1$ sb.setLength(0); sb.append(introspectedColumn.getJavaProperty()); sb.append("CriteriaWithListValue.add(map);"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); innerClass.addMethod(method); sb.setLength(0); sb.append("add"); //$NON-NLS-1$ sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(3, Character.toUpperCase(sb.charAt(3))); sb.append("Criterion"); //$NON-NLS-1$ method = new Method(); method.setVisibility(JavaVisibility.PROTECTED); method.setName(sb.toString()); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "condition")); //$NON-NLS-1$ method.addParameter(new Parameter(introspectedColumn.getFullyQualifiedJavaType(), "value1")); //$NON-NLS-1$ method.addParameter(new Parameter(introspectedColumn.getFullyQualifiedJavaType(), "value2")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "property")); //$NON-NLS-1$ method.addBodyLine("if (value1 == null || value2 == null) {"); //$NON-NLS-1$ method.addBodyLine("throw new RuntimeException(\"Between values for \" + property + \" cannot be null\");"); //$NON-NLS-1$ method.addBodyLine("}"); //$NON-NLS-1$ if (generateForJava5) { sb.setLength(0); sb.append("List<"); //$NON-NLS-1$ sb.append(introspectedColumn.getFullyQualifiedJavaType().getShortName()); sb.append("> list = new ArrayList<"); //$NON-NLS-1$ sb.append(introspectedColumn.getFullyQualifiedJavaType().getShortName()); sb.append(">();"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); } else { method.addBodyLine("List list = new ArrayList();"); //$NON-NLS-1$ } method.addBodyLine("list.add(value1);"); //$NON-NLS-1$ method.addBodyLine("list.add(value2);"); //$NON-NLS-1$ if (generateForJava5) { method.addBodyLine("Map<String, Object> map = new HashMap<String, Object>();"); //$NON-NLS-1$ } else { method.addBodyLine("Map map = new HashMap();"); //$NON-NLS-1$ } method.addBodyLine("map.put(\"condition\", condition);"); //$NON-NLS-1$ method.addBodyLine("map.put(\"values\", list);"); //$NON-NLS-1$ sb.setLength(0); sb.append(introspectedColumn.getJavaProperty()); sb.append("CriteriaWithBetweenValue.add(map);"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); innerClass.addMethod(method); return answer; } private Method getSetNullMethod(IntrospectedColumn introspectedColumn) { return getNoValueMethod(introspectedColumn, "IsNull", "is null"); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSetNotNullMethod(IntrospectedColumn introspectedColumn) { return getNoValueMethod(introspectedColumn, "IsNotNull", "is not null"); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSetEqualMethod(IntrospectedColumn introspectedColumn) { return getSingleValueMethod(introspectedColumn, "EqualTo", "="); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSetNotEqualMethod(IntrospectedColumn introspectedColumn) { return getSingleValueMethod(introspectedColumn, "NotEqualTo", "<>"); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSetGreaterThanMethod(IntrospectedColumn introspectedColumn) { return getSingleValueMethod(introspectedColumn, "GreaterThan", ">"); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSetGreaterThenOrEqualMethod(IntrospectedColumn introspectedColumn) { return getSingleValueMethod(introspectedColumn, "GreaterThanOrEqualTo", ">="); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSetLessThanMethod(IntrospectedColumn introspectedColumn) { return getSingleValueMethod(introspectedColumn, "LessThan", "<"); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSetLessThanOrEqualMethod(IntrospectedColumn introspectedColumn) { return getSingleValueMethod(introspectedColumn, "LessThanOrEqualTo", "<="); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSetLikeMethod(IntrospectedColumn introspectedColumn) { return getSingleValueMethod(introspectedColumn, "Like", "like"); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSetNotLikeMethod(IntrospectedColumn introspectedColumn) { return getSingleValueMethod(introspectedColumn, "NotLike", "not like"); //$NON-NLS-1$ //$NON-NLS-2$ } private Method getSingleValueMethod(IntrospectedColumn introspectedColumn, String nameFragment, String operator) { Method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(introspectedColumn.getFullyQualifiedJavaType(), "value")); //$NON-NLS-1$ StringBuilder sb = new StringBuilder(); sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); sb.insert(0, "and"); //$NON-NLS-1$ sb.append(nameFragment); method.setName(sb.toString()); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); sb.setLength(0); if (introspectedColumn.isJDBCDateColumn()) { sb.append("addCriterionForJDBCDate(\""); //$NON-NLS-1$ } else if (introspectedColumn.isJDBCTimeColumn()) { sb.append("addCriterionForJDBCTime(\""); //$NON-NLS-1$ } else if (stringHasValue(introspectedColumn.getTypeHandler())) { sb.append("add"); //$NON-NLS-1$ sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(3, Character.toUpperCase(sb.charAt(3))); sb.append("Criterion(\""); //$NON-NLS-1$ } else { sb.append("addCriterion(\""); //$NON-NLS-1$ } sb.append(Ibatis2FormattingUtilities.getAliasedActualColumnName(introspectedColumn)); sb.append(' '); sb.append(operator); sb.append("\", "); //$NON-NLS-1$ if (introspectedColumn.getFullyQualifiedJavaType().isPrimitive()) { sb.append("new "); //$NON-NLS-1$ sb.append(introspectedColumn.getFullyQualifiedJavaType().getPrimitiveTypeWrapper().getShortName()); sb.append("(value)"); //$NON-NLS-1$ } else { sb.append("value"); //$NON-NLS-1$ } sb.append(", \""); //$NON-NLS-1$ sb.append(introspectedColumn.getJavaProperty()); sb.append("\");"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); method.addBodyLine("return (Criteria) this;"); //$NON-NLS-1$ return method; } /** * Generates methods that set between and not between conditions * * @param introspectedColumn * @param betweenMethod * @return a generated method for the between or not between method */ private Method getSetBetweenOrNotBetweenMethod(IntrospectedColumn introspectedColumn, boolean betweenMethod) { Method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); FullyQualifiedJavaType type = introspectedColumn.getFullyQualifiedJavaType(); method.addParameter(new Parameter(type, "value1")); //$NON-NLS-1$ method.addParameter(new Parameter(type, "value2")); //$NON-NLS-1$ StringBuilder sb = new StringBuilder(); sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); sb.insert(0, "and"); //$NON-NLS-1$ if (betweenMethod) { sb.append("Between"); //$NON-NLS-1$ } else { sb.append("NotBetween"); //$NON-NLS-1$ } method.setName(sb.toString()); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); sb.setLength(0); if (introspectedColumn.isJDBCDateColumn()) { sb.append("addCriterionForJDBCDate(\""); //$NON-NLS-1$ } else if (introspectedColumn.isJDBCTimeColumn()) { sb.append("addCriterionForJDBCTime(\""); //$NON-NLS-1$ } else if (stringHasValue(introspectedColumn.getTypeHandler())) { sb.append("add"); //$NON-NLS-1$ sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(3, Character.toUpperCase(sb.charAt(3))); sb.append("Criterion(\""); //$NON-NLS-1$ } else { sb.append("addCriterion(\""); //$NON-NLS-1$ } sb.append(Ibatis2FormattingUtilities.getAliasedActualColumnName(introspectedColumn)); if (betweenMethod) { sb.append(" between"); //$NON-NLS-1$ } else { sb.append(" not between"); //$NON-NLS-1$ } sb.append("\", "); //$NON-NLS-1$ if (introspectedColumn.getFullyQualifiedJavaType().isPrimitive()) { sb.append("new "); //$NON-NLS-1$ sb.append(introspectedColumn.getFullyQualifiedJavaType().getPrimitiveTypeWrapper().getShortName()); sb.append("(value1), "); //$NON-NLS-1$ sb.append("new "); //$NON-NLS-1$ sb.append(introspectedColumn.getFullyQualifiedJavaType().getPrimitiveTypeWrapper().getShortName()); sb.append("(value2)"); //$NON-NLS-1$ } else { sb.append("value1, value2"); //$NON-NLS-1$ } sb.append(", \""); //$NON-NLS-1$ sb.append(introspectedColumn.getJavaProperty()); sb.append("\");"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); method.addBodyLine("return (Criteria) this;"); //$NON-NLS-1$ return method; } /** * * @param introspectedColumn * @param inMethod * if true generates an "in" method, else generates a "not in" * method * @return a generated method for the in or not in method */ private Method getSetInOrNotInMethod(IntrospectedColumn introspectedColumn, boolean inMethod) { Method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); FullyQualifiedJavaType type = FullyQualifiedJavaType.getNewListInstance(); if (generateForJava5) { if (introspectedColumn.getFullyQualifiedJavaType().isPrimitive()) { type.addTypeArgument(introspectedColumn.getFullyQualifiedJavaType().getPrimitiveTypeWrapper()); } else { type.addTypeArgument(introspectedColumn.getFullyQualifiedJavaType()); } } method.addParameter(new Parameter(type, "values")); //$NON-NLS-1$ StringBuilder sb = new StringBuilder(); sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); sb.insert(0, "and"); //$NON-NLS-1$ if (inMethod) { sb.append("In"); //$NON-NLS-1$ } else { sb.append("NotIn"); //$NON-NLS-1$ } method.setName(sb.toString()); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); sb.setLength(0); if (introspectedColumn.isJDBCDateColumn()) { sb.append("addCriterionForJDBCDate(\""); //$NON-NLS-1$ } else if (introspectedColumn.isJDBCTimeColumn()) { sb.append("addCriterionForJDBCTime(\""); //$NON-NLS-1$ } else if (stringHasValue(introspectedColumn.getTypeHandler())) { sb.append("add"); //$NON-NLS-1$ sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(3, Character.toUpperCase(sb.charAt(3))); sb.append("Criterion(\""); //$NON-NLS-1$ } else { sb.append("addCriterion(\""); //$NON-NLS-1$ } sb.append(Ibatis2FormattingUtilities.getAliasedActualColumnName(introspectedColumn)); if (inMethod) { sb.append(" in"); //$NON-NLS-1$ } else { sb.append(" not in"); //$NON-NLS-1$ } sb.append("\", values, \""); //$NON-NLS-1$ sb.append(introspectedColumn.getJavaProperty()); sb.append("\");"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); method.addBodyLine("return (Criteria) this;"); //$NON-NLS-1$ return method; } private Method getNoValueMethod(IntrospectedColumn introspectedColumn, String nameFragment, String operator) { Method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); StringBuilder sb = new StringBuilder(); sb.append(introspectedColumn.getJavaProperty()); sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); sb.insert(0, "and"); //$NON-NLS-1$ sb.append(nameFragment); method.setName(sb.toString()); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); sb.setLength(0); sb.append("addCriterion(\""); //$NON-NLS-1$ sb.append(Ibatis2FormattingUtilities.getAliasedActualColumnName(introspectedColumn)); sb.append(' '); sb.append(operator); sb.append("\");"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); method.addBodyLine("return (Criteria) this;"); //$NON-NLS-1$ return method; } }
/* Copyright 2021 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.openapi.models; import com.google.gson.annotations.SerializedName; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; import java.util.Objects; /** * PersistentVolumeClaimSpec describes the common attributes of storage devices and allows a Source * for provider-specific attributes */ @ApiModel( description = "PersistentVolumeClaimSpec describes the common attributes of storage devices and allows a Source for provider-specific attributes") @javax.annotation.Generated( value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2021-12-10T19:11:23.904Z[Etc/UTC]") public class V1PersistentVolumeClaimSpec { public static final String SERIALIZED_NAME_ACCESS_MODES = "accessModes"; @SerializedName(SERIALIZED_NAME_ACCESS_MODES) private List<String> accessModes = null; public static final String SERIALIZED_NAME_DATA_SOURCE = "dataSource"; @SerializedName(SERIALIZED_NAME_DATA_SOURCE) private V1TypedLocalObjectReference dataSource; public static final String SERIALIZED_NAME_DATA_SOURCE_REF = "dataSourceRef"; @SerializedName(SERIALIZED_NAME_DATA_SOURCE_REF) private V1TypedLocalObjectReference dataSourceRef; public static final String SERIALIZED_NAME_RESOURCES = "resources"; @SerializedName(SERIALIZED_NAME_RESOURCES) private V1ResourceRequirements resources; public static final String SERIALIZED_NAME_SELECTOR = "selector"; @SerializedName(SERIALIZED_NAME_SELECTOR) private V1LabelSelector selector; public static final String SERIALIZED_NAME_STORAGE_CLASS_NAME = "storageClassName"; @SerializedName(SERIALIZED_NAME_STORAGE_CLASS_NAME) private String storageClassName; public static final String SERIALIZED_NAME_VOLUME_MODE = "volumeMode"; @SerializedName(SERIALIZED_NAME_VOLUME_MODE) private String volumeMode; public static final String SERIALIZED_NAME_VOLUME_NAME = "volumeName"; @SerializedName(SERIALIZED_NAME_VOLUME_NAME) private String volumeName; public V1PersistentVolumeClaimSpec accessModes(List<String> accessModes) { this.accessModes = accessModes; return this; } public V1PersistentVolumeClaimSpec addAccessModesItem(String accessModesItem) { if (this.accessModes == null) { this.accessModes = new ArrayList<>(); } this.accessModes.add(accessModesItem); return this; } /** * AccessModes contains the desired access modes the volume should have. More info: * https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 * * @return accessModes */ @javax.annotation.Nullable @ApiModelProperty( value = "AccessModes contains the desired access modes the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1") public List<String> getAccessModes() { return accessModes; } public void setAccessModes(List<String> accessModes) { this.accessModes = accessModes; } public V1PersistentVolumeClaimSpec dataSource(V1TypedLocalObjectReference dataSource) { this.dataSource = dataSource; return this; } /** * Get dataSource * * @return dataSource */ @javax.annotation.Nullable @ApiModelProperty(value = "") public V1TypedLocalObjectReference getDataSource() { return dataSource; } public void setDataSource(V1TypedLocalObjectReference dataSource) { this.dataSource = dataSource; } public V1PersistentVolumeClaimSpec dataSourceRef(V1TypedLocalObjectReference dataSourceRef) { this.dataSourceRef = dataSourceRef; return this; } /** * Get dataSourceRef * * @return dataSourceRef */ @javax.annotation.Nullable @ApiModelProperty(value = "") public V1TypedLocalObjectReference getDataSourceRef() { return dataSourceRef; } public void setDataSourceRef(V1TypedLocalObjectReference dataSourceRef) { this.dataSourceRef = dataSourceRef; } public V1PersistentVolumeClaimSpec resources(V1ResourceRequirements resources) { this.resources = resources; return this; } /** * Get resources * * @return resources */ @javax.annotation.Nullable @ApiModelProperty(value = "") public V1ResourceRequirements getResources() { return resources; } public void setResources(V1ResourceRequirements resources) { this.resources = resources; } public V1PersistentVolumeClaimSpec selector(V1LabelSelector selector) { this.selector = selector; return this; } /** * Get selector * * @return selector */ @javax.annotation.Nullable @ApiModelProperty(value = "") public V1LabelSelector getSelector() { return selector; } public void setSelector(V1LabelSelector selector) { this.selector = selector; } public V1PersistentVolumeClaimSpec storageClassName(String storageClassName) { this.storageClassName = storageClassName; return this; } /** * Name of the StorageClass required by the claim. More info: * https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 * * @return storageClassName */ @javax.annotation.Nullable @ApiModelProperty( value = "Name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1") public String getStorageClassName() { return storageClassName; } public void setStorageClassName(String storageClassName) { this.storageClassName = storageClassName; } public V1PersistentVolumeClaimSpec volumeMode(String volumeMode) { this.volumeMode = volumeMode; return this; } /** * volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied * when not included in claim spec. * * @return volumeMode */ @javax.annotation.Nullable @ApiModelProperty( value = "volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec.") public String getVolumeMode() { return volumeMode; } public void setVolumeMode(String volumeMode) { this.volumeMode = volumeMode; } public V1PersistentVolumeClaimSpec volumeName(String volumeName) { this.volumeName = volumeName; return this; } /** * VolumeName is the binding reference to the PersistentVolume backing this claim. * * @return volumeName */ @javax.annotation.Nullable @ApiModelProperty( value = "VolumeName is the binding reference to the PersistentVolume backing this claim.") public String getVolumeName() { return volumeName; } public void setVolumeName(String volumeName) { this.volumeName = volumeName; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } V1PersistentVolumeClaimSpec v1PersistentVolumeClaimSpec = (V1PersistentVolumeClaimSpec) o; return Objects.equals(this.accessModes, v1PersistentVolumeClaimSpec.accessModes) && Objects.equals(this.dataSource, v1PersistentVolumeClaimSpec.dataSource) && Objects.equals(this.dataSourceRef, v1PersistentVolumeClaimSpec.dataSourceRef) && Objects.equals(this.resources, v1PersistentVolumeClaimSpec.resources) && Objects.equals(this.selector, v1PersistentVolumeClaimSpec.selector) && Objects.equals(this.storageClassName, v1PersistentVolumeClaimSpec.storageClassName) && Objects.equals(this.volumeMode, v1PersistentVolumeClaimSpec.volumeMode) && Objects.equals(this.volumeName, v1PersistentVolumeClaimSpec.volumeName); } @Override public int hashCode() { return Objects.hash( accessModes, dataSource, dataSourceRef, resources, selector, storageClassName, volumeMode, volumeName); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class V1PersistentVolumeClaimSpec {\n"); sb.append(" accessModes: ").append(toIndentedString(accessModes)).append("\n"); sb.append(" dataSource: ").append(toIndentedString(dataSource)).append("\n"); sb.append(" dataSourceRef: ").append(toIndentedString(dataSourceRef)).append("\n"); sb.append(" resources: ").append(toIndentedString(resources)).append("\n"); sb.append(" selector: ").append(toIndentedString(selector)).append("\n"); sb.append(" storageClassName: ").append(toIndentedString(storageClassName)).append("\n"); sb.append(" volumeMode: ").append(toIndentedString(volumeMode)).append("\n"); sb.append(" volumeName: ").append(toIndentedString(volumeName)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/**************************************************************** * Licensed to the AOS Community (AOS) under one or more * * contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The AOS licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ // An implementation of an OrderedDictionary. // (c) 1998, 2001 duane a. bailey package aos.data.structure; import java.util.Iterator; import java.util.Map.Entry; /** * An implementation of an ordered dictionary. Key-value pairs are * kept in the structure in order. To accomplish this, the keys of the * table must be comparable. * * @version $Id: Table.java 8 2006-08-02 19:03:11Z bailey $ * @author, 2001 duane a. bailey * @see Comparable */ public class Table extends AbstractMap implements OrderedMap { /** * An ordered structure that maintains the ComparableAssociations * that store the key-value pairings. */ protected OrderedStructure data; /** * Construct a new, empty table. * * @post constructs a new table */ public Table() { data = new SplayTree(); } public Table(Table other) { data = new SplayTree(); Iterator i = other.entrySet().iterator(); while (i.hasNext()) { java.util.Map.Entry o = (java.util.Map.Entry)i.next(); put(o.getKey(),o.getValue()); } } /** * Retrieve the value associated with the key provided. * Be aware, the value may be null. * * @pre key is a non-null object * @post returns value associated with key, or null * * @param key The key of the key-value pair sought. * @return The value associated with the key. */ public Object get(Object key) { ComparableAssociation ca = new ComparableAssociation((Comparable)key,null); ComparableAssociation result = ((ComparableAssociation)data.remove(ca)); if (result == null) return null; data.add(result); return result.getValue(); } /** * Enter a key-value pair into the table. if the key is already * in the table, the old value is returned, and the old key-value * pair is replaced. Otherwise null is returned. The user is cautioned * that a null value returned may indicate there was no prior key-value * pair, or --- if null values are inserted --- that the key was * previously associated with a null value. * * @pre key is non-null object * @post key-value pair is added to table * * @param key The unique key in the table. * @param value The (possibly null) value associated with key. * @return The prior value, or null if no prior value found. */ public Object put(Object key, Object value) { ComparableAssociation ca = new ComparableAssociation((Comparable)key,value); // fetch old key-value pair ComparableAssociation old = (ComparableAssociation)data.remove(ca); // insert new key-value pair data.add(ca); // return old value if (old == null) return null; else return old.getValue(); } /** * Determine if the table is empty. * * @post returns true iff table is empty * * @return True iff the table has no elements. */ public boolean isEmpty() { return data.isEmpty(); } /** * Remove all the elements of the table. * * @post removes all elements from the table */ public void clear() { data.clear(); } /** * Construct an iterator over the keys of the table. * The order of the keys returned is in ascending order. It will * be consistent with that of the iterator from elements, provided * the table is not modified. * * @post returns an iterator for traversing keys of table * * @return An iterator over the keys of the table. */ public Iterator keys() { return new KeyIterator(data.iterator()); } /** * Construct an iterator over the values of the table. * The order of the values returned is determined by order of keys. It will * be consistent with that of the iterator returned from keys, provided * the table is not modified. * * @post returns an iterator for traversing values in table * * @return An iterator over the values of the table. */ public Iterator iterator() { return new ValueIterator(data.iterator()); } /** * Determine if the key is in the table. The key should * not be null. * * @pre key is non-null object * @post returns true iff key indexes a value in table * * @param key A non-null key sought in the table. * @return True iff the key is used in association with some value. */ public boolean containsKey(Object key) { ComparableAssociation a = new ComparableAssociation((Comparable)key,null); return data.contains(a); } /** * Returns true if the value is associated with some key in the * table. This is often difficult to implement efficiently. * * @pre value is non-null object * @post returns true iff value in table * * @param value The value sought (possibly null). * @return True, if the value is associated with some key in table. */ public boolean containsValue(Object value) { Iterator i = iterator(); while (i.hasNext()) { Object nextValue = i.next(); if (nextValue != null && nextValue.equals(value)) return true; } return false; } /** * Remove a key-value pair, based on key. The value is returned. * * @pre key is non-null object * @post removes value indexed in table * * @param key The key of the key-value pair to be removed. * @return The value associated with key, no longer in table. */ public Object remove(Object key) { ComparableAssociation target = new ComparableAssociation((Comparable)key,null); target = (ComparableAssociation)data.remove(target); if (target == null) return null; else return target.getValue(); } /** * Determine the number of key-value pairs within the table. * * @post returns number of key-value pairs in table * * @return The number of key-value pairs in the table. */ public int size() { return data.size(); } public Set keySet() { Set result = new SetList(); Iterator i = new KeyIterator(data.iterator()); while (i.hasNext()) { result.add(i.next()); } return result; } public Structure values() { List result = new SinglyLinkedList(); Iterator i = new ValueIterator(data.iterator()); while (i.hasNext()) { result.add(i.next()); } return result; } public Set entrySet() { Set result = new SetList(); Iterator i = data.iterator(); while (i.hasNext()) { result.add(i.next()); } return result; } /** * Construct a string representing value of table. * * @post returns string representation * * @return String representing table. */ public String toString() { StringBuffer s = new StringBuffer(); s.append("<Table: size="+size()); Iterator ti = data.iterator(); while (ti.hasNext()) { ComparableAssociation ca = (ComparableAssociation)ti.next(); s.append(" key="+ca.getKey()+", value="+ca.getValue()); } s.append(">"); return s.toString(); } }
/* * Copyright 2009 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.core.map; import java.io.File; import java.util.ArrayList; import org.wyona.commons.io.FileUtil; import org.wyona.security.core.IdentityManagerFactory; import org.wyona.security.core.PolicyManagerFactory; import org.wyona.security.core.api.IdentityManager; import org.wyona.security.core.api.PolicyManager; import org.wyona.yanel.core.LanguageHandler; import org.wyona.yanel.core.Yanel; import org.wyona.yanel.core.attributes.translatable.DefaultTranslationManager; import org.wyona.yanel.core.attributes.translatable.TranslationManager; import org.wyona.yanel.core.api.security.WebAuthenticator; import org.wyona.yanel.core.navigation.Sitetree; import org.wyona.yanel.core.util.ConfigurationUtil; import org.wyona.yarep.core.Repository; import org.wyona.yarep.core.RepositoryFactory; import org.xml.sax.SAXException; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.ConfigurationException; import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; /** * Default realm implementation */ public class RealmDefaultImpl implements Realm { public static String DEFAULT_REPOSITORY_FACTORY_BEAN_ID = "DefaultRepositoryFactory"; private static String EXTRA_REPOSITORY_FACTORY_BEAN_ID = "ExtraRepositoryFactory"; private static final Logger log = LogManager.getLogger(RealmDefaultImpl.class); private String domain; private String name; private String id; private String mountPoint; private String defaultLanguage; private Repository repository = null; private Repository rtiRepository = null; private Repository identitiesRepository = null; private Repository policiesRepository = null; private PolicyManager privatePolicyManager; private IdentityManager privateIdentityManager; private WebAuthenticator privateWebAuthenticator; private TranslationManager translationManager; private LanguageHandler languageHandler; private Sitetree repoNavigation; private File configFile; private File rootDir; private String[] languages; private String i18nCatalogue; private boolean proxySet = false; private ReverseProxyConfig rpc; /** * Init realm * @param configFile Realm configuration file */ public RealmDefaultImpl(String name, String id, String mountPoint, File configFile) throws Exception { // INFO: If name is null, then get realm name from config (see method configure(Configuration)). this.name = name; this.id = id; this.mountPoint = mountPoint; this.configFile = configFile; log.info("Init realm: " + id + ", " + mountPoint + ", " + configFile); proxySet = false; rpc = new ReverseProxyConfig(null, -1, -1, null); if (configFile != null) { DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder(true); Configuration config; try { config = builder.buildFromFile(configFile); configure(config); // INFO: Dump filtered config... if (log.isDebugEnabled()) { org.apache.avalon.framework.configuration.DefaultConfigurationSerializer dcs = new org.apache.avalon.framework.configuration.DefaultConfigurationSerializer(); dcs.setIndent(true); dcs.serialize(new java.io.FileOutputStream(new File(configFile.getAbsolutePath() + ".DEBUG")), config); } } catch (SAXException e) { // TODO: CascadingSAXException cse = new CascadingSAXException(e); log.error(e, e); String errorMsg = "Could not read config file: " + configFile + ": " + e.getMessage(); throw new Exception(errorMsg, e); } catch (Exception e) { log.error(e, e); String errorMsg = "Could not configure realm [" + id + "] with config file: " + configFile + ": " + e.toString(); throw new Exception(errorMsg, e); } } } /** * Configure realm based on configuration * @param config Realm configuration */ protected void configure(Configuration config) throws Exception { Yanel yanel = Yanel.getInstance(); // INFO: Filter by target environment config = ConfigurationUtil.filterEnvironment((org.apache.avalon.framework.configuration.MutableConfiguration) config, yanel.getTargetEnvironment()); // INFO: Set realm name if not already set by yanel realms registration config (conf/local/local.realms.xml) Configuration nameConfigElement = config.getChild("name", false); if (name == null && nameConfigElement != null) { name = nameConfigElement.getValue(); } else { log.warn("Realm name '" + name + "' already set inside Yanel's realms configuration, instead inside realm configuration: " + configFile); } initIdentityManager(config, yanel); initPolicyManager(config, yanel); // Set WebAuthenticator for this realm Configuration waConfigElement = config.getChild("web-authenticator", false); WebAuthenticator wa = null; if (waConfigElement != null) { try { String customWebAuthenticatorImplClassName = waConfigElement.getAttribute("class"); wa = (WebAuthenticator) Class.forName(customWebAuthenticatorImplClassName).newInstance(); wa.init(ConfigurationUtil.getCustomConfiguration(waConfigElement, "web-authenticator-config", "http://www.wyona.org/security/1.0"), new RealmConfigPathResolver(this)); log.info("Custom WebAuthenticator (" + customWebAuthenticatorImplClassName + ") will be used!"); } catch (ConfigurationException e) { log.error(e, e); log.warn("Default WebAuthenticator will be used!"); wa = getDefaultWebAuthenticator(); wa.init(null, new RealmConfigPathResolver(this)); } } else { wa = getDefaultWebAuthenticator(); wa.init(null, new RealmConfigPathResolver(this)); } setWebAuthenticator(wa); RepositoryFactory repoFactory = yanel.getRepositoryFactory(DEFAULT_REPOSITORY_FACTORY_BEAN_ID); RepositoryFactory rtiRepoFactory = yanel.getRepositoryFactory("RTIRepositoryFactory"); RepositoryFactory extraRepoFactory = yanel.getRepositoryFactory(EXTRA_REPOSITORY_FACTORY_BEAN_ID); String repoConfigSrc = config.getChild("data", false).getValue(); File repoConfig = FileUtil.resolve(getConfigFile(), new File(repoConfigSrc)); log.info("Set data repository: " + getID() + ", " + repoConfig); setRepository(repoFactory.newRepository(getID(), repoConfig)); repoConfigSrc = config.getChild("rti", false).getValue(); repoConfig = FileUtil.resolve(getConfigFile(), new File(repoConfigSrc)); setRTIRepository(rtiRepoFactory.newRepository(getID(), repoConfig)); Configuration configElement = config.getChild("default-language", false); if (configElement != null) { setDefaultLanguage(configElement.getValue()); } else { //Maintain backwards compatibility with realms setDefaultLanguage("en"); } Configuration languagesElement = config.getChild("languages", false); ArrayList languages = new ArrayList(); if (languagesElement != null) { Configuration[] langElements = languagesElement.getChildren("language"); for (int i = 0; i < langElements.length; i++) { String language = langElements[i].getValue(); languages.add(language); } } setLanguages((String[])languages.toArray(new String[languages.size()])); configElement = config.getChild("translation-manager", false); TranslationManager translationManager = null; if (configElement != null) { String className = configElement.getAttribute("class"); translationManager = (TranslationManager)Class.forName(className).newInstance(); } else { translationManager = new DefaultTranslationManager(); } translationManager.init(this); setTranslationManager(translationManager); configElement = config.getChild("language-handler", false); LanguageHandler languageHandler = null; if (configElement != null) { String className = configElement.getAttribute("class"); languageHandler = (LanguageHandler)Class.forName(className).newInstance(); } else { languageHandler = (LanguageHandler)Class.forName("org.wyona.yanel.impl.DefaultLanguageHandler").newInstance(); } setLanguageHandler(languageHandler); Configuration rootDirConfig = config.getChild("root-dir", false); if (rootDirConfig != null) { setRootDir(FileUtil.resolve(getConfigFile(), new File(rootDirConfig.getValue()))); } Configuration reposElement = config.getChild("yarep-repositories", false); if (reposElement != null) { Configuration[] repoElements = reposElement.getChildren("repository"); for (int i = 0; i < repoElements.length; i++) { String id = repoElements[i].getAttribute("id"); String repoConfigPath = repoElements[i].getAttribute("config"); repoConfig = FileUtil.resolve(getConfigFile(), new File(repoConfigPath)); if (!extraRepoFactory.exists(getID() + "_" + id)) { extraRepoFactory.newRepository(getID() + "_" + id, repoConfig); } else { log.error("Extra repository with ID '" + id + "' already exists (Realm ID: '" + getID() + "')!"); } } } // Set i18n catalogue configElement = config.getChild("i18n-catalogue", false); if (configElement != null) { this.i18nCatalogue = configElement.getValue(); } // Set repo-navigation configElement = config.getChild("repo-navigation", false); if (configElement != null) { try { String customRepoNavigationImplClassName = configElement.getAttribute("class"); repoNavigation = (Sitetree) Class.forName(customRepoNavigationImplClassName).newInstance(); repoNavigation.init(ConfigurationUtil.getCustomConfiguration(configElement, "repo-navigation-config", "http://www.wyona.org/yanel/realm/1.0"), new RealmConfigPathResolver(this)); log.info("Custom repo navigation implementation will be used for realm: " + getName()); } catch (ConfigurationException e) { log.error(e, e); repoNavigation = yanel.getSitetreeImpl("repo-navigation"); log.warn("Default repo navigation implementation will be used for realm: " + getName()); } } else { log.info("Default repo navigation implementation will be used for realm: " + getName()); repoNavigation = yanel.getSitetreeImpl("repo-navigation"); } } /** * Name of realm */ public String getName() { return name; } /** * Id of realm */ public String getID() { return id; } /** * Mount point of realm */ public String getMountPoint() { return mountPoint; } /** * Configuration file of realm. */ public File getConfigFile() { return configFile; } /** * */ public void setProxy(String hostName, int port, int sslPort, String prefix) { proxySet = true; rpc = new ReverseProxyConfig(hostName, port, sslPort, prefix); } /** * @see org.wyona.yanel.core.map.Realm#setReverseProxyConfig(ReverseProxyConfig) */ public void setReverseProxyConfig(ReverseProxyConfig reverseProxyConfig) { proxySet = true; this.rpc = reverseProxyConfig; } /** * @see org.wyona.yanel.core.map.Realm#getReverseProxyConfig() */ public ReverseProxyConfig getReverseProxyConfig() { return rpc; } /** * */ public boolean isProxySet() { return proxySet; } /** * */ public String getProxyHostName() { return rpc.getHostName(); } /** * */ public int getProxyPort() { return rpc.getPort(); } /** * */ public int getProxySSLPort() { return rpc.getSSLPort(); } /** * */ public String getProxyPrefix() { return rpc.getPrefix(); } /** * */ public String toString() { String descr = "Name: " + name + ", ID: " + id + ", Mount-Point: " + mountPoint; if (isProxySet()) { if (rpc.getHostName() != null) { descr = descr + ", Reverse Proxy Host Name: " + rpc.getHostName(); } if (rpc.getPort() >= 0) { descr = descr + ", Reverse Proxy Port: " + rpc.getPort(); } else { descr = descr + ", Reverse Proxy Port is set to default 80 (resp. -1)"; } if (rpc.getSSLPort() >= 0) { descr = descr + ", Reverse Proxy SSL Port: " + rpc.getSSLPort(); } else { descr = descr + ", Reverse Proxy SSL Port is set to default 443 (resp. -1)"; } if (rpc.getPrefix() != null) { descr = descr + ", Reverse Proxy Prefix: " + rpc.getPrefix(); } } else { descr = descr + ", No reverse proxy set"; } return descr; } /** * Get data repository of realm */ public Repository getRepository() throws Exception { return repository; } public void setRepository(Repository repository) throws Exception { this.repository = repository; } /** * Get RTI (Resource Type Identifier) repository of realm */ public Repository getRTIRepository() throws Exception { return rtiRepository; } public void setRTIRepository(Repository repository) throws Exception { this.rtiRepository = repository; } /** * */ public WebAuthenticator getWebAuthenticator() { return privateWebAuthenticator; } /** * */ public void setWebAuthenticator(WebAuthenticator wa) { privateWebAuthenticator = wa; } public IdentityManager getIdentityManager() { return privateIdentityManager; } /** * @see org.wyona.yanel.core.map.Realm#setIdentityManager(IdentityManager) */ public void setIdentityManager(IdentityManager identityManager) { this.privateIdentityManager = identityManager; } /** * Get policy manager */ public PolicyManager getPolicyManager() { return privatePolicyManager; } public void setPolicyManager(PolicyManager policyManager) { this.privatePolicyManager = policyManager; } /** * Get repository navigation */ public Sitetree getRepoNavigation() { return repoNavigation; } /** * Get default language of this realm re content */ public String getDefaultLanguage() { return defaultLanguage; } public void setDefaultLanguage(String language) { this.defaultLanguage = language; } /** * Please note that the root-dir element is optional * @deprecated */ public File getRootDir() { log.warn("TODO: Try to avoid using the getRootDir() method because this method is deprecated!"); return this.rootDir; } public void setRootDir(File rootDir) { this.rootDir = rootDir; } /** * Please note that the menu element is optional */ public String getMenuClass() { try { Configuration realmConfig = new DefaultConfigurationBuilder().buildFromFile(getConfigFile()); Configuration menuClassConfig = realmConfig.getChild("menu", false); if (menuClassConfig != null) { return menuClassConfig.getAttribute("class"); } } catch (Exception e) { log.error(e.getMessage(), e); } return null; } /** * Gets a list of all languages supported by this realm. * @return list of languages. may be empty. */ public String[] getLanguages() { return languages; } public void setLanguages(String[] languages) { //TODO: the cast should not be necessary. but under strange circumstances build fails without. this.languages = (String[]) languages.clone(); } public TranslationManager getTranslationManager() { //log.debug("Translation Manager: " + translationManager.getClass().getName()); return translationManager; } public void setTranslationManager(TranslationManager translationManager) { this.translationManager = translationManager; } /** * Get repository, whereas default repositories can be retrieved by 'yanel_data', 'yanel_ac-policies', 'yanel_ac-identities' and 'yanel_res-configs'. * @param id Repository id */ public Repository getRepository(String id) throws Exception { if("yanel_data".equals(id)) { return repository; } else if("yanel_ac-policies".equals(id)) { return policiesRepository; } else if("yanel_ac-identities".equals(id)) { return identitiesRepository; } else if("yanel_res-configs".equals(id)) { return rtiRepository; } else { Yanel yanel = Yanel.getInstance(); RepositoryFactory extraRepoFactory = yanel.getRepositoryFactory(EXTRA_REPOSITORY_FACTORY_BEAN_ID); if (extraRepoFactory.exists(getID() + "_" + id)) { return extraRepoFactory.newRepository(getID() + "_" + id); } else { log.warn("No such extra repository: " + id + " (Realm: " + getID() + ")"); } } return null; } public LanguageHandler getLanguageHandler() { return languageHandler; } public void setLanguageHandler(LanguageHandler languageHandler) { this.languageHandler = languageHandler; } /** * Destroy/shutdown realm */ public void destroy() throws Exception { log.warn("Shutdown realm (and its repositories): " + getName()); repository.close(); rtiRepository.close(); if (identitiesRepository != null) { identitiesRepository.close(); } else { log.warn("No identities repository to close for realm: " + getID()); } if (policiesRepository != null) { policiesRepository.close(); } else { log.warn("No policies repository to close for realm: " + getID()); } // TODO: Close extra repositories } /** * Get Default WebAuthenticator */ private WebAuthenticator getDefaultWebAuthenticator() throws Exception { // TODO: Get this setting from spring config String defaultWebAuthenticatorImplClassName = "org.wyona.yanel.servlet.security.impl.DefaultWebAuthenticatorImpl"; return (WebAuthenticator) Class.forName(defaultWebAuthenticatorImplClassName).newInstance(); } /** * Gets the value of the i18n-catalogue config element. * This value normally is a URI pointing to an i18n message catalogue. * @return i18n catalogue */ public String getI18nCatalogue() { return this.i18nCatalogue; } /** * Init policy manager */ protected void initPolicyManager(Configuration config, Yanel yanel) throws Exception { Configuration repoConfigElement = config.getChild("ac-policies", false); if (repoConfigElement != null) { PolicyManagerFactory pmFactory = null; PolicyManager policyManager = null; try { String customPolicyManagerFactoryImplClassName = repoConfigElement.getAttribute("class"); pmFactory = (PolicyManagerFactory) Class.forName(customPolicyManagerFactoryImplClassName).newInstance(); log.warn("TODO: Add realm ID '" + getID() + "' to custom configuration..."); policyManager = pmFactory.newPolicyManager(ConfigurationUtil.getCustomConfiguration(repoConfigElement, "policy-manager-config", "http://www.wyona.org/security/1.0"), new RealmConfigPathResolver(this)); } catch (ConfigurationException e) { pmFactory = yanel.getPolicyManagerFactory("PolicyManagerFactory"); log.info("Default PolicyManager will be used for realm: " + getName()); File repoConfig = FileUtil.resolve(getConfigFile(), new File(repoConfigElement.getValue())); RepositoryFactory policiesRepoFactory = yanel.getRepositoryFactory("ACPoliciesRepositoryFactory"); policiesRepository = policiesRepoFactory.newRepository(getID(), repoConfig); policyManager = pmFactory.newPolicyManager(policiesRepository); } setPolicyManager(policyManager); } } /** * Init identity manager */ protected void initIdentityManager(Configuration config, Yanel yanel) throws Exception { Configuration repoConfigElement = config.getChild("ac-identities", false); if (repoConfigElement != null) { IdentityManagerFactory imFactory = null; IdentityManager identityManager = null; try { String customIdentityManagerFactoryImplClassName = repoConfigElement.getAttribute("class"); log.debug("Set custom identity manager " + customIdentityManagerFactoryImplClassName + " for realm: " + getName()); imFactory = (IdentityManagerFactory) Class.forName(customIdentityManagerFactoryImplClassName).newInstance(); // INFO: ConfigurationUtil generates a DOM Document with the root node called "identity-manager-config" which wraps/contains the custom indentities configuration identityManager = imFactory.newIdentityManager(ConfigurationUtil.getCustomConfiguration(repoConfigElement, "identity-manager-config", "http://www.wyona.org/security/1.0"), new RealmConfigPathResolver(this)); log.debug("Custom identity manager " + identityManager.getClass().getName() + " has been set for realm: " + getName()); } catch (ConfigurationException e) { imFactory = yanel.getIdentityManagerFactory("IdentityManagerFactory"); log.info("Default IdentityManager will be used for realm: " + getName()); File repoConfig = FileUtil.resolve(getConfigFile(), new File(repoConfigElement.getValue())); RepositoryFactory identitiesRepoFactory = yanel.getRepositoryFactory("ACIdentitiesRepositoryFactory"); identitiesRepository = identitiesRepoFactory.newRepository(getID(), repoConfig); identityManager = imFactory.newIdentityManager(identitiesRepository); } setIdentityManager(identityManager); } } /** * @see org.wyona.yanel.core.map.Realm#setUserTrackingDomain(String) */ public void setUserTrackingDomain(String domain) { this.domain = domain; } /** * @see org.wyona.yanel.core.map.Realm#getUserTrackingDomain() */ public String getUserTrackingDomain() { if (domain != null) { return domain; } else { log.debug("No domain configured, hence use realm ID..."); return getID(); } } }
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.coreimage; import apple.NSObject; import apple.coregraphics.struct.CGRect; import apple.foundation.NSArray; import apple.foundation.NSData; import apple.foundation.NSError; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.ByValue; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.ReferenceInfo; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.Ptr; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCBlock; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * CIWarpKernel is an object that encapsulates a Core Image Kernel Language * function that processes only the geometry of an image. * <p> * Warp kernel functions are declared akin to this example: * kernel vec2 myWarpKernel (vec4 params) * <p> * Additional arguments can be of type float, vec2, vec3, vec4. * The destination pixel location is obtained by calling destCoord(). * The kernel should not call sample(), samplerCoord(), or samplerTransform(). * The function must return a vec2 source location. */ @Generated @Library("CoreImage") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class CIWarpKernel extends CIKernel { static { NatJ.register(); } @Generated protected CIWarpKernel(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native CIWarpKernel alloc(); @Owned @Generated @Selector("allocWithZone:") public static native CIWarpKernel allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); /** * The string argument should contain a program with one warp kernel. * On iOS8 [CIWarpKernel kernelWithString:] can return a CIKernel, CIColorKernel, or CIWarpKernel object. * On iOS9 [CIWarpKernel kernelWithString:] will return a CIWarpKernel object or nil. * On OS X [CIWarpKernel kernelWithString:] will return a CIWarpKernel object or nil. */ @Generated @Selector("kernelWithString:") public static native CIWarpKernel kernelWithString(String string); @Generated @Selector("kernelsWithString:") public static native NSArray<? extends CIKernel> kernelsWithString(String string); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native CIWarpKernel new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("version") @NInt public static native long version_static(); /** * Apply the receiver CIWarpKernel to produce a new CIImage object. * <p> * The 'extent' is the bounding box of all pixel coordinates that are warped by the * kernel to fall within the extent of 'image'. * <p> * The 'image' is the input image that will be warped into a new image. * <p> * The 'callback' is a block that should return the rectangle of the input image * that is needed to produce a given rectangle in the coordinate space of the * new image. * <p> * The 'args' is an array of parameters needed to describe the warping function. * The object types of the items in the array correspond to the argument types of the * kernel function. For example, if the first argument in the kernel is a vec3, * then the first object in the array must be a CIVector of count 3. */ @Generated @Selector("applyWithExtent:roiCallback:inputImage:arguments:") public native CIImage applyWithExtentRoiCallbackInputImageArguments(@ByValue CGRect extent, @ObjCBlock(name = "call_applyWithExtentRoiCallbackInputImageArguments") Block_applyWithExtentRoiCallbackInputImageArguments callback, CIImage image, NSArray<?> args); @Generated @Selector("init") public native CIWarpKernel init(); @Runtime(ObjCRuntime.class) @Generated public interface Block_applyWithExtentRoiCallbackInputImageArguments { @Generated @ByValue CGRect call_applyWithExtentRoiCallbackInputImageArguments(int index, @ByValue CGRect destRect); } @Generated @Selector("kernelWithFunctionName:fromMetalLibraryData:error:") public static native CIWarpKernel kernelWithFunctionNameFromMetalLibraryDataError(String name, NSData data, @ReferenceInfo(type = NSError.class) Ptr<NSError> error); @Generated @Selector("kernelWithFunctionName:fromMetalLibraryData:outputPixelFormat:error:") public static native CIWarpKernel kernelWithFunctionNameFromMetalLibraryDataOutputPixelFormatError(String name, NSData data, int format, @ReferenceInfo(type = NSError.class) Ptr<NSError> error); @Generated @Selector("kernelNamesFromMetalLibraryData:") public static native NSArray<String> kernelNamesFromMetalLibraryData(NSData data); @Generated @Selector("kernelsWithMetalString:error:") public static native NSArray<? extends CIKernel> kernelsWithMetalStringError(String source, @ReferenceInfo(type = NSError.class) Ptr<NSError> error); }
/////////////////////////////////////////////////////////////////////////////// // Copyright (c) 2001, Eric D. Friedman All Rights Reserved. // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. /////////////////////////////////////////////////////////////////////////////// package gnu.trove; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Externalizable; import java.util.Arrays; ////////////////////////////////////////////////// // THIS IS A GENERATED CLASS. DO NOT HAND EDIT! // ////////////////////////////////////////////////// /** * An open addressed Map implementation for double keys and float values. * * Created: Sun Nov 4 08:52:45 2001 * * @author Eric D. Friedman */ public class TDoubleFloatHashMap extends TDoubleHash implements Externalizable { static final long serialVersionUID = 1L; private final TDoubleFloatProcedure PUT_ALL_PROC = new TDoubleFloatProcedure() { public boolean execute(double key, float value) { put(key, value); return true; } }; /** the values of the map */ protected transient float[] _values; /** * Creates a new <code>TDoubleFloatHashMap</code> instance with the default * capacity and load factor. */ public TDoubleFloatHashMap() { super(); } /** * Creates a new <code>TDoubleFloatHashMap</code> instance with a prime * capacity equal to or greater than <tt>initialCapacity</tt> and * with the default load factor. * * @param initialCapacity an <code>int</code> value */ public TDoubleFloatHashMap(int initialCapacity) { super(initialCapacity); } /** * Creates a new <code>TDoubleFloatHashMap</code> instance with a prime * capacity equal to or greater than <tt>initialCapacity</tt> and * with the specified load factor. * * @param initialCapacity an <code>int</code> value * @param loadFactor a <code>float</code> value */ public TDoubleFloatHashMap(int initialCapacity, float loadFactor) { super(initialCapacity, loadFactor); } /** * Creates a new <code>TDoubleFloatHashMap</code> instance with the default * capacity and load factor. * @param strategy used to compute hash codes and to compare keys. */ public TDoubleFloatHashMap(TDoubleHashingStrategy strategy) { super(strategy); } /** * Creates a new <code>TDoubleFloatHashMap</code> instance whose capacity * is the next highest prime above <tt>initialCapacity + 1</tt> * unless that value is already prime. * * @param initialCapacity an <code>int</code> value * @param strategy used to compute hash codes and to compare keys. */ public TDoubleFloatHashMap(int initialCapacity, TDoubleHashingStrategy strategy) { super(initialCapacity, strategy); } /** * Creates a new <code>TDoubleFloatHashMap</code> instance with a prime * value at or near the specified capacity and load factor. * * @param initialCapacity used to find a prime capacity for the table. * @param loadFactor used to calculate the threshold over which * rehashing takes place. * @param strategy used to compute hash codes and to compare keys. */ public TDoubleFloatHashMap(int initialCapacity, float loadFactor, TDoubleHashingStrategy strategy) { super(initialCapacity, loadFactor, strategy); } /** * @return a deep clone of this collection */ public Object clone() { TDoubleFloatHashMap m = (TDoubleFloatHashMap)super.clone(); m._values = (float[])this._values.clone(); return m; } /** * @return a TDoubleFloatIterator with access to this map's keys and values */ public TDoubleFloatIterator iterator() { return new TDoubleFloatIterator(this); } /** * initializes the hashtable to a prime capacity which is at least * <tt>initialCapacity + 1</tt>. * * @param initialCapacity an <code>int</code> value * @return the actual capacity chosen */ protected int setUp(int initialCapacity) { int capacity; capacity = super.setUp(initialCapacity); _values = new float[capacity]; return capacity; } /** * Inserts a key/value pair into the map. * * @param key an <code>double</code> value * @param value an <code>float</code> value * @return the previous value associated with <tt>key</tt>, * or (double)0 if none was found. */ public float put(double key, float value) { int index = insertionIndex(key); return doPut(key, value, index); } /** * Inserts a key/value pair into the map if the specified key is not already * associated with a value. * * @param key an <code>double</code> value * @param value an <code>float</code> value * @return the previous value associated with <tt>key</tt>, * or (double)0 if none was found. */ public float putIfAbsent(double key, float value) { int index = insertionIndex(key); if (index < 0) return _values[-index - 1]; return doPut(key, value, index); } private float doPut(double key, float value, int index) { byte previousState; float previous = (float)0; boolean isNewMapping = true; if (index < 0) { index = -index -1; previous = _values[index]; isNewMapping = false; } previousState = _states[index]; _set[index] = key; _states[index] = FULL; _values[index] = value; if (isNewMapping) { postInsertHook(previousState == FREE); } return previous; } /** * Put all the entries from the given map into this map. * * @param map The map from which entries will be obtained to put into this map. */ public void putAll(TDoubleFloatHashMap map){ map.forEachEntry(PUT_ALL_PROC); } /** * rehashes the map to the new capacity. * * @param newCapacity an <code>int</code> value */ protected void rehash(int newCapacity) { int oldCapacity = _set.length; double oldKeys[] = _set; float oldVals[] = _values; byte oldStates[] = _states; _set = new double[newCapacity]; _values = new float[newCapacity]; _states = new byte[newCapacity]; for (int i = oldCapacity; i-- > 0;) { if(oldStates[i] == FULL) { double o = oldKeys[i]; int index = insertionIndex(o); _set[index] = o; _values[index] = oldVals[i]; _states[index] = FULL; } } } /** * retrieves the value for <tt>key</tt> * * @param key an <code>double</code> value * @return the value of <tt>key</tt> or (double)0 if no such mapping exists. */ public float get(double key) { int index = index(key); return index < 0 ? (float)0 : _values[index]; } /** * Empties the map. * */ public void clear() { super.clear(); double[] keys = _set; float[] vals = _values; byte[] states = _states; Arrays.fill(_set, 0, _set.length, (double) 0); Arrays.fill(_values, 0, _values.length, (float) 0); Arrays.fill(_states, 0, _states.length, FREE); } /** * Deletes a key/value pair from the map. * * @param key an <code>double</code> value * @return an <code>float</code> value, or (double)0 if no mapping for key exists */ public float remove(double key) { float prev = (float)0; int index = index(key); if (index >= 0) { prev = _values[index]; removeAt(index); // clear key,state; adjust size } return prev; } /** * Compares this map with another map for equality of their stored * entries. * * @param other an <code>Object</code> value * @return a <code>boolean</code> value */ public boolean equals(Object other) { if (! (other instanceof TDoubleFloatHashMap)) { return false; } TDoubleFloatHashMap that = (TDoubleFloatHashMap)other; if (that.size() != this.size()) { return false; } return forEachEntry(new EqProcedure(that)); } public int hashCode() { HashProcedure p = new HashProcedure(); forEachEntry(p); return p.getHashCode(); } private final class HashProcedure implements TDoubleFloatProcedure { private int h = 0; public int getHashCode() { return h; } public final boolean execute(double key, float value) { h += (_hashingStrategy.computeHashCode(key) ^ HashFunctions.hash(value)); return true; } } private static final class EqProcedure implements TDoubleFloatProcedure { private final TDoubleFloatHashMap _otherMap; EqProcedure(TDoubleFloatHashMap otherMap) { _otherMap = otherMap; } public final boolean execute(double key, float value) { int index = _otherMap.index(key); if (index >= 0 && eq(value, _otherMap.get(key))) { return true; } return false; } /** * Compare two floats for equality. */ private final boolean eq(float v1, float v2) { return v1 == v2; } } /** * removes the mapping at <tt>index</tt> from the map. * * @param index an <code>int</code> value */ protected void removeAt(int index) { _values[index] = (float)0; super.removeAt(index); // clear key, state; adjust size } /** * Returns the values of the map. * * @return a <code>Collection</code> value */ public float[] getValues() { float[] vals = new float[size()]; float[] v = _values; byte[] states = _states; for (int i = v.length, j = 0; i-- > 0;) { if (states[i] == FULL) { vals[j++] = v[i]; } } return vals; } /** * returns the keys of the map. * * @return a <code>Set</code> value */ public double[] keys() { double[] keys = new double[size()]; double[] k = _set; byte[] states = _states; for (int i = k.length, j = 0; i-- > 0;) { if (states[i] == FULL) { keys[j++] = k[i]; } } return keys; } /** * returns the keys of the map. * * @param a the array into which the elements of the list are to * be stored, if it is big enough; otherwise, a new array of the * same type is allocated for this purpose. * @return a <code>Set</code> value */ public double[] keys(double[] a) { int size = size(); if (a.length < size) { a = (double[]) java.lang.reflect.Array.newInstance( a.getClass().getComponentType(), size); } double[] k = (double[]) _set; byte[] states = _states; for (int i = k.length, j = 0; i-- > 0;) { if (states[i] == FULL) { a[j++] = k[i]; } } return a; } /** * checks for the presence of <tt>val</tt> in the values of the map. * * @param val an <code>float</code> value * @return a <code>boolean</code> value */ public boolean containsValue(float val) { byte[] states = _states; float[] vals = _values; for (int i = vals.length; i-- > 0;) { if (states[i] == FULL && val == vals[i]) { return true; } } return false; } /** * checks for the present of <tt>key</tt> in the keys of the map. * * @param key an <code>double</code> value * @return a <code>boolean</code> value */ public boolean containsKey(double key) { return contains(key); } /** * Executes <tt>procedure</tt> for each key in the map. * * @param procedure a <code>TDoubleProcedure</code> value * @return false if the loop over the keys terminated because * the procedure returned false for some key. */ public boolean forEachKey(TDoubleProcedure procedure) { return forEach(procedure); } /** * Executes <tt>procedure</tt> for each value in the map. * * @param procedure a <code>TFloatProcedure</code> value * @return false if the loop over the values terminated because * the procedure returned false for some value. */ public boolean forEachValue(TFloatProcedure procedure) { byte[] states = _states; float[] values = _values; for (int i = values.length; i-- > 0;) { if (states[i] == FULL && ! procedure.execute(values[i])) { return false; } } return true; } /** * Executes <tt>procedure</tt> for each key/value entry in the * map. * * @param procedure a <code>TODoubleFloatProcedure</code> value * @return false if the loop over the entries terminated because * the procedure returned false for some entry. */ public boolean forEachEntry(TDoubleFloatProcedure procedure) { byte[] states = _states; double[] keys = _set; float[] values = _values; for (int i = keys.length; i-- > 0;) { if (states[i] == FULL && ! procedure.execute(keys[i],values[i])) { return false; } } return true; } /** * Retains only those entries in the map for which the procedure * returns a true value. * * @param procedure determines which entries to keep * @return true if the map was modified. */ public boolean retainEntries(TDoubleFloatProcedure procedure) { boolean modified = false; byte[] states = _states; double[] keys = _set; float[] values = _values; // Temporarily disable compaction. This is a fix for bug #1738760 tempDisableAutoCompaction(); try { for (int i = keys.length; i-- > 0;) { if (states[i] == FULL && ! procedure.execute(keys[i],values[i])) { removeAt(i); modified = true; } } } finally { reenableAutoCompaction(true); } return modified; } /** * Transform the values in this map using <tt>function</tt>. * * @param function a <code>TFloatFunction</code> value */ public void transformValues(TFloatFunction function) { byte[] states = _states; float[] values = _values; for (int i = values.length; i-- > 0;) { if (states[i] == FULL) { values[i] = function.execute(values[i]); } } } /** * Increments the primitive value mapped to key by 1 * * @param key the key of the value to increment * @return true if a mapping was found and modified. */ public boolean increment(double key) { return adjustValue(key, (float)1); } /** * Adjusts the primitive value mapped to key. * * @param key the key of the value to increment * @param amount the amount to adjust the value by. * @return true if a mapping was found and modified. */ public boolean adjustValue(double key, float amount) { int index = index(key); if (index < 0) { return false; } else { _values[index] += amount; return true; } } /** * Adjusts the primitive value mapped to the key if the key is present in the map. * Otherwise, the <tt>initial_value</tt> is put in the map. * * @param key the key of the value to increment * @param adjust_amount the amount to adjust the value by * @param put_amount the value put into the map if the key is not initial present * * @return the value present in the map after the adjustment or put operation * * @since 2.0b1 */ public float adjustOrPutValue(final double key, final float adjust_amount, final float put_amount ) { int index = insertionIndex(key); final boolean isNewMapping; final float newValue; if (index < 0) { index = -index -1; newValue = ( _values[index] += adjust_amount ); isNewMapping = false; } else { newValue = ( _values[index] = put_amount ); isNewMapping = true; } byte previousState = _states[index]; _set[index] = key; _states[index] = FULL; if ( isNewMapping ) { postInsertHook(previousState == FREE); } return newValue; } public void writeExternal( ObjectOutput out ) throws IOException { // VERSION out.writeByte( 0 ); // NUMBER OF ENTRIES out.writeInt( _size ); // ENTRIES SerializationProcedure writeProcedure = new SerializationProcedure( out ); if (! forEachEntry(writeProcedure)) { throw writeProcedure.exception; } } public void readExternal( ObjectInput in ) throws IOException, ClassNotFoundException { // VERSION in.readByte(); // NUMBER OF ENTRIES int size = in.readInt(); setUp( size ); // ENTRIES while (size-- > 0) { double key = in.readDouble(); float val = in.readFloat(); put(key, val); } } public String toString() { final StringBuilder buf = new StringBuilder("{"); forEachEntry(new TDoubleFloatProcedure() { private boolean first = true; public boolean execute(double key, float value) { if ( first ) first = false; else buf.append( "," ); buf.append(key); buf.append("="); buf.append(value); return true; } }); buf.append("}"); return buf.toString(); } } // TDoubleFloatHashMap
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdmodel.interactive.annotation; import java.io.IOException; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.interactive.action.PDActionFactory; import org.apache.pdfbox.pdmodel.interactive.action.PDAction; import org.apache.pdfbox.pdmodel.interactive.action.PDActionURI; import org.apache.pdfbox.pdmodel.interactive.documentnavigation.destination.PDDestination; /** * This is the class that represents a link annotation. * * @author Ben Litchfield * @author Paul King */ public class PDAnnotationLink extends PDAnnotation { /** * Constant values of the Text as defined in the PDF 1.6 reference Table 8.19. */ public static final String HIGHLIGHT_MODE_NONE = "N"; /** * Constant values of the Text as defined in the PDF 1.6 reference Table 8.19. */ public static final String HIGHLIGHT_MODE_INVERT = "I"; /** * Constant values of the Text as defined in the PDF 1.6 reference Table 8.19. */ public static final String HIGHLIGHT_MODE_OUTLINE = "O"; /** * Constant values of the Text as defined in the PDF 1.6 reference Table 8.19. */ public static final String HIGHLIGHT_MODE_PUSH = "P"; /** * The type of annotation. */ public static final String SUB_TYPE = "Link"; /** * Constructor. */ public PDAnnotationLink() { super(); getCOSObject().setItem(COSName.SUBTYPE, COSName.getPDFName(SUB_TYPE)); } /** * Creates a Link annotation from a COSDictionary, expected to be a correct object definition. * * @param field the PDF objet to represent as a field. */ public PDAnnotationLink(COSDictionary field) { super(field); } /** * Get the action to be performed when this annotation is to be activated. Either this or the * destination entry should be set, but not both. * * @return The action to be performed when this annotation is activated. */ public PDAction getAction() { COSDictionary action = (COSDictionary) this.getCOSObject().getDictionaryObject(COSName.A); return PDActionFactory.createAction(action); } /** * Set the annotation action. Either this or the destination entry should be set, but not both. * * @param action The annotation action. * */ public void setAction(PDAction action) { this.getCOSObject().setItem(COSName.A, action); } /** * This will set the border style dictionary, specifying the width and dash pattern used in drawing the line. * * @param bs the border style dictionary to set. * */ public void setBorderStyle(PDBorderStyleDictionary bs) { this.getCOSObject().setItem(COSName.BS, bs); } /** * This will retrieve the border style dictionary, specifying the width and dash pattern used in * drawing the line. * * @return the border style dictionary. */ public PDBorderStyleDictionary getBorderStyle() { COSBase bs = this.getCOSObject().getDictionaryObject(COSName.BS); if (bs instanceof COSDictionary) { return new PDBorderStyleDictionary((COSDictionary) bs); } return null; } /** * Get the destination to be displayed when the annotation is activated. Either this or the * action entry should be set, but not both. * * @return The destination for this annotation. * * @throws IOException If there is an error creating the destination. */ public PDDestination getDestination() throws IOException { COSBase base = getCOSObject().getDictionaryObject(COSName.DEST); return PDDestination.create(base); } /** * The new destination value. Either this or the action entry should be set, but not both. * * @param dest The updated destination. */ public void setDestination(PDDestination dest) { getCOSObject().setItem(COSName.DEST, dest); } /** * Set the highlight mode for when the mouse is depressed. See the HIGHLIGHT_MODE_XXX constants. * * @return The string representation of the highlight mode. */ public String getHighlightMode() { return getCOSObject().getNameAsString(COSName.H, HIGHLIGHT_MODE_INVERT); } /** * Set the highlight mode. See the HIGHLIGHT_MODE_XXX constants. * * @param mode The new highlight mode. */ public void setHighlightMode(String mode) { getCOSObject().setName(COSName.H, mode); } /** * This will set the previous URI action, in case it needs to be retrieved at later date. * * @param pa The previous URI. */ public void setPreviousURI(PDActionURI pa) { getCOSObject().setItem("PA", pa); } /** * This will set the previous URI action, in case it's needed. * * @return The previous URI. */ public PDActionURI getPreviousURI() { COSDictionary pa = (COSDictionary) getCOSObject().getDictionaryObject("PA"); if (pa != null) { return new PDActionURI(pa); } return null; } /** * This will set the set of quadpoints which encompass the areas of this annotation which will activate. * * @param quadPoints an array representing the set of area covered. */ public void setQuadPoints(float[] quadPoints) { COSArray newQuadPoints = new COSArray(); newQuadPoints.setFloatArray(quadPoints); getCOSObject().setItem("QuadPoints", newQuadPoints); } /** * This will retrieve the set of quadpoints which encompass the areas of this annotation which will activate. * * @return An array of floats representing the quad points. */ public float[] getQuadPoints() { COSArray quadPoints = (COSArray) getCOSObject().getDictionaryObject("QuadPoints"); if (quadPoints != null) { return quadPoints.toFloatArray(); } // Should never happen as this is a required item return null; } }
/* * Copyright 2011-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.glowroot.agent.tests; import java.io.File; import java.util.Iterator; import java.util.List; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.glowroot.agent.it.harness.AppUnderTest; import org.glowroot.agent.it.harness.Container; import org.glowroot.agent.it.harness.Containers; import org.glowroot.agent.tests.app.LevelOne; import org.glowroot.wire.api.model.TraceOuterClass.Trace; import static org.assertj.core.api.Assertions.assertThat; public class DetailMapIT { private static Container container; @BeforeAll public static void setUp() throws Exception { container = Containers.create(); } @AfterAll public static void tearDown() throws Exception { container.close(); } @AfterEach public void afterEachTest() throws Exception { container.checkAndReset(); } @Test public void shouldReadDetailMap() throws Exception { // when Trace trace = container.execute(ShouldGenerateTraceWithNestedEntries.class); // then Trace.Header header = trace.getHeader(); assertThat(header.getHeadline()).isEqualTo("Level One"); List<Trace.DetailEntry> details = trace.getHeader().getDetailEntryList(); assertThat(details).hasSize(4); assertThat(details.get(0).getName()).isEqualTo("arg1"); assertThat(details.get(0).getValueList()).hasSize(1); assertThat(details.get(0).getValueList().get(0).getString()).isEqualTo("a"); assertThat(details.get(1).getName()).isEqualTo("arg2"); assertThat(details.get(1).getValueList()).hasSize(1); assertThat(details.get(1).getValueList().get(0).getString()).isEqualTo("b"); assertThat(details.get(2).getName()).isEqualTo("nested1"); List<Trace.DetailEntry> nestedDetails = details.get(2).getChildEntryList(); assertThat(nestedDetails.get(0).getName()).isEqualTo("nestedkey11"); assertThat(nestedDetails.get(0).getValueList()).hasSize(1); assertThat(nestedDetails.get(0).getValueList().get(0).getString()).isEqualTo("a"); assertThat(nestedDetails.get(1).getName()).isEqualTo("nestedkey12"); assertThat(nestedDetails.get(1).getValueList()).hasSize(1); assertThat(nestedDetails.get(1).getValueList().get(0).getString()).isEqualTo("b"); assertThat(nestedDetails.get(2).getName()).isEqualTo("subnested1"); List<Trace.DetailEntry> subNestedDetails = nestedDetails.get(2).getChildEntryList(); assertThat(subNestedDetails.get(0).getName()).isEqualTo("subnestedkey1"); assertThat(subNestedDetails.get(0).getValueList()).hasSize(1); assertThat(subNestedDetails.get(0).getValueList().get(0).getString()).isEqualTo("a"); assertThat(subNestedDetails.get(1).getName()).isEqualTo("subnestedkey2"); assertThat(subNestedDetails.get(1).getValueList()).hasSize(1); assertThat(subNestedDetails.get(1).getValueList().get(0).getString()).isEqualTo("b"); assertThat(details.get(3).getName()).isEqualTo("nested2"); nestedDetails = details.get(3).getChildEntryList(); assertThat(nestedDetails.get(0).getName()).isEqualTo("nestedkey21"); assertThat(nestedDetails.get(0).getValueList()).hasSize(1); assertThat(nestedDetails.get(0).getValueList().get(0).getString()).isEqualTo("a"); assertThat(nestedDetails.get(1).getName()).isEqualTo("nestedkey22"); assertThat(nestedDetails.get(1).getValueList()).hasSize(1); assertThat(nestedDetails.get(1).getValueList().get(0).getString()).isEqualTo("b"); assertThat(header.getTransactionName()).isEqualTo("basic test"); Trace.Timer rootTimer = header.getMainThreadRootTimer(); assertThat(rootTimer.getName()).isEqualTo("level one"); assertThat(rootTimer.getChildTimerList()).hasSize(1); assertThat(rootTimer.getChildTimerList().get(0).getName()).isEqualTo("level two"); Trace.Timer levelTwoTimer = rootTimer.getChildTimerList().get(0); assertThat(levelTwoTimer.getChildTimerList()).hasSize(1); assertThat(levelTwoTimer.getChildTimerList().get(0).getName()).isEqualTo("level three"); Trace.Timer levelThreeTimer = levelTwoTimer.getChildTimerList().get(0); assertThat(levelThreeTimer.getChildTimerList()).hasSize(1); assertThat(levelThreeTimer.getChildTimerList().get(0).getName()).isEqualTo("level four"); Iterator<Trace.Entry> i = trace.getEntryList().iterator(); Trace.Entry entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("Level Two"); details = entry.getDetailEntryList(); assertThat(details).hasSize(2); assertThat(details.get(0).getName()).isEqualTo("arg1"); assertThat(details.get(0).getValueList()).hasSize(1); assertThat(details.get(0).getValueList().get(0).getString()).isEqualTo("ax"); assertThat(details.get(1).getName()).isEqualTo("arg2"); assertThat(details.get(1).getValueList()).hasSize(1); assertThat(details.get(1).getValueList().get(0).getString()).isEqualTo("bx"); entry = i.next(); assertThat(entry.getDepth()).isEqualTo(1); assertThat(entry.getMessage()).isEqualTo("Level Three"); details = entry.getDetailEntryList(); assertThat(details).hasSize(2); assertThat(details.get(0).getName()).isEqualTo("arg1"); assertThat(details.get(0).getValueList()).hasSize(1); assertThat(details.get(0).getValueList().get(0).getString()).isEqualTo("axy"); assertThat(details.get(1).getName()).isEqualTo("arg2"); assertThat(details.get(1).getValueList()).hasSize(1); assertThat(details.get(1).getValueList().get(0).getString()).isEqualTo("bxy"); // there's no way offsetNanos should be 0 assertThat(entry.getStartOffsetNanos()).isGreaterThan(0); entry = i.next(); assertThat(entry.getDepth()).isEqualTo(2); assertThat(entry.getMessage()).isEqualTo("Level Four: axy, bxy"); assertThat(i.hasNext()).isFalse(); } @Test public void shouldReadDetailMapWithBooleans() throws Exception { // when Trace trace = container.execute(ShouldGenerateTraceWithBooleans.class); // then Trace.Header header = trace.getHeader(); assertThat(header.getHeadline()).isEqualTo("Level One"); List<Trace.DetailEntry> details = trace.getHeader().getDetailEntryList(); assertThat(details).hasSize(4); assertThat(details.get(0).getName()).isEqualTo("arg1"); assertThat(details.get(0).getValueList()).hasSize(1); assertThat(details.get(0).getValueList().get(0).getBoolean()).isEqualTo(false); assertThat(details.get(1).getName()).isEqualTo("arg2"); assertThat(details.get(1).getValueList()).hasSize(1); assertThat(details.get(1).getValueList().get(0).getBoolean()).isEqualTo(true); assertThat(details.get(2).getName()).isEqualTo("nested1"); List<Trace.DetailEntry> nestedDetails = details.get(2).getChildEntryList(); assertThat(nestedDetails.get(0).getName()).isEqualTo("nestedkey11"); assertThat(nestedDetails.get(0).getValueList()).hasSize(1); assertThat(nestedDetails.get(0).getValueList().get(0).getBoolean()).isEqualTo(false); assertThat(nestedDetails.get(1).getName()).isEqualTo("nestedkey12"); assertThat(nestedDetails.get(1).getValueList()).hasSize(1); assertThat(nestedDetails.get(1).getValueList().get(0).getBoolean()).isEqualTo(true); assertThat(nestedDetails.get(2).getName()).isEqualTo("subnested1"); List<Trace.DetailEntry> subNestedDetails = nestedDetails.get(2).getChildEntryList(); assertThat(subNestedDetails.get(0).getName()).isEqualTo("subnestedkey1"); assertThat(subNestedDetails.get(0).getValueList()).hasSize(1); assertThat(subNestedDetails.get(0).getValueList().get(0).getBoolean()).isEqualTo(false); assertThat(subNestedDetails.get(1).getName()).isEqualTo("subnestedkey2"); assertThat(subNestedDetails.get(1).getValueList()).hasSize(1); assertThat(subNestedDetails.get(1).getValueList().get(0).getBoolean()).isEqualTo(true); assertThat(details.get(3).getName()).isEqualTo("nested2"); nestedDetails = details.get(3).getChildEntryList(); assertThat(nestedDetails.get(0).getName()).isEqualTo("nestedkey21"); assertThat(nestedDetails.get(0).getValueList()).hasSize(1); assertThat(nestedDetails.get(0).getValueList().get(0).getBoolean()).isEqualTo(false); assertThat(nestedDetails.get(1).getName()).isEqualTo("nestedkey22"); assertThat(nestedDetails.get(1).getValueList()).hasSize(1); assertThat(nestedDetails.get(1).getValueList().get(0).getBoolean()).isEqualTo(true); } @Test public void shouldReadDetailMapWithNumbers() throws Exception { // when Trace trace = container.execute(ShouldGenerateTraceWithNumbers.class); // then Trace.Header header = trace.getHeader(); assertThat(header.getHeadline()).isEqualTo("Level One"); List<Trace.DetailEntry> details = trace.getHeader().getDetailEntryList(); assertThat(details).hasSize(4); assertThat(details.get(0).getName()).isEqualTo("arg1"); assertThat(details.get(0).getValueList()).hasSize(1); assertThat(details.get(0).getValueList().get(0).getLong()).isEqualTo(5); assertThat(details.get(1).getName()).isEqualTo("arg2"); assertThat(details.get(1).getValueList()).hasSize(1); assertThat(details.get(1).getValueList().get(0).getDouble()).isEqualTo(5.5); assertThat(details.get(2).getName()).isEqualTo("nested1"); List<Trace.DetailEntry> nestedDetails = details.get(2).getChildEntryList(); assertThat(nestedDetails.get(0).getName()).isEqualTo("nestedkey11"); assertThat(nestedDetails.get(0).getValueList()).hasSize(1); assertThat(nestedDetails.get(0).getValueList().get(0).getLong()).isEqualTo(5); assertThat(nestedDetails.get(1).getName()).isEqualTo("nestedkey12"); assertThat(nestedDetails.get(1).getValueList()).hasSize(1); assertThat(nestedDetails.get(1).getValueList().get(0).getDouble()).isEqualTo(5.5); assertThat(nestedDetails.get(2).getName()).isEqualTo("subnested1"); List<Trace.DetailEntry> subNestedDetails = nestedDetails.get(2).getChildEntryList(); assertThat(subNestedDetails.get(0).getName()).isEqualTo("subnestedkey1"); assertThat(subNestedDetails.get(0).getValueList()).hasSize(1); assertThat(subNestedDetails.get(0).getValueList().get(0).getLong()).isEqualTo(5); assertThat(subNestedDetails.get(1).getName()).isEqualTo("subnestedkey2"); assertThat(subNestedDetails.get(1).getValueList()).hasSize(1); assertThat(subNestedDetails.get(1).getValueList().get(0).getDouble()).isEqualTo(5.5); assertThat(details.get(3).getName()).isEqualTo("nested2"); nestedDetails = details.get(3).getChildEntryList(); assertThat(nestedDetails.get(0).getName()).isEqualTo("nestedkey21"); assertThat(nestedDetails.get(0).getValueList()).hasSize(1); assertThat(nestedDetails.get(0).getValueList().get(0).getLong()).isEqualTo(5); assertThat(nestedDetails.get(1).getName()).isEqualTo("nestedkey22"); assertThat(nestedDetails.get(1).getValueList()).hasSize(1); assertThat(nestedDetails.get(1).getValueList().get(0).getDouble()).isEqualTo(5.5); } @Test public void shouldReadDetailMapWithBadType() throws Exception { // given for (int i = 0; i < 4; i++) { container.addExpectedLogMessage("org.glowroot.agent.model.DetailMapWriter", "detail map has unexpected value type: java.io.File"); } // when Trace trace = container.execute(ShouldGenerateTraceWithBadType.class); // then assertThat(trace.getHeader().getHeadline()).isEqualTo("Level One"); List<Trace.DetailEntry> details = trace.getHeader().getDetailEntryList(); assertThat(details).hasSize(4); assertThat(details.get(0).getName()).isEqualTo("arg1"); assertThat(details.get(0).getValueList()).hasSize(1); assertThat(details.get(0).getValueList().get(0).getString()).isEqualTo("a"); assertThat(details.get(1).getName()).isEqualTo("arg2"); assertThat(details.get(1).getValueList()).hasSize(1); assertThat(details.get(1).getValueList().get(0).getString()).isEqualTo("x"); assertThat(details.get(2).getName()).isEqualTo("nested1"); List<Trace.DetailEntry> nestedDetails = details.get(2).getChildEntryList(); assertThat(nestedDetails.get(0).getName()).isEqualTo("nestedkey11"); assertThat(nestedDetails.get(0).getValueList()).hasSize(1); assertThat(nestedDetails.get(0).getValueList().get(0).getString()).isEqualTo("a"); assertThat(nestedDetails.get(1).getName()).isEqualTo("nestedkey12"); assertThat(nestedDetails.get(1).getValueList()).hasSize(1); assertThat(nestedDetails.get(1).getValueList().get(0).getString()).isEqualTo("x"); assertThat(nestedDetails.get(2).getName()).isEqualTo("subnested1"); List<Trace.DetailEntry> subNestedDetails = nestedDetails.get(2).getChildEntryList(); assertThat(subNestedDetails.get(0).getName()).isEqualTo("subnestedkey1"); assertThat(subNestedDetails.get(0).getValueList()).hasSize(1); assertThat(subNestedDetails.get(0).getValueList().get(0).getString()).isEqualTo("a"); assertThat(subNestedDetails.get(1).getName()).isEqualTo("subnestedkey2"); assertThat(subNestedDetails.get(1).getValueList()).hasSize(1); assertThat(subNestedDetails.get(1).getValueList().get(0).getString()).isEqualTo("x"); assertThat(details.get(3).getName()).isEqualTo("nested2"); nestedDetails = details.get(3).getChildEntryList(); assertThat(nestedDetails.get(0).getName()).isEqualTo("nestedkey21"); assertThat(nestedDetails.get(0).getValueList()).hasSize(1); assertThat(nestedDetails.get(0).getValueList().get(0).getString()).isEqualTo("a"); assertThat(nestedDetails.get(1).getName()).isEqualTo("nestedkey22"); assertThat(nestedDetails.get(1).getValueList()).hasSize(1); assertThat(nestedDetails.get(1).getValueList().get(0).getString()).isEqualTo("x"); } @Test public void shouldReadDetailMapWithNullKey() throws Exception { // given container.addExpectedLogMessage("org.glowroot.agent.model.DetailMapWriter", "detail map has null key"); container.addExpectedLogMessage("org.glowroot.agent.model.DetailMapWriter", "detail map has null key"); // when Trace trace = container.execute(ShouldGenerateTraceWithNullKey.class); // then Trace.Header header = trace.getHeader(); assertThat(header.getHeadline()).isEqualTo("Level One"); List<Trace.DetailEntry> details = trace.getHeader().getDetailEntryList(); assertThat(details).hasSize(2); assertThat(details.get(0).getName()).isEqualTo("arg1"); assertThat(details.get(0).getValueList()).hasSize(1); assertThat(details.get(0).getValueList().get(0).getString()) .isEqualTo("useArg2AsKeyAndValue"); assertThat(details.get(1).getName()).isEqualTo("nested1"); List<Trace.DetailEntry> nestedDetails = details.get(1).getChildEntryList(); assertThat(nestedDetails.get(0).getName()).isEqualTo("nestedkey11"); assertThat(nestedDetails.get(0).getValueList()).hasSize(1); assertThat(nestedDetails.get(0).getValueList().get(0).getString()) .isEqualTo("useArg2AsKeyAndValue"); } public static class ShouldGenerateTraceWithNestedEntries implements AppUnderTest { @Override public void executeApp() { new LevelOne().call("a", "b"); } } public static class ShouldGenerateTraceWithBooleans implements AppUnderTest { @Override public void executeApp() { new LevelOne().call(false, true); } } public static class ShouldGenerateTraceWithNumbers implements AppUnderTest { @Override public void executeApp() { new LevelOne().call(5, 5.5); } } public static class ShouldGenerateTraceWithNulls implements AppUnderTest { @Override public void executeApp() { new LevelOne().call(5, null); } } public static class ShouldGenerateTraceWithBadType implements AppUnderTest { @Override public void executeApp() { new LevelOne().call("a", new File("x")); } } public static class ShouldGenerateTraceWithNullKey implements AppUnderTest { @Override public void executeApp() { new LevelOne().call("useArg2AsKeyAndValue", null); } } public static class ShouldGenerateTraceWithBadKeyType implements AppUnderTest { @Override public void executeApp() { new LevelOne().call("useArg2AsKeyAndValue", new File("x")); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.storage.fluent; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.resourcemanager.storage.fluent.models.LocalUserInner; import com.azure.resourcemanager.storage.fluent.models.LocalUserKeysInner; import com.azure.resourcemanager.storage.fluent.models.LocalUserRegeneratePasswordResultInner; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in LocalUsersOperationsClient. */ public interface LocalUsersOperationsClient { /** * List the local users associated with the storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list storage account local users as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<LocalUserInner> listAsync(String resourceGroupName, String accountName); /** * List the local users associated with the storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list storage account local users as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<LocalUserInner> list(String resourceGroupName, String accountName); /** * List the local users associated with the storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list storage account local users as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<LocalUserInner> list(String resourceGroupName, String accountName, Context context); /** * Get the local user of the storage account by username. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the local user of the storage account by username along with {@link Response} on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<LocalUserInner>> getWithResponseAsync(String resourceGroupName, String accountName, String username); /** * Get the local user of the storage account by username. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the local user of the storage account by username on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<LocalUserInner> getAsync(String resourceGroupName, String accountName, String username); /** * Get the local user of the storage account by username. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the local user of the storage account by username. */ @ServiceMethod(returns = ReturnType.SINGLE) LocalUserInner get(String resourceGroupName, String accountName, String username); /** * Get the local user of the storage account by username. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the local user of the storage account by username along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<LocalUserInner> getWithResponse( String resourceGroupName, String accountName, String username, Context context); /** * Create or update the properties of a local user associated with the storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @param properties The local user associated with a storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the local user associated with the storage accounts along with {@link Response} on successful completion * of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<LocalUserInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String accountName, String username, LocalUserInner properties); /** * Create or update the properties of a local user associated with the storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @param properties The local user associated with a storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the local user associated with the storage accounts on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<LocalUserInner> createOrUpdateAsync( String resourceGroupName, String accountName, String username, LocalUserInner properties); /** * Create or update the properties of a local user associated with the storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @param properties The local user associated with a storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the local user associated with the storage accounts. */ @ServiceMethod(returns = ReturnType.SINGLE) LocalUserInner createOrUpdate( String resourceGroupName, String accountName, String username, LocalUserInner properties); /** * Create or update the properties of a local user associated with the storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @param properties The local user associated with a storage account. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the local user associated with the storage accounts along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<LocalUserInner> createOrUpdateWithResponse( String resourceGroupName, String accountName, String username, LocalUserInner properties, Context context); /** * Deletes the local user associated with the specified storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Void>> deleteWithResponseAsync(String resourceGroupName, String accountName, String username); /** * Deletes the local user associated with the specified storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> deleteAsync(String resourceGroupName, String accountName, String username); /** * Deletes the local user associated with the specified storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void delete(String resourceGroupName, String accountName, String username); /** * Deletes the local user associated with the specified storage account. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<Void> deleteWithResponse(String resourceGroupName, String accountName, String username, Context context); /** * List SSH authorized keys and shared key of the local user. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Storage Account Local User keys along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<LocalUserKeysInner>> listKeysWithResponseAsync( String resourceGroupName, String accountName, String username); /** * List SSH authorized keys and shared key of the local user. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Storage Account Local User keys on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<LocalUserKeysInner> listKeysAsync(String resourceGroupName, String accountName, String username); /** * List SSH authorized keys and shared key of the local user. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Storage Account Local User keys. */ @ServiceMethod(returns = ReturnType.SINGLE) LocalUserKeysInner listKeys(String resourceGroupName, String accountName, String username); /** * List SSH authorized keys and shared key of the local user. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Storage Account Local User keys along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<LocalUserKeysInner> listKeysWithResponse( String resourceGroupName, String accountName, String username, Context context); /** * Regenerate the local user SSH password. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the secrets of Storage Account Local User along with {@link Response} on successful completion of {@link * Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<LocalUserRegeneratePasswordResultInner>> regeneratePasswordWithResponseAsync( String resourceGroupName, String accountName, String username); /** * Regenerate the local user SSH password. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the secrets of Storage Account Local User on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<LocalUserRegeneratePasswordResultInner> regeneratePasswordAsync( String resourceGroupName, String accountName, String username); /** * Regenerate the local user SSH password. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the secrets of Storage Account Local User. */ @ServiceMethod(returns = ReturnType.SINGLE) LocalUserRegeneratePasswordResultInner regeneratePassword( String resourceGroupName, String accountName, String username); /** * Regenerate the local user SSH password. * * @param resourceGroupName The name of the resource group within the user's subscription. The name is case * insensitive. * @param accountName The name of the storage account within the specified resource group. Storage account names * must be between 3 and 24 characters in length and use numbers and lower-case letters only. * @param username The name of local user. The username must contain lowercase letters and numbers only. It must be * unique only within the storage account. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the secrets of Storage Account Local User along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<LocalUserRegeneratePasswordResultInner> regeneratePasswordWithResponse( String resourceGroupName, String accountName, String username, Context context); }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.namespace; import com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; import java.io.IOException; import java.util.List; import java.util.NavigableMap; import java.util.NavigableSet; import java.util.concurrent.ConcurrentSkipListMap; /** * Class used to manage namespace descriptor information * stored in zookeeper. This information will be consumed * by regionservers to act on namespace related * parameters (ie enforcing quota). * Information in ZK is reconstructed from namespace * table whenever {@link TableNamespaceManager} is started. */ @InterfaceAudience.Private class ZKNamespaceManager extends ZooKeeperListener { private static Log LOG = LogFactory.getLog(ZKNamespaceManager.class); private final String nsZNode; private NavigableMap<String,NamespaceDescriptor> cache; public ZKNamespaceManager(ZooKeeperWatcher zkw) throws IOException { super(zkw); nsZNode = ZooKeeperWatcher.namespaceZNode; cache = new ConcurrentSkipListMap<String, NamespaceDescriptor>(); } public void start() throws IOException { watcher.registerListener(this); try { if (ZKUtil.watchAndCheckExists(watcher, nsZNode)) { List<ZKUtil.NodeAndData> existing = ZKUtil.getChildDataAndWatchForNewChildren(watcher, nsZNode); if (existing != null) { refreshNodes(existing); } } else { ZKUtil.createWithParents(watcher, nsZNode); } } catch (KeeperException e) { throw new IOException("Failed to initialize ZKNamespaceManager", e); } } public NamespaceDescriptor get(String name) { return cache.get(name); } public void update(NamespaceDescriptor ns) throws IOException { writeNamespace(ns); cache.put(ns.getName(), ns); } public void remove(String name) throws IOException { deleteNamespace(name); cache.remove(name); } public NavigableSet<NamespaceDescriptor> list() throws IOException { NavigableSet<NamespaceDescriptor> ret = Sets.newTreeSet(NamespaceDescriptor.NAMESPACE_DESCRIPTOR_COMPARATOR); for(NamespaceDescriptor ns: cache.values()) { ret.add(ns); } return ret; } @Override public void nodeCreated(String path) { if (nsZNode.equals(path)) { try { List<ZKUtil.NodeAndData> nodes = ZKUtil.getChildDataAndWatchForNewChildren(watcher, nsZNode); refreshNodes(nodes); } catch (KeeperException ke) { String msg = "Error reading data from zookeeper"; LOG.error(msg, ke); watcher.abort(msg, ke); } catch (IOException e) { String msg = "Error parsing data from zookeeper"; LOG.error(msg, e); watcher.abort(msg, e); } } } @Override public void nodeDeleted(String path) { if (nsZNode.equals(ZKUtil.getParent(path))) { String nsName = ZKUtil.getNodeName(path); cache.remove(nsName); } } @Override public void nodeDataChanged(String path) { if (nsZNode.equals(ZKUtil.getParent(path))) { try { byte[] data = ZKUtil.getDataAndWatch(watcher, path); NamespaceDescriptor ns = ProtobufUtil.toNamespaceDescriptor( HBaseProtos.NamespaceDescriptor.parseFrom(data)); cache.put(ns.getName(), ns); } catch (KeeperException ke) { String msg = "Error reading data from zookeeper for node "+path; LOG.error(msg, ke); // only option is to abort watcher.abort(msg, ke); } catch (IOException ioe) { String msg = "Error deserializing namespace: "+path; LOG.error(msg, ioe); watcher.abort(msg, ioe); } } } @Override public void nodeChildrenChanged(String path) { if (nsZNode.equals(path)) { // table permissions changed try { List<ZKUtil.NodeAndData> nodes = ZKUtil.getChildDataAndWatchForNewChildren(watcher, nsZNode); refreshNodes(nodes); } catch (KeeperException ke) { LOG.error("Error reading data from zookeeper for path "+path, ke); watcher.abort("Zookeeper error get node children for path "+path, ke); } catch (IOException e) { LOG.error("Error deserializing namespace child from: "+path, e); watcher.abort("Error deserializing namespace child from: " + path, e); } } } private void deleteNamespace(String name) throws IOException { String zNode = ZKUtil.joinZNode(nsZNode, name); try { ZKUtil.deleteNode(watcher, zNode); } catch (KeeperException e) { LOG.error("Failed updating permissions for namespace "+name, e); throw new IOException("Failed updating permissions for namespace "+name, e); } } private void writeNamespace(NamespaceDescriptor ns) throws IOException { String zNode = ZKUtil.joinZNode(nsZNode, ns.getName()); try { ZKUtil.createWithParents(watcher, zNode); ZKUtil.updateExistingNodeData(watcher, zNode, ProtobufUtil.toProtoBuf(ns).toByteArray(), -1); } catch (KeeperException e) { LOG.error("Failed updating permissions for namespace "+ns.getName(), e); throw new IOException("Failed updating permissions for namespace "+ns.getName(), e); } } private void refreshNodes(List<ZKUtil.NodeAndData> nodes) throws IOException { for (ZKUtil.NodeAndData n : nodes) { if (n.isEmpty()) continue; String path = n.getNode(); String namespace = ZKUtil.getNodeName(path); byte[] nodeData = n.getData(); if (LOG.isDebugEnabled()) { LOG.debug("Updating namespace cache from node "+namespace+" with data: "+ Bytes.toStringBinary(nodeData)); } NamespaceDescriptor ns = ProtobufUtil.toNamespaceDescriptor( HBaseProtos.NamespaceDescriptor.parseFrom(nodeData)); cache.put(ns.getName(), ns); } } }
/** * Jakarta Bean Validation TCK * * License: Apache License, Version 2.0 * See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>. */ package org.hibernate.beanvalidation.tck.util.shrinkwrap; import java.io.File; import java.util.ArrayList; import java.util.List; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.asset.Asset; import org.jboss.shrinkwrap.api.container.ClassContainer; import org.jboss.shrinkwrap.api.container.LibraryContainer; import org.jboss.shrinkwrap.api.container.ResourceContainer; /** * Abstract ShrinkWrap archive builder for Jakarta Bean Validation TCK Arquillian test. * <p> * This is a base class for builders that try to solve most <b>JBoss Test Harness</b> to <b>Arquillian</b> migration issues. The * main goal was to use Jakarta Bean Validation TCK 1.0 tests with minimum code changes. * </p> * * @param <T> Self type to enable abstract builder pattern * @param <A> Final shrinkwrap archive * * @author Martin Kouba * @author Hardy Ferentschik */ public abstract class ArchiveBuilder<T extends ArchiveBuilder<T, A>, A extends Archive<A>> { private String name; private Class<?> testClazz = null; protected List<ResourceDescriptor> resources = null; protected List<String> packages = null; protected List<String> classes = null; protected List<ServiceProviderDescriptor> serviceProviders = null; protected List<File> additionalJars = null; public T withName(String name) { this.name = name; return self(); } public T withServiceProvider(ServiceProviderDescriptor serviceProvider) { if ( serviceProviders == null ) { serviceProviders = new ArrayList<>(); } serviceProviders.add( serviceProvider ); return self(); } public T withClass(Class<?> clazz) { if ( this.classes == null ) { this.classes = new ArrayList<>(); } this.classes.add( clazz.getName() ); return self(); } public T withClasses(Class<?>... classes) { for ( Class<?> clazz : classes ) { withClass( clazz ); } return self(); } /** * Add all classes in the test class package to archive and set test class definition for configuration purpose. * * @param testClazz the test class * * @return self */ public T withTestClassPackage(Class<?> testClazz) { return withTestClassDefinition( testClazz ).withPackage( testClazz.getPackage() ); } /** * Add test class to archive and set test class definition for configuration purpose. * * @param testClazz the test class * * @return self */ public T withTestClass(Class<?> testClazz) { return withTestClassDefinition( testClazz ).withClass( testClazz ); } public T withTestClassDefinition(Class<?> testClazz) { if ( this.testClazz != null ) { throw new IllegalStateException( "Cannot set more than one test class definition!" ); } this.testClazz = testClazz; return self(); } public T withPackage(Package pack) { if ( this.packages == null ) { this.packages = new ArrayList<>(); } this.packages.add( pack.getName() ); return self(); } public T withResource(String source) { return withResource( source, null, true ); } public T withResource(String source, boolean useTestPackageToLocateSource) { return withResource( source, null, useTestPackageToLocateSource ); } public T withResource(String source, String target, boolean useTestPackageToLocateSource) { if ( this.resources == null ) { this.resources = new ArrayList<>(); } this.resources.add( new ResourceDescriptor( source, target, useTestPackageToLocateSource ) ); return self(); } public T withValidationXml(String source) { return withResource( source, "META-INF/validation.xml", true ); } public abstract T withEmptyBeansXml(); public T withAdditionalJar(String jarPath) { if ( additionalJars == null ) { additionalJars = new ArrayList<>(); } additionalJars.add( new File( jarPath ) ); return self(); } /** * @return self to enable generic builder */ public abstract T self(); /** * @return shrinkwrap archive */ public A build() { if ( testClazz == null ) { throw new IllegalStateException( "Test class must be set!" ); } return buildInternal(); } /** * @return concrete shrinkwrap archive */ protected abstract A buildInternal(); protected void processPackages(final ClassContainer<?> archive) { if ( packages == null ) { return; } for ( String pack : packages ) { final URLPackageScanner.Callback callback = new URLPackageScanner.Callback() { @Override public void classFound(String className) { archive.addClass( className ); } }; ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if ( classLoader == null ) { classLoader = getClass().getClassLoader(); } final URLPackageScanner scanner = URLPackageScanner.newInstance( false, classLoader, callback, pack ); scanner.scanPackage(); } } protected void processClasses(ClassContainer<?> archive) { if ( classes == null ) { return; } for ( String clazz : classes ) { if ( testClazz.getName().equals( clazz ) ) { continue; } archive.addClass( clazz ); } } protected void processResources(ResourceContainer<?> archive) { if ( resources == null ) { return; } for ( ResourceDescriptor resource : resources ) { if ( resource.getSource() != null ) { if ( resource.getTarget() == null ) { archive.addAsResource( resource.getSource() ); } else { archive.addAsResource( resource.getSource(), resource.getTarget() ); } } else if ( resource.getAsset() != null ) { archive.addAsResource( resource.getAsset(), resource.getTarget() ); } } } protected void processAdditionalJars(LibraryContainer<?> archive) { if ( additionalJars == null ) { return; } for ( File additionalJar : additionalJars ) { archive.addAsLibrary( additionalJar ); } } /** * Internal service provider descriptor. * * @author Martin Kouba */ protected class ServiceProviderDescriptor { private final Class<?> serviceInterface; private final Class<?>[] serviceImplementations; public ServiceProviderDescriptor(Class<?> serviceInterface, Class<?>... serviceImplementations) { super(); this.serviceInterface = serviceInterface; this.serviceImplementations = serviceImplementations; } public Class<?> getServiceInterface() { return serviceInterface; } public Class<?>[] getServiceImplementations() { return serviceImplementations; } } /** * Internal resource descriptor. * * @author Martin Kouba */ protected class ResourceDescriptor { private final Asset asset; private final String source; private final String target; private final boolean useTestPackageToLocateSource; public ResourceDescriptor(String source, String target, boolean useTestPackageToLocateSource) { this.asset = null; this.source = source; this.target = target; this.useTestPackageToLocateSource = useTestPackageToLocateSource; } public ResourceDescriptor(Asset asset, String target) { this.asset = asset; this.source = null; this.target = target; this.useTestPackageToLocateSource = false; } public Asset getAsset() { return asset; } public String getSource() { return useTestPackageToLocateSource ? getTestPackagePath() + source : source; } public String getPlainSource() { return source; } public String getTarget() { return target; } } private String getTestPackagePath() { return this.testClazz.getPackage().getName().replace( '.', '/' ).concat( "/" ); } /** * @return name of final archive */ public String getName() { return name; } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.concurrent.Semaphore; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.zookeeper.ClusterStatusTracker; import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; /** * Test the {@link ActiveMasterManager}. */ @Category({MasterTests.class, MediumTests.class}) public class TestActiveMasterManager { private final static Log LOG = LogFactory.getLog(TestActiveMasterManager.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniZKCluster(); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniZKCluster(); } @Test public void testRestartMaster() throws IOException, KeeperException { ZooKeeperWatcher zk = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(), "testActiveMasterManagerFromZK", null, true); try { ZKUtil.deleteNode(zk, zk.getMasterAddressZNode()); ZKUtil.deleteNode(zk, zk.clusterStateZNode); } catch(KeeperException.NoNodeException nne) {} // Create the master node with a dummy address ServerName master = ServerName.valueOf("localhost", 1, System.currentTimeMillis()); // Should not have a master yet DummyMaster dummyMaster = new DummyMaster(zk,master); ClusterStatusTracker clusterStatusTracker = dummyMaster.getClusterStatusTracker(); ActiveMasterManager activeMasterManager = dummyMaster.getActiveMasterManager(); assertFalse(activeMasterManager.clusterHasActiveMaster.get()); // First test becoming the active master uninterrupted MonitoredTask status = Mockito.mock(MonitoredTask.class); clusterStatusTracker.setClusterUp(); activeMasterManager.blockUntilBecomingActiveMaster(100, status); assertTrue(activeMasterManager.clusterHasActiveMaster.get()); assertMaster(zk, master); // Now pretend master restart DummyMaster secondDummyMaster = new DummyMaster(zk,master); ActiveMasterManager secondActiveMasterManager = secondDummyMaster.getActiveMasterManager(); assertFalse(secondActiveMasterManager.clusterHasActiveMaster.get()); activeMasterManager.blockUntilBecomingActiveMaster(100, status); assertTrue(activeMasterManager.clusterHasActiveMaster.get()); assertMaster(zk, master); } /** * Unit tests that uses ZooKeeper but does not use the master-side methods * but rather acts directly on ZK. * @throws Exception */ @Test public void testActiveMasterManagerFromZK() throws Exception { ZooKeeperWatcher zk = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(), "testActiveMasterManagerFromZK", null, true); try { ZKUtil.deleteNode(zk, zk.getMasterAddressZNode()); ZKUtil.deleteNode(zk, zk.clusterStateZNode); } catch(KeeperException.NoNodeException nne) {} // Create the master node with a dummy address ServerName firstMasterAddress = ServerName.valueOf("localhost", 1, System.currentTimeMillis()); ServerName secondMasterAddress = ServerName.valueOf("localhost", 2, System.currentTimeMillis()); // Should not have a master yet DummyMaster ms1 = new DummyMaster(zk,firstMasterAddress); ActiveMasterManager activeMasterManager = ms1.getActiveMasterManager(); assertFalse(activeMasterManager.clusterHasActiveMaster.get()); // First test becoming the active master uninterrupted ClusterStatusTracker clusterStatusTracker = ms1.getClusterStatusTracker(); clusterStatusTracker.setClusterUp(); activeMasterManager.blockUntilBecomingActiveMaster(100, Mockito.mock(MonitoredTask.class)); assertTrue(activeMasterManager.clusterHasActiveMaster.get()); assertMaster(zk, firstMasterAddress); // New manager will now try to become the active master in another thread WaitToBeMasterThread t = new WaitToBeMasterThread(zk, secondMasterAddress); t.start(); // Wait for this guy to figure out there is another active master // Wait for 1 second at most int sleeps = 0; while(!t.manager.clusterHasActiveMaster.get() && sleeps < 100) { Thread.sleep(10); sleeps++; } // Both should see that there is an active master assertTrue(activeMasterManager.clusterHasActiveMaster.get()); assertTrue(t.manager.clusterHasActiveMaster.get()); // But secondary one should not be the active master assertFalse(t.isActiveMaster); // Close the first server and delete it's master node ms1.stop("stopping first server"); // Use a listener to capture when the node is actually deleted NodeDeletionListener listener = new NodeDeletionListener(zk, zk.getMasterAddressZNode()); zk.registerListener(listener); LOG.info("Deleting master node"); ZKUtil.deleteNode(zk, zk.getMasterAddressZNode()); // Wait for the node to be deleted LOG.info("Waiting for active master manager to be notified"); listener.waitForDeletion(); LOG.info("Master node deleted"); // Now we expect the secondary manager to have and be the active master // Wait for 1 second at most sleeps = 0; while(!t.isActiveMaster && sleeps < 100) { Thread.sleep(10); sleeps++; } LOG.debug("Slept " + sleeps + " times"); assertTrue(t.manager.clusterHasActiveMaster.get()); assertTrue(t.isActiveMaster); LOG.info("Deleting master node"); ZKUtil.deleteNode(zk, zk.getMasterAddressZNode()); } /** * Assert there is an active master and that it has the specified address. * @param zk * @param thisMasterAddress * @throws KeeperException * @throws IOException */ private void assertMaster(ZooKeeperWatcher zk, ServerName expectedAddress) throws KeeperException, IOException { ServerName readAddress = MasterAddressTracker.getMasterAddress(zk); assertNotNull(readAddress); assertTrue(expectedAddress.equals(readAddress)); } public static class WaitToBeMasterThread extends Thread { ActiveMasterManager manager; DummyMaster dummyMaster; boolean isActiveMaster; public WaitToBeMasterThread(ZooKeeperWatcher zk, ServerName address) { this.dummyMaster = new DummyMaster(zk,address); this.manager = this.dummyMaster.getActiveMasterManager(); isActiveMaster = false; } @Override public void run() { manager.blockUntilBecomingActiveMaster(100, Mockito.mock(MonitoredTask.class)); LOG.info("Second master has become the active master!"); isActiveMaster = true; } } public static class NodeDeletionListener extends ZooKeeperListener { private static final Log LOG = LogFactory.getLog(NodeDeletionListener.class); private Semaphore lock; private String node; public NodeDeletionListener(ZooKeeperWatcher watcher, String node) { super(watcher); lock = new Semaphore(0); this.node = node; } @Override public void nodeDeleted(String path) { if(path.equals(node)) { LOG.debug("nodeDeleted(" + path + ")"); lock.release(); } } public void waitForDeletion() throws InterruptedException { lock.acquire(); } } /** * Dummy Master Implementation. */ public static class DummyMaster implements Server { private volatile boolean stopped; private ClusterStatusTracker clusterStatusTracker; private ActiveMasterManager activeMasterManager; public DummyMaster(ZooKeeperWatcher zk, ServerName master) { this.clusterStatusTracker = new ClusterStatusTracker(zk, this); clusterStatusTracker.start(); this.activeMasterManager = new ActiveMasterManager(zk, master, this); zk.registerListener(activeMasterManager); } @Override public void abort(final String msg, final Throwable t) {} @Override public boolean isAborted() { return false; } @Override public Configuration getConfiguration() { return null; } @Override public ZooKeeperWatcher getZooKeeper() { return null; } @Override public CoordinatedStateManager getCoordinatedStateManager() { return null; } @Override public ServerName getServerName() { return null; } @Override public boolean isStopped() { return this.stopped; } @Override public void stop(String why) { this.stopped = true; } @Override public ClusterConnection getConnection() { return null; } @Override public MetaTableLocator getMetaTableLocator() { return null; } public ClusterStatusTracker getClusterStatusTracker() { return clusterStatusTracker; } public ActiveMasterManager getActiveMasterManager() { return activeMasterManager; } @Override public ChoreService getChoreService() { return null; } } }
/* * "Copyright (c) 2014 Capgemini Technology Services (hereinafter "Capgemini") * * License/Terms of Use * Permission is hereby granted, free of charge and for the term of intellectual * property rights on the Software, to any person obtaining a copy of this software * and associated documentation files (the "Software"), to use, copy, modify and * propagate free of charge, anywhere in the world, all or part of the Software * subject to the following mandatory conditions: * * - The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * Any failure to comply with the above shall automatically terminate the license * and be construed as a breach of these Terms of Use causing significant harm to * Capgemini. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, PEACEFUL ENJOYMENT, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS * OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * Except as contained in this notice, the name of Capgemini shall not be used in * advertising or otherwise to promote the use or other dealings in this Software * without prior written authorization from Capgemini. * * These Terms of Use are subject to French law. * * IMPORTANT NOTICE: The WUIC software implements software components governed by * open source software licenses (BSD and Apache) of which CAPGEMINI is not the * author or the editor. The rights granted on the said software components are * governed by the specific terms and conditions specified by Apache 2.0 and BSD * licenses." */ package com.github.wuic.path.core; import com.github.wuic.exception.wrapper.BadArgumentException; import com.github.wuic.path.DirectoryPath; import com.github.wuic.path.FilePath; import com.github.wuic.util.CloseableZipFileAdapter; import com.github.wuic.util.IOUtils; import com.github.wuic.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.io.FileInputStream; import java.io.InputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; /** * <p> * This class represents a path which could be considered as a {@link com.github.wuic.path.FilePath} and as a {@link com.github.wuic.path.DirectoryPath} with a * ZIP archive behind the scene. It is a directory since we can consider its entries as children and a path since we can * directly read the archive. * </p> * * @author Guillaume DROUET * @version 1.1 * @since 0.3.4 */ public class ZipFilePath extends ZipDirectoryPath implements FilePath, DirectoryPath { /** * The logger. */ private final Logger log = LoggerFactory.getLogger(ZipFilePath.class); /** * The archive. */ private File zipFile; /** * If this file should be deleted or not. */ private Boolean keepZipFile; /** * <p> * Builds a new instance with the file name as path name. * </p> * * @param file the path * @param parent the parent * @throws java.io.IOException if any I/O error occurs */ public ZipFilePath(final File file, final DirectoryPath parent) throws IOException { this(file, file.getName(), parent, Boolean.TRUE); } /** * <p> * Builds a new instance. Throws an {@link IllegalArgumentException} if the given path is not a ZIP archive. * </p> * * @param file the path * @param name the name * @param parent the parent * @param keepFile if given file should be deleted when object is garbage collected * @throws java.io.IOException if any I/O error occurs */ public ZipFilePath(final File file, final String name, final DirectoryPath parent, final Boolean keepFile) throws IOException { super(name, parent); if (!IOUtils.isArchive(file)) { throw new IllegalArgumentException(String.format("%s is not a ZIP archive", file.getAbsolutePath())); } zipFile = file; keepZipFile = keepFile; } /** * <p> * Gets the concrete path pointing to the archive on path system. * </p> * * @return the raw path */ public File getRawFile() { return zipFile; } /** * {@inheritDoc} */ @Override public InputStream openStream() throws IOException { return new FileInputStream(zipFile); } /** * {@inheritDoc} */ @Override public String[] list() throws IOException { ZipFile archive = null; try { archive = new ZipFile(zipFile); final Enumeration<? extends ZipEntry> entries = archive.entries(); final List<String> retval = new ArrayList<String>(); // Read entries while (entries.hasMoreElements()) { final ZipEntry entry = entries.nextElement(); final String entryName = entry.getName(); // We only add the entries at the root level if (entryName.split(IOUtils.STD_SEPARATOR).length == 1) { retval.add(entryName); } } return retval.toArray(new String[retval.size()]); } finally { IOUtils.close(new CloseableZipFileAdapter(archive)); } } /** * <p> * Lists all the entries in the archive relatively to the given root entry name. * </p> * * @param baseEntry the root entry * @return the entries which are children of the root entry * @throws IOException if any I/O error occurs */ public String[] list(final String baseEntry) throws IOException { ZipFile archive = null; // Directories end with a / final String rootEntry = StringUtils.merge(new String[] { baseEntry, "/", }, "/"); try { archive = new ZipFile(zipFile); final Enumeration<? extends ZipEntry> entries = archive.entries(); final List<String> retval = new ArrayList<String>(); // Make sure we are going to list the entries of directory inside the archive if (!archive.getEntry(rootEntry).isDirectory()) { final String message = String.format("%s is not a ZIP directory entry", rootEntry); throw new BadArgumentException(new IllegalArgumentException(message)); } // Read entries while (entries.hasMoreElements()) { final ZipEntry entry = entries.nextElement(); final String entryName = entry.getName(); // We only add the entries at the root level final String relativeEntry = entryName.replace(rootEntry, ""); if (entryName.startsWith(rootEntry) && !relativeEntry.isEmpty() && relativeEntry.split(IOUtils.STD_SEPARATOR).length == 1) { retval.add(relativeEntry); } } return retval.toArray(new String[retval.size()]); } finally { if (archive != null) { archive.close(); } } } /** * {@inheritDoc} */ @Override protected ZipFile getZipFile() throws IOException { return new ZipFile(zipFile); } /** * {@inheritDoc} */ @Override public long getLastUpdate() { return zipFile.lastModified(); } /** * {@inheritDoc} */ @Override protected String absoluteEntryOf(final String child) throws IOException { return child; } /** * {@inheritDoc} */ @Override public void finalize() throws Throwable { if (!keepZipFile && !zipFile.delete()) { log.warn("A ZipFilePath has been garbage collected but unable to delete {}", zipFile.getAbsolutePath()); } super.finalize(); } }
package com.sampleboard.adapter; import android.content.Context; import android.graphics.Bitmap; import android.graphics.drawable.AnimationDrawable; import android.graphics.drawable.Drawable; import android.support.v4.app.Fragment; import android.support.v4.content.ContextCompat; import android.support.v7.graphics.Palette; import android.support.v7.widget.RecyclerView; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.animation.LinearInterpolator; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import com.sampleboard.R; import com.sampleboard.bean.api_response.TimelineObjResponse; import com.sampleboard.interfaces.TimelineInterface; import com.sampleboard.utils.CustomAnimationDrawableNew; import com.sampleboard.utils.Utils; import com.sampleboard.view.fragment.dashboard.HomeFragment; import com.squareup.picasso.Picasso; import com.squareup.picasso.Target; import java.util.List; /** * @author AnujSharma */ public class HomeListAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> { private Context mContext; private List<TimelineObjResponse> mResponse; private int lastPosition = -1; // public final static int COLOR_ANIMATION_DURATION = 1000; private int mDefaultBackgroundColor; private TimelineInterface listener; private Fragment fragment; public HomeListAdapter(Context ctx, List<TimelineObjResponse> response, TimelineInterface listener, Fragment fragment) { this.mContext = ctx; this.mResponse = response; this.listener = listener; this.fragment = fragment; } public void updateList(List<TimelineObjResponse> response) { this.mResponse = response; } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View rowView; RecyclerView.ViewHolder vh; rowView = LayoutInflater.from(parent.getContext()).inflate(R.layout.view_list, parent, false); vh = new LoadMoreViewHolder(rowView); return vh; } @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { final LoadMoreViewHolder vh = (LoadMoreViewHolder) holder; try { TimelineObjResponse obj = mResponse.get(position); vh.mTitle.setText(obj.getTitle()); vh.mLikesCount.setText(String.valueOf(obj.getLikeCount())); // update like heart symbol if (!TextUtils.isEmpty(obj.getIsLiked()) && obj.getIsLiked().equals("1")) { vh.mLikeImgInitial.setVisibility(View.GONE); vh.mLikeImgFinal.setVisibility(View.VISIBLE); vh.mLikesCount.setTextColor(ContextCompat.getColor(mContext, R.color.red)); vh.mLikeImgFinal.setClickable(true); } else { vh.mLikeImgInitial.setVisibility(View.VISIBLE); vh.mLikeImgInitial.setClickable(true); vh.mLikeImgFinal.setVisibility(View.GONE); vh.mLikesCount.setTextColor(ContextCompat.getColor(mContext, R.color.app_textcolor_heading)); vh.mLikeImgFinal.setClickable(false); vh.mLikeImgInitial.setBackgroundResource(R.drawable.animation_list_layout); } // if (obj.isShared()) { // vh.reshareImg.setColorFilter(ContextCompat.getColor(mContext, R.color.green)); // } else { // vh.reshareImg.setColorFilter(ContextCompat.getColor(mContext, R.color.app_textcolor_heading)); // } //cancel any loading images on this view Picasso.with(mContext).cancelRequest(vh.mImage); vh.mImage.setImageBitmap(null); Target target = new Target() { @Override public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) { assert vh.mImage != null; /* Save the bitmap or do something with it here */ Palette.from(bitmap) .generate(palette -> { Palette.Swatch textSwatch = palette.getVibrantSwatch(); if (textSwatch == null) { return; } vh.mParentLayout.setBackgroundColor(textSwatch.getRgb()); // vh.mInfoContainer.setBackgroundColor(textSwatch.getRgb()); vh.mTitle.setTextColor(textSwatch.getTitleTextColor()); Utils.animateViewColor(vh.mInfoContainer, mDefaultBackgroundColor, textSwatch.getRgb()); }); vh.mImage.setImageBitmap(bitmap); } @Override public void onBitmapFailed(Drawable errorDrawable) { vh.mImage.setImageDrawable(ContextCompat.getDrawable(mContext, R.drawable.ic_default_image)); } @Override public void onPrepareLoad(Drawable placeHolderDrawable) { } }; // set the tag to the view vh.mImage.setTag(target); if (!TextUtils.isEmpty(obj.getMedia())) { Picasso.with(mContext).load("http://10.20.3.169" + obj.getMedia()) .resize(500, 500).centerCrop() .into(target); } else { vh.mImage.setImageDrawable(ContextCompat.getDrawable(mContext, R.drawable.ic_default_image)); } } catch (Exception e) { e.printStackTrace(); } } @Override public int getItemCount() { return (mResponse == null) ? 0 : mResponse.size(); } /* View Holder For Trip History */ private class LoadMoreViewHolder extends RecyclerView.ViewHolder { // CardView mCardView; private LinearLayout mParentLayout; private RelativeLayout mInfoContainer; private ImageView reshareImg, mImage, mLikeImgInitial, mLikeImgFinal; private TextView mTitle, mLikesCount; private LoadMoreViewHolder(View itemView) { super(itemView); mParentLayout = itemView.findViewById(R.id.parent); mInfoContainer = itemView.findViewById(R.id.info_container); reshareImg = itemView.findViewById(R.id.img_re_share); mImage = itemView.findViewById(R.id.image); mTitle = itemView.findViewById(R.id.title); mLikeImgInitial = itemView.findViewById(R.id.ic_heart_initial); mLikeImgFinal = itemView.findViewById(R.id.ic_heart_final); mLikesCount = itemView.findViewById(R.id.likes_count); mImage.setOnClickListener(v -> { if (listener != null) listener.onItemClick(mResponse.get(getAdapterPosition()), (ImageView) v, getAdapterPosition()); }); reshareImg.setOnClickListener(view -> { }); mLikeImgInitial.setOnClickListener(v -> { if (listener != null && mResponse.get(getAdapterPosition()) != null) { //Hit API to set is_liked 1 listener.onLikeBtnClicked(mResponse.get(getAdapterPosition()), null, getAdapterPosition(), true); } if (fragment instanceof HomeFragment) { ((HomeFragment) fragment).getIsEverythingFine().observe(fragment, aBoolean -> { if (aBoolean) { mLikeImgInitial.setClickable(false); CustomAnimationDrawableNew cad = new CustomAnimationDrawableNew((AnimationDrawable) mContext.getResources().getDrawable( R.drawable.animation_list_layout)) { @Override public void onAnimationFinish() { mLikeImgInitial.setVisibility(View.GONE); mLikeImgFinal.setVisibility(View.VISIBLE); mLikeImgFinal.setClickable(true); } @Override public void onAnimtionStart() { final LinearInterpolator interpolator = new LinearInterpolator(); int updatedCount = updateLikesCounter(mResponse.get(getAdapterPosition()).getLikeCount(), true); mResponse.get(getAdapterPosition()).setLikeCount(updatedCount); mLikesCount.animate() .alpha(0) .setDuration(100) .setStartDelay(200) .setInterpolator(interpolator) .withEndAction(() -> { mLikesCount.animate() .alpha(1) .setDuration(100) .setInterpolator(interpolator); mLikesCount.setText(String.valueOf(updatedCount)); mLikesCount.setTextColor(ContextCompat.getColor(mContext, R.color.red)); }); } }; mLikeImgInitial.setBackgroundDrawable(cad); cad.start(); } }); } }); mLikeImgFinal.setOnClickListener(v -> { if (listener != null && mResponse.get(getAdapterPosition()) != null) { //Hit API to set is_liked 0 listener.onLikeBtnClicked(mResponse.get(getAdapterPosition()), null, getAdapterPosition(), false); } ((HomeFragment) fragment).getIsEverythingFine().observe(fragment, aBoolean -> { if (aBoolean) { mLikeImgInitial.setVisibility(View.VISIBLE); mLikeImgInitial.setClickable(true); mLikeImgFinal.setVisibility(View.GONE); mLikesCount.setTextColor(ContextCompat.getColor(mContext, R.color.app_textcolor)); mLikeImgFinal.setClickable(false); mLikeImgInitial.setBackgroundResource(R.drawable.animation_list_layout); int updatedCount = updateLikesCounter(mResponse.get(getAdapterPosition()).getLikeCount(), false); mResponse.get(getAdapterPosition()).setLikeCount(updatedCount); mLikesCount.setText(String.valueOf(updatedCount)); } }); }); } } private int updateLikesCounter(int count, boolean isIncreased) { if (isIncreased) return count + 1; else return count - 1; } }
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2006, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Java is a trademark or registered trademark of Sun Microsystems, Inc. * in the United States and other countries.] * * ------------- * RingPlot.java * ------------- * (C) Copyright 2004-2006, by Object Refinery Limited. * * Original Author: David Gilbert (for Object Refinery Limtied); * Contributor(s): - * * $Id: RingPlot.java,v 1.4.2.11 2007/01/17 15:24:31 mungady Exp $ * * Changes * ------- * 08-Nov-2004 : Version 1 (DG); * 22-Feb-2005 : Renamed DonutPlot --> RingPlot (DG); * 06-Jun-2005 : Added default constructor and fixed equals() method to handle * GradientPaint (DG); * ------------- JFREECHART 1.0.x --------------------------------------------- * 20-Dec-2005 : Fixed problem with entity shape (bug 1386328) (DG); * 27-Sep-2006 : Updated drawItem() method for new lookup methods (DG); * 12-Oct-2006 : Added configurable section depth (DG); * */ package org.jfree.chart.plot; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Shape; import java.awt.Stroke; import java.awt.geom.Arc2D; import java.awt.geom.GeneralPath; import java.awt.geom.Line2D; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import org.jfree.chart.entity.EntityCollection; import org.jfree.chart.entity.PieSectionEntity; import org.jfree.chart.event.PlotChangeEvent; import org.jfree.chart.labels.PieToolTipGenerator; import org.jfree.chart.urls.PieURLGenerator; import org.jfree.data.general.PieDataset; import org.jfree.io.SerialUtilities; import org.jfree.ui.RectangleInsets; import org.jfree.util.ObjectUtilities; import org.jfree.util.PaintUtilities; import org.jfree.util.Rotation; import org.jfree.util.ShapeUtilities; import org.jfree.util.UnitType; /** * A customised pie plot that leaves a hole in the middle. */ public class RingPlot extends PiePlot implements Cloneable, Serializable { /** For serialization. */ private static final long serialVersionUID = 1556064784129676620L; /** * A flag that controls whether or not separators are drawn between the * sections of the chart. */ private boolean separatorsVisible; /** The stroke used to draw separators. */ private transient Stroke separatorStroke; /** The paint used to draw separators. */ private transient Paint separatorPaint; /** * The length of the inner separator extension (as a percentage of the * depth of the sections). */ private double innerSeparatorExtension; /** * The length of the outer separator extension (as a percentage of the * depth of the sections). */ private double outerSeparatorExtension; /** * The depth of the section as a percentage of the diameter. */ private double sectionDepth; /** * Creates a new plot with a <code>null</code> dataset. */ public RingPlot() { this(null); } /** * Creates a new plot for the specified dataset. * * @param dataset the dataset (<code>null</code> permitted). */ public RingPlot(PieDataset dataset) { super(dataset); this.separatorsVisible = true; this.separatorStroke = new BasicStroke(0.5f); this.separatorPaint = Color.gray; this.innerSeparatorExtension = 0.20; // twenty percent this.outerSeparatorExtension = 0.20; // twenty percent this.sectionDepth = 0.20; // 20% } /** * Returns a flag that indicates whether or not separators are drawn between * the sections in the chart. * * @return A boolean. * * @see #setSeparatorsVisible(boolean) */ public boolean getSeparatorsVisible() { return this.separatorsVisible; } /** * Sets the flag that controls whether or not separators are drawn between * the sections in the chart, and sends a {@link PlotChangeEvent} to all * registered listeners. * * @param visible the flag. * * @see #getSeparatorsVisible() */ public void setSeparatorsVisible(boolean visible) { this.separatorsVisible = visible; notifyListeners(new PlotChangeEvent(this)); } /** * Returns the separator stroke. * * @return The stroke (never <code>null</code>). * * @see #setSeparatorStroke(Stroke) */ public Stroke getSeparatorStroke() { return this.separatorStroke; } /** * Sets the stroke used to draw the separator between sections. * * @param stroke the stroke (<code>null</code> not permitted). * * @see #getSeparatorStroke() */ public void setSeparatorStroke(Stroke stroke) { if (stroke == null) { throw new IllegalArgumentException("Null 'stroke' argument."); } this.separatorStroke = stroke; notifyListeners(new PlotChangeEvent(this)); } /** * Returns the separator paint. * * @return The paint (never <code>null</code>). * * @see #setSeparatorPaint(Paint) */ public Paint getSeparatorPaint() { return this.separatorPaint; } /** * Sets the paint used to draw the separator between sections. * * @param paint the paint (<code>null</code> not permitted). * * @see #getSeparatorPaint() */ public void setSeparatorPaint(Paint paint) { if (paint == null) { throw new IllegalArgumentException("Null 'paint' argument."); } this.separatorPaint = paint; notifyListeners(new PlotChangeEvent(this)); } /** * Returns the length of the inner extension of the separator line that * is drawn between sections, expressed as a percentage of the depth of * the section. * * @return The inner separator extension (as a percentage). * * @see #setInnerSeparatorExtension(double) */ public double getInnerSeparatorExtension() { return this.innerSeparatorExtension; } /** * Sets the length of the inner extension of the separator line that is * drawn between sections, as a percentage of the depth of the * sections, and sends a {@link PlotChangeEvent} to all registered * listeners. * * @param percent the percentage. * * @see #getInnerSeparatorExtension() * @see #setOuterSeparatorExtension(double) */ public void setInnerSeparatorExtension(double percent) { this.innerSeparatorExtension = percent; notifyListeners(new PlotChangeEvent(this)); } /** * Returns the length of the outer extension of the separator line that * is drawn between sections, expressed as a percentage of the depth of * the section. * * @return The outer separator extension (as a percentage). * * @see #setOuterSeparatorExtension(double) */ public double getOuterSeparatorExtension() { return this.outerSeparatorExtension; } /** * Sets the length of the outer extension of the separator line that is * drawn between sections, as a percentage of the depth of the * sections, and sends a {@link PlotChangeEvent} to all registered * listeners. * * @param percent the percentage. * * @see #getOuterSeparatorExtension() */ public void setOuterSeparatorExtension(double percent) { this.outerSeparatorExtension = percent; notifyListeners(new PlotChangeEvent(this)); } /** * Returns the depth of each section, expressed as a percentage of the * plot radius. * * @return The depth of each section. * * @see #setSectionDepth(double) * @since 1.0.3 */ public double getSectionDepth() { return this.sectionDepth; } /** * The section depth is given as percentage of the plot radius. * Specifying 1.0 results in a straightforward pie chart. * * @param sectionDepth the section depth. * * @see #getSectionDepth() * @since 1.0.3 */ public void setSectionDepth(double sectionDepth) { this.sectionDepth = sectionDepth; } /** * Initialises the plot state (which will store the total of all dataset * values, among other things). This method is called once at the * beginning of each drawing. * * @param g2 the graphics device. * @param plotArea the plot area (<code>null</code> not permitted). * @param plot the plot. * @param index the secondary index (<code>null</code> for primary * renderer). * @param info collects chart rendering information for return to caller. * * @return A state object (maintains state information relevant to one * chart drawing). */ public PiePlotState initialise(Graphics2D g2, Rectangle2D plotArea, PiePlot plot, Integer index, PlotRenderingInfo info) { PiePlotState state = super.initialise(g2, plotArea, plot, index, info); state.setPassesRequired(3); return state; } /** * Draws a single data item. * * @param g2 the graphics device (<code>null</code> not permitted). * @param section the section index. * @param dataArea the data plot area. * @param state state information for one chart. * @param currentPass the current pass index. */ protected void drawItem(Graphics2D g2, int section, Rectangle2D dataArea, PiePlotState state, int currentPass) { PieDataset dataset = getDataset(); Number n = dataset.getValue(section); if (n == null) { return; } double value = n.doubleValue(); double angle1 = 0.0; double angle2 = 0.0; Rotation direction = getDirection(); if (direction == Rotation.CLOCKWISE) { angle1 = state.getLatestAngle(); angle2 = angle1 - value / state.getTotal() * 360.0; } else if (direction == Rotation.ANTICLOCKWISE) { angle1 = state.getLatestAngle(); angle2 = angle1 + value / state.getTotal() * 360.0; } else { throw new IllegalStateException("Rotation type not recognised."); } double angle = (angle2 - angle1); if (Math.abs(angle) > getMinimumArcAngleToDraw()) { Comparable key = getSectionKey(section); double ep = 0.0; double mep = getMaximumExplodePercent(); if (mep > 0.0) { ep = getExplodePercent(key) / mep; } Rectangle2D arcBounds = getArcBounds(state.getPieArea(), state.getExplodedPieArea(), angle1, angle, ep); Arc2D.Double arc = new Arc2D.Double(arcBounds, angle1, angle, Arc2D.OPEN); // create the bounds for the inner arc double depth = this.sectionDepth / 2.0; RectangleInsets s = new RectangleInsets(UnitType.RELATIVE, depth, depth, depth, depth); Rectangle2D innerArcBounds = new Rectangle2D.Double(); innerArcBounds.setRect(arcBounds); s.trim(innerArcBounds); // calculate inner arc in reverse direction, for later // GeneralPath construction Arc2D.Double arc2 = new Arc2D.Double(innerArcBounds, angle1 + angle, -angle, Arc2D.OPEN); GeneralPath path = new GeneralPath(); path.moveTo((float) arc.getStartPoint().getX(), (float) arc.getStartPoint().getY()); path.append(arc.getPathIterator(null), false); path.append(arc2.getPathIterator(null), true); path.closePath(); Line2D separator = new Line2D.Double(arc2.getEndPoint(), arc.getStartPoint()); if (currentPass == 0) { Paint shadowPaint = getShadowPaint(); double shadowXOffset = getShadowXOffset(); double shadowYOffset = getShadowYOffset(); if (shadowPaint != null) { Shape shadowArc = ShapeUtilities.createTranslatedShape( path, (float) shadowXOffset, (float) shadowYOffset); g2.setPaint(shadowPaint); g2.fill(shadowArc); } } else if (currentPass == 1) { Paint paint = lookupSectionPaint(key, true); g2.setPaint(paint); g2.fill(path); Paint outlinePaint = lookupSectionOutlinePaint(key); Stroke outlineStroke = lookupSectionOutlineStroke(key); if (outlinePaint != null && outlineStroke != null) { g2.setPaint(outlinePaint); g2.setStroke(outlineStroke); g2.draw(path); } // add an entity for the pie section if (state.getInfo() != null) { EntityCollection entities = state.getEntityCollection(); if (entities != null) { String tip = null; PieToolTipGenerator toolTipGenerator = getToolTipGenerator(); if (toolTipGenerator != null) { tip = toolTipGenerator.generateToolTip(dataset, key); } String url = null; PieURLGenerator urlGenerator = getURLGenerator(); if (urlGenerator != null) { url = urlGenerator.generateURL(dataset, key, getPieIndex()); } PieSectionEntity entity = new PieSectionEntity(path, dataset, getPieIndex(), section, key, tip, url); entities.add(entity); } } } else if (currentPass == 2) { if (this.separatorsVisible) { Line2D extendedSeparator = extendLine(separator, this.innerSeparatorExtension, this.outerSeparatorExtension); g2.setStroke(this.separatorStroke); g2.setPaint(this.separatorPaint); g2.draw(extendedSeparator); } } } state.setLatestAngle(angle2); } /** * Tests this plot for equality with an arbitrary object. * * @param obj the object to test against (<code>null</code> permitted). * * @return A boolean. */ public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof RingPlot)) { return false; } RingPlot that = (RingPlot) obj; if (this.separatorsVisible != that.separatorsVisible) { return false; } if (!ObjectUtilities.equal(this.separatorStroke, that.separatorStroke)) { return false; } if (!PaintUtilities.equal(this.separatorPaint, that.separatorPaint)) { return false; } if (this.innerSeparatorExtension != that.innerSeparatorExtension) { return false; } if (this.outerSeparatorExtension != that.outerSeparatorExtension) { return false; } if (this.sectionDepth != that.sectionDepth) { return false; } return super.equals(obj); } /** * Creates a new line by extending an existing line. * * @param line the line (<code>null</code> not permitted). * @param startPercent the amount to extend the line at the start point * end. * @param endPercent the amount to extend the line at the end point end. * * @return A new line. */ private Line2D extendLine(Line2D line, double startPercent, double endPercent) { if (line == null) { throw new IllegalArgumentException("Null 'line' argument."); } double x1 = line.getX1(); double x2 = line.getX2(); double deltaX = x2 - x1; double y1 = line.getY1(); double y2 = line.getY2(); double deltaY = y2 - y1; x1 = x1 - (startPercent * deltaX); y1 = y1 - (startPercent * deltaY); x2 = x2 + (endPercent * deltaX); y2 = y2 + (endPercent * deltaY); return new Line2D.Double(x1, y1, x2, y2); } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtilities.writeStroke(this.separatorStroke, stream); SerialUtilities.writePaint(this.separatorPaint, stream); } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.separatorStroke = SerialUtilities.readStroke(stream); this.separatorPaint = SerialUtilities.readPaint(stream); } }
package net.md_5.bungee.api.chat; import net.md_5.bungee.api.ChatColor; import java.util.ArrayList; import java.util.List; /** * <p> * ComponentBuilder simplifies creating basic messages by allowing the use of a * chainable builder. * </p> * <pre> * new ComponentBuilder("Hello ").color(ChatColor.RED). * append("World").color(ChatColor.BLUE). append("!").bold(true).create(); * </pre> * <p> * All methods (excluding {@link #append(String)} and {@link #create()} work on * the last part appended to the builder, so in the example above "Hello " would * be {@link net.md_5.bungee.api.ChatColor#RED} and "World" would be * {@link net.md_5.bungee.api.ChatColor#BLUE} but "!" would be bold and * {@link net.md_5.bungee.api.ChatColor#BLUE} because append copies the previous * part's formatting * </p> */ public class ComponentBuilder { private TextComponent current; private final List<BaseComponent> parts = new ArrayList<BaseComponent>(); /** * Creates a ComponentBuilder from the other given ComponentBuilder to clone * it. * * @param original the original for the new ComponentBuilder. */ public ComponentBuilder(ComponentBuilder original) { current = new TextComponent( original.current ); for ( BaseComponent baseComponent : original.parts ) { parts.add( baseComponent.duplicate() ); } } /** * Creates a ComponentBuilder with the given text as the first part. * * @param text the first text element */ public ComponentBuilder(String text) { current = new TextComponent( text ); } /** * Appends the text to the builder and makes it the current target for * formatting. The text will have all the formatting from the previous part. * * @param text the text to append * @return this ComponentBuilder for chaining */ public ComponentBuilder append(String text) { return append( text, FormatRetention.ALL ); } /** * Appends the text to the builder and makes it the current target for * formatting. You can specify the amount of formatting retained. * * @param text the text to append * @param retention the formatting to retain * @return this ComponentBuilder for chaining */ public ComponentBuilder append(String text, FormatRetention retention) { parts.add( current ); current = new TextComponent( current ); current.setText( text ); retain( retention ); return this; } /** * Sets the color of the current part. * * @param color the new color * @return this ComponentBuilder for chaining */ public ComponentBuilder color(ChatColor color) { current.setColor( color ); return this; } /** * Sets whether the current part is bold. * * @param bold whether this part is bold * @return this ComponentBuilder for chaining */ public ComponentBuilder bold(boolean bold) { current.setBold( bold ); return this; } /** * Sets whether the current part is italic. * * @param italic whether this part is italic * @return this ComponentBuilder for chaining */ public ComponentBuilder italic(boolean italic) { current.setItalic( italic ); return this; } /** * Sets whether the current part is underlined. * * @param underlined whether this part is underlined * @return this ComponentBuilder for chaining */ public ComponentBuilder underlined(boolean underlined) { current.setUnderlined( underlined ); return this; } /** * Sets whether the current part is strikethrough. * * @param strikethrough whether this part is strikethrough * @return this ComponentBuilder for chaining */ public ComponentBuilder strikethrough(boolean strikethrough) { current.setStrikethrough( strikethrough ); return this; } /** * Sets whether the current part is obfuscated. * * @param obfuscated whether this part is obfuscated * @return this ComponentBuilder for chaining */ public ComponentBuilder obfuscated(boolean obfuscated) { current.setObfuscated( obfuscated ); return this; } /** * Sets the insertion text for the current part. * * @param insertion the insertion text * @return this ComponentBuilder for chaining */ public ComponentBuilder insertion(String insertion) { current.setInsertion( insertion ); return this; } /** * Sets the click event for the current part. * * @param clickEvent the click event * @return this ComponentBuilder for chaining */ public ComponentBuilder event(ClickEvent clickEvent) { current.setClickEvent( clickEvent ); return this; } /** * Sets the hover event for the current part. * * @param hoverEvent the hover event * @return this ComponentBuilder for chaining */ public ComponentBuilder event(HoverEvent hoverEvent) { current.setHoverEvent( hoverEvent ); return this; } /** * Sets the current part back to normal settings. Only text is kept. * * @return this ComponentBuilder for chaining */ public ComponentBuilder reset() { return retain( FormatRetention.NONE ); } /** * Retains only the specified formatting. Text is not modified. * * @param retention the formatting to retain * @return this ComponentBuilder for chaining */ public ComponentBuilder retain(FormatRetention retention) { BaseComponent previous = current; switch ( retention ) { case NONE: current = new TextComponent( current.getText() ); break; case ALL: // No changes are required break; case EVENTS: current = new TextComponent( current.getText() ); current.setInsertion( previous.getInsertion() ); current.setClickEvent( previous.getClickEvent() ); current.setHoverEvent( previous.getHoverEvent() ); break; case FORMATTING: current.setClickEvent( null ); current.setHoverEvent( null ); break; } return this; } /** * Returns the components needed to display the message created by this * builder. * * @return the created components */ public BaseComponent[] create() { parts.add( current ); return parts.toArray( new BaseComponent[ parts.size() ] ); } public static enum FormatRetention { /** * Specify that we do not want to retain anything from the previous component. */ NONE, /** * Specify that we want the formatting retained from the previous component. */ FORMATTING, /** * Specify that we want the events retained from the previous component. */ EVENTS, /** * Specify that we want to retain everything from the previous component. */ ALL } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.discovery.zen; import com.google.common.base.Objects; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.Version; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.InitialStateDiscoveryListener; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.fd.MasterFaultDetection; import org.elasticsearch.discovery.zen.fd.NodesFaultDetection; import org.elasticsearch.discovery.zen.membership.MembershipAction; import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.discovery.zen.ping.ZenPingService; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; /** * */ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implements Discovery, DiscoveryNodesProvider { private final ThreadPool threadPool; private final TransportService transportService; private final ClusterService clusterService; private AllocationService allocationService; private final ClusterName clusterName; private final DiscoveryNodeService discoveryNodeService; private final ZenPingService pingService; private final MasterFaultDetection masterFD; private final NodesFaultDetection nodesFD; private final PublishClusterStateAction publishClusterState; private final MembershipAction membership; private final Version version; private final TimeValue pingTimeout; // a flag that should be used only for testing private final boolean sendLeaveRequest; private final ElectMasterService electMaster; private final boolean masterElectionFilterClientNodes; private final boolean masterElectionFilterDataNodes; private DiscoveryNode localNode; private final CopyOnWriteArrayList<InitialStateDiscoveryListener> initialStateListeners = new CopyOnWriteArrayList<InitialStateDiscoveryListener>(); private volatile boolean master = false; private volatile DiscoveryNodes latestDiscoNodes; private volatile Thread currentJoinThread; private final AtomicBoolean initialStateSent = new AtomicBoolean(); @Nullable private NodeService nodeService; @Inject public ZenDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, ClusterService clusterService, NodeSettingsService nodeSettingsService, DiscoveryNodeService discoveryNodeService, ZenPingService pingService, Version version, DiscoverySettings discoverySettings) { super(settings); this.clusterName = clusterName; this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; this.discoveryNodeService = discoveryNodeService; this.pingService = pingService; this.version = version; // also support direct discovery.zen settings, for cases when it gets extended this.pingTimeout = settings.getAsTime("discovery.zen.ping.timeout", settings.getAsTime("discovery.zen.ping_timeout", componentSettings.getAsTime("ping_timeout", componentSettings.getAsTime("initial_ping_timeout", timeValueSeconds(3))))); this.sendLeaveRequest = componentSettings.getAsBoolean("send_leave_request", true); this.masterElectionFilterClientNodes = settings.getAsBoolean("discovery.zen.master_election.filter_client", true); this.masterElectionFilterDataNodes = settings.getAsBoolean("discovery.zen.master_election.filter_data", false); logger.debug("using ping.timeout [{}], master_election.filter_client [{}], master_election.filter_data [{}]", pingTimeout, masterElectionFilterClientNodes, masterElectionFilterDataNodes); this.electMaster = new ElectMasterService(settings); nodeSettingsService.addListener(new ApplySettings()); this.masterFD = new MasterFaultDetection(settings, threadPool, transportService, this); this.masterFD.addListener(new MasterNodeFailureListener()); this.nodesFD = new NodesFaultDetection(settings, threadPool, transportService); this.nodesFD.addListener(new NodeFailureListener()); this.publishClusterState = new PublishClusterStateAction(settings, transportService, this, new NewClusterStateListener(), discoverySettings); this.pingService.setNodesProvider(this); this.membership = new MembershipAction(settings, transportService, this, new MembershipListener()); transportService.registerHandler(RejoinClusterRequestHandler.ACTION, new RejoinClusterRequestHandler()); } @Override public void setNodeService(@Nullable NodeService nodeService) { this.nodeService = nodeService; } @Override public void setAllocationService(AllocationService allocationService) { this.allocationService = allocationService; } @Override protected void doStart() throws ElasticsearchException { Map<String, String> nodeAttributes = discoveryNodeService.buildAttributes(); // note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling final String nodeId = DiscoveryService.generateNodeId(settings); localNode = new DiscoveryNode(settings.get("name"), nodeId, transportService.boundAddress().publishAddress(), nodeAttributes, version); latestDiscoNodes = new DiscoveryNodes.Builder().put(localNode).localNodeId(localNode.id()).build(); nodesFD.updateNodes(latestDiscoNodes); pingService.start(); // do the join on a different thread, the DiscoveryService waits for 30s anyhow till it is discovered asyncJoinCluster(); } @Override protected void doStop() throws ElasticsearchException { pingService.stop(); masterFD.stop("zen disco stop"); nodesFD.stop(); initialStateSent.set(false); if (sendLeaveRequest) { if (!master && latestDiscoNodes.masterNode() != null) { try { membership.sendLeaveRequestBlocking(latestDiscoNodes.masterNode(), localNode, TimeValue.timeValueSeconds(1)); } catch (Exception e) { logger.debug("failed to send leave request to master [{}]", e, latestDiscoNodes.masterNode()); } } else { DiscoveryNode[] possibleMasters = electMaster.nextPossibleMasters(latestDiscoNodes.nodes().values(), 5); for (DiscoveryNode possibleMaster : possibleMasters) { if (localNode.equals(possibleMaster)) { continue; } try { membership.sendLeaveRequest(latestDiscoNodes.masterNode(), possibleMaster); } catch (Exception e) { logger.debug("failed to send leave request from master [{}] to possible master [{}]", e, latestDiscoNodes.masterNode(), possibleMaster); } } } } master = false; if (currentJoinThread != null) { try { currentJoinThread.interrupt(); } catch (Exception e) { // ignore } } } @Override protected void doClose() throws ElasticsearchException { masterFD.close(); nodesFD.close(); publishClusterState.close(); membership.close(); pingService.close(); } @Override public DiscoveryNode localNode() { return localNode; } @Override public void addListener(InitialStateDiscoveryListener listener) { this.initialStateListeners.add(listener); } @Override public void removeListener(InitialStateDiscoveryListener listener) { this.initialStateListeners.remove(listener); } @Override public String nodeDescription() { return clusterName.value() + "/" + localNode.id(); } @Override public DiscoveryNodes nodes() { DiscoveryNodes latestNodes = this.latestDiscoNodes; if (latestNodes != null) { return latestNodes; } // have not decided yet, just send the local node return DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id()).build(); } @Override public NodeService nodeService() { return this.nodeService; } @Override public void publish(ClusterState clusterState, AckListener ackListener) { if (!master) { throw new ElasticsearchIllegalStateException("Shouldn't publish state when not master"); } latestDiscoNodes = clusterState.nodes(); nodesFD.updateNodes(clusterState.nodes()); publishClusterState.publish(clusterState, ackListener); } private void asyncJoinCluster() { if (currentJoinThread != null) { // we are already joining, ignore... logger.trace("a join thread already running"); return; } threadPool.generic().execute(new Runnable() { @Override public void run() { currentJoinThread = Thread.currentThread(); try { innerJoinCluster(); } finally { currentJoinThread = null; } } }); } private void innerJoinCluster() { boolean retry = true; while (retry) { if (lifecycle.stoppedOrClosed()) { return; } retry = false; DiscoveryNode masterNode = findMaster(); if (masterNode == null) { logger.trace("no masterNode returned"); retry = true; continue; } if (localNode.equals(masterNode)) { this.master = true; nodesFD.start(); // start the nodes FD clusterService.submitStateUpdateTask("zen-disco-join (elected_as_master)", Priority.URGENT, new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder() .localNodeId(localNode.id()) .masterNodeId(localNode.id()) // put our local node .put(localNode); // update the fact that we are the master... latestDiscoNodes = builder.build(); ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(currentState.blocks()).removeGlobalBlock(NO_MASTER_BLOCK).build(); return ClusterState.builder(currentState).nodes(latestDiscoNodes).blocks(clusterBlocks).build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { sendInitialStateEventIfNeeded(); } }); } else { this.master = false; try { // first, make sure we can connect to the master transportService.connectToNode(masterNode); } catch (Exception e) { logger.warn("failed to connect to master [{}], retrying...", e, masterNode); retry = true; continue; } // send join request try { membership.sendJoinRequestBlocking(masterNode, localNode, pingTimeout); } catch (Exception e) { if (e instanceof ElasticsearchException) { logger.info("failed to send join request to master [{}], reason [{}]", masterNode, ((ElasticsearchException) e).getDetailedMessage()); } else { logger.info("failed to send join request to master [{}], reason [{}]", masterNode, e.getMessage()); } if (logger.isTraceEnabled()) { logger.trace("detailed failed reason", e); } // failed to send the join request, retry retry = true; continue; } masterFD.start(masterNode, "initial_join"); // no need to submit the received cluster state, we will get it from the master when it publishes // the fact that we joined } } } private void handleLeaveRequest(final DiscoveryNode node) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a node failure return; } if (master) { clusterService.submitStateUpdateTask("zen-disco-node_left(" + node + ")", Priority.IMMEDIATE, new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(currentState.nodes()).remove(node.id()); latestDiscoNodes = builder.build(); currentState = ClusterState.builder(currentState).nodes(latestDiscoNodes).build(); // check if we have enough master nodes, if not, we need to move into joining the cluster again if (!electMaster.hasEnoughMasterNodes(currentState.nodes())) { return rejoin(currentState, "not enough master nodes"); } // eagerly run reroute to remove dead nodes from routing table RoutingAllocation.Result routingResult = allocationService.reroute(ClusterState.builder(currentState).build()); return ClusterState.builder(currentState).routingResult(routingResult).build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } }); } else { handleMasterGone(node, "shut_down"); } } private void handleNodeFailure(final DiscoveryNode node, String reason) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a node failure return; } if (!master) { // nothing to do here... return; } clusterService.submitStateUpdateTask("zen-disco-node_failed(" + node + "), reason " + reason, Priority.IMMEDIATE, new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(currentState.nodes()) .remove(node.id()); latestDiscoNodes = builder.build(); currentState = ClusterState.builder(currentState).nodes(latestDiscoNodes).build(); // check if we have enough master nodes, if not, we need to move into joining the cluster again if (!electMaster.hasEnoughMasterNodes(currentState.nodes())) { return rejoin(currentState, "not enough master nodes"); } // eagerly run reroute to remove dead nodes from routing table RoutingAllocation.Result routingResult = allocationService.reroute(ClusterState.builder(currentState).build()); return ClusterState.builder(currentState).routingResult(routingResult).build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { sendInitialStateEventIfNeeded(); } }); } private void handleMinimumMasterNodesChanged(final int minimumMasterNodes) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a node failure return; } final int prevMinimumMasterNode = ZenDiscovery.this.electMaster.minimumMasterNodes(); ZenDiscovery.this.electMaster.minimumMasterNodes(minimumMasterNodes); if (!master) { // We only set the new value. If the master doesn't see enough nodes it will revoke it's mastership. return; } clusterService.submitStateUpdateTask("zen-disco-minimum_master_nodes_changed", Priority.IMMEDIATE, new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { // check if we have enough master nodes, if not, we need to move into joining the cluster again if (!electMaster.hasEnoughMasterNodes(currentState.nodes())) { return rejoin(currentState, "not enough master nodes on change of minimum_master_nodes from [" + prevMinimumMasterNode + "] to [" + minimumMasterNodes + "]"); } return currentState; } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { sendInitialStateEventIfNeeded(); } }); } private void handleMasterGone(final DiscoveryNode masterNode, final String reason) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a master failure return; } if (master) { // we might get this on both a master telling us shutting down, and then the disconnect failure return; } logger.info("master_left [{}], reason [{}]", masterNode, reason); clusterService.submitStateUpdateTask("zen-disco-master_failed (" + masterNode + ")", Priority.IMMEDIATE, new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { if (!masterNode.id().equals(currentState.nodes().masterNodeId())) { // master got switched on us, no need to send anything return currentState; } DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(currentState.nodes()) // make sure the old master node, which has failed, is not part of the nodes we publish .remove(masterNode.id()) .masterNodeId(null).build(); if (!electMaster.hasEnoughMasterNodes(discoveryNodes)) { return rejoin(ClusterState.builder(currentState).nodes(discoveryNodes).build(), "not enough master nodes after master left (reason = " + reason + ")"); } final DiscoveryNode electedMaster = electMaster.electMaster(discoveryNodes); // elect master if (localNode.equals(electedMaster)) { master = true; masterFD.stop("got elected as new master since master left (reason = " + reason + ")"); nodesFD.start(); discoveryNodes = DiscoveryNodes.builder(discoveryNodes).masterNodeId(localNode.id()).build(); latestDiscoNodes = discoveryNodes; return ClusterState.builder(currentState).nodes(latestDiscoNodes).build(); } else { nodesFD.stop(); if (electedMaster != null) { discoveryNodes = DiscoveryNodes.builder(discoveryNodes).masterNodeId(electedMaster.id()).build(); masterFD.restart(electedMaster, "possible elected master since master left (reason = " + reason + ")"); latestDiscoNodes = discoveryNodes; return ClusterState.builder(currentState) .nodes(latestDiscoNodes) .build(); } else { return rejoin(ClusterState.builder(currentState).nodes(discoveryNodes).build(), "master_left and no other node elected to become master"); } } } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { sendInitialStateEventIfNeeded(); } }); } static class ProcessClusterState { final ClusterState clusterState; final PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed; volatile boolean processed; ProcessClusterState(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) { this.clusterState = clusterState; this.newStateProcessed = newStateProcessed; } } private final BlockingQueue<ProcessClusterState> processNewClusterStates = ConcurrentCollections.newBlockingQueue(); void handleNewClusterStateFromMaster(ClusterState newClusterState, final PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) { if (master) { final ClusterState newState = newClusterState; clusterService.submitStateUpdateTask("zen-disco-master_receive_cluster_state_from_another_master [" + newState.nodes().masterNode() + "]", Priority.URGENT, new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { if (newState.version() > currentState.version()) { logger.warn("received cluster state from [{}] which is also master but with a newer cluster_state, rejoining to cluster...", newState.nodes().masterNode()); return rejoin(currentState, "zen-disco-master_receive_cluster_state_from_another_master [" + newState.nodes().masterNode() + "]"); } else { logger.warn("received cluster state from [{}] which is also master but with an older cluster_state, telling [{}] to rejoin the cluster", newState.nodes().masterNode(), newState.nodes().masterNode()); transportService.sendRequest(newState.nodes().masterNode(), RejoinClusterRequestHandler.ACTION, new RejoinClusterRequest(currentState.nodes().localNodeId()), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override public void handleException(TransportException exp) { logger.warn("failed to send rejoin request to [{}]", exp, newState.nodes().masterNode()); } }); return currentState; } } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { newStateProcessed.onNewClusterStateProcessed(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); newStateProcessed.onNewClusterStateFailed(t); } }); } else { if (newClusterState.nodes().localNode() == null) { logger.warn("received a cluster state from [{}] and not part of the cluster, should not happen", newClusterState.nodes().masterNode()); newStateProcessed.onNewClusterStateFailed(new ElasticsearchIllegalStateException("received state from a node that is not part of the cluster")); } else { if (currentJoinThread != null) { logger.debug("got a new state from master node, though we are already trying to rejoin the cluster"); } final ProcessClusterState processClusterState = new ProcessClusterState(newClusterState, newStateProcessed); processNewClusterStates.add(processClusterState); clusterService.submitStateUpdateTask("zen-disco-receive(from master [" + newClusterState.nodes().masterNode() + "])", Priority.URGENT, new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { // we already processed it in a previous event if (processClusterState.processed) { return currentState; } // TODO: once improvement that we can do is change the message structure to include version and masterNodeId // at the start, this will allow us to keep the "compressed bytes" around, and only parse the first page // to figure out if we need to use it or not, and only once we picked the latest one, parse the whole state // try and get the state with the highest version out of all the ones with the same master node id ProcessClusterState stateToProcess = processNewClusterStates.poll(); if (stateToProcess == null) { return currentState; } stateToProcess.processed = true; while (true) { ProcessClusterState potentialState = processNewClusterStates.peek(); // nothing else in the queue, bail if (potentialState == null) { break; } // if its not from the same master, then bail if (!Objects.equal(stateToProcess.clusterState.nodes().masterNodeId(), potentialState.clusterState.nodes().masterNodeId())) { break; } // we are going to use it for sure, poll (remove) it potentialState = processNewClusterStates.poll(); potentialState.processed = true; if (potentialState.clusterState.version() > stateToProcess.clusterState.version()) { // we found a new one stateToProcess = potentialState; } } ClusterState updatedState = stateToProcess.clusterState; // if the new state has a smaller version, and it has the same master node, then no need to process it if (updatedState.version() < currentState.version() && Objects.equal(updatedState.nodes().masterNodeId(), currentState.nodes().masterNodeId())) { return currentState; } // we don't need to do this, since we ping the master, and get notified when it has moved from being a master // because it doesn't have enough master nodes... //if (!electMaster.hasEnoughMasterNodes(newState.nodes())) { // return disconnectFromCluster(newState, "not enough master nodes on new cluster state received from [" + newState.nodes().masterNode() + "]"); //} latestDiscoNodes = updatedState.nodes(); // check to see that we monitor the correct master of the cluster if (masterFD.masterNode() == null || !masterFD.masterNode().equals(latestDiscoNodes.masterNode())) { masterFD.restart(latestDiscoNodes.masterNode(), "new cluster state received and we are monitoring the wrong master [" + masterFD.masterNode() + "]"); } ClusterState.Builder builder = ClusterState.builder(updatedState); // if the routing table did not change, use the original one if (updatedState.routingTable().version() == currentState.routingTable().version()) { builder.routingTable(currentState.routingTable()); } // same for metadata if (updatedState.metaData().version() == currentState.metaData().version()) { builder.metaData(currentState.metaData()); } else { // if its not the same version, only copy over new indices or ones that changed the version MetaData.Builder metaDataBuilder = MetaData.builder(updatedState.metaData()).removeAllIndices(); for (IndexMetaData indexMetaData : updatedState.metaData()) { IndexMetaData currentIndexMetaData = currentState.metaData().index(indexMetaData.index()); if (currentIndexMetaData == null || currentIndexMetaData.version() != indexMetaData.version()) { metaDataBuilder.put(indexMetaData, false); } else { metaDataBuilder.put(currentIndexMetaData, false); } } builder.metaData(metaDataBuilder); } return builder.build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); newStateProcessed.onNewClusterStateFailed(t); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { sendInitialStateEventIfNeeded(); newStateProcessed.onNewClusterStateProcessed(); } }); } } } private ClusterState handleJoinRequest(final DiscoveryNode node) { if (!master) { throw new ElasticsearchIllegalStateException("Node [" + localNode + "] not master for join request from [" + node + "]"); } ClusterState state = clusterService.state(); if (!transportService.addressSupported(node.address().getClass())) { // TODO, what should we do now? Maybe inform that node that its crap? logger.warn("received a wrong address type from [{}], ignoring...", node); } else { // try and connect to the node, if it fails, we can raise an exception back to the client... transportService.connectToNode(node); state = clusterService.state(); // validate the join request, will throw a failure if it fails, which will get back to the // node calling the join request membership.sendValidateJoinRequestBlocking(node, state, pingTimeout); clusterService.submitStateUpdateTask("zen-disco-receive(join from node[" + node + "])", Priority.IMMEDIATE, new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { if (currentState.nodes().nodeExists(node.id())) { // the node already exists in the cluster logger.warn("received a join request for an existing node [{}]", node); // still send a new cluster state, so it will be re published and possibly update the other node return ClusterState.builder(currentState).build(); } DiscoveryNodes.Builder builder = DiscoveryNodes.builder(currentState.nodes()); for (DiscoveryNode existingNode : currentState.nodes()) { if (node.address().equals(existingNode.address())) { builder.remove(existingNode.id()); logger.warn("received join request from node [{}], but found existing node {} with same address, removing existing node", node, existingNode); } } latestDiscoNodes = builder.build(); // add the new node now (will update latestDiscoNodes on publish) return ClusterState.builder(currentState).nodes(latestDiscoNodes.newNode(node)).build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } }); } return state; } private DiscoveryNode findMaster() { ZenPing.PingResponse[] fullPingResponses = pingService.pingAndWait(pingTimeout); if (fullPingResponses == null) { logger.trace("No full ping responses"); return null; } if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("full ping responses:"); if (fullPingResponses.length == 0) { sb.append(" {none}"); } else { for (ZenPing.PingResponse pingResponse : fullPingResponses) { sb.append("\n\t--> ").append("target [").append(pingResponse.target()).append("], master [").append(pingResponse.master()).append("]"); } } logger.trace(sb.toString()); } // filter responses List<ZenPing.PingResponse> pingResponses = Lists.newArrayList(); for (ZenPing.PingResponse pingResponse : fullPingResponses) { DiscoveryNode node = pingResponse.target(); if (masterElectionFilterClientNodes && (node.clientNode() || (!node.masterNode() && !node.dataNode()))) { // filter out the client node, which is a client node, or also one that is not data and not master (effectively, client) } else if (masterElectionFilterDataNodes && (!node.masterNode() && node.dataNode())) { // filter out data node that is not also master } else { pingResponses.add(pingResponse); } } if (logger.isDebugEnabled()) { StringBuilder sb = new StringBuilder("filtered ping responses: (filter_client[").append(masterElectionFilterClientNodes).append("], filter_data[").append(masterElectionFilterDataNodes).append("])"); if (pingResponses.isEmpty()) { sb.append(" {none}"); } else { for (ZenPing.PingResponse pingResponse : pingResponses) { sb.append("\n\t--> ").append("target [").append(pingResponse.target()).append("], master [").append(pingResponse.master()).append("]"); } } logger.debug(sb.toString()); } List<DiscoveryNode> pingMasters = newArrayList(); for (ZenPing.PingResponse pingResponse : pingResponses) { if (pingResponse.master() != null) { pingMasters.add(pingResponse.master()); } } Set<DiscoveryNode> possibleMasterNodes = Sets.newHashSet(); possibleMasterNodes.add(localNode); for (ZenPing.PingResponse pingResponse : pingResponses) { possibleMasterNodes.add(pingResponse.target()); } // if we don't have enough master nodes, we bail, even if we get a response that indicates // there is a master by other node, we don't see enough... if (!electMaster.hasEnoughMasterNodes(possibleMasterNodes)) { return null; } if (pingMasters.isEmpty()) { // lets tie break between discovered nodes DiscoveryNode electedMaster = electMaster.electMaster(possibleMasterNodes); if (localNode.equals(electedMaster)) { return localNode; } } else { DiscoveryNode electedMaster = electMaster.electMaster(pingMasters); if (electedMaster != null) { return electedMaster; } } return null; } private ClusterState rejoin(ClusterState clusterState, String reason) { logger.warn(reason + ", current nodes: {}", clusterState.nodes()); nodesFD.stop(); masterFD.stop(reason); master = false; ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(clusterState.blocks()) .addGlobalBlock(NO_MASTER_BLOCK) .addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) .build(); // clear the routing table, we have no master, so we need to recreate the routing when we reform the cluster RoutingTable routingTable = RoutingTable.builder().build(); // we also clean the metadata, since we are going to recover it if we become master MetaData metaData = MetaData.builder().build(); // clean the nodes, we are now not connected to anybody, since we try and reform the cluster latestDiscoNodes = new DiscoveryNodes.Builder().put(localNode).localNodeId(localNode.id()).build(); asyncJoinCluster(); return ClusterState.builder(clusterState) .blocks(clusterBlocks) .nodes(latestDiscoNodes) .routingTable(routingTable) .metaData(metaData) .build(); } private void sendInitialStateEventIfNeeded() { if (initialStateSent.compareAndSet(false, true)) { for (InitialStateDiscoveryListener listener : initialStateListeners) { listener.initialStateProcessed(); } } } private class NewClusterStateListener implements PublishClusterStateAction.NewClusterStateListener { @Override public void onNewClusterState(ClusterState clusterState, NewStateProcessed newStateProcessed) { handleNewClusterStateFromMaster(clusterState, newStateProcessed); } } private class MembershipListener implements MembershipAction.MembershipListener { @Override public ClusterState onJoin(DiscoveryNode node) { return handleJoinRequest(node); } @Override public void onLeave(DiscoveryNode node) { handleLeaveRequest(node); } } private class NodeFailureListener implements NodesFaultDetection.Listener { @Override public void onNodeFailure(DiscoveryNode node, String reason) { handleNodeFailure(node, reason); } } private class MasterNodeFailureListener implements MasterFaultDetection.Listener { @Override public void onMasterFailure(DiscoveryNode masterNode, String reason) { handleMasterGone(masterNode, reason); } @Override public void onDisconnectedFromMaster() { // got disconnected from the master, send a join request DiscoveryNode masterNode = latestDiscoNodes.masterNode(); try { membership.sendJoinRequest(masterNode, localNode); } catch (Exception e) { logger.warn("failed to send join request on disconnection from master [{}]", masterNode); } } } static class RejoinClusterRequest extends TransportRequest { private String fromNodeId; RejoinClusterRequest(String fromNodeId) { this.fromNodeId = fromNodeId; } RejoinClusterRequest() { } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); fromNodeId = in.readOptionalString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(fromNodeId); } } class RejoinClusterRequestHandler extends BaseTransportRequestHandler<RejoinClusterRequest> { static final String ACTION = "discovery/zen/rejoin"; @Override public RejoinClusterRequest newInstance() { return new RejoinClusterRequest(); } @Override public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel) throws Exception { clusterService.submitStateUpdateTask("received a request to rejoin the cluster from [" + request.fromNodeId + "]", Priority.URGENT, new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { try { channel.sendResponse(TransportResponse.Empty.INSTANCE); } catch (Exception e) { logger.warn("failed to send response on rejoin cluster request handling", e); } return rejoin(currentState, "received a request to rejoin the cluster from [" + request.fromNodeId + "]"); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } }); } @Override public String executor() { return ThreadPool.Names.SAME; } } class ApplySettings implements NodeSettingsService.Listener { @Override public void onRefreshSettings(Settings settings) { int minimumMasterNodes = settings.getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, ZenDiscovery.this.electMaster.minimumMasterNodes()); if (minimumMasterNodes != ZenDiscovery.this.electMaster.minimumMasterNodes()) { logger.info("updating {} from [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, ZenDiscovery.this.electMaster.minimumMasterNodes(), minimumMasterNodes); handleMinimumMasterNodesChanged(minimumMasterNodes); } } } }
package intrashipservice.ws.de.isservice_1_0_de; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import javax.jws.WebService; import javax.jws.soap.SOAPBinding; import javax.xml.bind.annotation.XmlSeeAlso; import intraship.ws.de.BookPickupRequest; import intraship.ws.de.BookPickupResponse; import intraship.ws.de.CancelPickupRequest; import intraship.ws.de.CancelPickupResponse; import intraship.ws.de.CreateShipmentDDRequest; import intraship.ws.de.CreateShipmentResponse; import intraship.ws.de.CreateShipmentTDRequest; import intraship.ws.de.DeleteShipmentDDRequest; import intraship.ws.de.DeleteShipmentResponse; import intraship.ws.de.DeleteShipmentTDRequest; import intraship.ws.de.DoManifestDDRequest; import intraship.ws.de.DoManifestResponse; import intraship.ws.de.DoManifestTDRequest; import intraship.ws.de.GetExportDocDDRequest; import intraship.ws.de.GetExportDocResponse; import intraship.ws.de.GetExportDocTDRequest; import intraship.ws.de.GetLabelDDRequest; import intraship.ws.de.GetLabelResponse; import intraship.ws.de.GetLabelTDRequest; import intraship.ws.de.GetManifestDDRequest; import intraship.ws.de.GetManifestDDResponse; import intraship.ws.de.GetVersionResponse; import intraship.ws.de.UpdateShipmentDDRequest; import intraship.ws.de.UpdateShipmentResponse; /** * This class was generated by the JAX-WS RI. * JAX-WS RI 2.1.7-b01- * Generated source version: 2.1 * */ @WebService(name = "ISWSServicePortType", targetNamespace = "http://de.ws.intrashipservice") @SOAPBinding(parameterStyle = SOAPBinding.ParameterStyle.BARE) @XmlSeeAlso({ intraship.ws.de.ObjectFactory.class, intrashipservice.ws.de.isservice_1_0_de.ObjectFactory.class }) public interface ISWSServicePortType { /** * Creates TD shipments. * * @param part1 * @return * returns intraship.ws.de.CreateShipmentResponse */ @WebMethod(action = "urn:createShipmentTD") @WebResult(name = "CreateShipmentResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public CreateShipmentResponse createShipmentTD( @WebParam(name = "CreateShipmentTDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") CreateShipmentTDRequest part1); /** * Creates DD shipments. * * @param part1 * @return * returns intraship.ws.de.CreateShipmentResponse */ @WebMethod(action = "urn:createShipmentDD") @WebResult(name = "CreateShipmentResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public CreateShipmentResponse createShipmentDD( @WebParam(name = "CreateShipmentDDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") CreateShipmentDDRequest part1); /** * Deletes the requested TD shipments. * * @param part1 * @return * returns intraship.ws.de.DeleteShipmentResponse */ @WebMethod(action = "urn:deleteShipmentTD") @WebResult(name = "DeleteShipmentResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public DeleteShipmentResponse deleteShipmentTD( @WebParam(name = "DeleteShipmentTDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") DeleteShipmentTDRequest part1); /** * Deletes the requested DD shipments. * * @param part1 * @return * returns intraship.ws.de.DeleteShipmentResponse */ @WebMethod(action = "urn:deleteShipmentDD") @WebResult(name = "DeleteShipmentResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public DeleteShipmentResponse deleteShipmentDD( @WebParam(name = "DeleteShipmentDDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") DeleteShipmentDDRequest part1); /** * Manifest the requested TD shipments. * * @param part1 * @return * returns intraship.ws.de.DoManifestResponse */ @WebMethod(action = "urn:doManifestTD") @WebResult(name = "DoManifestResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public DoManifestResponse doManifestTD( @WebParam(name = "DoManifestTDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") DoManifestTDRequest part1); /** * Manifest the requested DD shipments. * * @param part1 * @return * returns intraship.ws.de.DoManifestResponse */ @WebMethod(action = "urn:doManifestDD") @WebResult(name = "DoManifestResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public DoManifestResponse doManifestDD( @WebParam(name = "DoManifestDDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") DoManifestDDRequest part1); /** * Returns the request-url for getting a TD label. * * @param part1 * @return * returns intraship.ws.de.GetLabelResponse */ @WebMethod(action = "urn:getLabelTD") @WebResult(name = "GetLabelResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public GetLabelResponse getLabelTD( @WebParam(name = "GetLabelTDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") GetLabelTDRequest part1); /** * Returns the request-url for getting a DD label. * * @param part1 * @return * returns intraship.ws.de.GetLabelResponse */ @WebMethod(action = "urn:getLabelDD") @WebResult(name = "GetLabelResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public GetLabelResponse getLabelDD( @WebParam(name = "GetLabelDDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") GetLabelDDRequest part1); /** * Books a pickup order. * * @param part1 * @return * returns intraship.ws.de.BookPickupResponse */ @WebMethod(action = "urn:bookPickup") @WebResult(name = "BookPickupResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public BookPickupResponse bookPickup( @WebParam(name = "BookPickupRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") BookPickupRequest part1); /** * Cancels a pickup order. * * @param part1 * @return * returns intraship.ws.de.CancelPickupResponse */ @WebMethod(action = "urn:cancelPickup") @WebResult(name = "CancelPickupResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public CancelPickupResponse cancelPickup( @WebParam(name = "CancelPickupRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") CancelPickupRequest part1); /** * Returns the actual version of the implementation of the whole ISService webservice. * * @param part1 * @return * returns intraship.ws.de.GetVersionResponse */ @WebMethod(action = "urn:getVersion") @WebResult(name = "GetVersionResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public GetVersionResponse getVersion( @WebParam(name = "Version", targetNamespace = "http://dhl.de/webservice/cisbase", partName = "part1") Version part1); /** * Returns the request-url for getting a TD export document. * * @param part1 * @return * returns intraship.ws.de.GetExportDocResponse */ @WebMethod(action = "urn:getExportDocTD") @WebResult(name = "GetExportDocResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public GetExportDocResponse getExportDocTD( @WebParam(name = "GetExportDocTDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") GetExportDocTDRequest part1); /** * Returns the request-url for getting a DD export document. * * @param part1 * @return * returns intraship.ws.de.GetExportDocResponse */ @WebMethod(action = "urn:getExportDocDD") @WebResult(name = "GetExportDocResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public GetExportDocResponse getExportDocDD( @WebParam(name = "GetExportDocDDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") GetExportDocDDRequest part1); /** * Request the manifest. * * @param part1 * @return * returns intraship.ws.de.GetManifestDDResponse */ @WebMethod(action = "urn:getManifestDD") @WebResult(name = "GetManifestDDResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public GetManifestDDResponse getManifestDD( @WebParam(name = "GetManifestDDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") GetManifestDDRequest part1); /** * Updates a DD shipment. * * @param part1 * @return * returns intraship.ws.de.UpdateShipmentResponse */ @WebMethod(action = "urn:updateShipmentDD") @WebResult(name = "UpdateShipmentResponse", targetNamespace = "http://de.ws.intraship", partName = "part1") public UpdateShipmentResponse updateShipmentDD( @WebParam(name = "UpdateShipmentDDRequest", targetNamespace = "http://de.ws.intraship", partName = "part1") UpdateShipmentDDRequest part1); }
package com.dsa.StackQueue; import java.util.Vector; import java.lang.Exception; // Class for implementing Priority Queue class MaxHeap { // vector to store heap elements private Vector<Integer> A; // constructor: use default initial capacity of vector public MaxHeap() { A = new Vector(); } // constructor: set custom initial capacity for vector public MaxHeap(int capacity) { A = new Vector(capacity); } // return parent of A.get(i) private int parent(int i) { return i / 2; } // return l child of A.get(i) private int left(int i) { return (2 * i + 1); } // return r child of A.get(i) private int right(int i) { return (2 * i + 2); } // swap values at two indexes void swap(int x, int y) { // swap with child having greater value Integer temp = A.get(x); A.setElementAt(A.get(y), x); A.setElementAt(temp, y); } // Recursive Heapify-down procedure: trigger by remove operation // Here the node at index i and its two direct children violates the heap property private void heapify_down(int i) { // get l and r child of node at index i int l = left(i); int r = right(i); int largest = i; // compare A.get(i) with its l and r child // and find largest value if (l < size() && A.get(l) > A.get(i)) { largest = l; } if (r < size() && A.get(r) > A.get(largest)) { largest = r; } if (largest != i) { // swap with child having greater value swap(i, largest); // call heapify-down on the child heapify_down(largest); } } // Recursive Heapify-up procedure: trigger by add operation private void heapify_up(int i) { // check if node at index i and its parent violates // the heap property if (i > 0 && A.get(parent(i)) < A.get(i)) { // swap the two if heap property is violated swap(i, parent(i)); // call Heapify-up on the parent heapify_up(parent(i)); } } // return size of the heap public int size() { return A.size(); } // check if heap is empty or not public Boolean isEmpty() { return A.isEmpty(); } // insert specified key into the heap public void add(Integer key) { // insert the new element to the end of the vector A.addElement(key); // get element index and call heapify-up procedure int index = size() - 1; heapify_up(index); } // function to remove and return element with highest priority // (present at root). It returns null if queue is empty public Integer poll() { try { // if heap is empty, throw an exception if (size() == 0) throw new Exception("Index is out of range (Heap underflow)"); // element with highest priority int root = A.firstElement(); // or A.get(0); // replace the root of the heap with the last element of the vector A.setElementAt(A.lastElement(), 0); A.remove(size()-1); // call heapify-down on root node heapify_down(0); // return root element return root; } // catch and print the exception catch (Exception ex) { System.out.println(ex); return null; } } // function to return, but does not remove, element with highest priority // (present at root). It returns null if queue is empty public Integer peek() { try { // if heap has no elements, throw an exception if (size() == 0) throw new Exception("Index out of range (Heap underflow)"); // else return the top (first) element return A.firstElement(); // or A.get(0); } // catch the exception and print it, and return null catch (Exception ex) { System.out.println(ex); return null; } } // function to remove all elements from priority queue public void clear() { System.out.print("Emptying queue: "); while (!A.isEmpty()) { System.out.print(poll() + " "); } System.out.println(); } // returns true if queue contains the specified element public Boolean contains(Integer i) { return A.contains(i); } // returns an array containing all elements in the queue public Integer[] toArray() { return A.toArray(new Integer[size()]); } // Program for Max Heap Implementation in Java public static void main (String[] args) { // create a Priority Queue of initial capacity 10 // Priority of an element is decided by element's value MaxHeap pq = new MaxHeap(10); // insert three integers pq.add(3); pq.add(2); pq.add(15); // print Priority Queue size System.out.println("Priority Queue Size is " + pq.size()); // search 2 in Priority Queue Integer searchKey = 2; if (pq.contains(searchKey)) System.out.println("Priority Queue contains " + searchKey + "\n"); // empty queue pq.clear(); if (pq.isEmpty()) System.out.println("Queue is Empty"); System.out.println("\nCalling remove operation on an empty heap"); System.out.println("Element with highest priority is " + pq.poll()); System.out.println("\nCalling peek operation on an empty heap"); System.out.println("Element with highest priority is " + pq.peek()); // again insert three integers pq.add(5); pq.add(4); pq.add(45); // construct array containing all elements present in the queue Integer[] I = pq.toArray(); System.out.print("\nPrinting array: "); for (int i : I) System.out.print(i + " "); System.out.println("\n\nElement with highest priority is " + pq.poll()); System.out.println("Element with highest priority is " + pq.peek()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws.s3; import com.amazonaws.regions.Regions; import org.apache.camel.BindToRegistry; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Test; public class S3ComponentConfigurationTest extends CamelTestSupport { @BindToRegistry("amazonS3Client") AmazonS3ClientMock clientMock = new AmazonS3ClientMock(); @Test public void createEndpointWithMinimalS3ClientConfiguration() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?amazonS3Client=#amazonS3Client&accessKey=xxx&secretKey=yyy"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertNotNull(endpoint.getConfiguration().getAmazonS3Client()); assertNull(endpoint.getConfiguration().getRegion()); assertTrue(endpoint.getConfiguration().isDeleteAfterRead()); assertEquals(10, endpoint.getMaxMessagesPerPoll()); assertNull(endpoint.getConfiguration().getPolicy()); assertNull(endpoint.getConfiguration().getPrefix()); assertTrue(endpoint.getConfiguration().isIncludeBody()); } @Test public void createEndpointWithMinimalCredentialsConfiguration() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?accessKey=xxx&secretKey=yyy"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertNull(endpoint.getConfiguration().getAmazonS3Client()); assertNull(endpoint.getConfiguration().getRegion()); assertTrue(endpoint.getConfiguration().isDeleteAfterRead()); assertEquals(10, endpoint.getMaxMessagesPerPoll()); assertNull(endpoint.getConfiguration().getPolicy()); assertNull(endpoint.getConfiguration().getPrefix()); assertTrue(endpoint.getConfiguration().isIncludeBody()); } @Test public void createEndpointWithMinimalArnConfiguration() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://arn:aws:s3:::MyBucket?amazonS3Client=#amazonS3Client&accessKey=xxx&secretKey=yyy"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); } @Test public void createEndpointWithMinimalConfigurationAndProvidedClient() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?amazonS3Client=#amazonS3Client"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertNull(endpoint.getConfiguration().getAccessKey()); assertNull(endpoint.getConfiguration().getSecretKey()); assertSame(clientMock, endpoint.getConfiguration().getAmazonS3Client()); assertNull(endpoint.getConfiguration().getRegion()); assertTrue(endpoint.getConfiguration().isDeleteAfterRead()); assertEquals(10, endpoint.getMaxMessagesPerPoll()); assertNull(endpoint.getConfiguration().getPolicy()); assertNull(endpoint.getConfiguration().getPrefix()); assertTrue(endpoint.getConfiguration().isIncludeBody()); } @Test public void createEndpointWithMaximalConfiguration() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component .createEndpoint("aws-s3://MyBucket?amazonS3Client=#amazonS3Client" + "&accessKey=xxx&secretKey=yyy&region=us-west-1&deleteAfterRead=false&maxMessagesPerPoll=1&policy=%7B%22Version%22%3A%222008-10-17%22,%22Id%22%3A%22Policy4324355464%22," + "%22Statement%22%3A%5B%7B%22Sid%22%3A%22Stmt456464646477%22,%22Action%22%3A%5B%22s3%3AGetObject%22%5D,%22Effect%22%3A%22Allow%22," + "%22Resource%22%3A%5B%22arn%3Aaws%3As3%3A%3A%3Amybucket/some/path/*%22%5D,%22Principal%22%3A%7B%22AWS%22%3A%5B%22*%22%5D%7D%7D%5D%7D&storageClass=REDUCED_REDUNDANCY" + "&prefix=confidential&includeBody=false"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertNotNull(endpoint.getConfiguration().getAmazonS3Client()); assertEquals("us-west-1", endpoint.getConfiguration().getRegion()); assertFalse(endpoint.getConfiguration().isDeleteAfterRead()); assertEquals(1, endpoint.getMaxMessagesPerPoll()); assertEquals("{\"Version\":\"2008-10-17\",\"Id\":\"Policy4324355464\",\"Statement\":[{\"Sid\":\"Stmt456464646477\",\"Action\":[\"s3:GetObject\"],\"Effect\":\"Allow\",\"Resource\":" + "[\"arn:aws:s3:::mybucket/some/path/*\"],\"Principal\":{\"AWS\":[\"*\"]}}]}", endpoint.getConfiguration().getPolicy()); assertEquals("REDUCED_REDUNDANCY", endpoint.getConfiguration().getStorageClass()); assertEquals("confidential", endpoint.getConfiguration().getPrefix()); assertFalse(endpoint.getConfiguration().isIncludeBody()); } @Test(expected = IllegalArgumentException.class) public void createEndpointWithoutBucketName() throws Exception { S3Component component = new S3Component(context); component.createEndpoint("aws-s3:// "); } @Test(expected = IllegalArgumentException.class) public void createEndpointWithoutAccessKeyConfiguration() throws Exception { S3Component component = new S3Component(context); component.createEndpoint("aws-s3://MyTopic?secretKey=yyy"); } @Test(expected = IllegalArgumentException.class) public void createEndpointWithoutSecretKeyConfiguration() throws Exception { S3Component component = new S3Component(context); component.createEndpoint("aws-s3://MyTopic?accessKey=xxx"); } @Test public void createEndpointWithComponentElements() throws Exception { S3Component component = new S3Component(context); component.setAccessKey("XXX"); component.setSecretKey("YYY"); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("XXX", endpoint.getConfiguration().getAccessKey()); assertEquals("YYY", endpoint.getConfiguration().getSecretKey()); } @Test public void createEndpointWithComponentAndEndpointElements() throws Exception { S3Component component = new S3Component(context); component.setAccessKey("XXX"); component.setSecretKey("YYY"); component.setRegion(Regions.US_WEST_1.toString()); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?accessKey=xxxxxx&secretKey=yyyyy&region=US_EAST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey()); assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion()); } @Test public void createEndpointWithChunkedEncoding() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?chunkedEncodingDisabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isChunkedEncodingDisabled()); } @Test public void createEndpointWithAccelerateMode() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?accelerateModeEnabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isAccelerateModeEnabled()); } @Test public void createEndpointWithDualstack() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?dualstackEnabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isDualstackEnabled()); } @Test public void createEndpointWithPayloadSigning() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?payloadSigningEnabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isPayloadSigningEnabled()); } @Test public void createEndpointWithForceGlobalBucketAccess() throws Exception { S3Component component = new S3Component(context); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?forceGlobalBucketAccessEnabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isForceGlobalBucketAccessEnabled()); } @Test public void createEndpointWithoutSecretKeyAndAccessKeyConfiguration() throws Exception { S3Component component = new S3Component(context); component.createEndpoint("aws-s3://MyTopic?amazonS3Client=#amazonS3Client"); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.federation.store; import static org.apache.hadoop.hdfs.server.federation.FederationTestUtils.verifyException; import static org.apache.hadoop.hdfs.server.federation.store.FederationStateStoreTestUtils.clearRecords; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.hadoop.hdfs.server.federation.router.FederationUtil; import org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys; import org.apache.hadoop.hdfs.server.federation.router.RouterServiceState; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetRouterRegistrationRequest; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetRouterRegistrationsRequest; import org.apache.hadoop.hdfs.server.federation.store.protocol.RouterHeartbeatRequest; import org.apache.hadoop.hdfs.server.federation.store.records.RouterState; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; /** * Test the basic {@link StateStoreService} {@link RouterStore} functionality. */ public class TestStateStoreRouterState extends TestStateStoreBase { private static RouterStore routerStore; @BeforeClass public static void create() { // Reduce expirations to 2 seconds getConf().setTimeDuration( RBFConfigKeys.FEDERATION_STORE_ROUTER_EXPIRATION_MS, 2, TimeUnit.SECONDS); // Set deletion time to 2 seconds getConf().setTimeDuration( RBFConfigKeys.FEDERATION_STORE_ROUTER_EXPIRATION_DELETION_MS, 2, TimeUnit.SECONDS); } @Before public void setup() throws IOException, InterruptedException { if (routerStore == null) { routerStore = getStateStore().getRegisteredRecordStore(RouterStore.class); } // Clear router status registrations assertTrue(clearRecords(getStateStore(), RouterState.class)); } @Test public void testStateStoreDisconnected() throws Exception { // Close the data store driver getStateStore().closeDriver(); assertEquals(false, getStateStore().isDriverReady()); // Test all APIs that access the data store to ensure they throw the correct // exception. GetRouterRegistrationRequest getSingleRequest = GetRouterRegistrationRequest.newInstance(); verifyException(routerStore, "getRouterRegistration", StateStoreUnavailableException.class, new Class[] {GetRouterRegistrationRequest.class}, new Object[] {getSingleRequest}); GetRouterRegistrationsRequest getRequest = GetRouterRegistrationsRequest.newInstance(); routerStore.loadCache(true); verifyException(routerStore, "getRouterRegistrations", StateStoreUnavailableException.class, new Class[] {GetRouterRegistrationsRequest.class}, new Object[] {getRequest}); RouterHeartbeatRequest hbRequest = RouterHeartbeatRequest.newInstance( RouterState.newInstance("test", 0, RouterServiceState.UNINITIALIZED)); verifyException(routerStore, "routerHeartbeat", StateStoreUnavailableException.class, new Class[] {RouterHeartbeatRequest.class}, new Object[] {hbRequest}); } // // Router // @Test public void testUpdateRouterStatus() throws IllegalStateException, IOException { long dateStarted = Time.now(); String address = "testaddress"; // Set RouterHeartbeatRequest request = RouterHeartbeatRequest.newInstance( RouterState.newInstance( address, dateStarted, RouterServiceState.RUNNING)); assertTrue(routerStore.routerHeartbeat(request).getStatus()); // Verify GetRouterRegistrationRequest getRequest = GetRouterRegistrationRequest.newInstance(address); RouterState record = routerStore.getRouterRegistration(getRequest).getRouter(); assertNotNull(record); assertEquals(RouterServiceState.RUNNING, record.getStatus()); assertEquals(address, record.getAddress()); assertEquals(FederationUtil.getCompileInfo(), record.getCompileInfo()); // Build version may vary a bit assertFalse(record.getVersion().isEmpty()); } @Test public void testRouterStateExpiredAndDeletion() throws IOException, InterruptedException, TimeoutException { long dateStarted = Time.now(); String address = "testaddress"; RouterHeartbeatRequest request = RouterHeartbeatRequest.newInstance( RouterState.newInstance( address, dateStarted, RouterServiceState.RUNNING)); // Set assertTrue(routerStore.routerHeartbeat(request).getStatus()); // Verify GetRouterRegistrationRequest getRequest = GetRouterRegistrationRequest.newInstance(address); RouterState record = routerStore.getRouterRegistration(getRequest).getRouter(); assertNotNull(record); // Wait past expiration (set in conf to 2 seconds) GenericTestUtils.waitFor(() -> { try { RouterState routerState = routerStore .getRouterRegistration(getRequest).getRouter(); // Verify entry is expired return routerState.getStatus() == RouterServiceState.EXPIRED; } catch (IOException e) { return false; } }, 100, 3000); // Heartbeat again and this shouldn't be EXPIRED at this point assertTrue(routerStore.routerHeartbeat(request).getStatus()); RouterState r = routerStore.getRouterRegistration(getRequest).getRouter(); assertEquals(RouterServiceState.RUNNING, r.getStatus()); // Wait past expiration (set in conf to 2 seconds) GenericTestUtils.waitFor(() -> { try { RouterState routerState = routerStore .getRouterRegistration(getRequest).getRouter(); // Verify entry is expired return routerState.getStatus() == RouterServiceState.EXPIRED; } catch (IOException e) { return false; } }, 100, 3000); // Wait deletion (set in conf to 2 seconds) GenericTestUtils.waitFor(() -> { try { RouterState routerState = routerStore .getRouterRegistration(getRequest).getRouter(); // Verify entry is deleted return routerState.getStatus() == null; } catch (IOException e) { return false; } }, 100, 3000); } @Test public void testGetAllRouterStates() throws StateStoreUnavailableException, IOException { // Set 2 entries RouterHeartbeatRequest heartbeatRequest1 = RouterHeartbeatRequest.newInstance( RouterState.newInstance( "testaddress1", Time.now(), RouterServiceState.RUNNING)); assertTrue(routerStore.routerHeartbeat(heartbeatRequest1).getStatus()); RouterHeartbeatRequest heartbeatRequest2 = RouterHeartbeatRequest.newInstance( RouterState.newInstance( "testaddress2", Time.now(), RouterServiceState.RUNNING)); assertTrue(routerStore.routerHeartbeat(heartbeatRequest2).getStatus()); // Verify routerStore.loadCache(true); GetRouterRegistrationsRequest request = GetRouterRegistrationsRequest.newInstance(); List<RouterState> entries = routerStore.getRouterRegistrations(request).getRouters(); assertEquals(2, entries.size()); Collections.sort(entries); assertEquals("testaddress1", entries.get(0).getAddress()); assertEquals("testaddress2", entries.get(1).getAddress()); assertEquals(RouterServiceState.RUNNING, entries.get(0).getStatus()); assertEquals(RouterServiceState.RUNNING, entries.get(1).getStatus()); } }
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.impl.neomedia.jmfext.media.protocol.directshow; import java.awt.*; import java.io.*; import java.util.*; import java.util.List; import javax.media.*; import javax.media.control.*; import org.jitsi.impl.neomedia.codec.*; import org.jitsi.impl.neomedia.codec.video.*; import org.jitsi.impl.neomedia.control.*; import org.jitsi.impl.neomedia.device.*; import org.jitsi.impl.neomedia.jmfext.media.protocol.*; import org.jitsi.utils.logging.*; /** * Implements a <tt>CaptureDevice</tt> and a <tt>DataSource</tt> using * DirectShow. * * @author Lyubomir Marinov * @author Sebastien Vincent */ public class DataSource extends AbstractVideoPushBufferCaptureDevice { /** * The map of DirectShow pixel formats to FFmpeg pixel formats which allows * converting between the two. */ private static final int[] DS_TO_FFMPEG_PIX_FMTS = new int[] { DSFormat.RGB24, FFmpeg.PIX_FMT_RGB24, DSFormat.RGB32, FFmpeg.PIX_FMT_RGB32, DSFormat.ARGB32, FFmpeg.PIX_FMT_ARGB, DSFormat.YUY2, FFmpeg.PIX_FMT_YUYV422, DSFormat.MJPG, FFmpeg.PIX_FMT_YUVJ422P, DSFormat.UYVY, FFmpeg.PIX_FMT_UYVY422, DSFormat.Y411, FFmpeg.PIX_FMT_UYYVYY411, DSFormat.Y41P, FFmpeg.PIX_FMT_YUV411P, DSFormat.NV12, FFmpeg.PIX_FMT_NV12, DSFormat.I420, FFmpeg.PIX_FMT_YUV420P }; /** * The <tt>Logger</tt> used by the <tt>DataSource</tt> class and its * instances for logging output. */ private static final Logger logger = Logger.getLogger(DataSource.class); /** * Gets the FFmpeg pixel format matching a specific DirectShow * Specification pixel format. * * @param ffmpegPixFmt FFmpeg format * @return the DirectShow pixel format matching the specified FFmpeg format */ public static int getDSPixFmt(int ffmpegPixFmt) { for (int i = 0; i < DS_TO_FFMPEG_PIX_FMTS.length; i += 2) if (DS_TO_FFMPEG_PIX_FMTS[i + 1] == ffmpegPixFmt) return DS_TO_FFMPEG_PIX_FMTS[i]; return -1; } /** * Gets the DirectShow pixel format matching a specific FFmpeg pixel * format. * * @param dsPixFmt the DirectShow pixel format to get the matching * FFmpeg pixel format of * @return the FFmpeg pixel format matching the specified DirectShow pixel */ public static int getFFmpegPixFmt(int dsPixFmt) { for (int i = 0; i < DS_TO_FFMPEG_PIX_FMTS.length; i += 2) if (DS_TO_FFMPEG_PIX_FMTS[i] == dsPixFmt) return DS_TO_FFMPEG_PIX_FMTS[i + 1]; return FFmpeg.PIX_FMT_NONE; } /** * DirectShow capture device. */ private DSCaptureDevice device; /** * DirectShow manager. */ private DSManager manager; /** * Constructor. */ public DataSource() { this(null); } /** * Initializes a new <tt>DataSource</tt> instance from a specific * <tt>MediaLocator</tt>. * * @param locator the <tt>MediaLocator</tt> to create the new instance from */ public DataSource(MediaLocator locator) { super(locator); } /** * Creates a new <tt>FrameRateControl</tt> instance which is to allow the * getting and setting of the frame rate of this * <tt>AbstractVideoPushBufferCaptureDevice</tt>. * * @return a new <tt>FrameRateControl</tt> instance which is to allow the * getting and setting of the frame rate of this * <tt>AbstractVideoPushBufferCaptureDevice</tt> * @see AbstractPushBufferCaptureDevice#createFrameRateControl() */ @Override protected FrameRateControl createFrameRateControl() { return new FrameRateControlAdapter() { /** * The output frame rate of this * <tt>AbstractVideoPullBufferCaptureDevice</tt>. */ private float frameRate = -1; @Override public float getFrameRate() { return frameRate; } @Override public float setFrameRate(float frameRate) { this.frameRate = frameRate; return this.frameRate; } }; } /** * Create a new <tt>PushBufferStream</tt> which is to be at a specific * zero-based index in the list of streams of this * <tt>PushBufferDataSource</tt>. The <tt>Format</tt>-related information of * the new instance is to be abstracted by a specific * <tt>FormatControl</tt>. * * @param streamIndex the zero-based index of the <tt>PushBufferStream</tt> * in the list of streams of this <tt>PushBufferDataSource</tt> * @param formatControl the <tt>FormatControl</tt> which is to abstract the * <tt>Format</tt>-related information of the new instance * @return a new <tt>PushBufferStream</tt> which is to be at the specified * <tt>streamIndex</tt> in the list of streams of this * <tt>PushBufferDataSource</tt> and which has its <tt>Format</tt>-related * information abstracted by the specified <tt>formatControl</tt> * @see AbstractPushBufferCaptureDevice#createStream(int, FormatControl) */ @Override protected DirectShowStream createStream( int streamIndex, FormatControl formatControl) { DirectShowStream stream = new DirectShowStream(this, formatControl); if (logger.isTraceEnabled()) { DSCaptureDevice device = this.device; if (device != null) { DSFormat supportedFormats[] = device.getSupportedFormats(); for (DSFormat supportedFormat : supportedFormats) { logger.trace( "width= " + supportedFormat.getWidth() + ", height= " + supportedFormat.getHeight() + ", pixelFormat= " + supportedFormat.getPixelFormat()); } } } return stream; } /** * Opens a connection to the media source specified by the * <tt>MediaLocator</tt> of this <tt>DataSource</tt>. * * @throws IOException if anything goes wrong while opening the connection * to the media source specified by the <tt>MediaLocator</tt> of this * <tt>DataSource</tt> * @see AbstractPushBufferCaptureDevice#doConnect() */ @Override protected void doConnect() throws IOException { super.doConnect(); boolean connected = false; try { DSCaptureDevice device = getDevice(); device.connect(); synchronized (getStreamSyncRoot()) { for (Object stream : getStreams()) ((DirectShowStream) stream).setDevice(device); } connected = true; } finally { if (!connected) { /* * The connect attempt has failed but it may have been * successful up to the point of failure thus partially * modifying the state. The disconnect procedure is prepared to * deal with a partially modified state and will restore it to * its pristine form. */ doDisconnect(); } } } /** * Closes the connection to the media source specified by the * <tt>MediaLocator</tt> of this <tt>DataSource</tt>. * * @see AbstractPushBufferCaptureDevice#doDisconnect() */ @Override protected void doDisconnect() { try { synchronized (getStreamSyncRoot()) { for (Object stream : getStreams()) { try { ((DirectShowStream) stream).setDevice(null); } catch (IOException ioe) { logger.error( "Failed to disconnect " + stream.getClass().getName(), ioe); } } } } finally { if (device != null) { device.disconnect(); device = null; } if (manager != null) { manager.dispose(); manager = null; } super.doDisconnect(); } } private DSCaptureDevice getDevice() { DSCaptureDevice device = this.device; if (device == null) { MediaLocator locator = getLocator(); if (locator == null) throw new IllegalStateException("locator"); if (!locator.getProtocol().equalsIgnoreCase( DeviceSystem.LOCATOR_PROTOCOL_DIRECTSHOW)) throw new IllegalStateException("locator.protocol"); String remainder = locator.getRemainder(); if (remainder == null) throw new IllegalStateException("locator.remainder"); if (manager == null) manager = new DSManager(); try { /* * Find the device specified by the locator using matching by * name. */ for (DSCaptureDevice d : manager.getCaptureDevices()) { if (remainder.equals(d.getName())) { device = d; break; } } if (device != null) this.device = device; } finally { if (this.device == null) { manager.dispose(); manager = null; } } } return device; } /** * Gets the <tt>Format</tt>s which are to be reported by a * <tt>FormatControl</tt> as supported formats for a * <tt>PushBufferStream</tt> at a specific zero-based index in the list of * streams of this <tt>PushBufferDataSource</tt>. * * @param streamIndex the zero-based index of the <tt>PushBufferStream</tt> * for which the specified <tt>FormatControl</tt> is to report the list of * supported <tt>Format</tt>s * @return an array of <tt>Format</tt>s to be reported by a * <tt>FormatControl</tt> as the supported formats for the * <tt>PushBufferStream</tt> at the specified <tt>streamIndex</tt> in the * list of streams of this <tt>PushBufferDataSource</tt> * @see AbstractPushBufferCaptureDevice#getSupportedFormats(int) */ @Override protected Format[] getSupportedFormats(int streamIndex) { DSCaptureDevice device = this.device; if (device == null) return super.getSupportedFormats(streamIndex); DSFormat[] deviceFmts = device.getSupportedFormats(); List<Format> fmts = new ArrayList<Format>(deviceFmts.length); for (DSFormat deviceFmt : deviceFmts) { Dimension size = new Dimension(deviceFmt.getWidth(), deviceFmt.getHeight()); int devicePixFmt = deviceFmt.getPixelFormat(); int pixFmt = getFFmpegPixFmt(devicePixFmt); if (pixFmt != FFmpeg.PIX_FMT_NONE) { fmts.add( new AVFrameFormat( size, Format.NOT_SPECIFIED, pixFmt, devicePixFmt)); } } return fmts.toArray(new Format[fmts.size()]); } /** * Attempts to set the <tt>Format</tt> to be reported by the * <tt>FormatControl</tt> of a <tt>PushBufferStream</tt> at a specific * zero-based index in the list of streams of this * <tt>PushBufferDataSource</tt>. The <tt>PushBufferStream</tt> does not * exist at the time of the attempt to set its <tt>Format</tt>. * * @param streamIndex the zero-based index of the <tt>PushBufferStream</tt> * the <tt>Format</tt> of which is to be set * @param oldValue the last-known <tt>Format</tt> for the * <tt>PushBufferStream</tt> at the specified <tt>streamIndex</tt> * @param newValue the <tt>Format</tt> which is to be set * @return the <tt>Format</tt> to be reported by the <tt>FormatControl</tt> * of the <tt>PushBufferStream</tt> at the specified <tt>streamIndex</tt> * in the list of streams of this <tt>PushBufferStream</tt> or <tt>null</tt> * if the attempt to set the <tt>Format</tt> did not success and any * last-known <tt>Format</tt> is to be left in effect * @see AbstractPushBufferCaptureDevice#setFormat(int, Format, Format) */ @Override protected Format setFormat( int streamIndex, Format oldValue, Format newValue) { // This DataSource supports setFormat. return DirectShowStream.isSupportedFormat(newValue) ? newValue : super.setFormat(streamIndex, oldValue, newValue); } }
/* $Id: RulesBase.java 299475 2004-06-26 17:41:32Z remm $ * * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.util.digester; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; /** * <p>Default implementation of the <code>Rules</code> interface that supports * the standard rule matching behavior. This class can also be used as a * base class for specialized <code>Rules</code> implementations.</p> * * <p>The matching policies implemented by this class support two different * types of pattern matching rules:</p> * <ul> * <li><em>Exact Match</em> - A pattern "a/b/c" exactly matches a * <code>&lt;c&gt;</code> element, nested inside a <code>&lt;b&gt;</code> * element, which is nested inside an <code>&lt;a&gt;</code> element.</li> * <li><em>Tail Match</em> - A pattern "&#42;/a/b" matches a * <code>&lt;b&gt;</code> element, nested inside an <code>&lt;a&gt;</code> * element, no matter how deeply the pair is nested.</li> * </ul> */ public class RulesBase implements Rules { // ----------------------------------------------------- Instance Variables /** * The set of registered Rule instances, keyed by the matching pattern. * Each value is a List containing the Rules for that pattern, in the * order that they were orginally registered. */ protected HashMap cache = new HashMap(); /** * The Digester instance with which this Rules instance is associated. */ protected Digester digester = null; /** * The namespace URI for which subsequently added <code>Rule</code> * objects are relevant, or <code>null</code> for matching independent * of namespaces. */ protected String namespaceURI = null; /** * The set of registered Rule instances, in the order that they were * originally registered. */ protected ArrayList rules = new ArrayList(); // ------------------------------------------------------------- Properties /** * Return the Digester instance with which this Rules instance is * associated. */ public Digester getDigester() { return (this.digester); } /** * Set the Digester instance with which this Rules instance is associated. * * @param digester The newly associated Digester instance */ public void setDigester(Digester digester) { this.digester = digester; Iterator items = rules.iterator(); while (items.hasNext()) { Rule item = (Rule) items.next(); item.setDigester(digester); } } /** * Return the namespace URI that will be applied to all subsequently * added <code>Rule</code> objects. */ public String getNamespaceURI() { return (this.namespaceURI); } /** * Set the namespace URI that will be applied to all subsequently * added <code>Rule</code> objects. * * @param namespaceURI Namespace URI that must match on all * subsequently added rules, or <code>null</code> for matching * regardless of the current namespace URI */ public void setNamespaceURI(String namespaceURI) { this.namespaceURI = namespaceURI; } // --------------------------------------------------------- Public Methods /** * Register a new Rule instance matching the specified pattern. * * @param pattern Nesting pattern to be matched for this Rule * @param rule Rule instance to be registered */ public void add(String pattern, Rule rule) { // to help users who accidently add '/' to the end of their patterns int patternLength = pattern.length(); if (patternLength>1 && pattern.endsWith("/")) { pattern = pattern.substring(0, patternLength-1); } List list = (List) cache.get(pattern); if (list == null) { list = new ArrayList(); cache.put(pattern, list); } list.add(rule); rules.add(rule); if (this.digester != null) { rule.setDigester(this.digester); } if (this.namespaceURI != null) { rule.setNamespaceURI(this.namespaceURI); } } /** * Clear all existing Rule instance registrations. */ public void clear() { cache.clear(); rules.clear(); } /** * Return a List of all registered Rule instances that match the specified * nesting pattern, or a zero-length List if there are no matches. If more * than one Rule instance matches, they <strong>must</strong> be returned * in the order originally registered through the <code>add()</code> * method. * * @param pattern Nesting pattern to be matched * * @deprecated Call match(namespaceURI,pattern) instead. */ public List match(String pattern) { return (match(null, pattern)); } /** * Return a List of all registered Rule instances that match the specified * nesting pattern, or a zero-length List if there are no matches. If more * than one Rule instance matches, they <strong>must</strong> be returned * in the order originally registered through the <code>add()</code> * method. * * @param namespaceURI Namespace URI for which to select matching rules, * or <code>null</code> to match regardless of namespace URI * @param pattern Nesting pattern to be matched */ public List match(String namespaceURI, String pattern) { // List rulesList = (List) this.cache.get(pattern); List rulesList = lookup(namespaceURI, pattern); if ((rulesList == null) || (rulesList.size() < 1)) { // Find the longest key, ie more discriminant String longKey = ""; Iterator keys = this.cache.keySet().iterator(); while (keys.hasNext()) { String key = (String) keys.next(); if (key.startsWith("*/")) { if (pattern.equals(key.substring(2)) || pattern.endsWith(key.substring(1))) { if (key.length() > longKey.length()) { // rulesList = (List) this.cache.get(key); rulesList = lookup(namespaceURI, key); longKey = key; } } } } } if (rulesList == null) { rulesList = new ArrayList(); } return (rulesList); } /** * Return a List of all registered Rule instances, or a zero-length List * if there are no registered Rule instances. If more than one Rule * instance has been registered, they <strong>must</strong> be returned * in the order originally registered through the <code>add()</code> * method. */ public List rules() { return (this.rules); } // ------------------------------------------------------ Protected Methods /** * Return a List of Rule instances for the specified pattern that also * match the specified namespace URI (if any). If there are no such * rules, return <code>null</code>. * * @param namespaceURI Namespace URI to match, or <code>null</code> to * select matching rules regardless of namespace URI * @param pattern Pattern to be matched */ protected List lookup(String namespaceURI, String pattern) { // Optimize when no namespace URI is specified List list = (List) this.cache.get(pattern); if (list == null) { return (null); } if ((namespaceURI == null) || (namespaceURI.length() == 0)) { return (list); } // Select only Rules that match on the specified namespace URI ArrayList results = new ArrayList(); Iterator items = list.iterator(); while (items.hasNext()) { Rule item = (Rule) items.next(); if ((namespaceURI.equals(item.getNamespaceURI())) || (item.getNamespaceURI() == null)) { results.add(item); } } return (results); } }
/* * Licensed to Elastic Search and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Elastic Search licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.metadata; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.ElasticSearchIllegalStateException; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.node.DiscoveryNodeFilters; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Preconditions; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import java.io.IOException; import java.util.*; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.*; import static org.elasticsearch.common.settings.ImmutableSettings.*; /** * */ public class IndexMetaData { public interface Custom { String type(); interface Factory<T extends Custom> { String type(); T readFrom(StreamInput in) throws IOException; void writeTo(T customIndexMetaData, StreamOutput out) throws IOException; T fromMap(Map<String, Object> map) throws IOException; T fromXContent(XContentParser parser) throws IOException; void toXContent(T customIndexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException; /** * Merges from first to second, with first being more important, i.e., if something exists in first and second, * first will prevail. */ T merge(T first, T second); } } public static Map<String, Custom.Factory> customFactories = new HashMap<String, Custom.Factory>(); static { // register non plugin custom metadata registerFactory(IndexWarmersMetaData.TYPE, IndexWarmersMetaData.FACTORY); } /** * Register a custom index meta data factory. Make sure to call it from a static block. */ public static void registerFactory(String type, Custom.Factory factory) { customFactories.put(type, factory); } @Nullable public static <T extends Custom> Custom.Factory<T> lookupFactory(String type) { return customFactories.get(type); } public static <T extends Custom> Custom.Factory<T> lookupFactorySafe(String type) throws ElasticSearchIllegalArgumentException { Custom.Factory<T> factory = customFactories.get(type); if (factory == null) { throw new ElasticSearchIllegalArgumentException("No custom index metadata factoy registered for type [" + type + "]"); } return factory; } private static ImmutableSet<String> dynamicSettings = ImmutableSet.<String>builder() .add(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) .add(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS) .add(IndexMetaData.SETTING_READ_ONLY) .add(IndexMetaData.SETTING_BLOCKS_READ) .add(IndexMetaData.SETTING_BLOCKS_WRITE) .add(IndexMetaData.SETTING_BLOCKS_METADATA) .build(); public static final ClusterBlock INDEX_READ_ONLY_BLOCK = new ClusterBlock(5, "index read-only (api)", false, false, RestStatus.FORBIDDEN, ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA); public static final ClusterBlock INDEX_READ_BLOCK = new ClusterBlock(7, "index read (api)", false, false, RestStatus.FORBIDDEN, ClusterBlockLevel.READ); public static final ClusterBlock INDEX_WRITE_BLOCK = new ClusterBlock(8, "index write (api)", false, false, RestStatus.FORBIDDEN, ClusterBlockLevel.WRITE); public static final ClusterBlock INDEX_METADATA_BLOCK = new ClusterBlock(9, "index metadata (api)", false, false, RestStatus.FORBIDDEN, ClusterBlockLevel.METADATA); public static ImmutableSet<String> dynamicSettings() { return dynamicSettings; } public static boolean hasDynamicSetting(String key) { for (String dynamicSetting : dynamicSettings) { if (Regex.simpleMatch(dynamicSetting, key)) { return true; } } return false; } public static synchronized void addDynamicSettings(String... settings) { HashSet<String> updatedSettings = new HashSet<String>(dynamicSettings); updatedSettings.addAll(Arrays.asList(settings)); dynamicSettings = ImmutableSet.copyOf(updatedSettings); } public static enum State { OPEN((byte) 0), CLOSE((byte) 1); private final byte id; State(byte id) { this.id = id; } public byte id() { return this.id; } public static State fromId(byte id) { if (id == 0) { return OPEN; } else if (id == 1) { return CLOSE; } throw new ElasticSearchIllegalStateException("No state match for id [" + id + "]"); } public static State fromString(String state) { if ("open".equals(state)) { return OPEN; } else if ("close".equals(state)) { return CLOSE; } throw new ElasticSearchIllegalStateException("No state match for [" + state + "]"); } } public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards"; public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas"; public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; public static final String SETTING_READ_ONLY = "index.blocks.read_only"; public static final String SETTING_BLOCKS_READ = "index.blocks.read"; public static final String SETTING_BLOCKS_WRITE = "index.blocks.write"; public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata"; public static final String SETTING_VERSION_CREATED = "index.version.created"; private final String index; private final long version; private final State state; private final ImmutableMap<String, AliasMetaData> aliases; private final Settings settings; private final ImmutableMap<String, MappingMetaData> mappings; private final ImmutableMap<String, Custom> customs; private transient final int totalNumberOfShards; private final DiscoveryNodeFilters requireFilters; private final DiscoveryNodeFilters includeFilters; private final DiscoveryNodeFilters excludeFilters; private IndexMetaData(String index, long version, State state, Settings settings, ImmutableMap<String, MappingMetaData> mappings, ImmutableMap<String, AliasMetaData> aliases, ImmutableMap<String, Custom> customs) { Preconditions.checkArgument(settings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1) != -1, "must specify numberOfShards for index [" + index + "]"); Preconditions.checkArgument(settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1) != -1, "must specify numberOfReplicas for index [" + index + "]"); this.index = index; this.version = version; this.state = state; this.settings = settings; this.mappings = mappings; this.customs = customs; this.totalNumberOfShards = numberOfShards() * (numberOfReplicas() + 1); this.aliases = aliases; ImmutableMap<String, String> requireMap = settings.getByPrefix("index.routing.allocation.require.").getAsMap(); if (requireMap.isEmpty()) { requireFilters = null; } else { requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); } ImmutableMap<String, String> includeMap = settings.getByPrefix("index.routing.allocation.include.").getAsMap(); if (includeMap.isEmpty()) { includeFilters = null; } else { includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); } ImmutableMap<String, String> excludeMap = settings.getByPrefix("index.routing.allocation.exclude.").getAsMap(); if (excludeMap.isEmpty()) { excludeFilters = null; } else { excludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); } } public String index() { return index; } public String getIndex() { return index(); } public long version() { return this.version; } public long getVersion() { return this.version; } public State state() { return this.state; } public State getState() { return state(); } public int numberOfShards() { return settings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1); } public int getNumberOfShards() { return numberOfShards(); } public int numberOfReplicas() { return settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1); } public int getNumberOfReplicas() { return numberOfReplicas(); } public int totalNumberOfShards() { return totalNumberOfShards; } public int getTotalNumberOfShards() { return totalNumberOfShards(); } public Settings settings() { return settings; } public Settings getSettings() { return settings(); } public ImmutableMap<String, AliasMetaData> aliases() { return this.aliases; } public ImmutableMap<String, AliasMetaData> getAliases() { return aliases(); } public ImmutableMap<String, MappingMetaData> mappings() { return mappings; } public ImmutableMap<String, MappingMetaData> getMappings() { return mappings(); } @Nullable public MappingMetaData mapping(String mappingType) { return mappings.get(mappingType); } /** * Sometimes, the default mapping exists and an actual mapping is not created yet (introduced), * in this case, we want to return the default mapping in case it has some default mapping definitions. * <p/> * Note, once the mapping type is introduced, the default mapping is applied on the actual typed MappingMetaData, * setting its routing, timestamp, and so on if needed. */ @Nullable public MappingMetaData mappingOrDefault(String mappingType) { MappingMetaData mapping = mappings.get(mappingType); if (mapping != null) { return mapping; } return mappings.get(MapperService.DEFAULT_MAPPING); } public ImmutableMap<String, Custom> customs() { return this.customs; } public ImmutableMap<String, Custom> getCustoms() { return this.customs; } public <T extends Custom> T custom(String type) { return (T) customs.get(type); } @Nullable public DiscoveryNodeFilters requireFilters() { return requireFilters; } @Nullable public DiscoveryNodeFilters includeFilters() { return includeFilters; } @Nullable public DiscoveryNodeFilters excludeFilters() { return excludeFilters; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IndexMetaData that = (IndexMetaData) o; if (!aliases.equals(that.aliases)) return false; if (!index.equals(that.index)) return false; if (!mappings.equals(that.mappings)) return false; if (!settings.equals(that.settings)) return false; if (state != that.state) return false; return true; } @Override public int hashCode() { int result = index.hashCode(); result = 31 * result + state.hashCode(); result = 31 * result + aliases.hashCode(); result = 31 * result + settings.hashCode(); result = 31 * result + mappings.hashCode(); return result; } public static Builder builder(String index) { return new Builder(index); } public static Builder newIndexMetaDataBuilder(String index) { return new Builder(index); } public static Builder newIndexMetaDataBuilder(IndexMetaData indexMetaData) { return new Builder(indexMetaData); } public static class Builder { private String index; private State state = State.OPEN; private long version = 1; private Settings settings = ImmutableSettings.Builder.EMPTY_SETTINGS; private MapBuilder<String, MappingMetaData> mappings = MapBuilder.newMapBuilder(); private MapBuilder<String, AliasMetaData> aliases = MapBuilder.newMapBuilder(); private MapBuilder<String, Custom> customs = MapBuilder.newMapBuilder(); public Builder(String index) { this.index = index; } public Builder(IndexMetaData indexMetaData) { this(indexMetaData.index()); settings(indexMetaData.settings()); mappings.putAll(indexMetaData.mappings); aliases.putAll(indexMetaData.aliases); customs.putAll(indexMetaData.customs); this.state = indexMetaData.state; this.version = indexMetaData.version; } public String index() { return index; } public Builder index(String index) { this.index = index; return this; } public Builder numberOfShards(int numberOfShards) { settings = settingsBuilder().put(settings).put(SETTING_NUMBER_OF_SHARDS, numberOfShards).build(); return this; } public int numberOfShards() { return settings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1); } public Builder numberOfReplicas(int numberOfReplicas) { settings = settingsBuilder().put(settings).put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas).build(); return this; } public int numberOfReplicas() { return settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1); } public Builder settings(Settings.Builder settings) { this.settings = settings.build(); return this; } public Builder settings(Settings settings) { this.settings = settings; return this; } public Builder removeMapping(String mappingType) { mappings.remove(mappingType); return this; } public Builder putMapping(String type, String source) throws IOException { XContentParser parser = XContentFactory.xContent(source).createParser(source); try { putMapping(new MappingMetaData(type, parser.mapOrdered())); } finally { parser.close(); } return this; } public Builder putMapping(MappingMetaData mappingMd) { mappings.put(mappingMd.type(), mappingMd); return this; } public Builder state(State state) { this.state = state; return this; } public Builder putAlias(AliasMetaData aliasMetaData) { aliases.put(aliasMetaData.alias(), aliasMetaData); return this; } public Builder putAlias(AliasMetaData.Builder aliasMetaData) { aliases.put(aliasMetaData.alias(), aliasMetaData.build()); return this; } public Builder removerAlias(String alias) { aliases.remove(alias); return this; } public Builder putCustom(String type, Custom customIndexMetaData) { this.customs.put(type, customIndexMetaData); return this; } public Builder removeCustom(String type) { this.customs.remove(type); return this; } public Custom getCustom(String type) { return this.customs.get(type); } public long version() { return this.version; } public Builder version(long version) { this.version = version; return this; } public IndexMetaData build() { MapBuilder<String, AliasMetaData> tmpAliases = aliases; Settings tmpSettings = settings; // For backward compatibility String[] legacyAliases = settings.getAsArray("index.aliases"); if (legacyAliases.length > 0) { tmpAliases = MapBuilder.newMapBuilder(); for (String alias : legacyAliases) { AliasMetaData aliasMd = AliasMetaData.newAliasMetaDataBuilder(alias).build(); tmpAliases.put(alias, aliasMd); } tmpAliases.putAll(aliases.immutableMap()); // Remove index.aliases from settings once they are migrated to the new data structure tmpSettings = ImmutableSettings.settingsBuilder().put(settings).putArray("index.aliases").build(); } // update default mapping on the MappingMetaData if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { MappingMetaData defaultMapping = mappings.get(MapperService.DEFAULT_MAPPING); for (MappingMetaData mappingMetaData : mappings.map().values()) { mappingMetaData.updateDefaultMapping(defaultMapping); } } return new IndexMetaData(index, version, state, tmpSettings, mappings.immutableMap(), tmpAliases.immutableMap(), customs.immutableMap()); } public static void toXContent(IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(indexMetaData.index(), XContentBuilder.FieldCaseConversion.NONE); builder.field("version", indexMetaData.version()); builder.field("state", indexMetaData.state().toString().toLowerCase(Locale.ENGLISH)); boolean binary = params.paramAsBoolean("binary", false); builder.startObject("settings"); for (Map.Entry<String, String> entry : indexMetaData.settings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); builder.startArray("mappings"); for (Map.Entry<String, MappingMetaData> entry : indexMetaData.mappings().entrySet()) { if (binary) { builder.value(entry.getValue().source().compressed()); } else { byte[] data = entry.getValue().source().uncompressed(); XContentParser parser = XContentFactory.xContent(data).createParser(data); Map<String, Object> mapping = parser.mapOrdered(); parser.close(); builder.map(mapping); } } builder.endArray(); for (Map.Entry<String, Custom> entry : indexMetaData.customs().entrySet()) { builder.startObject(entry.getKey(), XContentBuilder.FieldCaseConversion.NONE); lookupFactorySafe(entry.getKey()).toXContent(entry.getValue(), builder, params); builder.endObject(); } builder.startObject("aliases"); for (AliasMetaData alias : indexMetaData.aliases().values()) { AliasMetaData.Builder.toXContent(alias, builder, params); } builder.endObject(); builder.endObject(); } public static IndexMetaData fromXContent(XContentParser parser) throws IOException { if (parser.currentToken() == XContentParser.Token.START_OBJECT) { parser.nextToken(); } Builder builder = new Builder(parser.currentName()); String currentFieldName = null; XContentParser.Token token = parser.nextToken(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("settings".equals(currentFieldName)) { builder.settings(ImmutableSettings.settingsBuilder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()))); } else if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { String mappingType = currentFieldName; Map<String, Object> mappingSource = MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map(); builder.putMapping(new MappingMetaData(mappingType, mappingSource)); } } } else if ("aliases".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { builder.putAlias(AliasMetaData.Builder.fromXContent(parser)); } } else { // check if its a custom index metadata Custom.Factory<Custom> factory = lookupFactory(currentFieldName); if (factory == null) { //TODO warn parser.skipChildren(); } else { builder.putCustom(factory.type(), factory.fromXContent(parser)); } } } else if (token == XContentParser.Token.START_ARRAY) { if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { builder.putMapping(new MappingMetaData(new CompressedString(parser.binaryValue()))); } else { Map<String, Object> mapping = parser.mapOrdered(); if (mapping.size() == 1) { String mappingType = mapping.keySet().iterator().next(); builder.putMapping(new MappingMetaData(mappingType, mapping)); } } } } } else if (token.isValue()) { if ("state".equals(currentFieldName)) { builder.state(State.fromString(parser.text())); } else if ("version".equals(currentFieldName)) { builder.version(parser.longValue()); } } } return builder.build(); } public static IndexMetaData readFrom(StreamInput in) throws IOException { Builder builder = new Builder(in.readString()); builder.version(in.readLong()); builder.state(State.fromId(in.readByte())); builder.settings(readSettingsFromStream(in)); int mappingsSize = in.readVInt(); for (int i = 0; i < mappingsSize; i++) { MappingMetaData mappingMd = MappingMetaData.readFrom(in); builder.putMapping(mappingMd); } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { AliasMetaData aliasMd = AliasMetaData.Builder.readFrom(in); builder.putAlias(aliasMd); } int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { String type = in.readString(); Custom customIndexMetaData = lookupFactorySafe(type).readFrom(in); builder.putCustom(type, customIndexMetaData); } return builder.build(); } public static void writeTo(IndexMetaData indexMetaData, StreamOutput out) throws IOException { out.writeString(indexMetaData.index()); out.writeLong(indexMetaData.version()); out.writeByte(indexMetaData.state().id()); writeSettingsToStream(indexMetaData.settings(), out); out.writeVInt(indexMetaData.mappings().size()); for (MappingMetaData mappingMd : indexMetaData.mappings().values()) { MappingMetaData.writeTo(mappingMd, out); } out.writeVInt(indexMetaData.aliases().size()); for (AliasMetaData aliasMd : indexMetaData.aliases().values()) { AliasMetaData.Builder.writeTo(aliasMd, out); } out.writeVInt(indexMetaData.customs().size()); for (Map.Entry<String, Custom> entry : indexMetaData.customs().entrySet()) { out.writeString(entry.getKey()); lookupFactorySafe(entry.getKey()).writeTo(entry.getValue(), out); } } } }
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.webapp; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.management.ManagementFactory; import java.lang.reflect.Constructor; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import javax.management.MBeanInfo; import javax.management.MBeanServer; import javax.management.ObjectName; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.apache.log4j.jmx.HierarchyDynamicMBean; import org.apache.velocity.app.VelocityEngine; import org.apache.velocity.runtime.log.Log4JLogChute; import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader; import org.apache.velocity.runtime.resource.loader.JarResourceLoader; import org.joda.time.DateTimeZone; import org.mortbay.jetty.Connector; import org.mortbay.jetty.Server; import org.mortbay.jetty.bio.SocketConnector; import org.mortbay.jetty.security.SslSocketConnector; import org.mortbay.jetty.servlet.Context; import org.mortbay.jetty.servlet.DefaultServlet; import org.mortbay.jetty.servlet.ServletHolder; import org.mortbay.thread.QueuedThreadPool; import azkaban.alert.Alerter; import azkaban.database.AzkabanDatabaseSetup; import azkaban.executor.ExecutorManager; import azkaban.executor.JdbcExecutorLoader; import azkaban.jmx.JmxExecutorManager; import azkaban.jmx.JmxJettyServer; import azkaban.jmx.JmxTriggerManager; import azkaban.project.JdbcProjectLoader; import azkaban.project.ProjectManager; import azkaban.scheduler.ScheduleLoader; import azkaban.scheduler.ScheduleManager; import azkaban.scheduler.TriggerBasedScheduleLoader; import azkaban.server.AzkabanServer; import azkaban.server.ServerConstants; import azkaban.server.session.SessionCache; import azkaban.trigger.JdbcTriggerLoader; import azkaban.trigger.TriggerLoader; import azkaban.trigger.TriggerManager; import azkaban.trigger.TriggerManagerException; import azkaban.trigger.builtin.BasicTimeChecker; import azkaban.trigger.builtin.CreateTriggerAction; import azkaban.trigger.builtin.ExecuteFlowAction; import azkaban.trigger.builtin.ExecutionChecker; import azkaban.trigger.builtin.KillExecutionAction; import azkaban.trigger.builtin.SlaAlertAction; import azkaban.trigger.builtin.SlaChecker; import azkaban.user.UserManager; import azkaban.user.XmlUserManager; import azkaban.utils.Emailer; import azkaban.utils.FileIOUtils; import azkaban.utils.Props; import azkaban.utils.PropsUtils; import azkaban.utils.Utils; import azkaban.webapp.plugin.PluginRegistry; import azkaban.webapp.plugin.TriggerPlugin; import azkaban.webapp.plugin.ViewerPlugin; import azkaban.webapp.servlet.AbstractAzkabanServlet; import azkaban.webapp.servlet.ExecutorServlet; import azkaban.webapp.servlet.HistoryServlet; import azkaban.webapp.servlet.IndexRedirectServlet; import azkaban.webapp.servlet.JMXHttpServlet; import azkaban.webapp.servlet.ProjectManagerServlet; import azkaban.webapp.servlet.ProjectServlet; import azkaban.webapp.servlet.ScheduleServlet; import azkaban.webapp.servlet.StatsServlet; import azkaban.webapp.servlet.TriggerManagerServlet; import com.linkedin.restli.server.RestliServlet; /** * The Azkaban Jetty server class * * Global azkaban properties for setup. All of them are optional unless * otherwise marked: azkaban.name - The displayed name of this instance. * azkaban.label - Short descriptor of this Azkaban instance. azkaban.color - * Theme color azkaban.temp.dir - Temp dir used by Azkaban for various file * uses. web.resource.dir - The directory that contains the static web files. * default.timezone.id - The timezone code. I.E. America/Los Angeles * * user.manager.class - The UserManager class used for the user manager. Default * is XmlUserManager. project.manager.class - The ProjectManager to load * projects project.global.properties - The base properties inherited by all * projects and jobs * * jetty.maxThreads - # of threads for jetty jetty.ssl.port - The ssl port used * for sessionizing. jetty.keystore - Jetty keystore . jetty.keypassword - Jetty * keystore password jetty.truststore - Jetty truststore jetty.trustpassword - * Jetty truststore password */ public class AzkabanWebServer extends AzkabanServer { private static final String AZKABAN_ACCESS_LOGGER_NAME = "azkaban.webapp.servlet.LoginAbstractAzkabanServlet"; private static final Logger logger = Logger.getLogger(AzkabanWebServer.class); public static final String AZKABAN_HOME = "AZKABAN_HOME"; public static final String DEFAULT_CONF_PATH = "conf"; public static final String AZKABAN_PROPERTIES_FILE = "azkaban.properties"; public static final String AZKABAN_PRIVATE_PROPERTIES_FILE = "azkaban.private.properties"; private static final int MAX_FORM_CONTENT_SIZE = 10 * 1024 * 1024; private static final int MAX_HEADER_BUFFER_SIZE = 10 * 1024 * 1024; private static AzkabanWebServer app; private static final String DEFAULT_TIMEZONE_ID = "default.timezone.id"; private static final int DEFAULT_PORT_NUMBER = 8081; private static final int DEFAULT_SSL_PORT_NUMBER = 8443; private static final int DEFAULT_THREAD_NUMBER = 20; private static final String VELOCITY_DEV_MODE_PARAM = "velocity.dev.mode"; private static final String USER_MANAGER_CLASS_PARAM = "user.manager.class"; private static final String DEFAULT_STATIC_DIR = ""; private final VelocityEngine velocityEngine; private final Server server; private UserManager userManager; private ProjectManager projectManager; // private ExecutorManagerAdapter executorManager; private ExecutorManager executorManager; private ScheduleManager scheduleManager; private TriggerManager triggerManager; private Map<String, Alerter> alerters; private final ClassLoader baseClassLoader; private Props props; private SessionCache sessionCache; private File tempDir; private Map<String, TriggerPlugin> triggerPlugins; private MBeanServer mbeanServer; private ArrayList<ObjectName> registeredMBeans = new ArrayList<ObjectName>(); public static AzkabanWebServer getInstance() { return app; } /** * Constructor usually called by tomcat AzkabanServletContext to create the * initial server */ public AzkabanWebServer() throws Exception { this(null, loadConfigurationFromAzkabanHome()); } /** * Constructor */ public AzkabanWebServer(Server server, Props props) throws Exception { this.props = props; this.server = server; velocityEngine = configureVelocityEngine(props .getBoolean(VELOCITY_DEV_MODE_PARAM, false)); sessionCache = new SessionCache(props); userManager = loadUserManager(props); alerters = loadAlerters(props); executorManager = loadExecutorManager(props); projectManager = loadProjectManager(props); triggerManager = loadTriggerManager(props); loadBuiltinCheckersAndActions(); // load all trigger agents here scheduleManager = loadScheduleManager(triggerManager, props); String triggerPluginDir = props.getString("trigger.plugin.dir", "plugins/triggers"); loadPluginCheckersAndActions(triggerPluginDir); baseClassLoader = this.getClassLoader(); tempDir = new File(props.getString("azkaban.temp.dir", "temp")); // Setup time zone if (props.containsKey(DEFAULT_TIMEZONE_ID)) { String timezone = props.getString(DEFAULT_TIMEZONE_ID); System.setProperty("user.timezone", timezone); TimeZone.setDefault(TimeZone.getTimeZone(timezone)); DateTimeZone.setDefault(DateTimeZone.forID(timezone)); logger.info("Setting timezone to " + timezone); } configureMBeanServer(); } private void setTriggerPlugins(Map<String, TriggerPlugin> triggerPlugins) { this.triggerPlugins = triggerPlugins; } private UserManager loadUserManager(Props props) { Class<?> userManagerClass = props.getClass(USER_MANAGER_CLASS_PARAM, null); logger.info("Loading user manager class " + userManagerClass.getName()); UserManager manager = null; if (userManagerClass != null && userManagerClass.getConstructors().length > 0) { try { Constructor<?> userManagerConstructor = userManagerClass.getConstructor(Props.class); manager = (UserManager) userManagerConstructor.newInstance(props); } catch (Exception e) { logger.error("Could not instantiate UserManager " + userManagerClass.getName()); throw new RuntimeException(e); } } else { manager = new XmlUserManager(props); } return manager; } private ProjectManager loadProjectManager(Props props) { logger.info("Loading JDBC for project management"); JdbcProjectLoader loader = new JdbcProjectLoader(props); ProjectManager manager = new ProjectManager(loader, props); return manager; } private ExecutorManager loadExecutorManager(Props props) throws Exception { JdbcExecutorLoader loader = new JdbcExecutorLoader(props); ExecutorManager execManager = new ExecutorManager(props, loader, alerters); return execManager; } private ScheduleManager loadScheduleManager(TriggerManager tm, Props props) throws Exception { logger.info("Loading trigger based scheduler"); ScheduleLoader loader = new TriggerBasedScheduleLoader(tm, ScheduleManager.triggerSource); return new ScheduleManager(loader); } private TriggerManager loadTriggerManager(Props props) throws TriggerManagerException { TriggerLoader loader = new JdbcTriggerLoader(props); return new TriggerManager(props, loader, executorManager); } private void loadBuiltinCheckersAndActions() { logger.info("Loading built-in checker and action types"); if (triggerManager instanceof TriggerManager) { SlaChecker.setExecutorManager(executorManager); ExecuteFlowAction.setExecutorManager(executorManager); ExecuteFlowAction.setProjectManager(projectManager); ExecuteFlowAction.setTriggerManager(triggerManager); KillExecutionAction.setExecutorManager(executorManager); SlaAlertAction.setExecutorManager(executorManager); SlaAlertAction.setAlerters(alerters); SlaAlertAction.setExecutorManager(executorManager); CreateTriggerAction.setTriggerManager(triggerManager); ExecutionChecker.setExecutorManager(executorManager); } triggerManager.registerCheckerType(BasicTimeChecker.type, BasicTimeChecker.class); triggerManager.registerCheckerType(SlaChecker.type, SlaChecker.class); triggerManager.registerCheckerType(ExecutionChecker.type, ExecutionChecker.class); triggerManager.registerActionType(ExecuteFlowAction.type, ExecuteFlowAction.class); triggerManager.registerActionType(KillExecutionAction.type, KillExecutionAction.class); triggerManager .registerActionType(SlaAlertAction.type, SlaAlertAction.class); triggerManager.registerActionType(CreateTriggerAction.type, CreateTriggerAction.class); } private Map<String, Alerter> loadAlerters(Props props) { Map<String, Alerter> allAlerters = new HashMap<String, Alerter>(); // load built-in alerters Emailer mailAlerter = new Emailer(props); allAlerters.put("email", mailAlerter); // load all plugin alerters String pluginDir = props.getString("alerter.plugin.dir", "plugins/alerter"); allAlerters.putAll(loadPluginAlerters(pluginDir)); return allAlerters; } private Map<String, Alerter> loadPluginAlerters(String pluginPath) { File alerterPluginPath = new File(pluginPath); if (!alerterPluginPath.exists()) { return Collections.<String, Alerter> emptyMap(); } Map<String, Alerter> installedAlerterPlugins = new HashMap<String, Alerter>(); ClassLoader parentLoader = getClass().getClassLoader(); File[] pluginDirs = alerterPluginPath.listFiles(); ArrayList<String> jarPaths = new ArrayList<String>(); for (File pluginDir : pluginDirs) { if (!pluginDir.isDirectory()) { logger.error("The plugin path " + pluginDir + " is not a directory."); continue; } // Load the conf directory File propertiesDir = new File(pluginDir, "conf"); Props pluginProps = null; if (propertiesDir.exists() && propertiesDir.isDirectory()) { File propertiesFile = new File(propertiesDir, "plugin.properties"); File propertiesOverrideFile = new File(propertiesDir, "override.properties"); if (propertiesFile.exists()) { if (propertiesOverrideFile.exists()) { pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile); } else { pluginProps = PropsUtils.loadProps(null, propertiesFile); } } else { logger.error("Plugin conf file " + propertiesFile + " not found."); continue; } } else { logger.error("Plugin conf path " + propertiesDir + " not found."); continue; } String pluginName = pluginProps.getString("alerter.name"); List<String> extLibClasspath = pluginProps.getStringList("alerter.external.classpaths", (List<String>) null); String pluginClass = pluginProps.getString("alerter.class"); if (pluginClass == null) { logger.error("Alerter class is not set."); } else { logger.info("Plugin class " + pluginClass); } URLClassLoader urlClassLoader = null; File libDir = new File(pluginDir, "lib"); if (libDir.exists() && libDir.isDirectory()) { File[] files = libDir.listFiles(); ArrayList<URL> urls = new ArrayList<URL>(); for (int i = 0; i < files.length; ++i) { try { URL url = files[i].toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } if (extLibClasspath != null) { for (String extLib : extLibClasspath) { try { File file = new File(pluginDir, extLib); URL url = file.toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } } urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader); } else { logger.error("Library path " + propertiesDir + " not found."); continue; } Class<?> alerterClass = null; try { alerterClass = urlClassLoader.loadClass(pluginClass); } catch (ClassNotFoundException e) { logger.error("Class " + pluginClass + " not found."); continue; } String source = FileIOUtils.getSourcePathFromClass(alerterClass); logger.info("Source jar " + source); jarPaths.add("jar:file:" + source); Constructor<?> constructor = null; try { constructor = alerterClass.getConstructor(Props.class); } catch (NoSuchMethodException e) { logger.error("Constructor not found in " + pluginClass); continue; } Object obj = null; try { obj = constructor.newInstance(pluginProps); } catch (Exception e) { logger.error(e); } if (!(obj instanceof Alerter)) { logger.error("The object is not an Alerter"); continue; } Alerter plugin = (Alerter) obj; installedAlerterPlugins.put(pluginName, plugin); } return installedAlerterPlugins; } private void loadPluginCheckersAndActions(String pluginPath) { logger.info("Loading plug-in checker and action types"); File triggerPluginPath = new File(pluginPath); if (!triggerPluginPath.exists()) { logger.error("plugin path " + pluginPath + " doesn't exist!"); return; } ClassLoader parentLoader = this.getClassLoader(); File[] pluginDirs = triggerPluginPath.listFiles(); ArrayList<String> jarPaths = new ArrayList<String>(); for (File pluginDir : pluginDirs) { if (!pluginDir.exists()) { logger.error("Error! Trigger plugin path " + pluginDir.getPath() + " doesn't exist."); continue; } if (!pluginDir.isDirectory()) { logger.error("The plugin path " + pluginDir + " is not a directory."); continue; } // Load the conf directory File propertiesDir = new File(pluginDir, "conf"); Props pluginProps = null; if (propertiesDir.exists() && propertiesDir.isDirectory()) { File propertiesFile = new File(propertiesDir, "plugin.properties"); File propertiesOverrideFile = new File(propertiesDir, "override.properties"); if (propertiesFile.exists()) { if (propertiesOverrideFile.exists()) { pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile); } else { pluginProps = PropsUtils.loadProps(null, propertiesFile); } } else { logger.error("Plugin conf file " + propertiesFile + " not found."); continue; } } else { logger.error("Plugin conf path " + propertiesDir + " not found."); continue; } List<String> extLibClasspath = pluginProps.getStringList("trigger.external.classpaths", (List<String>) null); String pluginClass = pluginProps.getString("trigger.class"); if (pluginClass == null) { logger.error("Trigger class is not set."); } else { logger.error("Plugin class " + pluginClass); } URLClassLoader urlClassLoader = null; File libDir = new File(pluginDir, "lib"); if (libDir.exists() && libDir.isDirectory()) { File[] files = libDir.listFiles(); ArrayList<URL> urls = new ArrayList<URL>(); for (int i = 0; i < files.length; ++i) { try { URL url = files[i].toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } if (extLibClasspath != null) { for (String extLib : extLibClasspath) { try { File file = new File(pluginDir, extLib); URL url = file.toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } } urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader); } else { logger.error("Library path " + propertiesDir + " not found."); continue; } Class<?> triggerClass = null; try { triggerClass = urlClassLoader.loadClass(pluginClass); } catch (ClassNotFoundException e) { logger.error("Class " + pluginClass + " not found."); continue; } String source = FileIOUtils.getSourcePathFromClass(triggerClass); logger.info("Source jar " + source); jarPaths.add("jar:file:" + source); try { Utils.invokeStaticMethod(urlClassLoader, pluginClass, "initiateCheckerTypes", pluginProps, app); } catch (Exception e) { logger.error("Unable to initiate checker types for " + pluginClass); continue; } try { Utils.invokeStaticMethod(urlClassLoader, pluginClass, "initiateActionTypes", pluginProps, app); } catch (Exception e) { logger.error("Unable to initiate action types for " + pluginClass); continue; } } } /** * Returns the web session cache. * * @return */ public SessionCache getSessionCache() { return sessionCache; } /** * Returns the velocity engine for pages to use. * * @return */ public VelocityEngine getVelocityEngine() { return velocityEngine; } /** * * @return */ public UserManager getUserManager() { return userManager; } /** * * @return */ public ProjectManager getProjectManager() { return projectManager; } /** * */ public ExecutorManager getExecutorManager() { return executorManager; } public ScheduleManager getScheduleManager() { return scheduleManager; } public TriggerManager getTriggerManager() { return triggerManager; } /** * Creates and configures the velocity engine. * * @param devMode * @return */ private VelocityEngine configureVelocityEngine(final boolean devMode) { VelocityEngine engine = new VelocityEngine(); engine.setProperty("resource.loader", "classpath, jar"); engine.setProperty("classpath.resource.loader.class", ClasspathResourceLoader.class.getName()); engine.setProperty("classpath.resource.loader.cache", !devMode); engine.setProperty("classpath.resource.loader.modificationCheckInterval", 5L); engine.setProperty("jar.resource.loader.class", JarResourceLoader.class.getName()); engine.setProperty("jar.resource.loader.cache", !devMode); engine.setProperty("resource.manager.logwhenfound", false); engine.setProperty("input.encoding", "UTF-8"); engine.setProperty("output.encoding", "UTF-8"); engine.setProperty("directive.set.null.allowed", true); engine.setProperty("resource.manager.logwhenfound", false); engine.setProperty("velocimacro.permissions.allow.inline", true); engine.setProperty("velocimacro.library.autoreload", devMode); engine.setProperty("velocimacro.library", "/azkaban/webapp/servlet/velocity/macros.vm"); engine.setProperty( "velocimacro.permissions.allow.inline.to.replace.global", true); engine.setProperty("velocimacro.arguments.strict", true); engine.setProperty("runtime.log.invalid.references", devMode); engine.setProperty("runtime.log.logsystem.class", Log4JLogChute.class); engine.setProperty("runtime.log.logsystem.log4j.logger", Logger.getLogger("org.apache.velocity.Logger")); engine.setProperty("parser.pool.size", 3); return engine; } public ClassLoader getClassLoader() { return baseClassLoader; } /** * Returns the global azkaban properties * * @return */ public Props getServerProps() { return props; } /** * Azkaban using Jetty * * @param args */ public static void main(String[] args) throws Exception { logger.info("Starting Jetty Azkaban Web Server..."); Props azkabanSettings = AzkabanServer.loadProps(args); if (azkabanSettings == null) { logger.error("Azkaban Properties not loaded."); logger.error("Exiting Azkaban..."); return; } int maxThreads = azkabanSettings.getInt("jetty.maxThreads", DEFAULT_THREAD_NUMBER); boolean isStatsOn = azkabanSettings.getBoolean("jetty.connector.stats", true); logger.info("Setting up connector with stats on: " + isStatsOn); boolean ssl; int port; final Server server = new Server(); if (azkabanSettings.getBoolean("jetty.use.ssl", true)) { int sslPortNumber = azkabanSettings.getInt("jetty.ssl.port", DEFAULT_SSL_PORT_NUMBER); port = sslPortNumber; ssl = true; logger.info("Setting up Jetty Https Server with port:" + sslPortNumber + " and numThreads:" + maxThreads); SslSocketConnector secureConnector = new SslSocketConnector(); secureConnector.setPort(sslPortNumber); secureConnector.setKeystore(azkabanSettings.getString("jetty.keystore")); secureConnector.setPassword(azkabanSettings.getString("jetty.password")); secureConnector.setKeyPassword(azkabanSettings .getString("jetty.keypassword")); secureConnector.setTruststore(azkabanSettings .getString("jetty.truststore")); secureConnector.setTrustPassword(azkabanSettings .getString("jetty.trustpassword")); secureConnector.setHeaderBufferSize(MAX_HEADER_BUFFER_SIZE); server.addConnector(secureConnector); } else { ssl = false; port = azkabanSettings.getInt("jetty.port", DEFAULT_PORT_NUMBER); SocketConnector connector = new SocketConnector(); connector.setPort(port); connector.setHeaderBufferSize(MAX_HEADER_BUFFER_SIZE); server.addConnector(connector); } // setting stats configuration for connectors for (Connector connector : server.getConnectors()) { connector.setStatsOn(isStatsOn); } String hostname = azkabanSettings.getString("jetty.hostname", "localhost"); azkabanSettings.put("server.hostname", hostname); azkabanSettings.put("server.port", port); azkabanSettings.put("server.useSSL", String.valueOf(ssl)); app = new AzkabanWebServer(server, azkabanSettings); boolean checkDB = azkabanSettings.getBoolean(AzkabanDatabaseSetup.DATABASE_CHECK_VERSION, false); if (checkDB) { AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(azkabanSettings); setup.loadTableInfo(); if (setup.needsUpdating()) { logger.error("Database is out of date."); setup.printUpgradePlan(); logger.error("Exiting with error."); System.exit(-1); } } QueuedThreadPool httpThreadPool = new QueuedThreadPool(maxThreads); server.setThreadPool(httpThreadPool); String staticDir = azkabanSettings.getString("web.resource.dir", DEFAULT_STATIC_DIR); logger.info("Setting up web resource dir " + staticDir); Context root = new Context(server, "/", Context.SESSIONS); root.setMaxFormContentSize(MAX_FORM_CONTENT_SIZE); String defaultServletPath = azkabanSettings.getString("azkaban.default.servlet.path", "/index"); root.setResourceBase(staticDir); ServletHolder indexRedirect = new ServletHolder(new IndexRedirectServlet(defaultServletPath)); root.addServlet(indexRedirect, "/"); ServletHolder index = new ServletHolder(new ProjectServlet()); root.addServlet(index, "/index"); ServletHolder staticServlet = new ServletHolder(new DefaultServlet()); root.addServlet(staticServlet, "/css/*"); root.addServlet(staticServlet, "/js/*"); root.addServlet(staticServlet, "/images/*"); root.addServlet(staticServlet, "/fonts/*"); root.addServlet(staticServlet, "/favicon.ico"); root.addServlet(new ServletHolder(new ProjectManagerServlet()), "/manager"); root.addServlet(new ServletHolder(new ExecutorServlet()), "/executor"); root.addServlet(new ServletHolder(new HistoryServlet()), "/history"); root.addServlet(new ServletHolder(new ScheduleServlet()), "/schedule"); root.addServlet(new ServletHolder(new JMXHttpServlet()), "/jmx"); root.addServlet(new ServletHolder(new TriggerManagerServlet()), "/triggers"); root.addServlet(new ServletHolder(new StatsServlet()), "/stats"); ServletHolder restliHolder = new ServletHolder(new RestliServlet()); restliHolder.setInitParameter("resourcePackages", "azkaban.restli"); root.addServlet(restliHolder, "/restli/*"); String viewerPluginDir = azkabanSettings.getString("viewer.plugin.dir", "plugins/viewer"); loadViewerPlugins(root, viewerPluginDir, app.getVelocityEngine()); // triggerplugin String triggerPluginDir = azkabanSettings.getString("trigger.plugin.dir", "plugins/triggers"); Map<String, TriggerPlugin> triggerPlugins = loadTriggerPlugins(root, triggerPluginDir, app); app.setTriggerPlugins(triggerPlugins); // always have basic time trigger // TODO: find something else to do the job app.getTriggerManager().start(); root.setAttribute(ServerConstants.AZKABAN_SERVLET_CONTEXT_KEY, app); try { server.start(); } catch (Exception e) { logger.warn(e); Utils.croak(e.getMessage(), 1); } Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { try { logTopMemoryConsumers(); } catch (Exception e) { logger.info(("Exception when logging top memory consumers"), e); } logger.info("Shutting down http server..."); try { app.close(); server.stop(); server.destroy(); } catch (Exception e) { logger.error("Error while shutting down http server.", e); } logger.info("kk thx bye."); } public void logTopMemoryConsumers() throws Exception, IOException { if (new File("/bin/bash").exists() && new File("/bin/ps").exists() && new File("/usr/bin/head").exists()) { logger.info("logging top memeory consumer"); java.lang.ProcessBuilder processBuilder = new java.lang.ProcessBuilder("/bin/bash", "-c", "/bin/ps aux --sort -rss | /usr/bin/head"); Process p = processBuilder.start(); p.waitFor(); InputStream is = p.getInputStream(); java.io.BufferedReader reader = new java.io.BufferedReader(new InputStreamReader(is)); String line = null; while ((line = reader.readLine()) != null) { logger.info(line); } is.close(); } } }); logger.info("Server running on " + (ssl ? "ssl" : "") + " port " + port + "."); } private static Map<String, TriggerPlugin> loadTriggerPlugins(Context root, String pluginPath, AzkabanWebServer azkabanWebApp) { File triggerPluginPath = new File(pluginPath); if (!triggerPluginPath.exists()) { return new HashMap<String, TriggerPlugin>(); } Map<String, TriggerPlugin> installedTriggerPlugins = new HashMap<String, TriggerPlugin>(); ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader(); File[] pluginDirs = triggerPluginPath.listFiles(); ArrayList<String> jarPaths = new ArrayList<String>(); for (File pluginDir : pluginDirs) { if (!pluginDir.exists()) { logger.error("Error! Trigger plugin path " + pluginDir.getPath() + " doesn't exist."); continue; } if (!pluginDir.isDirectory()) { logger.error("The plugin path " + pluginDir + " is not a directory."); continue; } // Load the conf directory File propertiesDir = new File(pluginDir, "conf"); Props pluginProps = null; if (propertiesDir.exists() && propertiesDir.isDirectory()) { File propertiesFile = new File(propertiesDir, "plugin.properties"); File propertiesOverrideFile = new File(propertiesDir, "override.properties"); if (propertiesFile.exists()) { if (propertiesOverrideFile.exists()) { pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile); } else { pluginProps = PropsUtils.loadProps(null, propertiesFile); } } else { logger.error("Plugin conf file " + propertiesFile + " not found."); continue; } } else { logger.error("Plugin conf path " + propertiesDir + " not found."); continue; } String pluginName = pluginProps.getString("trigger.name"); List<String> extLibClasspath = pluginProps.getStringList("trigger.external.classpaths", (List<String>) null); String pluginClass = pluginProps.getString("trigger.class"); if (pluginClass == null) { logger.error("Trigger class is not set."); } else { logger.error("Plugin class " + pluginClass); } URLClassLoader urlClassLoader = null; File libDir = new File(pluginDir, "lib"); if (libDir.exists() && libDir.isDirectory()) { File[] files = libDir.listFiles(); ArrayList<URL> urls = new ArrayList<URL>(); for (int i = 0; i < files.length; ++i) { try { URL url = files[i].toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } if (extLibClasspath != null) { for (String extLib : extLibClasspath) { try { File file = new File(pluginDir, extLib); URL url = file.toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } } urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader); } else { logger.error("Library path " + propertiesDir + " not found."); continue; } Class<?> triggerClass = null; try { triggerClass = urlClassLoader.loadClass(pluginClass); } catch (ClassNotFoundException e) { logger.error("Class " + pluginClass + " not found."); continue; } String source = FileIOUtils.getSourcePathFromClass(triggerClass); logger.info("Source jar " + source); jarPaths.add("jar:file:" + source); Constructor<?> constructor = null; try { constructor = triggerClass.getConstructor(String.class, Props.class, Context.class, AzkabanWebServer.class); } catch (NoSuchMethodException e) { logger.error("Constructor not found in " + pluginClass); continue; } Object obj = null; try { obj = constructor.newInstance(pluginName, pluginProps, root, azkabanWebApp); } catch (Exception e) { logger.error(e); } if (!(obj instanceof TriggerPlugin)) { logger.error("The object is not an TriggerPlugin"); continue; } TriggerPlugin plugin = (TriggerPlugin) obj; installedTriggerPlugins.put(pluginName, plugin); } // Velocity needs the jar resource paths to be set. String jarResourcePath = StringUtils.join(jarPaths, ", "); logger.info("Setting jar resource path " + jarResourcePath); VelocityEngine ve = azkabanWebApp.getVelocityEngine(); ve.addProperty("jar.resource.loader.path", jarResourcePath); return installedTriggerPlugins; } public Map<String, TriggerPlugin> getTriggerPlugins() { return triggerPlugins; } private static void loadViewerPlugins(Context root, String pluginPath, VelocityEngine ve) { File viewerPluginPath = new File(pluginPath); if (!viewerPluginPath.exists()) { return; } ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader(); File[] pluginDirs = viewerPluginPath.listFiles(); ArrayList<String> jarPaths = new ArrayList<String>(); for (File pluginDir : pluginDirs) { if (!pluginDir.exists()) { logger.error("Error viewer plugin path " + pluginDir.getPath() + " doesn't exist."); continue; } if (!pluginDir.isDirectory()) { logger.error("The plugin path " + pluginDir + " is not a directory."); continue; } // Load the conf directory File propertiesDir = new File(pluginDir, "conf"); Props pluginProps = null; if (propertiesDir.exists() && propertiesDir.isDirectory()) { File propertiesFile = new File(propertiesDir, "plugin.properties"); File propertiesOverrideFile = new File(propertiesDir, "override.properties"); if (propertiesFile.exists()) { if (propertiesOverrideFile.exists()) { pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile); } else { pluginProps = PropsUtils.loadProps(null, propertiesFile); } } else { logger.error("Plugin conf file " + propertiesFile + " not found."); continue; } } else { logger.error("Plugin conf path " + propertiesDir + " not found."); continue; } String pluginName = pluginProps.getString("viewer.name"); String pluginWebPath = pluginProps.getString("viewer.path"); String pluginJobTypes = pluginProps.getString("viewer.jobtypes", null); int pluginOrder = pluginProps.getInt("viewer.order", 0); boolean pluginHidden = pluginProps.getBoolean("viewer.hidden", false); List<String> extLibClasspath = pluginProps.getStringList("viewer.external.classpaths", (List<String>) null); String pluginClass = pluginProps.getString("viewer.servlet.class"); if (pluginClass == null) { logger.error("Viewer class is not set."); } else { logger.error("Plugin class " + pluginClass); } URLClassLoader urlClassLoader = null; File libDir = new File(pluginDir, "lib"); if (libDir.exists() && libDir.isDirectory()) { File[] files = libDir.listFiles(); ArrayList<URL> urls = new ArrayList<URL>(); for (int i = 0; i < files.length; ++i) { try { URL url = files[i].toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } // Load any external libraries. if (extLibClasspath != null) { for (String extLib : extLibClasspath) { File extLibFile = new File(pluginDir, extLib); if (extLibFile.exists()) { if (extLibFile.isDirectory()) { // extLibFile is a directory; load all the files in the // directory. File[] extLibFiles = extLibFile.listFiles(); for (int i = 0; i < extLibFiles.length; ++i) { try { URL url = extLibFiles[i].toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } } else { // extLibFile is a file try { URL url = extLibFile.toURI().toURL(); urls.add(url); } catch (MalformedURLException e) { logger.error(e); } } } else { logger.error("External library path " + extLibFile.getAbsolutePath() + " not found."); continue; } } } urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader); } else { logger .error("Library path " + libDir.getAbsolutePath() + " not found."); continue; } Class<?> viewerClass = null; try { viewerClass = urlClassLoader.loadClass(pluginClass); } catch (ClassNotFoundException e) { logger.error("Class " + pluginClass + " not found."); continue; } String source = FileIOUtils.getSourcePathFromClass(viewerClass); logger.info("Source jar " + source); jarPaths.add("jar:file:" + source); Constructor<?> constructor = null; try { constructor = viewerClass.getConstructor(Props.class); } catch (NoSuchMethodException e) { logger.error("Constructor not found in " + pluginClass); continue; } Object obj = null; try { obj = constructor.newInstance(pluginProps); } catch (Exception e) { logger.error(e); logger.error(e.getCause()); } if (!(obj instanceof AbstractAzkabanServlet)) { logger.error("The object is not an AbstractAzkabanServlet"); continue; } AbstractAzkabanServlet avServlet = (AbstractAzkabanServlet) obj; root.addServlet(new ServletHolder(avServlet), "/" + pluginWebPath + "/*"); PluginRegistry.getRegistry().register( new ViewerPlugin(pluginName, pluginWebPath, pluginOrder, pluginHidden, pluginJobTypes)); } // Velocity needs the jar resource paths to be set. String jarResourcePath = StringUtils.join(jarPaths, ", "); logger.info("Setting jar resource path " + jarResourcePath); ve.addProperty("jar.resource.loader.path", jarResourcePath); } /** * Loads the Azkaban property file from the AZKABAN_HOME conf directory * * @return */ private static Props loadConfigurationFromAzkabanHome() { String azkabanHome = System.getenv("AZKABAN_HOME"); if (azkabanHome == null) { logger.error("AZKABAN_HOME not set. Will try default."); return null; } if (!new File(azkabanHome).isDirectory() || !new File(azkabanHome).canRead()) { logger.error(azkabanHome + " is not a readable directory."); return null; } File confPath = new File(azkabanHome, DEFAULT_CONF_PATH); if (!confPath.exists() || !confPath.isDirectory() || !confPath.canRead()) { logger .error(azkabanHome + " does not contain a readable conf directory."); return null; } return loadAzkabanConfigurationFromDirectory(confPath); } /** * Returns the set temp dir * * @return */ public File getTempDirectory() { return tempDir; } private static Props loadAzkabanConfigurationFromDirectory(File dir) { File azkabanPrivatePropsFile = new File(dir, AZKABAN_PRIVATE_PROPERTIES_FILE); File azkabanPropsFile = new File(dir, AZKABAN_PROPERTIES_FILE); Props props = null; try { // This is purely optional if (azkabanPrivatePropsFile.exists() && azkabanPrivatePropsFile.isFile()) { logger.info("Loading azkaban private properties file"); props = new Props(null, azkabanPrivatePropsFile); } if (azkabanPropsFile.exists() && azkabanPropsFile.isFile()) { logger.info("Loading azkaban properties file"); props = new Props(props, azkabanPropsFile); } } catch (FileNotFoundException e) { logger.error("File not found. Could not load azkaban config file", e); } catch (IOException e) { logger.error( "File found, but error reading. Could not load azkaban config file", e); } return props; } private void configureMBeanServer() { logger.info("Registering MBeans..."); mbeanServer = ManagementFactory.getPlatformMBeanServer(); registerMbean("jetty", new JmxJettyServer(server)); registerMbean("triggerManager", new JmxTriggerManager(triggerManager)); if (executorManager instanceof ExecutorManager) { registerMbean("executorManager", new JmxExecutorManager( (ExecutorManager) executorManager)); } // Register Log4J loggers as JMX beans so the log level can be // updated via JConsole or Java VisualVM HierarchyDynamicMBean log4jMBean = new HierarchyDynamicMBean(); registerMbean("log4jmxbean", log4jMBean); ObjectName accessLogLoggerObjName = log4jMBean.addLoggerMBean(AZKABAN_ACCESS_LOGGER_NAME); if (accessLogLoggerObjName == null) { System.out .println("************* loginLoggerObjName is null, make sure there is a logger with name " + AZKABAN_ACCESS_LOGGER_NAME); } else { System.out.println("******** loginLoggerObjName: " + accessLogLoggerObjName.getCanonicalName()); } } public void close() { try { for (ObjectName name : registeredMBeans) { mbeanServer.unregisterMBean(name); logger.info("Jmx MBean " + name.getCanonicalName() + " unregistered."); } } catch (Exception e) { logger.error("Failed to cleanup MBeanServer", e); } scheduleManager.shutdown(); executorManager.shutdown(); } private void registerMbean(String name, Object mbean) { Class<?> mbeanClass = mbean.getClass(); ObjectName mbeanName; try { mbeanName = new ObjectName(mbeanClass.getName() + ":name=" + name); mbeanServer.registerMBean(mbean, mbeanName); logger.info("Bean " + mbeanClass.getCanonicalName() + " registered."); registeredMBeans.add(mbeanName); } catch (Exception e) { logger.error("Error registering mbean " + mbeanClass.getCanonicalName(), e); } } public List<ObjectName> getMbeanNames() { return registeredMBeans; } public MBeanInfo getMBeanInfo(ObjectName name) { try { return mbeanServer.getMBeanInfo(name); } catch (Exception e) { logger.error(e); return null; } } public Object getMBeanAttribute(ObjectName name, String attribute) { try { return mbeanServer.getAttribute(name, attribute); } catch (Exception e) { logger.error(e); return null; } } }
package org.lcmmun.kiosk.gui; import java.awt.BorderLayout; import java.awt.CardLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import net.miginfocom.layout.CC; import net.miginfocom.swing.MigLayout; import org.lcmmun.kiosk.Committee; import org.lcmmun.kiosk.Delegate; import org.lcmmun.kiosk.Messages; import org.lcmmun.kiosk.Speech.YieldType; import org.lcmmun.kiosk.Yield; import org.lcmmun.kiosk.gui.events.YieldActionEvent; import org.lcmmun.kiosk.gui.events.YieldActionEvent.CommentActionType; import org.lcmmun.kiosk.gui.events.YieldActionEvent.QuestionActionType; import org.lcmmun.kiosk.gui.events.YieldActionListener; import org.lcmmun.kiosk.gui.events.YieldEvent; import org.lcmmun.kiosk.gui.events.YieldListener; import tools.customizable.CounterProperty; import tools.customizable.MultipleChoiceProperty; import tools.customizable.PropertyPanel; import tools.customizable.PropertySet; /** * A panel for managing yields. * * @author William Chargin * */ public class YieldsPanel extends JPanel { /** * */ private static final long serialVersionUID = 1L; /** * The delegate currently speaking. */ private Delegate speaking; /** * The card layout of this component. */ private final CardLayout layout = new CardLayout(); /** * The yield type property. */ private final MultipleChoiceProperty<YieldType> mcpYieldType; /** * The yield button. */ private final JButton btnYield; /** * The committee. */ public Committee committee; /** * Creates the panel. */ public YieldsPanel(final Committee committee, final SpeechPanel parent) { super(); setLayout(layout); this.committee = committee; mcpYieldType = MultipleChoiceProperty.createFromEnum( Messages.getString("YieldsPanel.YieldType"), //$NON-NLS-1$ YieldType.class); mcpYieldType.setRenderer(new YieldRenderer()); JPanel pnlYield = new JPanel(new BorderLayout()); add(pnlYield, new String()); pnlYield.add(new PropertyPanel(new PropertySet(mcpYieldType), true, false), BorderLayout.CENTER); btnYield = new JButton(Messages.getString("YieldsPanel.YieldButton")); //$NON-NLS-1$ pnlYield.add(btnYield, BorderLayout.SOUTH); final JPanel ypnlComments = new JPanel(new MigLayout()); final JPanel ypnlQuestions = new JPanel(new MigLayout()); final JPanel ypnlDelegate = new JPanel(new BorderLayout()); btnYield.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent ae) { final YieldType value = mcpYieldType.getValue(); if (value != YieldType.DELEGATE) { fireYieldEvent(new YieldEvent(YieldsPanel.this, speaking, value == YieldType.DELEGATE ? new Yield(speaking) : new Yield(value))); } switch (value) { case DELEGATE: setupDelegatePanel(ypnlDelegate, YieldsPanel.this.committee, parent); break; case QUESTIONS: setupQuestionsPanel(ypnlQuestions, YieldsPanel.this.committee, parent); break; case COMMENTS: setupCommentsPanel(ypnlComments, YieldsPanel.this.committee); parent.yield(new Yield(YieldType.COMMENTS)); break; default: parent.yield(new Yield(value)); break; } layout.show(YieldsPanel.this, value.name()); } }); setupCommentsPanel(ypnlComments, committee); setupQuestionsPanel(ypnlQuestions, committee, parent); setupDelegatePanel(ypnlDelegate, committee, parent); add(ypnlComments, YieldType.COMMENTS.name()); add(ypnlQuestions, YieldType.QUESTIONS.name()); add(ypnlDelegate, YieldType.DELEGATE.name()); setSpeakingDelegate(null); } /** * Sets up the question panel. * * @param ypnlQuestions * the panel to set up * @param committee * the relevant committee */ private void setupQuestionsPanel(JPanel ypnlQuestions, Committee committee, final SpeechPanel parent) { ypnlQuestions.removeAll(); ArrayList<Delegate> candidates = new ArrayList<Delegate>( committee.getPresentDelegates()); // You can't ask yourself a question. // ... although that would be interesting. candidates.remove(speaking); if (candidates.isEmpty()) { final JLabel label = new JLabel( Messages.getString("YieldsPanel.NoDelegatesToAsk")); //$NON-NLS-1$ label.setHorizontalAlignment(JLabel.CENTER); label.setVerticalAlignment(JLabel.CENTER); ypnlQuestions.add(label, new CC().grow().push()); } else { parent.pauseSpeech(); final MultipleChoiceProperty<Delegate> mcpRecognize = new MultipleChoiceProperty<Delegate>( Messages.getString("YieldsPanel.Recognize"), candidates, null); //$NON-NLS-1$ ypnlQuestions.add(new PropertyPanel(new PropertySet(mcpRecognize), true, false), new CC().grow().push().spanX().wrap()); final Map<String, ActionListener> buttonValues = new LinkedHashMap<String, ActionListener>(); buttonValues .put(Messages.getString("YieldsPanel.StartQuestion"), new ActionListener() { //$NON-NLS-1$ @Override public void actionPerformed(ActionEvent ae) { fireYieldActionEvent(new YieldActionEvent( speaking, new Yield( YieldType.QUESTIONS), QuestionActionType.ASKING, mcpRecognize.getValue())); } }); final JButton btn = new JButton(new ArrayList<String>( buttonValues.keySet()).get(0)); buttonValues .put(Messages.getString("YieldsPanel.StartAnswer"), new ActionListener() { //$NON-NLS-1$ @Override public void actionPerformed(ActionEvent ae) { parent.pauseSpeech(); fireYieldActionEvent(new YieldActionEvent( speaking, new Yield( YieldType.QUESTIONS), QuestionActionType.ANSWERING)); } }); buttonValues .put(Messages.getString("YieldsPanel.StopAnswer"), new ActionListener() { //$NON-NLS-1$ @Override public void actionPerformed(ActionEvent ae) { parent.pauseSpeech(); fireYieldActionEvent(new YieldActionEvent( speaking, new Yield(YieldType.QUESTIONS), QuestionActionType.WAITING_FOR_QUESTIONS)); } }); btn.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent ae) { ArrayList<String> list = new ArrayList<String>(buttonValues .keySet()); int index = list.indexOf(btn.getText()); int next = (index + 1) % list.size(); buttonValues.get(list.get(index)).actionPerformed(ae); btn.setText(list.get(next)); } }); ypnlQuestions.add(btn, new CC().growX().pushX()); } ypnlQuestions.revalidate(); ypnlQuestions.repaint(); } /** * Adds the given listener to the list of listeners. * * @param yl * the listener to add */ public void addYieldListener(YieldListener yl) { listenerList.add(YieldListener.class, yl); } /** * Removes the given listener from the list of listeners. * * @param yl * the listener to remove */ public void removeYieldListener(YieldListener yl) { listenerList.remove(YieldListener.class, yl); } /** * Adds the given listener to the list of listeners. * * @param yl * the listener to add */ public void addYieldActionListener(YieldActionListener yl) { listenerList.add(YieldActionListener.class, yl); } /** * Removes the given listener from the list of listeners. * * @param yl * the listener to remove */ public void removeYieldActionListener(YieldActionListener yl) { listenerList.remove(YieldActionListener.class, yl); } /** * Fires a {@link YieldActionEvent} to all registered listeners. * * @param yieldActionEvent * the event to fire */ protected void fireYieldActionEvent(YieldActionEvent yieldActionEvent) { for (YieldActionListener yl : listenerList .getListeners(YieldActionListener.class)) { yl.yieldActionPerformed(yieldActionEvent); } } public void setupDelegatePanel(final JPanel ypnlDelegate, final Committee committee, final SpeechPanel parent) { ypnlDelegate.removeAll(); final ArrayList<Delegate> validYields = new ArrayList<Delegate>( committee.getPresentDelegates()); if (validYields.isEmpty()) { final JLabel label = new JLabel( Messages.getString("YieldsPanel.NoDelegateToYieldTo")); //$NON-NLS-1$ label.setHorizontalAlignment(JLabel.CENTER); label.setVerticalAlignment(JLabel.CENTER); ypnlDelegate.add(label, BorderLayout.CENTER); } else { validYields.remove(speaking); final MultipleChoiceProperty<Delegate> mcpTarget = new MultipleChoiceProperty<Delegate>( Messages.getString("YieldsPanel.Target"), validYields, validYields.get(0)); //$NON-NLS-1$ ypnlDelegate.add(new PropertyPanel(new PropertySet(mcpTarget), true, false), BorderLayout.CENTER); final JButton btnYield = new JButton( Messages.getString("YieldsPanel.YieldToDelegate")); //$NON-NLS-1$ ypnlDelegate.add(btnYield, BorderLayout.SOUTH); btnYield.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent ae) { mcpTarget.setEnabled(false); btnYield.setEnabled(false); parent.yield(new Yield(mcpTarget.getValue())); } }); } ypnlDelegate.revalidate(); ypnlDelegate.repaint(); } /** * Sets up the comments panel. * * @param ypnlComments * the panel to set up * @param committee * the committee */ public void setupCommentsPanel(JPanel ypnlComments, final Committee committee) { ypnlComments.removeAll(); if (committee.numComments > 0) { final TimeBar tbComment = new TimeBar(); final CounterProperty cpCommentNumber = new CounterProperty( new String(), 1, 1, committee.numComments); tbComment.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent me) { if (me.getClickCount() >= 2 && (tbComment.isRunning() || tbComment.isFinished())) { tbComment.stop(); fireYieldActionEvent(new YieldActionEvent(speaking, new Yield(YieldType.COMMENTS), CommentActionType.COMMENT_ENDED, committee.numComments - cpCommentNumber.getValue() + 1)); } } }); tbComment.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent ce) { if (tbComment.isFinished()) { tbComment.stop(); fireYieldActionEvent(new YieldActionEvent(speaking, new Yield(YieldType.COMMENTS), CommentActionType.COMMENT_ENDED, committee.numComments - cpCommentNumber.getValue() + 1)); } } }); final JButton btnStartComment = new JButton( Messages.getString("YieldsPanel.DefaultCommentButtonText")); //$NON-NLS-1$ btnStartComment.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent ae) { cpCommentNumber.setValue(cpCommentNumber.getValue() + 1); if (cpCommentNumber.getValue() > committee.numComments) { btnStartComment.setEnabled(false); btnStartComment.setText(Messages .getString("YieldsPanel.NoMoreComments")); //$NON-NLS-1$ } else { btnStartComment.setText(Messages .getString("YieldsPanel.StartCommentButtonPrefix") //$NON-NLS-1$ + (cpCommentNumber.getValue())); } fireYieldActionEvent(new YieldActionEvent(speaking, new Yield(YieldType.COMMENTS), CommentActionType.COMMENT_STARTED)); tbComment.start(committee.commentTime); } }); ypnlComments.add(btnStartComment, new CC().grow().push()); ypnlComments.add(tbComment, new CC().growX().pushX().spanX() .newline()); } else { final JLabel label = new JLabel( Messages.getString("YieldsPanel.NoCommentsAllowedText")); //$NON-NLS-1$ label.setHorizontalAlignment(JLabel.CENTER); label.setVerticalAlignment(JLabel.CENTER); ypnlComments.add(label, new CC().grow().push()); } ypnlComments.revalidate(); ypnlComments.repaint(); } /** * Gets the delegate currently speaking. * * @return the currently speaking delegate, or {@code null} if no one is * speaking */ public Delegate getSpeakingDelegate() { return speaking; } /** * Sets the delegate speaking. * * @param speakingDelegate * the currently speaking delegate, or {@code null} if no one is * speaking */ public void setSpeakingDelegate(Delegate speakingDelegate) { this.speaking = speakingDelegate; mcpYieldType.setEnabled(speakingDelegate != null); mcpYieldType.setValue(YieldType.CHAIR); btnYield.setEnabled(speakingDelegate != null); layout.show(this, new String()); } /** * Sets the committee. * * @param committee * the new committee */ public void setCommittee(Committee committee) { this.committee = committee; } /** * Fires a {@link YieldEvent} to all registered listeners. * * @param yieldActionEvent * the event to fire */ protected void fireYieldEvent(YieldEvent yieldEvent) { for (YieldListener yl : listenerList.getListeners(YieldListener.class)) { yl.yield(yieldEvent); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.management.impl.openmbean; import javax.management.openmbean.ArrayType; import javax.management.openmbean.CompositeData; import javax.management.openmbean.CompositeDataSupport; import javax.management.openmbean.CompositeType; import javax.management.openmbean.OpenDataException; import javax.management.openmbean.OpenType; import javax.management.openmbean.SimpleType; import javax.management.openmbean.TabularDataSupport; import javax.management.openmbean.TabularType; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.ICoreMessage; import org.apache.activemq.artemis.api.core.JsonUtil; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.server.MessageReference; public final class OpenTypeSupport { private static MessageOpenTypeFactory TEXT_FACTORY = new TextMessageOpenTypeFactory(); private static MessageOpenTypeFactory BYTES_FACTORY = new BytesMessageOpenTypeFactory(); private OpenTypeSupport() { } public static CompositeData convert(MessageReference ref, int valueSizeLimit) throws OpenDataException { CompositeType ct; ICoreMessage message = ref.getMessage().toCore(); Map<String, Object> fields; byte type = message.getType(); switch(type) { case Message.TEXT_TYPE: ct = TEXT_FACTORY.getCompositeType(); fields = TEXT_FACTORY.getFields(ref, valueSizeLimit); break; default: ct = BYTES_FACTORY.getCompositeType(); fields = BYTES_FACTORY.getFields(ref, valueSizeLimit); break; } return new CompositeDataSupport(ct, fields); } static class MessageOpenTypeFactory { private CompositeType compositeType; private final List<String> itemNamesList = new ArrayList<>(); private final List<String> itemDescriptionsList = new ArrayList<>(); private final List<OpenType> itemTypesList = new ArrayList<>(); protected TabularType stringPropertyTabularType; protected TabularType booleanPropertyTabularType; protected TabularType bytePropertyTabularType; protected TabularType shortPropertyTabularType; protected TabularType intPropertyTabularType; protected TabularType longPropertyTabularType; protected TabularType floatPropertyTabularType; protected TabularType doublePropertyTabularType; protected Object[][] typedPropertyFields; protected String getTypeName() { return Message.class.getName(); } public CompositeType getCompositeType() throws OpenDataException { if (compositeType == null) { init(); compositeType = createCompositeType(); } return compositeType; } protected void init() throws OpenDataException { addItem(CompositeDataConstants.ADDRESS, CompositeDataConstants.ADDRESS_DESCRIPTION, SimpleType.STRING); addItem(CompositeDataConstants.MESSAGE_ID, CompositeDataConstants.MESSAGE_ID_DESCRIPTION, SimpleType.STRING); addItem(CompositeDataConstants.USER_ID, CompositeDataConstants.USER_ID_DESCRIPTION, SimpleType.STRING); addItem(CompositeDataConstants.TYPE, CompositeDataConstants.TYPE_DESCRIPTION, SimpleType.BYTE); addItem(CompositeDataConstants.DURABLE, CompositeDataConstants.DURABLE_DESCRIPTION, SimpleType.BOOLEAN); addItem(CompositeDataConstants.EXPIRATION, CompositeDataConstants.EXPIRATION_DESCRIPTION, SimpleType.LONG); addItem(CompositeDataConstants.PRIORITY, CompositeDataConstants.PRIORITY_DESCRIPTION, SimpleType.BYTE); addItem(CompositeDataConstants.REDELIVERED, CompositeDataConstants.REDELIVERED_DESCRIPTION, SimpleType.BOOLEAN); addItem(CompositeDataConstants.TIMESTAMP, CompositeDataConstants.TIMESTAMP_DESCRIPTION, SimpleType.LONG); addItem(CompositeDataConstants.LARGE_MESSAGE, CompositeDataConstants.LARGE_MESSAGE_DESCRIPTION, SimpleType.BOOLEAN); addItem(CompositeDataConstants.PERSISTENT_SIZE, CompositeDataConstants.PERSISTENT_SIZE_DESCRIPTION, SimpleType.LONG); addItem(CompositeDataConstants.PROPERTIES, CompositeDataConstants.PROPERTIES_DESCRIPTION, SimpleType.STRING); // now lets expose the type safe properties stringPropertyTabularType = createTabularType(String.class, SimpleType.STRING); booleanPropertyTabularType = createTabularType(Boolean.class, SimpleType.BOOLEAN); bytePropertyTabularType = createTabularType(Byte.class, SimpleType.BYTE); shortPropertyTabularType = createTabularType(Short.class, SimpleType.SHORT); intPropertyTabularType = createTabularType(Integer.class, SimpleType.INTEGER); longPropertyTabularType = createTabularType(Long.class, SimpleType.LONG); floatPropertyTabularType = createTabularType(Float.class, SimpleType.FLOAT); doublePropertyTabularType = createTabularType(Double.class, SimpleType.DOUBLE); addItem(CompositeDataConstants.STRING_PROPERTIES, CompositeDataConstants.STRING_PROPERTIES_DESCRIPTION, stringPropertyTabularType); addItem(CompositeDataConstants.BOOLEAN_PROPERTIES, CompositeDataConstants.BOOLEAN_PROPERTIES_DESCRIPTION, booleanPropertyTabularType); addItem(CompositeDataConstants.BYTE_PROPERTIES, CompositeDataConstants.BYTE_PROPERTIES_DESCRIPTION, bytePropertyTabularType); addItem(CompositeDataConstants.SHORT_PROPERTIES, CompositeDataConstants.SHORT_PROPERTIES_DESCRIPTION, shortPropertyTabularType); addItem(CompositeDataConstants.INT_PROPERTIES, CompositeDataConstants.INT_PROPERTIES_DESCRIPTION, intPropertyTabularType); addItem(CompositeDataConstants.LONG_PROPERTIES, CompositeDataConstants.LONG_PROPERTIES_DESCRIPTION, longPropertyTabularType); addItem(CompositeDataConstants.FLOAT_PROPERTIES, CompositeDataConstants.FLOAT_PROPERTIES_DESCRIPTION, floatPropertyTabularType); addItem(CompositeDataConstants.DOUBLE_PROPERTIES, CompositeDataConstants.DOUBLE_PROPERTIES_DESCRIPTION, doublePropertyTabularType); typedPropertyFields = new Object[][] { {CompositeDataConstants.STRING_PROPERTIES, stringPropertyTabularType, String.class}, {CompositeDataConstants.BOOLEAN_PROPERTIES, booleanPropertyTabularType, Boolean.class}, {CompositeDataConstants.BYTE_PROPERTIES, bytePropertyTabularType, Byte.class}, {CompositeDataConstants.SHORT_PROPERTIES, shortPropertyTabularType, Short.class}, {CompositeDataConstants.INT_PROPERTIES, intPropertyTabularType, Integer.class}, {CompositeDataConstants.LONG_PROPERTIES, longPropertyTabularType, Long.class}, {CompositeDataConstants.FLOAT_PROPERTIES, floatPropertyTabularType, Float.class}, {CompositeDataConstants.DOUBLE_PROPERTIES, doublePropertyTabularType, Double.class} }; } public Map<String, Object> getFields(MessageReference ref, int valueSizeLimit) throws OpenDataException { Map<String, Object> rc = new HashMap<>(); ICoreMessage m = ref.getMessage().toCore(); rc.put(CompositeDataConstants.MESSAGE_ID, "" + m.getMessageID()); if (m.getUserID() != null) { rc.put(CompositeDataConstants.USER_ID, "ID:" + m.getUserID().toString()); } else { rc.put(CompositeDataConstants.USER_ID, ""); } rc.put(CompositeDataConstants.ADDRESS, m.getAddress() == null ? "" : m.getAddress().toString()); rc.put(CompositeDataConstants.TYPE, m.getType()); rc.put(CompositeDataConstants.DURABLE, m.isDurable()); rc.put(CompositeDataConstants.EXPIRATION, m.getExpiration()); rc.put(CompositeDataConstants.TIMESTAMP, m.getTimestamp()); rc.put(CompositeDataConstants.PRIORITY, m.getPriority()); rc.put(CompositeDataConstants.REDELIVERED, ref.getDeliveryCount() > 1); rc.put(CompositeDataConstants.LARGE_MESSAGE, m.isLargeMessage()); try { rc.put(CompositeDataConstants.PERSISTENT_SIZE, m.getPersistentSize()); } catch (final ActiveMQException e1) { rc.put(CompositeDataConstants.PERSISTENT_SIZE, -1); } Map<String, Object> propertyMap = m.toPropertyMap(valueSizeLimit); rc.put(CompositeDataConstants.PROPERTIES, JsonUtil.truncate("" + propertyMap, valueSizeLimit)); // only populate if there are some values TabularDataSupport tabularData; for (Object[] typedPropertyInfo : typedPropertyFields) { tabularData = null; try { tabularData = createTabularData(propertyMap, (TabularType) typedPropertyInfo[1], (Class) typedPropertyInfo[2]); } catch (Exception ignored) { } if (tabularData != null && !tabularData.isEmpty()) { rc.put((String) typedPropertyInfo[0], tabularData); } else { rc.put((String) typedPropertyInfo[0], null); } } return rc; } protected String toString(Object value) { if (value == null) { return null; } return value.toString(); } protected CompositeType createCompositeType() throws OpenDataException { String[] itemNames = itemNamesList.toArray(new String[itemNamesList.size()]); String[] itemDescriptions = itemDescriptionsList.toArray(new String[itemDescriptionsList.size()]); OpenType[] itemTypes = itemTypesList.toArray(new OpenType[itemTypesList.size()]); return new CompositeType(getTypeName(), getDescription(), itemNames, itemDescriptions, itemTypes); } protected String getDescription() { return getTypeName(); } protected <T> TabularType createTabularType(Class<T> type, OpenType openType) throws OpenDataException { String typeName = "java.util.Map<java.lang.String, " + type.getName() + ">"; String[] keyValue = new String[]{"key", "value"}; OpenType[] openTypes = new OpenType[]{SimpleType.STRING, openType}; CompositeType rowType = new CompositeType(typeName, typeName, keyValue, keyValue, openTypes); return new TabularType(typeName, typeName, rowType, new String[]{"key"}); } protected TabularDataSupport createTabularData(Map<String, Object> entries, TabularType type, Class valueType) throws IOException, OpenDataException { TabularDataSupport answer = new TabularDataSupport(type); for (String key : entries.keySet()) { Object value = entries.get(key); if (valueType.isInstance(value)) { CompositeDataSupport compositeData = createTabularRowValue(type, key, value); answer.put(compositeData); } else if (valueType == String.class && value instanceof SimpleString) { CompositeDataSupport compositeData = createTabularRowValue(type, key, value.toString()); answer.put(compositeData); } } return answer; } protected CompositeDataSupport createTabularRowValue(TabularType type, String key, Object value) throws OpenDataException { Map<String, Object> fields = new HashMap<>(); fields.put("key", key); fields.put("value", value); return new CompositeDataSupport(type.getRowType(), fields); } protected void addItem(String name, String description, OpenType type) { itemNamesList.add(name); itemDescriptionsList.add(description); itemTypesList.add(type); } } static class BytesMessageOpenTypeFactory extends MessageOpenTypeFactory { protected ArrayType body; @Override protected void init() throws OpenDataException { super.init(); body = new ArrayType(SimpleType.BYTE, true); addItem(CompositeDataConstants.BODY, CompositeDataConstants.BODY_DESCRIPTION, body); } @Override public Map<String, Object> getFields(MessageReference ref, int valueSizeLimit) throws OpenDataException { Map<String, Object> rc = super.getFields(ref, valueSizeLimit); ICoreMessage m = ref.getMessage().toCore(); if (!m.isLargeMessage()) { ActiveMQBuffer bodyCopy = m.getReadOnlyBodyBuffer(); byte[] bytes = new byte[bodyCopy.readableBytes() <= valueSizeLimit ? bodyCopy.readableBytes() : valueSizeLimit + 1]; bodyCopy.readBytes(bytes); rc.put(CompositeDataConstants.BODY, JsonUtil.truncate(bytes, valueSizeLimit)); } else { rc.put(CompositeDataConstants.BODY, new byte[0]); } return rc; } } static class TextMessageOpenTypeFactory extends MessageOpenTypeFactory { protected SimpleType text; @Override protected void init() throws OpenDataException { super.init(); addItem(CompositeDataConstants.TEXT_BODY, CompositeDataConstants.TEXT_BODY, SimpleType.STRING); } @Override public Map<String, Object> getFields(MessageReference ref, int valueSizeLimit) throws OpenDataException { Map<String, Object> rc = super.getFields(ref, valueSizeLimit); ICoreMessage m = ref.getMessage().toCore(); if (!m.isLargeMessage()) { if (m.containsProperty(Message.HDR_LARGE_COMPRESSED)) { rc.put(CompositeDataConstants.TEXT_BODY, "[compressed]"); } else { SimpleString text = m.getReadOnlyBodyBuffer().readNullableSimpleString(); rc.put(CompositeDataConstants.TEXT_BODY, text != null ? JsonUtil.truncate(text.toString(), valueSizeLimit) : ""); } } else { rc.put(CompositeDataConstants.TEXT_BODY, "[large message]"); } return rc; } } }
package org.hisp.dhis.dbms; /* * Copyright (c) 2004-2016, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.SessionFactory; import org.hisp.dhis.cache.HibernateCacheManager; import org.springframework.jdbc.BadSqlGrammarException; import org.springframework.jdbc.core.JdbcTemplate; /** * @author Lars Helge Overland */ public class HibernateDbmsManager implements DbmsManager { private static final Log log = LogFactory.getLog( HibernateDbmsManager.class ); // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private JdbcTemplate jdbcTemplate; public void setJdbcTemplate( JdbcTemplate jdbcTemplate ) { this.jdbcTemplate = jdbcTemplate; } private SessionFactory sessionFactory; public void setSessionFactory( SessionFactory sessionFactory ) { this.sessionFactory = sessionFactory; } private HibernateCacheManager cacheManager; public void setCacheManager( HibernateCacheManager cacheManager ) { this.cacheManager = cacheManager; } // ------------------------------------------------------------------------- // DbmsManager implementation // ------------------------------------------------------------------------- @Override public void emptyDatabase() { emptyTable( "translation" ); emptyTable( "importobject" ); emptyTable( "importdatavalue" ); emptyTable( "constant" ); emptyTable( "sqlview" ); emptyTable( "datavalue_audit" ); emptyTable( "datavalueaudit" ); emptyTable( "datavalue" ); emptyTable( "completedatasetregistration" ); emptyTable( "reporttable_categorydimensions" ); emptyTable( "reporttable_categoryoptiongroups" ); emptyTable( "reporttable_dataelements" ); emptyTable( "reporttable_datasets" ); emptyTable( "reporttable_indicators" ); emptyTable( "reporttable_periods" ); emptyTable( "reporttable_itemorgunitgroups" ); emptyTable( "reporttable_organisationunits" ); emptyTable( "reporttable_dataelementgroups" ); emptyTable( "reporttable_orgunitgroups" ); emptyTable( "reporttable_columns" ); emptyTable( "reporttable_rows" ); emptyTable( "reporttable_filters" ); emptyTable( "reporttable" ); emptyTable( "chart_periods" ); emptyTable( "chart_orgunitlevels" ); emptyTable( "chart_orgunitgroups" ); emptyTable( "chart_organisationunits" ); emptyTable( "chart_itemorgunitgroups" ); emptyTable( "chart_indicators" ); emptyTable( "chart_filters" ); emptyTable( "chart_datasets" ); emptyTable( "chart_dataelements" ); emptyTable( "chart_dataelementoperands" ); emptyTable( "chart_dataelementgroups" ); emptyTable( "chart_categoryoptiongroups" ); emptyTable( "chart_categorydimensions" ); emptyTable( "chart" ); emptyTable( "categoryoptiongroupusergroupaccesses" ); emptyTable( "categoryoptiongroupsetusergroupaccesses" ); emptyTable( "dataelementcategoryoptionusergroupaccesses" ); emptyTable( "usergroupusergroupaccesses" ); emptyTable( "usergroupaccess" ); emptyTable( "users_catdimensionconstraints" ); emptyTable( "userrolemembers" ); emptyTable( "userroledataset" ); emptyTable( "userroleauthorities" ); emptyTable( "usergroupmembers" ); emptyTable( "usergroup" ); emptyTable( "userdatavieworgunits" ); emptyTable( "usermembership" ); emptyTable( "userrole" ); emptyTable( "orgunitgroupsetmembers" ); emptyTable( "orgunitgroupset" ); emptyTable( "orgunitgroupmembers" ); emptyTable( "orgunitgroup" ); emptyTable( "validationrulegroupusergroupstoalert" ); emptyTable( "validationrulegroupmembers" ); emptyTable( "validationrulegroup" ); emptyTable( "validationrule" ); emptyTable( "dataapproval" ); emptyTable( "lockexception" ); emptyTable( "datasetsource" ); emptyTable( "datasetmembers" ); emptyTable( "datasetindicators" ); emptyTable( "datasetoperands" ); emptyTable( "dataset" ); emptyTable( "dataapprovalworkflowlevels" ); emptyTable( "dataapprovalworkflow" ); emptyTable( "dataapprovallevel" ); emptyTable( "trackedentitydatavalue" ); emptyTable( "programstageinstance" ); emptyTable( "programinstance" ); emptyTable( "programstage_dataelements" ); emptyTable( "programstage" ); emptyTable( "program_organisationunits" ); emptyTable( "program" ); emptyTable( "trackedentityinstance" ); emptyTable( "minmaxdataelement" ); emptyTable( "expressiondataelement" ); emptyTable( "expressionoptioncombo" ); emptyTable( "calculateddataelement" ); emptyTable( "dataelementgroupsetmembers" ); emptyTable( "dataelementgroupset" ); emptyTable( "dataelementgroupmembers" ); emptyTable( "dataelementgroup" ); emptyTable( "dataelementaggregationlevels" ); emptyTable( "dataelementoperand" ); emptyTable( "dataelement" ); emptyTable( "categoryoptioncombos_categoryoptions" ); emptyTable( "categorycombos_optioncombos" ); emptyTable( "categorycombos_categories" ); emptyTable( "categories_categoryoptions" ); emptyTable( "categoryoption_organisationunits" ); emptyTable( "orgunitgroupsetmembers" ); emptyTable( "orgunitgroupmembers" ); emptyTable( "orgunitgroupset" ); emptyTable( "orgunitgroup" ); emptyTable( "organisationunit" ); emptyTable( "version" ); emptyTable( "mocksource" ); emptyTable( "period" ); emptyTable( "indicatorgroupsetmembers" ); emptyTable( "indicatorgroupset" ); emptyTable( "indicatorgroupmembers" ); emptyTable( "indicatorgroup" ); emptyTable( "indicator" ); emptyTable( "indicatortype" ); emptyTable( "categoryoptiongroupsetmembers" ); emptyTable( "categoryoptiongroupset" ); emptyTable( "categoryoptiongroupmembers" ); emptyTable( "categoryoptiongroup" ); emptyTable( "expression" ); emptyTable( "categoryoptioncombo" ); emptyTable( "categorycombo" ); emptyTable( "dataelementcategory" ); emptyTable( "dataelementcategoryoption" ); emptyTable( "optionvalue" ); emptyTable( "optionset" ); emptyTable( "systemsetting" ); emptyTable( "users" ); emptyTable( "userinfo" ); dropTable( "aggregateddatavalue" ); dropTable( "aggregatedindicatorvalue" ); dropTable( "aggregateddatasetcompleteness" ); dropTable( "aggregatedorgunitdatavalue" ); dropTable( "aggregatedorgunitindicatorvalue" ); dropTable( "aggregatedorgunitdatasetcompleteness" ); dropTable( "_categoryoptioncomboname" ); dropTable( "_categoryoptiongroupsetstructure" ); dropTable( "_categorystructure" ); dropTable( "_dataelementcategoryoptioncombo" ); dropTable( "_dataelementgroupsetstructure" ); dropTable( "_dataelementstructure" ); dropTable( "_dateperiodstructure" ); dropTable( "_indicatorgroupsetstructure" ); dropTable( "_organisationunitgroupsetstructure" ); dropTable( "_orgunitstructure" ); dropTable( "_periodstructure" ); log.debug( "Cleared database contents" ); cacheManager.clearCache(); log.debug( "Cleared Hibernate cache" ); } @Override public void clearSession() { sessionFactory.getCurrentSession().flush(); sessionFactory.getCurrentSession().clear(); } @Override public void emptyTable( String table ) { try { jdbcTemplate.update( "DELETE FROM " + table ); } catch ( BadSqlGrammarException ex ) { log.debug( "Table " + table + " does not exist" ); } } @Override public boolean tableExists( String tableName ) { final String sql = "select table_name from information_schema.tables " + "where table_name = '" + tableName + "' " + "and table_type = 'BASE TABLE'"; List<Object> tables = jdbcTemplate.queryForList( sql, Object.class ); return tables != null && tables.size() > 0; } // ------------------------------------------------------------------------- // Supportive methods // ------------------------------------------------------------------------- private void dropTable( String table ) { try { jdbcTemplate.execute( "DROP TABLE " + table ); } catch ( BadSqlGrammarException ex ) { log.debug( "Table " + table + " does not exist" ); } } }
/* * Copyright 2003 - 2021 The eFaps Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.efaps.esjp.contacts.taxid; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.apache.commons.lang3.StringUtils; import org.apache.commons.text.WordUtils; import org.efaps.admin.datamodel.Classification; import org.efaps.admin.event.Parameter; import org.efaps.admin.event.Return; import org.efaps.admin.event.Return.ReturnValues; import org.efaps.admin.program.esjp.EFapsApplication; import org.efaps.admin.program.esjp.EFapsClassLoader; import org.efaps.admin.program.esjp.EFapsUUID; import org.efaps.db.Instance; import org.efaps.db.PrintQuery; import org.efaps.db.SelectBuilder; import org.efaps.eql.EQL; import org.efaps.esjp.ci.CIContacts; import org.efaps.esjp.ci.CIFormContacts; import org.efaps.esjp.common.AbstractCommon; import org.efaps.esjp.db.InstanceUtils; import org.efaps.esjp.ui.html.HtmlTable; import org.efaps.update.AppDependency; import org.efaps.update.util.InstallationException; import org.efaps.util.EFapsException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The Class TaxIdInfo. * * @author The eFaps Team */ @EFapsUUID("1399e978-ea48-4aff-bb03-a0ca7e10c204") @EFapsApplication("eFapsApp-Contacts") public abstract class TaxIdInfo_Base extends AbstractCommon { /** * Logger for this class. */ private static final Logger LOG = LoggerFactory.getLogger(TaxIdInfo.class); /** * Gets the tax id info. * * @param _parameter Parameter as passed by the eFaps API * @return the tax id info * @throws EFapsException on error */ public Return getTaxIdInfoUI(final Parameter _parameter) throws EFapsException { final Return ret = new Return(); String taxId = _parameter.getParameterValue(CIFormContacts.Contacts_TaxIdRequestForm.taxid.name); if (StringUtils.isEmpty(taxId) && InstanceUtils.isKindOf(_parameter.getInstance(), CIContacts.ContactAbstract)) { final PrintQuery print = new PrintQuery(_parameter.getInstance()); final SelectBuilder selTaxId = SelectBuilder.get().clazz(CIContacts.ClassOrganisation) .attribute(CIContacts.ClassOrganisation.TaxNumber); print.addSelect(selTaxId); print.execute(); taxId = print.getSelect(selTaxId); } final Request request = new Request(); final var dto = request.getTaxpayer(taxId); ret.put(ReturnValues.SNIPLETT, getSnipplet4Taxpayer(_parameter, dto)); return ret; } public CharSequence getSnipplet4Taxpayer(final Parameter _parameter, final TaxpayerDto _dto) throws EFapsException { CharSequence ret; if (_dto != null) { final HtmlTable table = new HtmlTable(); table.table() .tr() .th(getDBProperty("KeyHeader")) .th(getDBProperty("ValueHeader")) .trC() .tr() .td(getDBProperty("taxId")) .td(_dto.getId()) .trC() .tr() .td(getDBProperty("name")) .td(_dto.getName()) .trC(); ret = table.toString(); } else { final String taxId = _parameter.getParameterValue(CIFormContacts.Contacts_TaxIdRequestForm.taxid.name); ret = getFormatedDBProperty("NotFound", taxId); } return ret; } /** * Gets the address label. * * @param _parameter Parameter as passed by the eFaps API * @param _ubigeo the ubigeo * @return the address label * @throws EFapsException on error */ public CharSequence getCityLabel(final Parameter _parameter, final String _ubigeo) throws EFapsException { final StringBuilder ret = new StringBuilder(); try { if (AppDependency.getAppDependency("eFapsLocalizations-Ubicaciones").isMet()) { try { final Class<?> clazz = Class.forName("org.efaps.esjp.ubicaciones.Ubicaciones", false, EFapsClassLoader.getInstance()); final Method method = clazz.getMethod("getAddressLabel", Parameter.class, String.class); ret.append(method.invoke(clazz.getDeclaredConstructor().newInstance(), _parameter, _ubigeo)); } catch (ClassNotFoundException | NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | InstantiationException e) { TaxIdInfo_Base.LOG.error("Catched error", e); } } } catch (final InstallationException e) { TaxIdInfo_Base.LOG.error("Catched error", e); throw new EFapsException("AppDependency", e); } return ret; } /** * Checks if is not empty. * * @param _value the value * @return true, if is not empty */ protected boolean isNotEmpty(final String _value) { return StringUtils.isNoneEmpty(_value) && !_value.equals("-"); } public Instance createContactFromTaxpayerDto(final TaxpayerDto _dto, final boolean _isClient) throws EFapsException { Instance ret = null; if (_dto != null) { ret = EQL.builder().insert(CIContacts.Contact) .set(CIContacts.Contact.Name, _dto.getName()) .set(CIContacts.Contact.Status, CIContacts.ContactStatus.Active) .stmt().execute(); final var classClass = (Classification) CIContacts.Class.getType(); EQL.builder().insert(classClass.getClassifyRelationType()) .set(classClass.getRelLinkAttributeName(), String.valueOf(ret.getId())) .set(classClass.getRelTypeAttributeName(), String.valueOf(classClass.getId())) .stmt().execute(); EQL.builder().insert(classClass) .set(classClass.getLinkAttributeName(), String.valueOf(ret.getId())) .stmt().execute(); final var orgClass = (Classification) CIContacts.ClassOrganisation.getType(); EQL.builder().insert(orgClass.getClassifyRelationType()) .set(orgClass.getRelLinkAttributeName(), String.valueOf(ret.getId())) .set(orgClass.getRelTypeAttributeName(), String.valueOf(orgClass.getId())) .stmt().execute(); EQL.builder().insert(orgClass) .set(orgClass.getLinkAttributeName(), String.valueOf(ret.getId())) .set(CIContacts.ClassOrganisation.TaxNumber, _dto.getId()) .stmt().execute(); final var locationClass = (Classification) CIContacts.ClassLocation.getType(); EQL.builder().insert(locationClass.getClassifyRelationType()) .set(locationClass.getRelLinkAttributeName(), String.valueOf(ret.getId())) .set(locationClass.getRelTypeAttributeName(), String.valueOf(locationClass.getId())) .stmt().execute(); EQL.builder().insert(locationClass) .set(locationClass.getLinkAttributeName(), String.valueOf(ret.getId())) .set(CIContacts.ClassLocation.LocationAdressStreet, getAdressStreet(_dto)) .set(CIContacts.ClassLocation.LocationAdressCity, getAdressCity(_dto)) .stmt().execute(); if (_isClient) { final var classClient = (Classification) CIContacts.ClassClient.getType(); EQL.builder().insert(classClient.getClassifyRelationType()) .set(classClient.getRelLinkAttributeName(), String.valueOf(ret.getId())) .set(classClient.getRelTypeAttributeName(), String.valueOf(classClient.getId())) .stmt().execute(); EQL.builder().insert(classClient) .set(classClient.getLinkAttributeName(), String.valueOf(ret.getId())) .stmt().execute(); } } return ret; } public String getAdressStreet(final TaxpayerDto _dto) { final var ret = new StringBuilder(); if (isNotEmpty(_dto.getStreetType())) { ret.append(_dto.getStreetType()).append(" "); } if (_dto.getStreet() != null) { ret.append(WordUtils.capitalizeFully(_dto.getStreet())).append(" "); } if (isNotEmpty(_dto.getStreetNumber())) { ret.append("Nro. ").append(_dto.getStreetNumber()).append(" "); } if (isNotEmpty(_dto.getStreetInterior())) { ret.append("Int. ").append(_dto.getStreetInterior()).append(" "); } if (isNotEmpty(_dto.getApartmentNumber())) { ret.append("Dep. ").append(_dto.getApartmentNumber()).append(" "); } return ret.toString(); } public String getAdressCity(final TaxpayerDto _dto) { final var ret = new StringBuilder(); if (isNotEmpty(_dto.getDepartment())) { ret.append(_dto.getDepartment()).append(" - "); } if (isNotEmpty(_dto.getProvince())) { ret.append(_dto.getProvince()).append(" - "); } if (isNotEmpty(_dto.getDistrict())) { ret.append(_dto.getDistrict()).append(" - "); } return ret.toString(); } }
// Copyright (C) 2012 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.client; import com.google.gerrit.client.ui.AccountGroupSuggestOracle; import com.google.gerrit.client.ui.AccountSuggestOracle; import com.google.gerrit.client.ui.ProjectNameSuggestOracle; import com.google.gwt.user.client.ui.SuggestOracle; import com.google.gwtexpui.safehtml.client.HighlightSuggestOracle; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.TreeSet; public class SearchSuggestOracle extends HighlightSuggestOracle { private static final List<ParamSuggester> paramSuggester = Arrays.asList( new ParamSuggester(Arrays.asList("project:", "parentproject:"), new ProjectNameSuggestOracle()), new ParamSuggester(Arrays.asList("owner:", "reviewer:"), new AccountSuggestOracle() { @Override public void onRequestSuggestions(final Request request, final Callback done) { super.onRequestSuggestions(request, new Callback() { @Override public void onSuggestionsReady(final Request request, final Response response) { if ("self".startsWith(request.getQuery())) { final ArrayList<SuggestOracle.Suggestion> r = new ArrayList<>(response.getSuggestions().size() + 1); r.addAll(response.getSuggestions()); r.add(new SuggestOracle.Suggestion() { @Override public String getDisplayString() { return getReplacementString(); } @Override public String getReplacementString() { return "self"; } }); response.setSuggestions(r); } done.onSuggestionsReady(request, response); } }); } }), new ParamSuggester(Arrays.asList("ownerin:", "reviewerin:"), new AccountGroupSuggestOracle())); private static final TreeSet<String> suggestions = new TreeSet<>(); static { suggestions.add("age:"); suggestions.add("age:1week"); // Give an example age suggestions.add("change:"); suggestions.add("owner:"); suggestions.add("owner:self"); suggestions.add("ownerin:"); suggestions.add("author:"); suggestions.add("committer:"); suggestions.add("reviewer:"); suggestions.add("reviewer:self"); suggestions.add("reviewerin:"); suggestions.add("reviewedby:"); suggestions.add("commit:"); suggestions.add("comment:"); suggestions.add("message:"); suggestions.add("commentby:"); suggestions.add("from:"); suggestions.add("file:"); suggestions.add("conflicts:"); suggestions.add("project:"); suggestions.add("projects:"); suggestions.add("parentproject:"); suggestions.add("branch:"); suggestions.add("topic:"); suggestions.add("intopic:"); suggestions.add("ref:"); suggestions.add("tr:"); suggestions.add("bug:"); suggestions.add("label:"); suggestions.add("query:"); suggestions.add("has:"); suggestions.add("has:draft"); suggestions.add("has:edit"); suggestions.add("has:star"); suggestions.add("is:"); suggestions.add("is:starred"); suggestions.add("is:watched"); suggestions.add("is:reviewed"); suggestions.add("is:owner"); suggestions.add("is:reviewer"); suggestions.add("is:open"); suggestions.add("is:pending"); suggestions.add("is:draft"); suggestions.add("is:closed"); suggestions.add("is:submitted"); suggestions.add("is:merged"); suggestions.add("is:abandoned"); suggestions.add("is:mergeable"); suggestions.add("status:"); suggestions.add("status:open"); suggestions.add("status:pending"); suggestions.add("status:reviewed"); suggestions.add("status:submitted"); suggestions.add("status:closed"); suggestions.add("status:merged"); suggestions.add("status:abandoned"); suggestions.add("added:"); suggestions.add("deleted:"); suggestions.add("delta:"); suggestions.add("size:"); if (Gerrit.isNoteDbEnabled()) { suggestions.add("hashtag:"); } suggestions.add("AND"); suggestions.add("OR"); suggestions.add("NOT"); } @Override public void requestDefaultSuggestions(Request request, Callback done) { final ArrayList<SearchSuggestion> r = new ArrayList<>(); // No text - show some default suggestions. r.add(new SearchSuggestion("status:open", "status:open")); r.add(new SearchSuggestion("age:1week", "age:1week")); if (Gerrit.isSignedIn()) { r.add(new SearchSuggestion("owner:self", "owner:self")); } done.onSuggestionsReady(request, new Response(r)); } @Override protected void onRequestSuggestions(Request request, Callback done) { final String query = request.getQuery(); final String lastWord = getLastWord(query); if (lastWord == null) { // Starting a new word - don't show suggestions yet. done.onSuggestionsReady(request, null); return; } for (final ParamSuggester ps : paramSuggester) { if (ps.applicable(lastWord)) { ps.suggest(lastWord, request, done); return; } } final ArrayList<SearchSuggestion> r = new ArrayList<>(); for (String suggestion : suggestions.tailSet(lastWord)) { if ((lastWord.length() < suggestion.length()) && suggestion.startsWith(lastWord)) { if (suggestion.contains("self") && !Gerrit.isSignedIn()) { continue; } r.add(new SearchSuggestion(suggestion, query + suggestion.substring(lastWord.length()))); } } done.onSuggestionsReady(request, new Response(r)); } private String getLastWord(final String query) { final int lastSpace = query.lastIndexOf(' '); if (lastSpace == query.length() - 1) { return null; } if (lastSpace == -1) { return query; } return query.substring(lastSpace + 1); } @Override protected String getQueryPattern(final String query) { return super.getQueryPattern(getLastWord(query)); } @Override protected boolean isHTML() { return true; } private static class SearchSuggestion implements SuggestOracle.Suggestion { private final String suggestion; private final String fullQuery; public SearchSuggestion(String suggestion, String fullQuery) { this.suggestion = suggestion; // Add a space to the query if it is a complete operation (e.g. // "status:open") so the user can keep on typing. this.fullQuery = fullQuery.endsWith(":") ? fullQuery : fullQuery + " "; } @Override public String getDisplayString() { return suggestion; } @Override public String getReplacementString() { return fullQuery; } } private static class ParamSuggester { private final List<String> operators; private final SuggestOracle parameterSuggestionOracle; ParamSuggester(final List<String> operators, final SuggestOracle parameterSuggestionOracle) { this.operators = operators; this.parameterSuggestionOracle = parameterSuggestionOracle; } boolean applicable(final String query) { final String operator = getApplicableOperator(query, operators); return operator != null && query.length() > operator.length(); } private String getApplicableOperator(final String lastWord, final List<String> operators) { for (final String operator : operators) { if (lastWord.startsWith(operator)) { return operator; } } return null; } void suggest(final String lastWord, final Request request, final Callback done) { final String operator = getApplicableOperator(lastWord, operators); parameterSuggestionOracle.requestSuggestions( new Request(lastWord.substring(operator.length()), request.getLimit()), new Callback() { @Override public void onSuggestionsReady(final Request req, final Response response) { final String query = request.getQuery(); final List<SearchSuggestOracle.Suggestion> r = new ArrayList<>(response.getSuggestions().size()); for (final SearchSuggestOracle.Suggestion s : response .getSuggestions()) { r.add(new SearchSuggestion(s.getDisplayString(), query.substring(0, query.length() - lastWord.length()) + operator + quoteIfNeeded(s.getReplacementString()))); } done.onSuggestionsReady(request, new Response(r)); } private String quoteIfNeeded(final String s) { if (!s.matches("^\\S*$")) { return "\"" + s + "\""; } return s; } }); } } }
/* * Copyright 2014 Ben Manes. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.benmanes.caffeine.cache; import static com.github.benmanes.caffeine.cache.BLCHeader.DrainStatusRef.REQUIRED; import static com.github.benmanes.caffeine.cache.testing.HasRemovalNotifications.hasRemovalNotifications; import static com.github.benmanes.caffeine.cache.testing.HasStats.hasEvictionCount; import static java.util.Arrays.asList; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import java.util.List; import java.util.Map.Entry; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.testng.annotations.Listeners; import org.testng.annotations.Test; import com.github.benmanes.caffeine.cache.Policy.Eviction; import com.github.benmanes.caffeine.cache.testing.CacheContext; import com.github.benmanes.caffeine.cache.testing.CacheProvider; import com.github.benmanes.caffeine.cache.testing.CacheSpec; import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheExecutor; import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheWeigher; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Compute; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; import com.github.benmanes.caffeine.cache.testing.CacheSpec.MaximumSize; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population; import com.github.benmanes.caffeine.cache.testing.CacheValidationListener; import com.github.benmanes.caffeine.locks.NonReentrantLock; import com.github.benmanes.caffeine.testing.Awaits; import com.github.benmanes.caffeine.testing.ConcurrentTestHarness; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ThreadFactoryBuilder; /** * The test cases for the implementation details of {@link BoundedLocalCache}. * * @author ben.manes@gmail.com (Ben Manes) */ @Listeners(CacheValidationListener.class) @Test(dataProviderClass = CacheProvider.class) public final class BoundedLocalCacheTest { final Executor executor = Executors.newCachedThreadPool( new ThreadFactoryBuilder().setDaemon(true).build()); static BoundedLocalCache<Integer, Integer> asBoundedLocalCache(Cache<Integer, Integer> cache) { return (BoundedLocalCache<Integer, Integer>) cache.asMap(); } @Test public void putWeighted_noOverflow() { Cache<Integer, Integer> cache = Caffeine.newBuilder() .executor(CacheExecutor.DIRECT.get()) .weigher(CacheWeigher.MAX_VALUE) .maximumWeight(Long.MAX_VALUE) .build(); BoundedLocalCache<Integer, Integer> map = asBoundedLocalCache(cache); cache.put(1, 1); map.lazySetWeightedSize(BoundedLocalCache.MAXIMUM_CAPACITY); cache.put(2, 2); assertThat(map.size(), is(1)); assertThat(map.adjustedWeightedSize(), is(BoundedLocalCache.MAXIMUM_CAPACITY)); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.FULL, maximumSize = MaximumSize.FULL, executor = CacheExecutor.REJECTING, removalListener = Listener.CONSUMING) public void evict_rejected(Cache<Integer, Integer> cache, CacheContext context) { cache.put(context.absentKey(), context.absentValue()); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = MaximumSize.ONE) public void evict_alreadyRemoved(Cache<Integer, Integer> cache, CacheContext context) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); Entry<Integer, Integer> oldEntry = Iterables.get(context.absent().entrySet(), 0); Entry<Integer, Integer> newEntry = Iterables.get(context.absent().entrySet(), 1); localCache.put(oldEntry.getKey(), oldEntry.getValue()); localCache.evictionLock.lock(); try { Object keyRef = localCache.nodeFactory.newLookupKey(oldEntry.getKey()); Node<Integer, Integer> node = localCache.data.get(keyRef); checkStatus(node, Status.ALIVE); ConcurrentTestHarness.execute(() -> { localCache.put(newEntry.getKey(), newEntry.getValue()); assertThat(localCache.remove(oldEntry.getKey()), is(oldEntry.getValue())); }); Awaits.await().until(() -> localCache.containsKey(oldEntry.getKey()), is(false)); Awaits.await().until(() -> { synchronized (node) { return !node.isAlive(); } }); checkStatus(node, Status.RETIRED); localCache.maintenance(); checkStatus(node, Status.DEAD); assertThat(localCache.containsKey(newEntry.getKey()), is(true)); assertThat(cache, hasRemovalNotifications(context, 1, RemovalCause.EXPLICIT)); } finally { localCache.evictionLock.unlock(); } } enum Status { ALIVE, RETIRED, DEAD } static void checkStatus(Node<Integer, Integer> node, Status expected) { synchronized (node) { assertThat(node.isAlive(), is(expected == Status.ALIVE)); assertThat(node.isRetired(), is(expected == Status.RETIRED)); assertThat(node.isDead(), is(expected == Status.DEAD)); } } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = MaximumSize.TEN, weigher = CacheWeigher.DEFAULT) public void evict_lru(Cache<Integer, Integer> cache, CacheContext context) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); for (int i = 0; i < 10; i++) { cache.put(i, -i); } checkContainsInOrder(localCache, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9); // re-order checkReorder(localCache, asList(0, 1, 2), 3, 4, 5, 6, 7, 8, 9, 0, 1, 2); // evict 3, 4, 5 checkEvict(localCache, asList(10, 11, 12), 6, 7, 8, 9, 0, 1, 2, 10, 11, 12); // re-order checkReorder(localCache, asList(6, 7, 8), 9, 0, 1, 2, 10, 11, 12, 6, 7, 8); // evict 9, 0, 1 checkEvict(localCache, asList(13, 14, 15), 2, 10, 11, 12, 6, 7, 8, 13, 14, 15); assertThat(context, hasEvictionCount(6)); } private void checkReorder(BoundedLocalCache<Integer, Integer> localCache, List<Integer> keys, Integer... expect) { keys.forEach(localCache::get); checkContainsInOrder(localCache, expect); } private void checkEvict(BoundedLocalCache<Integer, Integer> localCache, List<Integer> keys, Integer... expect) { keys.forEach(i -> localCache.put(i, i)); checkContainsInOrder(localCache, expect); } private void checkContainsInOrder(BoundedLocalCache<Integer, Integer> localCache, Integer... expect) { localCache.maintenance(); List<Integer> evictionList = Lists.newArrayList(); localCache.accessOrderMainDeque().forEach( node -> evictionList.add(node.getKey())); assertThat(localCache.size(), is(equalTo(expect.length))); assertThat(localCache.keySet(), containsInAnyOrder(expect)); assertThat(evictionList, is(equalTo(asList(expect)))); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.FULL, maximumSize = MaximumSize.FULL) public void updateRecency_onGet(Cache<Integer, Integer> cache) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); Node<Integer, Integer> first = localCache.accessOrderMainDeque().peek(); updateRecency(localCache, () -> localCache.get(first.getKey())); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.FULL, maximumSize = MaximumSize.FULL) public void updateRecency_onPutIfAbsent(Cache<Integer, Integer> cache) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); Node<Integer, Integer> first = localCache.accessOrderMainDeque().peek(); updateRecency(localCache, () -> localCache.putIfAbsent(first.getKey(), first.getKey())); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.FULL, maximumSize = MaximumSize.FULL) public void updateRecency_onPut(Cache<Integer, Integer> cache) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); Node<Integer, Integer> first = localCache.accessOrderMainDeque().peek(); updateRecency(localCache, () -> localCache.put(first.getKey(), first.getKey())); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.FULL, maximumSize = MaximumSize.FULL) public void updateRecency_onReplace(Cache<Integer, Integer> cache) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); Node<Integer, Integer> first = localCache.accessOrderMainDeque().peek(); updateRecency(localCache, () -> localCache.replace(first.getKey(), first.getKey())); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.FULL, maximumSize = MaximumSize.FULL) public void updateRecency_onReplaceConditionally( Cache<Integer, Integer> cache, CacheContext context) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); Node<Integer, Integer> first = localCache.accessOrderMainDeque().peek(); Integer key = first.getKey(); Integer value = context.original().get(key); updateRecency(localCache, () -> localCache.replace(first.getKey(), value, value)); } private void updateRecency(BoundedLocalCache<Integer, Integer> cache, Runnable operation) { Node<Integer, Integer> first = cache.accessOrderMainDeque().peek(); operation.run(); cache.maintenance(); assertThat(cache.accessOrderMainDeque().peekFirst(), is(not(first))); assertThat(cache.accessOrderMainDeque().peekLast(), is(first)); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = MaximumSize.FULL) public void exceedsMaximumBufferSize_onRead(Cache<Integer, Integer> cache) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); Node<Integer, Integer> dummy = localCache.nodeFactory.newNode(null, null, null, 1, 0); Buffer<Node<Integer, Integer>> buffer = localCache.readBuffer; for (int i = 0; i < BoundedBuffer.BUFFER_SIZE; i++) { buffer.offer(dummy); } assertThat(buffer.offer(dummy), is(Buffer.FULL)); localCache.afterRead(dummy, 0, true); assertThat(buffer.offer(dummy), is(not(Buffer.FULL))); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = MaximumSize.FULL) public void exceedsMaximumBufferSize_onWrite(Cache<Integer, Integer> cache) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); Node<Integer, Integer> dummy = localCache.nodeFactory.newNode(null, null, null, 1, 0); boolean[] ran = new boolean[1]; localCache.afterWrite(dummy, () -> ran[0] = true, 0); assertThat(ran[0], is(true)); assertThat(localCache.writeQueue(), hasSize(0)); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.FULL, maximumSize = MaximumSize.FULL) public void drain_onRead(Cache<Integer, Integer> cache, CacheContext context) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); Buffer<Node<Integer, Integer>> buffer = localCache.readBuffer; for (int i = 0; i < BoundedBuffer.BUFFER_SIZE; i++) { localCache.get(context.firstKey()); } int pending = buffer.size(); assertThat(buffer.writes(), is(equalTo(pending))); assertThat(pending, is(BoundedBuffer.BUFFER_SIZE)); localCache.get(context.firstKey()); assertThat(buffer.size(), is(0)); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = MaximumSize.FULL) public void drain_onWrite(Cache<Integer, Integer> cache) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); cache.put(1, 1); assertThat(localCache.writeQueue(), hasSize(0)); assertThat(localCache.accessOrderMainDeque(), hasSize(1)); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = MaximumSize.FULL) public void drain_nonblocking(Cache<Integer, Integer> cache) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); AtomicBoolean done = new AtomicBoolean(); Runnable task = () -> { localCache.lazySetDrainStatus(REQUIRED); localCache.scheduleDrainBuffers(); done.set(true); }; localCache.evictionLock.lock(); try { ConcurrentTestHarness.execute(task); Awaits.await().untilTrue(done); } finally { localCache.evictionLock.unlock(); } } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = MaximumSize.FULL) public void drain_blocksClear(Cache<Integer, Integer> cache) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); checkDrainBlocks(localCache, localCache::clear); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = MaximumSize.FULL) public void drain_blocksOrderedMap(Cache<Integer, Integer> cache, CacheContext context, Eviction<Integer, Integer> eviction) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); checkDrainBlocks(localCache, () -> eviction.coldest(((int) context.maximumSize()))); } @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, implementation = Implementation.Caffeine, population = Population.EMPTY, maximumSize = MaximumSize.FULL) public void drain_blocksCapacity(Cache<Integer, Integer> cache, CacheContext context) { BoundedLocalCache<Integer, Integer> localCache = asBoundedLocalCache(cache); checkDrainBlocks(localCache, () -> localCache.setMaximum(0)); } void checkDrainBlocks(BoundedLocalCache<Integer, Integer> localCache, Runnable task) { AtomicBoolean done = new AtomicBoolean(); Lock lock = localCache.evictionLock; lock.lock(); try { executor.execute(() -> { localCache.lazySetDrainStatus(REQUIRED); task.run(); done.set(true); }); Awaits.await().until(() -> hasQueuedThreads(lock)); } finally { lock.unlock(); } Awaits.await().untilTrue(done); } private boolean hasQueuedThreads(Lock lock) { return (lock instanceof NonReentrantLock) ? ((NonReentrantLock) lock).hasQueuedThreads() : ((ReentrantLock) lock).hasQueuedThreads(); } }
package std.algs; import std.libs.*; /************************************************************************* * Compilation: javac MinPQ.java * Execution: java MinPQ < input.txt * * Generic min priority queue implementation with a binary heap. * Can be used with a comparator instead of the natural order. * * % java MinPQ < tinyPQ.txt * E A E (6 left on pq) * * We use a one-based array to simplify parent and child calculations. * * Can be optimized by replacing full exchanges with half exchanges * (ala insertion sort). * *************************************************************************/ import java.util.Comparator; import java.util.Iterator; import java.util.NoSuchElementException; /** * The <tt>MinPQ</tt> class represents a priority queue of generic keys. * It supports the usual <em>insert</em> and <em>delete-the-minimum</em> * operations, along with methods for peeking at the minimum key, * testing if the priority queue is empty, and iterating through * the keys. * <p> * This implementation uses a binary heap. * The <em>insert</em> and <em>delete-the-minimum</em> operations take * logarithmic amortized time. * The <em>min</em>, <em>size</em>, and <em>is-empty</em> operations take constant time. * Construction takes time proportional to the specified capacity or the number of * items used to initialize the data structure. * <p> * For additional documentation, see <a href="http://algs4.cs.princeton.edu/24pq">Section 2.4</a> of * <i>Algorithms, 4th Edition</i> by Robert Sedgewick and Kevin Wayne. * * @author Robert Sedgewick * @author Kevin Wayne */ public class MinPQ<Key> implements Iterable<Key> { private Key[] pq; // store items at indices 1 to N private int N; // number of items on priority queue private Comparator<Key> comparator; // optional comparator /** * Initializes an empty priority queue with the given initial capacity. * @param initCapacity the initial capacity of the priority queue */ public MinPQ(int initCapacity) { pq = (Key[]) new Object[initCapacity + 1]; N = 0; } /** * Initializes an empty priority queue. */ public MinPQ() { this(1); } /** * Initializes an empty priority queue with the given initial capacity, * using the given comparator. * @param initCapacity the initial capacity of the priority queue * @param comparator the order to use when comparing keys */ public MinPQ(int initCapacity, Comparator<Key> comparator) { this.comparator = comparator; pq = (Key[]) new Object[initCapacity + 1]; N = 0; } /** * Initializes an empty priority queue using the given comparator. * @param comparator the order to use when comparing keys */ public MinPQ(Comparator<Key> comparator) { this(1, comparator); } /** * Initializes a priority queue from the array of keys. * Takes time proportional to the number of keys, using sink-based heap construction. * @param keys the array of keys */ public MinPQ(Key[] keys) { N = keys.length; pq = (Key[]) new Object[keys.length + 1]; for (int i = 0; i < N; i++) pq[i+1] = keys[i]; for (int k = N/2; k >= 1; k--) sink(k); assert isMinHeap(); } /** * Is the priority queue empty? * @return true if the priority queue is empty; false otherwise */ public boolean isEmpty() { return N == 0; } /** * Returns the number of keys on the priority queue. * @return the number of keys on the priority queue */ public int size() { return N; } /** * Returns a smallest key on the priority queue. * @return a smallest key on the priority queue * @throws java.util.NoSuchElementException if priority queue is empty */ public Key min() { if (isEmpty()) throw new NoSuchElementException("Priority queue underflow"); return pq[1]; } // helper function to double the size of the heap array private void resize(int capacity) { assert capacity > N; Key[] temp = (Key[]) new Object[capacity]; for (int i = 1; i <= N; i++) temp[i] = pq[i]; pq = temp; } /** * Adds a new key to the priority queue. * @param x the key to add to the priority queue */ public void insert(Key x) { // double size of array if necessary if (N == pq.length - 1) resize(2 * pq.length); // add x, and percolate it up to maintain heap invariant pq[++N] = x; swim(N); assert isMinHeap(); } /** * Removes and returns a smallest key on the priority queue. * @return a smallest key on the priority queue * @throws java.util.NoSuchElementException if the priority queue is empty */ public Key delMin() { if (isEmpty()) throw new NoSuchElementException("Priority queue underflow"); exch(1, N); Key min = pq[N--]; sink(1); pq[N+1] = null; // avoid loitering and help with garbage collection if ((N > 0) && (N == (pq.length - 1) / 4)) resize(pq.length / 2); assert isMinHeap(); return min; } /*********************************************************************** * Helper functions to restore the heap invariant. **********************************************************************/ private void swim(int k) { while (k > 1 && greater(k/2, k)) { exch(k, k/2); k = k/2; } } private void sink(int k) { while (2*k <= N) { int j = 2*k; if (j < N && greater(j, j+1)) j++; if (!greater(k, j)) break; exch(k, j); k = j; } } /*********************************************************************** * Helper functions for compares and swaps. **********************************************************************/ private boolean greater(int i, int j) { if (comparator == null) { return ((Comparable<Key>) pq[i]).compareTo(pq[j]) > 0; } else { return comparator.compare(pq[i], pq[j]) > 0; } } private void exch(int i, int j) { Key swap = pq[i]; pq[i] = pq[j]; pq[j] = swap; } // is pq[1..N] a min heap? private boolean isMinHeap() { return isMinHeap(1); } // is subtree of pq[1..N] rooted at k a min heap? private boolean isMinHeap(int k) { if (k > N) return true; int left = 2*k, right = 2*k + 1; if (left <= N && greater(k, left)) return false; if (right <= N && greater(k, right)) return false; return isMinHeap(left) && isMinHeap(right); } /*********************************************************************** * Iterators **********************************************************************/ /** * Returns an iterator that iterates over the keys on the priority queue * in ascending order. * The iterator doesn't implement <tt>remove()</tt> since it's optional. * @return an iterator that iterates over the keys in ascending order */ public Iterator<Key> iterator() { return new HeapIterator(); } private class HeapIterator implements Iterator<Key> { // create a new pq private MinPQ<Key> copy; // add all items to copy of heap // takes linear time since already in heap order so no keys move public HeapIterator() { if (comparator == null) copy = new MinPQ<Key>(size()); else copy = new MinPQ<Key>(size(), comparator); for (int i = 1; i <= N; i++) copy.insert(pq[i]); } public boolean hasNext() { return !copy.isEmpty(); } public void remove() { throw new UnsupportedOperationException(); } public Key next() { if (!hasNext()) throw new NoSuchElementException(); return copy.delMin(); } } /** * Unit tests the <tt>MinPQ</tt> data type. */ public static void main(String[] args) { MinPQ<String> pq = new MinPQ<String>(); while (!StdIn.isEmpty()) { String item = StdIn.readString(); if (!item.equals("-")) pq.insert(item); else if (!pq.isEmpty()) StdOut.print(pq.delMin() + " "); } StdOut.println("(" + pq.size() + " left on pq)"); } }
package finditfirst.ebay; import finditfirst.gui.panels.OptionPanel; import finditfirst.main.APISettings; import finditfirst.main.Program; import finditfirst.searchentry.SearchEntry; import finditfirst.utilities.MailService; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; /** Provides a number of static * functions that interface with * eBay's RESTful API. */ public final class API //TODO XML parsing / JSON { private static final Logger LOG = Program.LOG; /** eBay Developer key, gotten from eBay. * Be sure to use the "Production Key," * and not the "Sandbox Key." You need the AppID. */ private static final String EBAY_DEV_KEY_STRING = APISettings.getInstance().getEbayDevKey(); /** GET requests utilizing eBay's "findItemsAdvanced" * are required to have at least parameters. */ public static final String GET_REQUEST_TEMPLATE = "http://svcs.ebay.com/services/" + "search/FindingService/" + "v1?SECURITY-APPNAME=" + EBAY_DEV_KEY_STRING + "&OPERATION-NAME=findItemsAdvanced" + "&SERVICE-VERSION=1.0.0" + "&RESPONSE-DATA-FORMAT=XML" + "&REST-PAYLOAD" + "&outputSelector=SellerInfo" + "&keywords="; /** Returns eBay's official time in UTC format */ public static final String EBAY_TIME_API_CALL = "http://open.api.ebay.com/shopping?callname=" + "GeteBayTime&responseencoding=XML&" + "appid=" + EBAY_DEV_KEY_STRING + "&siteid=0&version=533"; private static final Pattern PATTERN_ITEMS = Pattern.compile("(?<=\\<item\\>)((?!\\<\\/item\\>).)*"); private static final Pattern PATTERN_ITEM_IDS = Pattern.compile("(?<=\\<itemId\\>)[0-9]*(?=\\<\\/itemId\\>)"); /** Used to build GETRequestURLs */ private static StringBuilder sb; private static int filterCounter; protected API() {} /** Calls the eBay API and returns * the raw XML. * @param GETRequestURL The GET URL * @return eBay XML response * @see {@link #buildGETRequestURL(OptionPanel)} */ public static synchronized String callAndGetXmlResponse(String GETRequestURL) { StringBuilder sbb = new StringBuilder(); try { URL obj = new URL(GETRequestURL); HttpURLConnection con = (HttpURLConnection) obj.openConnection(); BufferedReader br = new BufferedReader(new InputStreamReader(con.getInputStream())); String line; while((line = br.readLine()) != null) { sbb.append(line); } } catch (IOException e) { LOG.log(Level.SEVERE, "Failed to get response from eBay for: " + GETRequestURL); } return sbb.toString(); } /** Submits a GET request to the eBay API, * determines if there are new item results, * and emails them if there are. * @param entry {@link SearchEntry} used to build * GET request and check item results against * @see {@link MailService#sendResults(String, java.util.Collection)} */ public static void submitSearchQuery(SearchEntry entry) { Queue<Listing> listingsToEmail = new LinkedList<Listing>(); String xml = callAndGetXmlResponse(entry.getGETRequestURL()); Matcher findItems = PATTERN_ITEMS.matcher(xml); while(findItems.find()) { if(findItems.group().length() != 0) { String itemXml = findItems.group(); String itemXmlID = getItemID(itemXml); if(!entry.getSeenListingIDs().contains(itemXmlID)) { listingsToEmail.add(new Listing(itemXml)); entry.getSeenListingIDs().add(itemXmlID); LOG.log(Level.FINER, "Added Item ID : " + itemXmlID); } } } if(!listingsToEmail.isEmpty()) { MailService.sendResults(entry.getSearchName(), listingsToEmail); } else { LOG.log(Level.FINEST, "No new results: " + entry.getSearchName()); } } /** Calls eBay's API for the * official time. * @return {@code Date} representing eBay time. */ public static Date callEbayTime() { Date date = null; String xml = callAndGetXmlResponse(EBAY_TIME_API_CALL); Matcher findTimestamp = Pattern.compile("(?<=\\<Timestamp\\>)((?!\\<\\/Timestamp\\>).)*").matcher(xml); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); sdf.setTimeZone(TimeZone.getTimeZone("UTC")); if(findTimestamp.find()) { try { date = sdf.parse(findTimestamp.group()); } catch (ParseException e) { LOG.log(Level.SEVERE, "Failed to parse eBay time"); } } return date; } private static String getItemID(String itemXml) { Matcher findId = PATTERN_ITEM_IDS.matcher(itemXml); if(findId.find()) { return findId.group(); } return ""; } /** Reads panel state and builds a properly * formatted GET request URL ready to be sent * to eBay's API. * @param panel Panel to read state from. * @return GET url. * @see {@link OptionPanel} */ public static String buildGETRequestURL(OptionPanel panel) { sb = new StringBuilder(); filterCounter = 0; String categoryId = panel.getCategoryId(); if(categoryId != null && !Objects.equals(categoryId, "ALL")) { createKeyValueFilter("categoryID", categoryId); } String distance = panel.getDistance(); if(distance != null) { createKeyValueFilter("buyerPostalCode" , panel.getZipcode()); createItemFilter("MaxDistance", distance); } ArrayList<String> condition = panel.getCondition(); createItemFilter("Condition", condition.toArray()); ArrayList<String> listingTypes = panel.getListingTypes(); createItemFilter("ListingType", listingTypes.toArray()); String listedIn = panel.getListedIn(); if(listedIn != null) { createItemFilter("ListedIn", listedIn); } String feedbackScore = panel.getFeedbackScore(); if(panel.getFeedbackScoreName().charAt(0) == '>') { createItemFilter("FeedbackScoreMin", feedbackScore); } else { createItemFilter("FeedbackScoreMax", feedbackScore); } String maxPrice = panel.getMaxPrice(); if(maxPrice != null) { createItemFilter("MaxPrice", maxPrice); } String minPrice = panel.getMinPrice(); if(minPrice != null) { createItemFilter("MinPrice", minPrice); } String locatedIn = panel.getLocatedIn(); if(locatedIn != null) { createItemFilter("LocatedIn", locatedIn); } String availableTo = panel.getAvailableTo(); if(availableTo != null) { createItemFilter("AvailableTo", availableTo); } boolean isUseExcludedSellers = panel.isUseExcludedSellerList(); if(isUseExcludedSellers) { createItemFilter("ExcludeSeller", panel.getExcludedSellers().toArray()); } boolean isUseIncludedSellers = panel.isUseIncludedSellerList(); if(isUseIncludedSellers) { createItemFilter("Seller", panel.getIncludedSellers().toArray()); } boolean isAuthorizedSeller = panel.isAuthorized(); if(isAuthorizedSeller) { createItemFilter("AuthorizedSellerOnly", true); } boolean isBestOffer = panel.isBestOffer(); if(isBestOffer) { createItemFilter("BestOfferOnly", true); } boolean isExcludeAutoPay = panel.isNoAutoPay(); if(isExcludeAutoPay) { createItemFilter("ExcludeAutoPay", true); } boolean isFeatured = panel.isFeatured(); if(isFeatured) { createItemFilter("FeaturedOnly", true); } boolean isFreeShipping = panel.isFreeShipping(); if(isFreeShipping) { createItemFilter("FreeShippingOnly", true); } boolean isGetItFast = panel.isGetItFast(); if(isGetItFast) { createItemFilter("GetItFastOnly", true); } boolean isLocalPickup = panel.isLocalPickup(); if(isLocalPickup) { createItemFilter("LocalPickupOnly", true); } boolean isOutlet = panel.isOutlet(); if(isOutlet) { createItemFilter("OutletSellerOnly", true); } boolean isReturnsAccepted = panel.isReturnsAccepted(); if(isReturnsAccepted) { createItemFilter("ReturnsAcceptedOnly", true); } boolean isTopRatedSeller = panel.isTopRated(); if(isTopRatedSeller) { createItemFilter("TopRatedSellerOnly", true); } /* String expeditedShippingType = if(expeditedShippingType!=null) { createItemFilter("ExpeditedShippingType", expeditedShippingType); } String maxBids = if(maxBids!=Integer.MIN_VALUE) { createItemFilter("MaxBids", maxBids); } String maxHandlingTime = if(maxHandlingTime!=Integer.MIN_VALUE) { createItemFilter("MaxHandlingTime", maxHandlingTime); } String maxQuantity = if(maxQuantity != null) { createItemFilter("MaxQuantity", maxQuantity); } String minQuantity = if(minQuantity != null) { createItemFilter("MinQuantity", minQuantity); } String minBids = if(minBids != null) { createItemFilter("MinBids", minBids); } String modTimeFrom = if(modTimeFrom != null) { createItemFilter("ModTimeFrom", modTimeFrom); } String startTimeFrom = if(startTimeFrom!=null) { createItemFilter("StartTimeFrom", startTimeFrom); } String startTimeTo = if(startTimeTo!=null) { createItemFilter("StartTimeTo", startTimeTo); } String sellerBusinessType = if(sellerBusinessType != null) { createItemFilter("SellerBusinessType", sellerBusinessType); } boolean isCharityOnly = if(charityOnly) { createItemFilter("CharityOnly", true); } boolean isHideDuplicates = if(isHideDuplicates) { createItemFilter("HideDuplicateItems", true); } boolean isLocalSearch = if(localSearchOnly) { createItemFilter("LocalSearchOnly", true); } boolean isLots = if(isLots) { createItemFilter("LotsOnly", true); } boolean isSold = if(soldItemsOnly) { createItemFilter("SoldItemsOnly", true); } boolean isValueBoxInventory = if(valueBoxInventory) { createItemFilter("ValueBoxInventory", true); } boolean isWorldOfGood = if(worldOfGoodOnly) { createItemFilter("WorldOfGoodOnly", true); } */ return GET_REQUEST_TEMPLATE + panel.getKeywords() + sb.toString(); } private static void createKeyValueFilter(String name, String value) { sb.append("&" + name + "=" + value); } private static void createItemFilter(String name, Object value) { sb.append("&itemFilter(" + filterCounter + ").name=" + name + "&itemFilter("+ filterCounter++ + ").value=" + value); } private static void createItemFilter(String name, Object[] value) { sb.append("&itemFilter(" + filterCounter + ").name=" + name); for(int i = 0; i < value.length; i++) { sb.append("&itemFilter(" + filterCounter + ").value(" + i + ")=" + value[i]); } filterCounter++; } }
/* * Spine Runtimes Software License * Version 2.3 * * Copyright (c) 2013-2015, Esoteric Software * All rights reserved. * * You are granted a perpetual, non-exclusive, non-sublicensable and * non-transferable license to use, install, execute and perform the Spine * Runtimes Software (the "Software") and derivative works solely for personal * or internal use. Without the written permission of Esoteric Software (see * Section 2 of the Spine Software License Agreement), you may not (a) modify, * translate, adapt or otherwise create derivative works, improvements of the * Software or develop new applications using the Software or (b) remove, * delete, alter or obscure any trademarks or any copyright, trademark, patent * or other intellectual property or proprietary rights notices on or in the * Software, including any copy thereof. Redistributions in binary or source * form must include this license and terms. * * THIS SOFTWARE IS PROVIDED BY ESOTERIC SOFTWARE "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL ESOTERIC SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.esotericsoftware.spine; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.Pool; import com.badlogic.gdx.utils.Pool.Poolable; /** Stores state for an animation and automatically mixes between animations. */ public class AnimationState { private final AnimationStateData data; private Array<TrackEntry> tracks = new Array(); private final Array<Event> events = new Array(); private final Array<AnimationStateListener> listeners = new Array(); private float timeScale = 1; private Pool<TrackEntry> trackEntryPool = new Pool() { protected Object newObject () { return new TrackEntry(); } }; public AnimationState (AnimationStateData data) { if (data == null) throw new IllegalArgumentException("data cannot be null."); this.data = data; } public void update (float delta) { delta *= timeScale; for (int i = 0; i < tracks.size; i++) { TrackEntry current = tracks.get(i); if (current == null) continue; current.time += delta * current.timeScale; if (current.previous != null) { float previousDelta = delta * current.previous.timeScale; current.previous.time += previousDelta; current.mixTime += previousDelta; } TrackEntry next = current.next; if (next != null) { next.time = current.lastTime - next.delay; if (next.time >= 0) setCurrent(i, next); } else { // End non-looping animation when it reaches its end time and there is no next entry. if (!current.loop && current.lastTime >= current.endTime) clearTrack(i); } } } public void apply (Skeleton skeleton) { Array<Event> events = this.events; int listenerCount = listeners.size; for (int i = 0; i < tracks.size; i++) { TrackEntry current = tracks.get(i); if (current == null) continue; events.size = 0; float time = current.time; float lastTime = current.lastTime; float endTime = current.endTime; boolean loop = current.loop; if (!loop && time > endTime) time = endTime; TrackEntry previous = current.previous; if (previous == null) current.animation.mix(skeleton, lastTime, time, loop, events, current.mix); else { float previousTime = previous.time; if (!previous.loop && previousTime > previous.endTime) previousTime = previous.endTime; previous.animation.apply(skeleton, previousTime, previousTime, previous.loop, null); float alpha = current.mixTime / current.mixDuration * current.mix; if (alpha >= 1) { alpha = 1; trackEntryPool.free(previous); current.previous = null; } current.animation.mix(skeleton, lastTime, time, loop, events, alpha); } for (int ii = 0, nn = events.size; ii < nn; ii++) { Event event = events.get(ii); if (current.listener != null) current.listener.event(i, event); for (int iii = 0; iii < listenerCount; iii++) listeners.get(iii).event(i, event); } // Check if completed the animation or a loop iteration. if (loop ? (lastTime % endTime > time % endTime) : (lastTime < endTime && time >= endTime)) { int count = (int) (time / endTime); if (current.listener != null) current.listener.complete(i, count); for (int ii = 0, nn = listeners.size; ii < nn; ii++) listeners.get(ii).complete(i, count); } current.lastTime = current.time; } } public void clearTracks () { for (int i = 0, n = tracks.size; i < n; i++) clearTrack(i); tracks.clear(); } public void clearTrack (int trackIndex) { if (trackIndex >= tracks.size) return; TrackEntry current = tracks.get(trackIndex); if (current == null) return; if (current.listener != null) current.listener.end(trackIndex); for (int i = 0, n = listeners.size; i < n; i++) listeners.get(i).end(trackIndex); tracks.set(trackIndex, null); freeAll(current); if (current.previous != null) trackEntryPool.free(current.previous); } private void freeAll (TrackEntry entry) { while (entry != null) { TrackEntry next = entry.next; trackEntryPool.free(entry); entry = next; } } private TrackEntry expandToIndex (int index) { if (index < tracks.size) return tracks.get(index); tracks.ensureCapacity(index - tracks.size + 1); tracks.size = index + 1; return null; } private void setCurrent (int index, TrackEntry entry) { TrackEntry current = expandToIndex(index); if (current != null) { TrackEntry previous = current.previous; current.previous = null; if (current.listener != null) current.listener.end(index); for (int i = 0, n = listeners.size; i < n; i++) listeners.get(i).end(index); entry.mixDuration = data.getMix(current.animation, entry.animation); if (entry.mixDuration > 0) { entry.mixTime = 0; // If a mix is in progress, mix from the closest animation. if (previous != null && current.mixTime / current.mixDuration < 0.5f) { entry.previous = previous; previous = current; } else entry.previous = current; } else trackEntryPool.free(current); if (previous != null) trackEntryPool.free(previous); } tracks.set(index, entry); if (entry.listener != null) entry.listener.start(index); for (int i = 0, n = listeners.size; i < n; i++) listeners.get(i).start(index); } /** @see #setAnimation(int, Animation, boolean) */ public TrackEntry setAnimation (int trackIndex, String animationName, boolean loop) { Animation animation = data.getSkeletonData().findAnimation(animationName); if (animation == null) throw new IllegalArgumentException("Animation not found: " + animationName); return setAnimation(trackIndex, animation, loop); } /** Set the current animation. Any queued animations are cleared. */ public TrackEntry setAnimation (int trackIndex, Animation animation, boolean loop) { TrackEntry current = expandToIndex(trackIndex); if (current != null) freeAll(current.next); TrackEntry entry = trackEntryPool.obtain(); entry.animation = animation; entry.loop = loop; entry.endTime = animation.getDuration(); setCurrent(trackIndex, entry); return entry; } /** {@link #addAnimation(int, Animation, boolean, float)} */ public TrackEntry addAnimation (int trackIndex, String animationName, boolean loop, float delay) { Animation animation = data.getSkeletonData().findAnimation(animationName); if (animation == null) throw new IllegalArgumentException("Animation not found: " + animationName); return addAnimation(trackIndex, animation, loop, delay); } /** * Adds an animation to be played delay seconds after the current or last queued animation. * @param delay May be <= 0 to use duration of previous animation minus any mix duration plus the negative delay. */ public TrackEntry addAnimation (int trackIndex, Animation animation, boolean loop, float delay) { TrackEntry entry = trackEntryPool.obtain(); entry.animation = animation; entry.loop = loop; entry.endTime = animation.getDuration(); TrackEntry last = expandToIndex(trackIndex); if (last != null) { while (last.next != null) last = last.next; last.next = entry; } else tracks.set(trackIndex, entry); if (delay <= 0) { if (last != null) delay += last.endTime - data.getMix(last.animation, animation); else delay = 0; } entry.delay = delay; return entry; } /** @return May be null. */ public TrackEntry getCurrent (int trackIndex) { if (trackIndex >= tracks.size) return null; return tracks.get(trackIndex); } /** Adds a listener to receive events for all animations. */ public void addListener (AnimationStateListener listener) { if (listener == null) throw new IllegalArgumentException("listener cannot be null."); listeners.add(listener); } /** Removes the listener added with {@link #addListener(AnimationStateListener)}. */ public void removeListener (AnimationStateListener listener) { listeners.removeValue(listener, true); } public float getTimeScale () { return timeScale; } public void setTimeScale (float timeScale) { this.timeScale = timeScale; } public AnimationStateData getData () { return data; } /** Returns the list of tracks that have animations, which may contain nulls. */ public Array<TrackEntry> getTracks () { return tracks; } public String toString () { StringBuilder buffer = new StringBuilder(64); for (int i = 0, n = tracks.size; i < n; i++) { TrackEntry entry = tracks.get(i); if (entry == null) continue; if (buffer.length() > 0) buffer.append(", "); buffer.append(entry.toString()); } if (buffer.length() == 0) return "<none>"; return buffer.toString(); } static public class TrackEntry implements Poolable { TrackEntry next, previous; Animation animation; boolean loop; float delay, time, lastTime = -1, endTime, timeScale = 1; float mixTime, mixDuration; AnimationStateListener listener; float mix = 1; public void reset () { next = null; previous = null; animation = null; listener = null; timeScale = 1; lastTime = -1; // Trigger events on frame zero. time = 0; } public Animation getAnimation () { return animation; } public void setAnimation (Animation animation) { this.animation = animation; } public boolean getLoop () { return loop; } public void setLoop (boolean loop) { this.loop = loop; } public float getDelay () { return delay; } public void setDelay (float delay) { this.delay = delay; } public float getTime () { return time; } public void setTime (float time) { this.time = time; } public float getEndTime () { return endTime; } public void setEndTime (float endTime) { this.endTime = endTime; } public AnimationStateListener getListener () { return listener; } public void setListener (AnimationStateListener listener) { this.listener = listener; } public float getLastTime () { return lastTime; } public void setLastTime (float lastTime) { this.lastTime = lastTime; } public float getMix () { return mix; } public void setMix (float mix) { this.mix = mix; } public float getTimeScale () { return timeScale; } public void setTimeScale (float timeScale) { this.timeScale = timeScale; } public TrackEntry getNext () { return next; } public void setNext (TrackEntry next) { this.next = next; } /** Returns true if the current time is greater than the end time, regardless of looping. */ public boolean isComplete () { return time >= endTime; } public String toString () { return animation == null ? "<none>" : animation.name; } } static public interface AnimationStateListener { /** Invoked when the current animation triggers an event. */ public void event (int trackIndex, Event event); /** * Invoked when the current animation has completed. * @param loopCount The number of times the animation reached the end. */ public void complete (int trackIndex, int loopCount); /** Invoked just after the current animation is set. */ public void start (int trackIndex); /** Invoked just before the current animation is replaced. */ public void end (int trackIndex); } static public abstract class AnimationStateAdapter implements AnimationStateListener { public void event (int trackIndex, Event event) { } public void complete (int trackIndex, int loopCount) { } public void start (int trackIndex) { } public void end (int trackIndex) { } } }
package org.apereo.cas.web.flow; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.services.MultifactorAuthenticationProvider; import org.apereo.cas.web.support.WebUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.binding.convert.ConversionExecutor; import org.springframework.binding.convert.service.RuntimeBindingConversionExecutor; import org.springframework.binding.expression.Expression; import org.springframework.binding.expression.ExpressionParser; import org.springframework.binding.expression.ParserContext; import org.springframework.binding.expression.spel.SpringELExpressionParser; import org.springframework.binding.expression.support.FluentParserContext; import org.springframework.binding.expression.support.LiteralExpression; import org.springframework.binding.mapping.Mapper; import org.springframework.binding.mapping.impl.DefaultMapper; import org.springframework.binding.mapping.impl.DefaultMapping; import org.springframework.context.ApplicationContext; import org.springframework.context.expression.BeanExpressionContextAccessor; import org.springframework.context.expression.EnvironmentAccessor; import org.springframework.context.expression.MapAccessor; import org.springframework.expression.spel.SpelParserConfiguration; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.expression.spel.support.ReflectivePropertyAccessor; import org.springframework.util.ReflectionUtils; import org.springframework.webflow.action.EvaluateAction; import org.springframework.webflow.action.ExternalRedirectAction; import org.springframework.webflow.action.ViewFactoryActionAdapter; import org.springframework.webflow.config.FlowDefinitionRegistryBuilder; import org.springframework.webflow.definition.registry.FlowDefinitionRegistry; import org.springframework.webflow.engine.ActionState; import org.springframework.webflow.engine.DecisionState; import org.springframework.webflow.engine.EndState; import org.springframework.webflow.engine.Flow; import org.springframework.webflow.engine.FlowExecutionExceptionHandler; import org.springframework.webflow.engine.FlowVariable; import org.springframework.webflow.engine.SubflowAttributeMapper; import org.springframework.webflow.engine.SubflowState; import org.springframework.webflow.engine.Transition; import org.springframework.webflow.engine.TransitionCriteria; import org.springframework.webflow.engine.TransitionableState; import org.springframework.webflow.engine.ViewState; import org.springframework.webflow.engine.WildcardTransitionCriteria; import org.springframework.webflow.engine.builder.BinderConfiguration; import org.springframework.webflow.engine.builder.support.FlowBuilderServices; import org.springframework.webflow.engine.support.ActionExecutingViewFactory; import org.springframework.webflow.engine.support.BeanFactoryVariableValueFactory; import org.springframework.webflow.engine.support.DefaultTargetStateResolver; import org.springframework.webflow.engine.support.DefaultTransitionCriteria; import org.springframework.webflow.engine.support.GenericSubflowAttributeMapper; import org.springframework.webflow.engine.support.TransitionCriteriaChain; import org.springframework.webflow.execution.Action; import org.springframework.webflow.execution.ViewFactory; import org.springframework.webflow.expression.spel.ActionPropertyAccessor; import org.springframework.webflow.expression.spel.BeanFactoryPropertyAccessor; import org.springframework.webflow.expression.spel.FlowVariablePropertyAccessor; import org.springframework.webflow.expression.spel.MapAdaptablePropertyAccessor; import org.springframework.webflow.expression.spel.MessageSourcePropertyAccessor; import org.springframework.webflow.expression.spel.ScopeSearchingPropertyAccessor; import javax.annotation.PostConstruct; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Optional; /** * The {@link AbstractCasWebflowConfigurer} is responsible for * providing an entry point into the CAS webflow. * * @author Misagh Moayyed * @since 4.2 */ public abstract class AbstractCasWebflowConfigurer implements CasWebflowConfigurer { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractCasWebflowConfigurer.class); /** * The logout flow definition registry. */ protected FlowDefinitionRegistry logoutFlowDefinitionRegistry; /** * The Login flow definition registry. */ protected final FlowDefinitionRegistry loginFlowDefinitionRegistry; /** * Application context. */ @Autowired protected ApplicationContext applicationContext; /** * CAS Properties. */ @Autowired protected CasConfigurationProperties casProperties; /** * Flow builder services. */ protected final FlowBuilderServices flowBuilderServices; public AbstractCasWebflowConfigurer(final FlowBuilderServices flowBuilderServices, final FlowDefinitionRegistry loginFlowDefinitionRegistry) { this.flowBuilderServices = flowBuilderServices; this.loginFlowDefinitionRegistry = loginFlowDefinitionRegistry; } @PostConstruct @Override public void initialize() { try { LOGGER.debug("Initializing CAS webflow configuration..."); if (casProperties.getWebflow().isAutoconfigure()) { doInitialize(); } else { LOGGER.warn("Webflow auto-configuration is disabled. CAS will not modify the webflow via [{}]", getClass().getName()); } } catch (final Exception e) { LOGGER.error(e.getMessage(), e); } } /** * Handle the initialization of the webflow. * * @throws Exception the exception */ protected abstract void doInitialize() throws Exception; @Override public Flow buildFlow(final String location, final String id) { final FlowDefinitionRegistryBuilder builder = new FlowDefinitionRegistryBuilder(this.applicationContext, this.flowBuilderServices); builder.setParent(this.loginFlowDefinitionRegistry); builder.addFlowLocation(location, id); final FlowDefinitionRegistry registry = builder.build(); return (Flow) registry.getFlowDefinition(id); } @Override public Flow getLoginFlow() { if (this.loginFlowDefinitionRegistry == null) { LOGGER.error("Login flow registry is not configured correctly."); return null; } final boolean found = Arrays.stream(this.loginFlowDefinitionRegistry.getFlowDefinitionIds()).anyMatch(f -> f.equals(FLOW_ID_LOGIN)); if (found) { return (Flow) this.loginFlowDefinitionRegistry.getFlowDefinition(FLOW_ID_LOGIN); } LOGGER.error("Could not find flow definition [{}]. Available flow definition ids are [{}]", FLOW_ID_LOGIN, this.loginFlowDefinitionRegistry.getFlowDefinitionIds()); return null; } @Override public Flow getLogoutFlow() { if (this.logoutFlowDefinitionRegistry == null) { LOGGER.error("Logout flow registry is not configured correctly."); return null; } return (Flow) this.logoutFlowDefinitionRegistry.getFlowDefinition(FLOW_ID_LOGOUT); } @Override public TransitionableState getStartState(final Flow flow) { return TransitionableState.class.cast(flow.getStartState()); } @Override public ActionState createActionState(final Flow flow, final String name, final Action... actions) { if (containsFlowState(flow, name)) { LOGGER.debug("Flow [{}] already contains a definition for state id [{}]", flow.getId(), name); return (ActionState) flow.getTransitionableState(name); } final ActionState actionState = new ActionState(flow, name); LOGGER.debug("Created action state [{}]", actionState.getId()); actionState.getActionList().addAll(actions); LOGGER.debug("Added action to the action state [{}] list of actions: [{}]", actionState.getId(), actionState.getActionList()); return actionState; } @Override public DecisionState createDecisionState(final Flow flow, final String id, final String testExpression, final String thenStateId, final String elseStateId) { if (containsFlowState(flow, id)) { LOGGER.debug("Flow [{}] already contains a definition for state id [{}]", flow.getId(), id); return (DecisionState) flow.getTransitionableState(id); } final DecisionState decisionState = new DecisionState(flow, id); final Expression expression = createExpression(testExpression, Boolean.class); final Transition thenTransition = createTransition(expression, thenStateId); decisionState.getTransitionSet().add(thenTransition); final Transition elseTransition = createTransition("*", elseStateId); decisionState.getTransitionSet().add(elseTransition); return decisionState; } @Override public void setStartState(final Flow flow, final String state) { flow.setStartState(state); final TransitionableState startState = getStartState(flow); LOGGER.debug("Start state is now set to [{}]", startState.getId()); } @Override public void setStartState(final Flow flow, final TransitionableState state) { setStartState(flow, state.getId()); } @Override public EvaluateAction createEvaluateAction(final String expression) { if (this.flowBuilderServices == null) { LOGGER.error("Flow builder services is not configured correctly."); return null; } final ParserContext ctx = new FluentParserContext(); final Expression action = this.flowBuilderServices.getExpressionParser().parseExpression(expression, ctx); final EvaluateAction newAction = new EvaluateAction(action, null); LOGGER.debug("Created evaluate action for expression [{}]", action.getExpressionString()); return newAction; } /** * Add a default transition to a given state. * * @param state the state to include the default transition * @param targetState the id of the destination state to which the flow should transfer */ protected void createStateDefaultTransition(final TransitionableState state, final String targetState) { if (state == null) { LOGGER.debug("Cannot add default transition of [{}] to the given state is null and cannot be found in the flow.", targetState); return; } final Transition transition = createTransition(targetState); state.getTransitionSet().add(transition); } /** * Create transition for state transition. * * @param state the state * @param criteriaOutcome the criteria outcome * @param targetState the target state * @return the transition */ protected Transition createTransitionForState(final TransitionableState state, final String criteriaOutcome, final String targetState) { return createTransitionForState(state, criteriaOutcome, targetState, false); } /** * Add transition to action state. * * @param state the action state * @param criteriaOutcome the criteria outcome * @param targetState the target state * @param removeExisting the remove existing * @return the transition */ protected Transition createTransitionForState(final TransitionableState state, final String criteriaOutcome, final String targetState, final boolean removeExisting) { try { if (removeExisting) { final Transition success = (Transition) state.getTransition(criteriaOutcome); if (success != null) { state.getTransitionSet().remove(success); } } final Transition transition = createTransition(criteriaOutcome, targetState); state.getTransitionSet().add(transition); LOGGER.debug("Added transition [{}] to the state [{}]", transition.getId(), state.getId()); return transition; } catch (final Exception e) { LOGGER.error(e.getMessage(), e); } return null; } @Override public Transition createTransition(final String criteriaOutcome, final String targetState) { return createTransition(new LiteralExpression(criteriaOutcome), targetState); } @Override public Transition createTransition(final String criteriaOutcome, final TransitionableState targetState) { return createTransition(new LiteralExpression(criteriaOutcome), targetState.getId()); } @Override public Transition createTransition(final String targetState) { final DefaultTargetStateResolver resolver = new DefaultTargetStateResolver(targetState); return new Transition(resolver); } @Override public Transition createTransition(final Expression criteriaOutcomeExpression, final String targetState) { final TransitionCriteria criteria; if (criteriaOutcomeExpression.toString().equals(WildcardTransitionCriteria.WILDCARD_EVENT_ID)) { criteria = WildcardTransitionCriteria.INSTANCE; } else { criteria = new DefaultTransitionCriteria(criteriaOutcomeExpression); } final DefaultTargetStateResolver resolver = new DefaultTargetStateResolver(targetState); final Transition t = new Transition(criteria, resolver); return t; } /** * Create expression expression. * * @param expression the expression * @param expectedType the expected type * @return the expression */ protected Expression createExpression(final String expression, final Class expectedType) { final ParserContext parserContext = new FluentParserContext() .expectResult(expectedType); return getSpringExpressionParser().parseExpression(expression, parserContext); } /** * Gets spring expression parser. * * @return the spring expression parser */ protected SpringELExpressionParser getSpringExpressionParser() { final SpelParserConfiguration configuration = new SpelParserConfiguration(); final SpelExpressionParser spelExpressionParser = new SpelExpressionParser(configuration); final SpringELExpressionParser parser = new SpringELExpressionParser(spelExpressionParser, this.flowBuilderServices.getConversionService()); parser.addPropertyAccessor(new ActionPropertyAccessor()); parser.addPropertyAccessor(new BeanFactoryPropertyAccessor()); parser.addPropertyAccessor(new FlowVariablePropertyAccessor()); parser.addPropertyAccessor(new MapAdaptablePropertyAccessor()); parser.addPropertyAccessor(new MessageSourcePropertyAccessor()); parser.addPropertyAccessor(new ScopeSearchingPropertyAccessor()); parser.addPropertyAccessor(new BeanExpressionContextAccessor()); parser.addPropertyAccessor(new MapAccessor()); parser.addPropertyAccessor(new MapAdaptablePropertyAccessor()); parser.addPropertyAccessor(new EnvironmentAccessor()); parser.addPropertyAccessor(new ReflectivePropertyAccessor()); return parser; } @Override public EndState createEndState(final Flow flow, final String id) { return createEndState(flow, id, (ViewFactory) null); } @Override public EndState createEndState(final Flow flow, final String id, final String viewId) { return createEndState(flow, id, new LiteralExpression(viewId)); } @Override public EndState createEndState(final Flow flow, final String id, final Expression expression) { final ViewFactory viewFactory = this.flowBuilderServices.getViewFactoryCreator().createViewFactory( expression, this.flowBuilderServices.getExpressionParser(), this.flowBuilderServices.getConversionService(), null, this.flowBuilderServices.getValidator(), this.flowBuilderServices.getValidationHintResolver()); return createEndState(flow, id, viewFactory); } @Override public EndState createEndState(final Flow flow, final String id, final String viewId, final boolean redirect) { if (!redirect) { return createEndState(flow, id, viewId); } final Expression expression = createExpression(viewId, String.class); final ActionExecutingViewFactory viewFactory = new ActionExecutingViewFactory(new ExternalRedirectAction(expression)); return createEndState(flow, id, viewFactory); } @Override public EndState createEndState(final Flow flow, final String id, final ViewFactory viewFactory) { if (containsFlowState(flow, id)) { LOGGER.debug("Flow [{}] already contains a definition for state id [{}]", flow.getId(), id); return (EndState) flow.getStateInstance(id); } final EndState endState = new EndState(flow, id); if (viewFactory != null) { final Action finalResponseAction = new ViewFactoryActionAdapter(viewFactory); endState.setFinalResponseAction(finalResponseAction); LOGGER.debug("Created end state state [{}] on flow id [{}], backed by view factory [{}]", id, flow.getId(), viewFactory); } else { LOGGER.debug("Created end state state [{}] on flow id [{}]", id, flow.getId()); } return endState; } @Override public ViewState createViewState(final Flow flow, final String id, final Expression expression, final BinderConfiguration binder) { try { if (containsFlowState(flow, id)) { LOGGER.debug("Flow [{}] already contains a definition for state id [{}]", flow.getId(), id); return (ViewState) flow.getTransitionableState(id); } final ViewFactory viewFactory = this.flowBuilderServices.getViewFactoryCreator().createViewFactory( expression, this.flowBuilderServices.getExpressionParser(), this.flowBuilderServices.getConversionService(), binder, this.flowBuilderServices.getValidator(), this.flowBuilderServices.getValidationHintResolver()); final ViewState viewState = new ViewState(flow, id, viewFactory); LOGGER.debug("Added view state [{}]", viewState.getId()); return viewState; } catch (final Exception e) { LOGGER.error(e.getMessage(), e); } return null; } @Override public ViewState createViewState(final Flow flow, final String id, final String viewId) { return createViewState(flow, id, new LiteralExpression(viewId), null); } @Override public ViewState createViewState(final Flow flow, final String id, final String viewId, final BinderConfiguration binder) { return createViewState(flow, id, new LiteralExpression(viewId), binder); } @Override public SubflowState createSubflowState(final Flow flow, final String id, final String subflow, final Action entryAction) { if (containsFlowState(flow, id)) { LOGGER.debug("Flow [{}] already contains a definition for state id [{}]", flow.getId(), id); return (SubflowState) flow.getTransitionableState(id); } final SubflowState state = new SubflowState(flow, id, new BasicSubflowExpression(subflow, this.loginFlowDefinitionRegistry)); if (entryAction != null) { state.getEntryActionList().add(entryAction); } return state; } @Override public SubflowState createSubflowState(final Flow flow, final String id, final String subflow) { return createSubflowState(flow, id, subflow, null); } /** * Create mapper to subflow state. * * @param mappings the mappings * @return the mapper */ protected Mapper createMapperToSubflowState(final List<DefaultMapping> mappings) { final DefaultMapper inputMapper = new DefaultMapper(); mappings.forEach(inputMapper::addMapping); return inputMapper; } /** * Create mapping to subflow state. * * @param name the name * @param value the value * @param required the required * @param type the type * @return the default mapping */ protected DefaultMapping createMappingToSubflowState(final String name, final String value, final boolean required, final Class type) { final ExpressionParser parser = this.flowBuilderServices.getExpressionParser(); final Expression source = parser.parseExpression(value, new FluentParserContext()); final Expression target = parser.parseExpression(name, new FluentParserContext()); final DefaultMapping mapping = new DefaultMapping(source, target); mapping.setRequired(required); final ConversionExecutor typeConverter = new RuntimeBindingConversionExecutor(type, this.flowBuilderServices.getConversionService()); mapping.setTypeConverter(typeConverter); return mapping; } /** * Create subflow attribute mapper. * * @param inputMapper the input mapper * @param outputMapper the output mapper * @return the subflow attribute mapper */ protected SubflowAttributeMapper createSubflowAttributeMapper(final Mapper inputMapper, final Mapper outputMapper) { return new GenericSubflowAttributeMapper(inputMapper, outputMapper); } public void setLogoutFlowDefinitionRegistry(final FlowDefinitionRegistry logoutFlowDefinitionRegistry) { this.logoutFlowDefinitionRegistry = logoutFlowDefinitionRegistry; } /** * Contains flow state? * * @param flow the flow * @param stateId the state id * @return true if flow contains the state. */ protected boolean containsFlowState(final Flow flow, final String stateId) { if (flow == null) { LOGGER.error("Flow is not configured correctly and cannot be null."); return false; } return flow.containsState(stateId); } /** * Contains transition boolean. * * @param state the state * @param transition the transition * @return the boolean */ protected boolean containsTransition(final TransitionableState state, final String transition) { if (state == null) { LOGGER.error("State is not configured correctly and cannot be null."); return false; } return state.getTransition(transition) != null; } /** * Create flow variable flow variable. * * @param flow the flow * @param id the id * @param type the type * @return the flow variable */ protected FlowVariable createFlowVariable(final Flow flow, final String id, final Class type) { final Optional<FlowVariable> opt = Arrays.stream(flow.getVariables()).filter(v -> v.getName().equalsIgnoreCase(id)).findFirst(); if (opt.isPresent()) { return opt.get(); } final FlowVariable flowVar = new FlowVariable(id, new BeanFactoryVariableValueFactory(type, applicationContext.getAutowireCapableBeanFactory())); flow.addVariable(flowVar); return flowVar; } /** * Create state model bindings. * * @param properties the properties * @return the binder configuration */ protected BinderConfiguration createStateBinderConfiguration(final List<String> properties) { final BinderConfiguration binder = new BinderConfiguration(); properties.forEach(p -> binder.addBinding(new BinderConfiguration.Binding(p, null, true))); return binder; } /** * Create state model binding. * * @param state the state * @param modelName the model name * @param modelType the model type */ protected void createStateModelBinding(final TransitionableState state, final String modelName, final Class modelType) { state.getAttributes().put("model", createExpression(modelName, modelType)); } /** * Gets state binder configuration. * * @param state the state * @return the state binder configuration */ protected BinderConfiguration getViewStateBinderConfiguration(final ViewState state) { final Field field = ReflectionUtils.findField(state.getViewFactory().getClass(), "binderConfiguration"); ReflectionUtils.makeAccessible(field); return (BinderConfiguration) ReflectionUtils.getField(field, state.getViewFactory()); } /** * Clone action state. * * @param source the source * @param target the target */ protected void cloneActionState(final ActionState source, final ActionState target) { source.getActionList().forEach(a -> target.getActionList().add(a)); source.getExitActionList().forEach(a -> target.getExitActionList().add(a)); source.getAttributes().asMap().forEach((k, v) -> target.getAttributes().put(k, v)); source.getTransitionSet().forEach(t -> target.getTransitionSet().addAll(t)); final Field field = ReflectionUtils.findField(target.getExceptionHandlerSet().getClass(), "exceptionHandlers"); ReflectionUtils.makeAccessible(field); final List<FlowExecutionExceptionHandler> list = (List<FlowExecutionExceptionHandler>) ReflectionUtils.getField(field, target.getExceptionHandlerSet()); list.forEach(h -> source.getExceptionHandlerSet().add(h)); target.setDescription(source.getDescription()); target.setCaption(source.getCaption()); } /** * Gets transition execution criteria chain for transition. * * @param def the def * @return the transition execution criteria chain for transition */ protected List<TransitionCriteria> getTransitionExecutionCriteriaChainForTransition(final Transition def) { if (def.getExecutionCriteria() instanceof TransitionCriteriaChain) { final TransitionCriteriaChain chain = (TransitionCriteriaChain) def.getExecutionCriteria(); final Field field = ReflectionUtils.findField(chain.getClass(), "criteriaChain"); ReflectionUtils.makeAccessible(field); return (List<TransitionCriteria>) ReflectionUtils.getField(field, chain); } if (def.getExecutionCriteria() != null) { final List c = new ArrayList<>(); c.add(def.getExecutionCriteria()); return c; } return new ArrayList<>(); } /** * Gets expression string from action. * * @param act the act * @return the expression string from action */ protected Expression getExpressionStringFromAction(final EvaluateAction act) { final Field field = ReflectionUtils.findField(act.getClass(), "expression"); ReflectionUtils.makeAccessible(field); return (Expression) ReflectionUtils.getField(field, act); } /** * Register multifactor providers state transitions into webflow. * * @param state the state */ protected void registerMultifactorProvidersStateTransitionsIntoWebflow(final TransitionableState state) { final Map<String, MultifactorAuthenticationProvider> providerMap = WebUtils.getAvailableMultifactorAuthenticationProviders(this.applicationContext); providerMap.forEach((k, v) -> createTransitionForState(state, v.getId(), v.getId())); } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apach3.http.impl.nio; import java.io.IOException; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLException; import org.apach3.http.HttpRequest; import org.apach3.http.HttpRequestFactory; import org.apach3.http.impl.DefaultHttpRequestFactory; import org.apach3.http.impl.nio.reactor.SSLIOSession; import org.apach3.http.impl.nio.reactor.SSLIOSessionHandler; import org.apach3.http.impl.nio.reactor.SSLMode; import org.apach3.http.nio.NHttpServerIOTarget; import org.apach3.http.nio.NHttpServiceHandler; import org.apach3.http.nio.reactor.IOEventDispatch; import org.apach3.http.nio.reactor.IOSession; import org.apach3.http.nio.util.ByteBufferAllocator; import org.apach3.http.nio.util.HeapByteBufferAllocator; import org.apach3.http.params.HttpParams; import org.apach3.http.protocol.ExecutionContext; /** * Default implementation of {@link IOEventDispatch} interface for SSL * (encrypted) server-side HTTP connections. * <p> * The following parameters can be used to customize the behavior of this * class: * <ul> * <li>{@link org.apach3.http.params.CoreProtocolPNames#HTTP_ELEMENT_CHARSET}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#SOCKET_BUFFER_SIZE}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#MAX_HEADER_COUNT}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#MAX_LINE_LENGTH}</li> * </ul> * * @since 4.0 * * @deprecated (4.2) use {@link org.apach3.http.impl.nio.ssl.SSLServerIOEventDispatch} */ @Deprecated public class SSLServerIOEventDispatch implements IOEventDispatch { private static final String SSL_SESSION = "SSL_SESSION"; protected final NHttpServiceHandler handler; protected final SSLContext sslcontext; protected final SSLIOSessionHandler sslHandler; protected final HttpParams params; /** * Creates a new instance of this class to be used for dispatching I/O event * notifications to the given protocol handler using the given * {@link SSLContext}. This I/O dispatcher will transparently handle SSL * protocol aspects for HTTP connections. * * @param handler the server protocol handler. * @param sslcontext the SSL context. * @param sslHandler the SSL handler. * @param params HTTP parameters. */ public SSLServerIOEventDispatch( final NHttpServiceHandler handler, final SSLContext sslcontext, final SSLIOSessionHandler sslHandler, final HttpParams params) { super(); if (handler == null) { throw new IllegalArgumentException("HTTP service handler may not be null"); } if (sslcontext == null) { throw new IllegalArgumentException("SSL context may not be null"); } if (params == null) { throw new IllegalArgumentException("HTTP parameters may not be null"); } this.handler = handler; this.params = params; this.sslcontext = sslcontext; this.sslHandler = sslHandler; } /** * Creates a new instance of this class to be used for dispatching I/O event * notifications to the given protocol handler using the given * {@link SSLContext}. This I/O dispatcher will transparently handle SSL * protocol aspects for HTTP connections. * * @param handler the server protocol handler. * @param sslcontext the SSL context. * @param params HTTP parameters. */ public SSLServerIOEventDispatch( final NHttpServiceHandler handler, final SSLContext sslcontext, final HttpParams params) { this(handler, sslcontext, null, params); } /** * Creates an instance of {@link HeapByteBufferAllocator} to be used * by HTTP connections for allocating {@link java.nio.ByteBuffer} objects. * <p> * This method can be overridden in a super class in order to provide * a different implementation of the {@link ByteBufferAllocator} interface. * * @return byte buffer allocator. */ protected ByteBufferAllocator createByteBufferAllocator() { return new HeapByteBufferAllocator(); } /** * Creates an instance of {@link DefaultHttpRequestFactory} to be used * by HTTP connections for creating {@link HttpRequest} objects. * <p> * This method can be overridden in a super class in order to provide * a different implementation of the {@link HttpRequestFactory} interface. * * @return HTTP request factory. */ protected HttpRequestFactory createHttpRequestFactory() { return new DefaultHttpRequestFactory(); } /** * Creates an instance of {@link DefaultNHttpServerConnection} based on the * given {@link IOSession}. * <p> * This method can be overridden in a super class in order to provide * a different implementation of the {@link NHttpServerIOTarget} interface. * * @param session the underlying SSL I/O session. * * @return newly created HTTP connection. */ protected NHttpServerIOTarget createConnection(final IOSession session) { return new DefaultNHttpServerConnection( session, createHttpRequestFactory(), createByteBufferAllocator(), this.params); } /** * Creates an instance of {@link SSLIOSession} decorating the given * {@link IOSession}. * <p> * This method can be overridden in a super class in order to provide * a different implementation of SSL I/O session. * * @param session the underlying I/O session. * @param sslcontext the SSL context. * @param sslHandler the SSL handler. * @return newly created SSL I/O session. */ protected SSLIOSession createSSLIOSession( final IOSession session, final SSLContext sslcontext, final SSLIOSessionHandler sslHandler) { return new SSLIOSession(session, sslcontext, sslHandler); } public void connected(final IOSession session) { SSLIOSession sslSession = createSSLIOSession( session, this.sslcontext, this.sslHandler); NHttpServerIOTarget conn = createConnection( sslSession); session.setAttribute(ExecutionContext.HTTP_CONNECTION, conn); session.setAttribute(SSL_SESSION, sslSession); this.handler.connected(conn); try { sslSession.bind(SSLMode.SERVER, this.params); } catch (SSLException ex) { this.handler.exception(conn, ex); sslSession.shutdown(); } } public void disconnected(final IOSession session) { NHttpServerIOTarget conn = (NHttpServerIOTarget) session.getAttribute(ExecutionContext.HTTP_CONNECTION); if (conn != null) { this.handler.closed(conn); } } public void inputReady(final IOSession session) { NHttpServerIOTarget conn = (NHttpServerIOTarget) session.getAttribute(ExecutionContext.HTTP_CONNECTION); SSLIOSession sslSession = (SSLIOSession) session.getAttribute(SSL_SESSION); try { if (sslSession.isAppInputReady()) { conn.consumeInput(this.handler); } sslSession.inboundTransport(); } catch (IOException ex) { this.handler.exception(conn, ex); sslSession.shutdown(); } } public void outputReady(final IOSession session) { NHttpServerIOTarget conn = (NHttpServerIOTarget) session.getAttribute(ExecutionContext.HTTP_CONNECTION); SSLIOSession sslSession = (SSLIOSession) session.getAttribute(SSL_SESSION); try { if (sslSession.isAppOutputReady()) { conn.produceOutput(this.handler); } sslSession.outboundTransport(); } catch (IOException ex) { this.handler.exception(conn, ex); sslSession.shutdown(); } } public void timeout(final IOSession session) { NHttpServerIOTarget conn = (NHttpServerIOTarget) session.getAttribute(ExecutionContext.HTTP_CONNECTION); SSLIOSession sslSession = (SSLIOSession) session.getAttribute(SSL_SESSION); this.handler.timeout(conn); synchronized (sslSession) { if (sslSession.isOutboundDone() && !sslSession.isInboundDone()) { // The session failed to cleanly terminate sslSession.shutdown(); } } } }
/** * Copyright 2011 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.index; import java.util.ArrayList; import java.util.List; import junit.framework.Assert; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.index.ColumnQualifier.ValueType; import org.apache.hadoop.hbase.index.client.EqualsExpression; import org.apache.hadoop.hbase.index.client.IndexUtils; import org.apache.hadoop.hbase.index.client.SingleIndexExpression; import org.apache.hadoop.hbase.index.coprocessor.master.IndexMasterObserver; import org.apache.hadoop.hbase.index.coprocessor.regionserver.IndexRegionObserver; import org.apache.hadoop.hbase.index.coprocessor.wal.IndexWALObserver; import org.apache.hadoop.hbase.index.filter.SingleColumnValuePartitionFilter; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(LargeTests.class) public class TestValuePartitionInScan { private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); @BeforeClass public static void setupBeforeClass() throws Exception { Configuration conf = UTIL.getConfiguration(); conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, IndexMasterObserver.class.getName()); conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, IndexRegionObserver.class.getName()); conf.set(CoprocessorHost.WAL_COPROCESSOR_CONF_KEY, IndexWALObserver.class.getName()); conf.setInt("hbase.regionserver.lease.period", 10 * 60 * 1000); conf.setInt("hbase.rpc.timeout", 10 * 60 * 1000); UTIL.startMiniCluster(1); } @AfterClass public static void tearDownAfterClass() throws Exception { UTIL.shutdownMiniCluster(); } @After public void tearDown() throws Exception { IndexRegionObserver.setIsTestingEnabled(false); } @Before public void setUp() throws Exception { IndexRegionObserver.setIndexedFlowUsed(false); IndexRegionObserver.setSeekpointAdded(false); IndexRegionObserver.setSeekPoints(null); IndexRegionObserver.setIsTestingEnabled(true); IndexRegionObserver.addSeekPoints(null); } @Test(timeout = 180000) public void testSeparatorPartition() throws Exception { HBaseAdmin admin = UTIL.getHBaseAdmin(); Configuration conf = UTIL.getConfiguration(); String userTableName = "testSeparatorPartition"; IndexedHTableDescriptor ihtd = new IndexedHTableDescriptor(userTableName); HColumnDescriptor hcd = new HColumnDescriptor("cf1"); ihtd.addFamily(hcd); ValuePartition vp = new SeparatorPartition("_", 3); IndexSpecification iSpec = new IndexSpecification("idx1"); iSpec.addIndexColumn(hcd, "cq", vp, ValueType.String, 200); ihtd.addIndex(iSpec); admin.createTable(ihtd); HTable table = new HTable(conf, "testSeparatorPartition"); byte[] value1 = "2ndFloor_solitaire_huawei_bangalore_karnataka".getBytes(); Put p = new Put("row".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), value1); table.put(p); p = new Put("row2".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "7thFloor_solitaire_huawei_bangalore_karnataka".getBytes()); table.put(p); p = new Put("row3".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "rrr_sss_hhh_bangalore_karnataka".getBytes()); table.put(p); Scan scan = new Scan(); scan.setFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "huawei".getBytes(), vp)); int i = 0; ResultScanner scanner = table.getScanner(scan); List<Result> testRes = new ArrayList<Result>(); Result[] result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 2, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 2 rows", testRes.size() == 2); } @Test(timeout = 180000) public void testSpatialPartition() throws Exception { HBaseAdmin admin = UTIL.getHBaseAdmin(); Configuration conf = UTIL.getConfiguration(); String userTableName = "testSpatialPartition"; IndexedHTableDescriptor ihtd = new IndexedHTableDescriptor(userTableName); HColumnDescriptor hcd = new HColumnDescriptor("cf1"); ihtd.addFamily(hcd); ValuePartition vp = new SpatialPartition(2, 3); IndexSpecification iSpec = new IndexSpecification("idx1"); iSpec.addIndexColumn(hcd, "cq", vp, ValueType.String, 200); ihtd.addIndex(iSpec); admin.createTable(ihtd); HTable table = new HTable(conf, "testSpatialPartition"); byte[] value1 = "helloworld".getBytes(); Put p = new Put("row".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), value1); table.put(p); p = new Put("row2".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "spatial".getBytes()); table.put(p); p = new Put("row3".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "partition".getBytes()); table.put(p); Scan scan = new Scan(); scan.setFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.LESS_OR_EQUAL, "rti".getBytes(), vp)); int i = 0; ResultScanner scanner = table.getScanner(scan); List<Result> testRes = new ArrayList<Result>(); Result[] result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 3, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 1 rows", testRes.size() == 3); } @Test(timeout = 180000) public void testSpatialPartitionIfMulitplePartsOfValueAreIndexedByDifferentIndicesOnSameColumn() throws Exception { HBaseAdmin admin = UTIL.getHBaseAdmin(); Configuration conf = UTIL.getConfiguration(); String userTableName = "testSpatialPartitionIfMulitplePartsOfValueAreIndexedByDifferentIndicesOnSameColumn"; IndexedHTableDescriptor ihtd = new IndexedHTableDescriptor(userTableName); HColumnDescriptor hcd = new HColumnDescriptor("cf1"); ihtd.addFamily(hcd); ValuePartition vp = new SpatialPartition(2, 3); IndexSpecification iSpec = new IndexSpecification("idx1"); iSpec.addIndexColumn(hcd, "cq", vp, ValueType.String, 200); ihtd.addIndex(iSpec); ValuePartition vp2 = new SpatialPartition(5, 2); iSpec = new IndexSpecification("idx2"); iSpec.addIndexColumn(hcd, "cq", vp2, ValueType.String, 200); ihtd.addIndex(iSpec); admin.createTable(ihtd); HTable table = new HTable(conf, userTableName); byte[] value1 = "helloworldmultiple".getBytes(); Put p = new Put("row".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), value1); table.put(p); p = new Put("row2".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "spatialmultiple".getBytes()); table.put(p); p = new Put("row3".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "partitionmultiple".getBytes()); table.put(p); FilterList masterFilter = new FilterList(Operator.MUST_PASS_ALL); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "rti".getBytes(), vp)); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.GREATER_OR_EQUAL, "ti".getBytes(), vp2)); Scan scan = new Scan(); scan.setFilter(masterFilter); int i = 0; ResultScanner scanner = table.getScanner(scan); List<Result> testRes = new ArrayList<Result>(); Result[] result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 1, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 1 rows", testRes.size() == 1); masterFilter = new FilterList(Operator.MUST_PASS_ONE); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.LESS, "rti".getBytes(), vp)); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.GREATER, "ti".getBytes(), vp2)); scan = new Scan(); scan.setFilter(masterFilter); i = 0; scanner = table.getScanner(scan); testRes = new ArrayList<Result>(); result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 3, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 2 rows", testRes.size() == 2); } @Test(timeout = 180000) public void testSeparatorPartitionIfMulitplePartsOfValueAreIndexedByDifferentIndicesOnSameColumn() throws Exception { HBaseAdmin admin = UTIL.getHBaseAdmin(); Configuration conf = UTIL.getConfiguration(); String userTableName = "testSeparatorPartitionIfMulitplePartsOfValueAreIndexedByDifferentIndicesOnSameColumn"; IndexedHTableDescriptor ihtd = new IndexedHTableDescriptor(userTableName); HColumnDescriptor hcd = new HColumnDescriptor("cf1"); ihtd.addFamily(hcd); ValuePartition vp = new SeparatorPartition("--", 3); IndexSpecification iSpec = new IndexSpecification("idx1"); iSpec.addIndexColumn(hcd, "cq", vp, ValueType.String, 200); ihtd.addIndex(iSpec); ValuePartition vp2 = new SeparatorPartition("--", 2); iSpec = new IndexSpecification("idx2"); iSpec.addIndexColumn(hcd, "cq", vp2, ValueType.String, 200); ihtd.addIndex(iSpec); admin.createTable(ihtd); HTable table = new HTable(conf, userTableName); byte[] value1 = "hello--world--multiple--1".getBytes(); Put p = new Put("row".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), value1); table.put(p); p = new Put("row2".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "spatial--partition--multiple".getBytes()); table.put(p); p = new Put("row3".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "partition--by--separator--multiple".getBytes()); table.put(p); FilterList masterFilter = new FilterList(Operator.MUST_PASS_ALL); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "multiple".getBytes(), vp)); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.GREATER_OR_EQUAL, "by".getBytes(), vp2)); Scan scan = new Scan(); scan.setFilter(masterFilter); int i = 0; ResultScanner scanner = table.getScanner(scan); List<Result> testRes = new ArrayList<Result>(); Result[] result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 2, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 1 rows", testRes.size() == 2); masterFilter = new FilterList(Operator.MUST_PASS_ONE); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.GREATER_OR_EQUAL, "person".getBytes(), vp)); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.LESS, "multiple".getBytes(), vp2)); scan = new Scan(); scan.setFilter(masterFilter); i = 0; scanner = table.getScanner(scan); testRes = new ArrayList<Result>(); result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 3, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 1 rows", testRes.size() == 1); } @Test(timeout = 180000) public void testCombinationOfPartitionFiltersWithSCVF() throws Exception { HBaseAdmin admin = UTIL.getHBaseAdmin(); Configuration conf = UTIL.getConfiguration(); String userTableName = "testCombinationOfPartitionFiltersWithSCVF"; IndexedHTableDescriptor ihtd = new IndexedHTableDescriptor(userTableName); HColumnDescriptor hcd = new HColumnDescriptor("cf1"); ihtd.addFamily(hcd); ValuePartition vp = new SeparatorPartition("--", 3); IndexSpecification iSpec = new IndexSpecification("idx1"); iSpec.addIndexColumn(hcd, "cq", vp, ValueType.String, 200); ihtd.addIndex(iSpec); ValuePartition vp2 = new SeparatorPartition("--", 2); iSpec = new IndexSpecification("idx2"); iSpec.addIndexColumn(hcd, "cq", vp2, ValueType.String, 200); ihtd.addIndex(iSpec); iSpec = new IndexSpecification("idx3"); iSpec.addIndexColumn(hcd, "cq", ValueType.String, 200); ihtd.addIndex(iSpec); admin.createTable(ihtd); HTable table = new HTable(conf, userTableName); byte[] value1 = "hello--world--multiple--1".getBytes(); Put p = new Put("row".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), value1); table.put(p); p = new Put("row2".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "spatial--partition--multiple".getBytes()); table.put(p); p = new Put("row3".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "partition--by--separator--multiple".getBytes()); table.put(p); FilterList masterFilter = new FilterList(Operator.MUST_PASS_ALL); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "multiple".getBytes(), vp)); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.GREATER_OR_EQUAL, "by".getBytes(), vp2)); masterFilter.addFilter(new SingleColumnValueFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "spatial--partition--multiple".getBytes())); Scan scan = new Scan(); scan.setFilter(masterFilter); int i = 0; ResultScanner scanner = table.getScanner(scan); List<Result> testRes = new ArrayList<Result>(); Result[] result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 1, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 1 rows", testRes.size() == 1); masterFilter = new FilterList(Operator.MUST_PASS_ONE); masterFilter.addFilter(new SingleColumnValueFilter(hcd.getName(), "cq".getBytes(), CompareOp.GREATER_OR_EQUAL, "partition--by--separator--multiple".getBytes())); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.GREATER_OR_EQUAL, "person".getBytes(), vp)); masterFilter.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.LESS, "multiple".getBytes(), vp2)); scan = new Scan(); scan.setFilter(masterFilter); i = 0; scanner = table.getScanner(scan); testRes = new ArrayList<Result>(); result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 3, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 1 rows", testRes.size() == 2); } @Test(timeout = 180000) public void testCombinationOfPartitionFiltersWithSCVFPart2() throws Exception { HBaseAdmin admin = UTIL.getHBaseAdmin(); Configuration conf = UTIL.getConfiguration(); String userTableName = "testCombinationOfPartitionFiltersWithSCVFPart2"; IndexedHTableDescriptor ihtd = new IndexedHTableDescriptor(userTableName); HColumnDescriptor hcd = new HColumnDescriptor("cf1"); ihtd.addFamily(hcd); ValuePartition vp = new SeparatorPartition("--", 3); IndexSpecification iSpec = new IndexSpecification("idx1"); iSpec.addIndexColumn(hcd, "cq", vp, ValueType.String, 100); iSpec.addIndexColumn(hcd, "cq1", ValueType.String, 100); ihtd.addIndex(iSpec); ValuePartition vp2 = new SeparatorPartition("--", 2); iSpec = new IndexSpecification("idx2"); iSpec.addIndexColumn(hcd, "cq", vp2, ValueType.String, 100); ihtd.addIndex(iSpec); iSpec = new IndexSpecification("idx3"); iSpec.addIndexColumn(hcd, "cq1", ValueType.String, 100); ihtd.addIndex(iSpec); iSpec = new IndexSpecification("idx4"); iSpec.addIndexColumn(hcd, "cq", ValueType.String, 100); iSpec.addIndexColumn(hcd, "cq1", ValueType.String, 100); ihtd.addIndex(iSpec); admin.createTable(ihtd); HTable table = new HTable(conf, userTableName); byte[] value1 = "hello--world--multiple--1".getBytes(); Put p = new Put("row".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), value1); table.put(p); p = new Put("row2".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "spatial--partition--multiple".getBytes()); p.add("cf1".getBytes(), "cq1".getBytes(), "spatialPartition".getBytes()); table.put(p); p = new Put("row3".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "partition--by--multiple--multiple".getBytes()); p.add("cf1".getBytes(), "cq1".getBytes(), "partitionValue".getBytes()); table.put(p); p = new Put("row4".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "partition--multiple--multiple--multiple".getBytes()); p.add("cf1".getBytes(), "cq1".getBytes(), "multiple".getBytes()); table.put(p); p = new Put("row5".getBytes()); p.add("cf1".getBytes(), "cq1".getBytes(), "abcd".getBytes()); table.put(p); p = new Put("row6".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "1234".getBytes()); table.put(p); FilterList masterFilter = new FilterList(Operator.MUST_PASS_ALL); FilterList filter1 = new FilterList(Operator.MUST_PASS_ALL); filter1.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "multiple".getBytes(), vp)); filter1.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.GREATER_OR_EQUAL, "by".getBytes(), vp2)); filter1.addFilter(new SingleColumnValueFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "partition--multiple--multiple--multiple".getBytes())); FilterList filter2 = new FilterList(Operator.MUST_PASS_ONE); filter2.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "multiple".getBytes(), vp)); filter2.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "multiple".getBytes(), vp2)); FilterList filter3 = new FilterList(Operator.MUST_PASS_ALL); filter3.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "multiple".getBytes(), vp)); filter3.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "multiple".getBytes(), vp2)); FilterList filter4 = new FilterList(Operator.MUST_PASS_ALL); filter3.addFilter(new SingleColumnValueFilter(hcd.getName(), "cq1".getBytes(), CompareOp.GREATER_OR_EQUAL, "1234".getBytes())); filter3.addFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.GREATER_OR_EQUAL, "multiple".getBytes(), vp2)); masterFilter.addFilter(filter1); masterFilter.addFilter(filter2); masterFilter.addFilter(filter3); masterFilter.addFilter(filter4); Scan scan = new Scan(); scan.setFilter(masterFilter); int i = 0; ResultScanner scanner = table.getScanner(scan); List<Result> testRes = new ArrayList<Result>(); Result[] result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 1, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 1 rows", testRes.size() == 1); } @Test(timeout = 180000) public void testSingleColumnValuePartitionFilterBySettingAsAttributeToScan() throws Exception { HBaseAdmin admin = UTIL.getHBaseAdmin(); Configuration conf = UTIL.getConfiguration(); String userTableName = "testSingleColumnValuePartitionFilterBySettingAsAttributeToScan"; IndexedHTableDescriptor ihtd = new IndexedHTableDescriptor(userTableName); HColumnDescriptor hcd = new HColumnDescriptor("cf1"); ihtd.addFamily(hcd); ValuePartition vp = new SeparatorPartition("_", 3); IndexSpecification iSpec = new IndexSpecification("idx1"); iSpec.addIndexColumn(hcd, "cq", vp, ValueType.String, 200); ihtd.addIndex(iSpec); admin.createTable(ihtd); HTable table = new HTable(conf, userTableName); byte[] value1 = "2ndFloor_solitaire_huawei_bangalore_karnataka".getBytes(); Put p = new Put("row".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), value1); table.put(p); p = new Put("row2".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "7thFloor_solitaire_huawei_bangalore_karnataka".getBytes()); table.put(p); p = new Put("row3".getBytes()); p.add("cf1".getBytes(), "cq".getBytes(), "rrr_sss_hhh_bangalore_karnataka".getBytes()); table.put(p); Scan scan = new Scan(); SingleIndexExpression singleIndexExpression = new SingleIndexExpression("idx1"); byte[] value = "huawei".getBytes(); Column column = new Column("cf1".getBytes(), "cq".getBytes(), vp); EqualsExpression equalsExpression = new EqualsExpression(column, value); singleIndexExpression.addEqualsExpression(equalsExpression); scan.setAttribute(Constants.INDEX_EXPRESSION, IndexUtils.toBytes(singleIndexExpression)); scan.setFilter(new SingleColumnValuePartitionFilter(hcd.getName(), "cq".getBytes(), CompareOp.EQUAL, "huawei".getBytes(), vp)); int i = 0; ResultScanner scanner = table.getScanner(scan); List<Result> testRes = new ArrayList<Result>(); Result[] result = scanner.next(1); while (result != null && result.length > 0) { testRes.add(result[0]); i++; result = scanner.next(1); } Assert.assertTrue("Index flow should get used.", IndexRegionObserver.getIndexedFlowUsed()); Assert.assertTrue("Seekpoints should get added by index scanner", IndexRegionObserver.getSeekpointAdded()); Assert.assertEquals("It should get two seek points from index scanner.", 2, IndexRegionObserver .getMultipleSeekPoints().size()); Assert.assertTrue("Overall result should have only 2 rows", testRes.size() == 2); } }
package org.apache.struts2.views.freemarker; import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.mock.MockActionInvocation; import com.opensymphony.xwork2.util.ClassLoaderUtil; import com.opensymphony.xwork2.util.ValueStack; import com.opensymphony.xwork2.util.fs.DefaultFileManagerFactory; import freemarker.template.Configuration; import freemarker.template.TemplateException; import freemarker.template.TemplateExceptionHandler; import org.apache.struts2.ServletActionContext; import org.apache.struts2.StrutsInternalTestCase; import org.apache.struts2.StrutsStatics; import org.apache.struts2.dispatcher.mapper.ActionMapper; import org.apache.struts2.dispatcher.mapper.ActionMapping; import org.apache.struts2.views.jsp.StrutsMockHttpServletResponse; import org.easymock.EasyMock; import org.springframework.mock.web.MockHttpServletRequest; import javax.servlet.ServletContext; import java.io.File; import java.io.PrintWriter; import java.io.StringWriter; import java.net.MalformedURLException; import java.net.URISyntaxException; import static org.apache.struts2.views.jsp.AbstractUITagTest.normalize; public class FreemarkerResultMockedTest extends StrutsInternalTestCase { ValueStack stack; MockActionInvocation invocation; ActionContext context; StrutsMockHttpServletResponse response; PrintWriter writer; StringWriter stringWriter; ServletContext servletContext; MockHttpServletRequest request; Configuration freemarkerConfig; public void testActionThatThrowsExceptionTag() throws Exception { File file = new File(FreeMarkerResultTest.class.getResource("callActionFreeMarker2.ftl").toURI()); EasyMock.expect(servletContext.getRealPath("/tutorial/org/apache/struts2/views/freemarker/callActionFreeMarker.ftl")).andReturn(file.getAbsolutePath()); file = new File(FreeMarkerResultTest.class.getResource("nested.ftl").toURI()); EasyMock.expect(servletContext.getRealPath("/tutorial/org/apache/struts2/views/freemarker/nested.ftl")).andReturn(file.getAbsolutePath()); EasyMock.replay(servletContext); init(); request.setRequestURI("/tutorial/test2.action"); ActionMapping mapping = container.getInstance(ActionMapper.class).getMapping(request, configurationManager); dispatcher.serviceAction(request, response, mapping); assertEquals("beforenestedafter", stringWriter.toString()); } public void testActionThatSucceedsTag() throws Exception { File file = new File(FreeMarkerResultTest.class.getResource("callActionFreeMarker2.ftl").toURI()); EasyMock.expect(servletContext.getRealPath("/tutorial/org/apache/struts2/views/freemarker/callActionFreeMarker2.ftl")).andReturn(file.getAbsolutePath()); file = new File(FreeMarkerResultTest.class.getResource("nested.ftl").toURI()); EasyMock.expect(servletContext.getRealPath("/tutorial/org/apache/struts2/views/freemarker/nested.ftl")).andReturn(file.getAbsolutePath()); EasyMock.replay(servletContext); init(); request.setRequestURI("/tutorial/test5.action"); ActionMapping mapping = container.getInstance(ActionMapper.class).getMapping(request, configurationManager); dispatcher.serviceAction(request, response, mapping); assertEquals("beforenestedafter", stringWriter.toString()); } public void testDynamicAttributesSupport() throws Exception { File file = new File(FreeMarkerResultTest.class.getResource("dynaAttributes.ftl").toURI()); EasyMock.expect(servletContext.getRealPath("/tutorial/org/apache/struts2/views/freemarker/dynaAttributes.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/text.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/text.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/css.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/css.ftl")).andReturn(file.getAbsolutePath()); EasyMock.expect(servletContext.getRealPath("/template/~~~simple/css.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/scripting-events.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/scripting-events.ftl")).andReturn(file.getAbsolutePath()); EasyMock.expect(servletContext.getRealPath("/template/~~~simple/scripting-events.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/common-attributes.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/common-attributes.ftl")).andReturn(file.getAbsolutePath()); EasyMock.expect(servletContext.getRealPath("/template/~~~simple/common-attributes.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/dynamic-attributes.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/dynamic-attributes.ftl")).andReturn(file.getAbsolutePath()); EasyMock.expect(servletContext.getRealPath("/template/~~~simple/dynamic-attributes.ftl")).andReturn(file.getAbsolutePath()); EasyMock.replay(servletContext); init(); request.setRequestURI("/tutorial/test6.action"); ActionMapping mapping = container.getInstance(ActionMapper.class).getMapping(request, configurationManager); dispatcher.serviceAction(request, response, mapping); // TODO lukaszlenart: remove expectedJDK15 and if() after switching to Java 1.6 String expectedJDK15 = "<input type=\"text\" name=\"test\" value=\"\" id=\"test\" foo=\"bar\" placeholder=\"input\"/>" + "<input type=\"text\" name=\"test\" value=\"\" id=\"test\" foo=\"bar\" placeholder=\"input\"/>" + "<input type=\"text\" name=\"test\" value=\"\" id=\"test\" break=\"true\"/>"; String expectedJDK16 = "<input type=\"text\" name=\"test\" value=\"\" id=\"test\" placeholder=\"input\" foo=\"bar\"/>" + "<input type=\"text\" name=\"test\" value=\"\" id=\"test\" placeholder=\"input\" foo=\"bar\"/>" + "<input type=\"text\" name=\"test\" value=\"\" id=\"test\" break=\"true\"/>" + "<input type=\"text\" name=\"required\" value=\"\" id=\"required\" required=\"true\"/>"; String result = stringWriter.toString(); if (result.contains("foo=\"bar\" placeholder=\"input\"")) { assertEquals(expectedJDK15, result); } else { assertEquals(expectedJDK16, result); } } public void testManualListInTemplate() throws Exception { File file = new File(FreeMarkerResultTest.class.getResource("manual-list.ftl").toURI()); EasyMock.expect(servletContext.getRealPath("/tutorial/org/apache/struts2/views/freemarker/manual-list.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/radiomap.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/radiomap.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/css.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/css.ftl")).andReturn(file.getAbsolutePath()); EasyMock.expect(servletContext.getRealPath("/template/~~~simple/css.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/scripting-events.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/scripting-events.ftl")).andReturn(file.getAbsolutePath()); EasyMock.expect(servletContext.getRealPath("/template/~~~simple/scripting-events.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/common-attributes.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/common-attributes.ftl")).andReturn(file.getAbsolutePath()); EasyMock.expect(servletContext.getRealPath("/template/~~~simple/common-attributes.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/simple/dynamic-attributes.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/simple/dynamic-attributes.ftl")).andReturn(file.getAbsolutePath()); EasyMock.expect(servletContext.getRealPath("/template/~~~simple/dynamic-attributes.ftl")).andReturn(file.getAbsolutePath()); EasyMock.replay(servletContext); init(); request.setRequestURI("/tutorial/test7.action"); ActionMapping mapping = container.getInstance(ActionMapper.class).getMapping(request, configurationManager); dispatcher.serviceAction(request, response, mapping); String expected = "<input type=\"radio\" name=\"client\" id=\"client_foo\" value=\"foo\"/><label for=\"client_foo\">foo</label>\n" + "<input type=\"radio\" name=\"client\" id=\"client_bar\" value=\"bar\"/><label for=\"client_bar\">bar</label>\n" + "\n" + "<input type=\"radio\" name=\"car\" id=\"carford\" value=\"ford\"/><label for=\"carford\">Ford Motor Co</label>\n" + "<input type=\"radio\" name=\"car\" id=\"cartoyota\" value=\"toyota\"/><label for=\"cartoyota\">Toyota</label>\n"; assertEquals(normalize(expected), normalize(stringWriter.toString())); } public void testDynamicAttributesInTheme() throws Exception { File file = new File(FreeMarkerResultTest.class.getResource("customTextField.ftl").toURI()); EasyMock.expect(servletContext.getRealPath("/tutorial/org/apache/struts2/views/freemarker/customTextField.ftl")).andReturn(file.getAbsolutePath()); file = new File(ClassLoaderUtil.getResource("template/test/text.ftl", getClass()).toURI()); EasyMock.expect(servletContext.getRealPath("/template/test/text.ftl")).andReturn(file.getAbsolutePath()); EasyMock.replay(servletContext); init(); request.setRequestURI("/tutorial/test8.action"); ActionMapping mapping = container.getInstance(ActionMapper.class).getMapping(request, configurationManager); dispatcher.serviceAction(request, response, mapping); String expected = "<input type=\"text\"autofocus=\"autofocus\"/>"; assertEquals(expected, stringWriter.toString()); } private void init() throws MalformedURLException, URISyntaxException { stringWriter = new StringWriter(); writer = new PrintWriter(stringWriter); response = new StrutsMockHttpServletResponse(); response.setWriter(writer); request = new MockHttpServletRequest(); stack = ActionContext.getContext().getValueStack(); context = new ActionContext(stack.getContext()); context.put(StrutsStatics.HTTP_RESPONSE, response); context.put(StrutsStatics.HTTP_REQUEST, request); context.put(StrutsStatics.SERVLET_CONTEXT, servletContext); ServletActionContext.setServletContext(servletContext); ServletActionContext.setRequest(request); ServletActionContext.setResponse(response); servletContext.setAttribute(FreemarkerManager.CONFIG_SERVLET_CONTEXT_KEY, null); invocation = new MockActionInvocation(); invocation.setStack(stack); invocation.setInvocationContext(context); //get fm config to use it in mock servlet context FreemarkerManager freemarkerManager = container.getInstance(FreemarkerManager.class); freemarkerConfig = freemarkerManager.getConfiguration(ServletActionContext.getServletContext()); freemarkerConfig.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER); freemarkerConfig.setServletContextForTemplateLoading(servletContext, null); } @Override public void setUp() throws Exception { super.setUp(); servletContext = EasyMock.createNiceMock(ServletContext.class); EasyMock.expect(servletContext.getInitParameter("TemplatePath")).andReturn(null); EasyMock.expect(servletContext.getInitParameter("templatePath")).andReturn(null); EasyMock.expect(servletContext.getAttribute(FreemarkerManager.CONFIG_SERVLET_CONTEXT_KEY)).andReturn(freemarkerConfig).anyTimes(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.direct; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import org.apache.beam.runners.direct.InMemoryWatermarkManager.FiredTimers; import org.apache.beam.runners.direct.InMemoryWatermarkManager.TimerUpdate; import org.apache.beam.runners.direct.InMemoryWatermarkManager.TimerUpdate.TimerUpdateBuilder; import org.apache.beam.runners.direct.InMemoryWatermarkManager.TransformWatermarks; import org.apache.beam.runners.direct.InProcessPipelineRunner.CommittedBundle; import org.apache.beam.runners.direct.InProcessPipelineRunner.UncommittedBundle; import org.apache.beam.sdk.coders.ByteArrayCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.coders.VarLongCoder; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.Filter; import org.apache.beam.sdk.transforms.Flatten; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.WithKeys; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.TimeDomain; import org.apache.beam.sdk.util.TimerInternals.TimerData; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.util.state.StateNamespaces; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.PValue; import org.apache.beam.sdk.values.TimestampedValue; import com.google.common.collect.ImmutableList; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.joda.time.Instant; import org.joda.time.ReadableInstant; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.io.Serializable; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import javax.annotation.Nullable; /** * Tests for {@link InMemoryWatermarkManager}. */ @RunWith(JUnit4.class) public class InMemoryWatermarkManagerTest implements Serializable { private transient MockClock clock; private transient PCollection<Integer> createdInts; private transient PCollection<Integer> filtered; private transient PCollection<Integer> filteredTimesTwo; private transient PCollection<KV<String, Integer>> keyed; private transient PCollection<Integer> intsToFlatten; private transient PCollection<Integer> flattened; private transient InMemoryWatermarkManager manager; private transient BundleFactory bundleFactory; @Before public void setup() { TestPipeline p = TestPipeline.create(); createdInts = p.apply("createdInts", Create.of(1, 2, 3)); filtered = createdInts.apply("filtered", Filter.greaterThan(1)); filteredTimesTwo = filtered.apply("timesTwo", ParDo.of(new DoFn<Integer, Integer>() { @Override public void processElement(DoFn<Integer, Integer>.ProcessContext c) throws Exception { c.output(c.element() * 2); } })); keyed = createdInts.apply("keyed", WithKeys.<String, Integer>of("MyKey")); intsToFlatten = p.apply("intsToFlatten", Create.of(-1, 256, 65535)); PCollectionList<Integer> preFlatten = PCollectionList.of(createdInts).and(intsToFlatten); flattened = preFlatten.apply("flattened", Flatten.<Integer>pCollections()); Collection<AppliedPTransform<?, ?, ?>> rootTransforms = ImmutableList.<AppliedPTransform<?, ?, ?>>of( createdInts.getProducingTransformInternal(), intsToFlatten.getProducingTransformInternal()); Map<PValue, Collection<AppliedPTransform<?, ?, ?>>> consumers = new HashMap<>(); consumers.put( createdInts, ImmutableList.<AppliedPTransform<?, ?, ?>>of(filtered.getProducingTransformInternal(), keyed.getProducingTransformInternal(), flattened.getProducingTransformInternal())); consumers.put( filtered, Collections.<AppliedPTransform<?, ?, ?>>singleton( filteredTimesTwo.getProducingTransformInternal())); consumers.put(filteredTimesTwo, Collections.<AppliedPTransform<?, ?, ?>>emptyList()); consumers.put(keyed, Collections.<AppliedPTransform<?, ?, ?>>emptyList()); consumers.put( intsToFlatten, Collections.<AppliedPTransform<?, ?, ?>>singleton( flattened.getProducingTransformInternal())); consumers.put(flattened, Collections.<AppliedPTransform<?, ?, ?>>emptyList()); clock = MockClock.fromInstant(new Instant(1000)); manager = InMemoryWatermarkManager.create(clock, rootTransforms, consumers); bundleFactory = InProcessBundleFactory.create(); } /** * Demonstrates that getWatermark, when called on an {@link AppliedPTransform} that has not * processed any elements, returns the {@link BoundedWindow#TIMESTAMP_MIN_VALUE}. */ @Test public void getWatermarkForUntouchedTransform() { TransformWatermarks watermarks = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat(watermarks.getInputWatermark(), equalTo(BoundedWindow.TIMESTAMP_MIN_VALUE)); assertThat(watermarks.getOutputWatermark(), equalTo(BoundedWindow.TIMESTAMP_MIN_VALUE)); } /** * Demonstrates that getWatermark for a transform that consumes no input uses the Watermark * Hold value provided to it as the output watermark. */ @Test public void getWatermarkForUpdatedSourceTransform() { CommittedBundle<Integer> output = multiWindowedBundle(createdInts, 1); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(output)), new Instant(8000L)); manager.refreshAll(); TransformWatermarks updatedSourceWatermark = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat(updatedSourceWatermark.getOutputWatermark(), equalTo(new Instant(8000L))); } /** * Demonstrates that getWatermark for a transform that takes multiple inputs is held to the * minimum watermark across all of its inputs. */ @Test public void getWatermarkForMultiInputTransform() { CommittedBundle<Integer> secondPcollectionBundle = multiWindowedBundle(intsToFlatten, -1); manager.updateWatermarks(null, TimerUpdate.empty(), result(intsToFlatten.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(secondPcollectionBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); // We didn't do anything for the first source, so we shouldn't have progressed the watermark TransformWatermarks firstSourceWatermark = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat( firstSourceWatermark.getOutputWatermark(), not(laterThan(BoundedWindow.TIMESTAMP_MIN_VALUE))); // the Second Source output all of the elements so it should be done (with a watermark at the // end of time). TransformWatermarks secondSourceWatermark = manager.getWatermarks(intsToFlatten.getProducingTransformInternal()); assertThat( secondSourceWatermark.getOutputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); // We haven't consumed anything yet, so our watermark should be at the beginning of time TransformWatermarks transformWatermark = manager.getWatermarks(flattened.getProducingTransformInternal()); assertThat( transformWatermark.getInputWatermark(), not(laterThan(BoundedWindow.TIMESTAMP_MIN_VALUE))); assertThat( transformWatermark.getOutputWatermark(), not(laterThan(BoundedWindow.TIMESTAMP_MIN_VALUE))); CommittedBundle<Integer> flattenedBundleSecondCreate = multiWindowedBundle(flattened, -1); // We have finished processing the bundle from the second PCollection, but we haven't consumed // anything from the first PCollection yet; so our watermark shouldn't advance manager.updateWatermarks(secondPcollectionBundle, TimerUpdate.empty(), result(flattened.getProducingTransformInternal(), secondPcollectionBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(flattenedBundleSecondCreate)), BoundedWindow.TIMESTAMP_MAX_VALUE); TransformWatermarks transformAfterProcessing = manager.getWatermarks(flattened.getProducingTransformInternal()); manager.updateWatermarks(secondPcollectionBundle, TimerUpdate.empty(), result(flattened.getProducingTransformInternal(), secondPcollectionBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(flattenedBundleSecondCreate)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); assertThat( transformAfterProcessing.getInputWatermark(), not(laterThan(BoundedWindow.TIMESTAMP_MIN_VALUE))); assertThat( transformAfterProcessing.getOutputWatermark(), not(laterThan(BoundedWindow.TIMESTAMP_MIN_VALUE))); Instant firstCollectionTimestamp = new Instant(10000); CommittedBundle<Integer> firstPcollectionBundle = timestampedBundle(createdInts, TimestampedValue.<Integer>of(5, firstCollectionTimestamp)); // the source is done, but elements are still buffered. The source output watermark should be // past the end of the global window manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(firstPcollectionBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks firstSourceWatermarks = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat( firstSourceWatermarks.getOutputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); // We still haven't consumed any of the first source's input, so the watermark should still not // progress TransformWatermarks flattenAfterSourcesProduced = manager.getWatermarks(flattened.getProducingTransformInternal()); assertThat( flattenAfterSourcesProduced.getInputWatermark(), not(laterThan(firstCollectionTimestamp))); assertThat( flattenAfterSourcesProduced.getOutputWatermark(), not(laterThan(firstCollectionTimestamp))); // We have buffered inputs, but since the PCollection has all of the elements (has a WM past the // end of the global window), we should have a watermark equal to the min among buffered // elements TransformWatermarks withBufferedElements = manager.getWatermarks(flattened.getProducingTransformInternal()); assertThat(withBufferedElements.getInputWatermark(), equalTo(firstCollectionTimestamp)); assertThat(withBufferedElements.getOutputWatermark(), equalTo(firstCollectionTimestamp)); CommittedBundle<?> completedFlattenBundle = bundleFactory.createRootBundle(flattened).commit(BoundedWindow.TIMESTAMP_MAX_VALUE); manager.updateWatermarks(firstPcollectionBundle, TimerUpdate.empty(), result(flattened.getProducingTransformInternal(), firstPcollectionBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(completedFlattenBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks afterConsumingAllInput = manager.getWatermarks(flattened.getProducingTransformInternal()); assertThat( afterConsumingAllInput.getInputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); assertThat( afterConsumingAllInput.getOutputWatermark(), not(laterThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); } /** * Demonstrates that pending elements are independent among * {@link AppliedPTransform AppliedPTransforms} that consume the same input {@link PCollection}. */ @Test public void getWatermarkForMultiConsumedCollection() { CommittedBundle<Integer> createdBundle = timestampedBundle(createdInts, TimestampedValue.of(1, new Instant(1_000_000L)), TimestampedValue.of(2, new Instant(1234L)), TimestampedValue.of(3, new Instant(-1000L))); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(createdBundle)), new Instant(Long.MAX_VALUE)); manager.refreshAll(); TransformWatermarks createdAfterProducing = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat( createdAfterProducing.getOutputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); CommittedBundle<KV<String, Integer>> keyBundle = timestampedBundle(keyed, TimestampedValue.of(KV.of("MyKey", 1), new Instant(1_000_000L)), TimestampedValue.of(KV.of("MyKey", 2), new Instant(1234L)), TimestampedValue.of(KV.of("MyKey", 3), new Instant(-1000L))); manager.updateWatermarks(createdBundle, TimerUpdate.empty(), result(keyed.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(keyBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks keyedWatermarks = manager.getWatermarks(keyed.getProducingTransformInternal()); assertThat( keyedWatermarks.getInputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); assertThat( keyedWatermarks.getOutputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); TransformWatermarks filteredWatermarks = manager.getWatermarks(filtered.getProducingTransformInternal()); assertThat(filteredWatermarks.getInputWatermark(), not(laterThan(new Instant(-1000L)))); assertThat(filteredWatermarks.getOutputWatermark(), not(laterThan(new Instant(-1000L)))); CommittedBundle<Integer> filteredBundle = timestampedBundle(filtered, TimestampedValue.of(2, new Instant(1234L))); manager.updateWatermarks(createdBundle, TimerUpdate.empty(), result(filtered.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(filteredBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks filteredProcessedWatermarks = manager.getWatermarks(filtered.getProducingTransformInternal()); assertThat( filteredProcessedWatermarks.getInputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); assertThat( filteredProcessedWatermarks.getOutputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); } /** * Demonstrates that the watermark of an {@link AppliedPTransform} is held to the provided * watermark hold. */ @Test public void updateWatermarkWithWatermarkHolds() { CommittedBundle<Integer> createdBundle = timestampedBundle(createdInts, TimestampedValue.of(1, new Instant(1_000_000L)), TimestampedValue.of(2, new Instant(1234L)), TimestampedValue.of(3, new Instant(-1000L))); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(createdBundle)), new Instant(Long.MAX_VALUE)); CommittedBundle<KV<String, Integer>> keyBundle = timestampedBundle(keyed, TimestampedValue.of(KV.of("MyKey", 1), new Instant(1_000_000L)), TimestampedValue.of(KV.of("MyKey", 2), new Instant(1234L)), TimestampedValue.of(KV.of("MyKey", 3), new Instant(-1000L))); manager.updateWatermarks(createdBundle, TimerUpdate.empty(), result(keyed.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(keyBundle)), new Instant(500L)); manager.refreshAll(); TransformWatermarks keyedWatermarks = manager.getWatermarks(keyed.getProducingTransformInternal()); assertThat( keyedWatermarks.getInputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); assertThat(keyedWatermarks.getOutputWatermark(), not(laterThan(new Instant(500L)))); } /** * Demonstrates that the watermark of an {@link AppliedPTransform} is held to the provided * watermark hold. */ @Test public void updateWatermarkWithKeyedWatermarkHolds() { CommittedBundle<Integer> firstKeyBundle = bundleFactory.createKeyedBundle(null, StructuralKey.of("Odd", StringUtf8Coder.of()), createdInts) .add(WindowedValue.timestampedValueInGlobalWindow(1, new Instant(1_000_000L))) .add(WindowedValue.timestampedValueInGlobalWindow(3, new Instant(-1000L))) .commit(clock.now()); CommittedBundle<Integer> secondKeyBundle = bundleFactory.createKeyedBundle(null, StructuralKey.of("Even", StringUtf8Coder.of()), createdInts) .add(WindowedValue.timestampedValueInGlobalWindow(2, new Instant(1234L))) .commit(clock.now()); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, ImmutableList.of(firstKeyBundle, secondKeyBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.updateWatermarks(firstKeyBundle, TimerUpdate.empty(), result(filtered.getProducingTransformInternal(), firstKeyBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>emptyList()), new Instant(-1000L)); manager.updateWatermarks(secondKeyBundle, TimerUpdate.empty(), result(filtered.getProducingTransformInternal(), secondKeyBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>emptyList()), new Instant(1234L)); manager.refreshAll(); TransformWatermarks filteredWatermarks = manager.getWatermarks(filtered.getProducingTransformInternal()); assertThat(filteredWatermarks.getInputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); assertThat(filteredWatermarks.getOutputWatermark(), not(laterThan(new Instant(-1000L)))); CommittedBundle<Integer> fauxFirstKeyTimerBundle = bundleFactory.createKeyedBundle(null, StructuralKey.of("Odd", StringUtf8Coder.of()), createdInts).commit(clock.now()); manager.updateWatermarks(fauxFirstKeyTimerBundle, TimerUpdate.empty(), result(filtered.getProducingTransformInternal(), fauxFirstKeyTimerBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>emptyList()), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); assertThat(filteredWatermarks.getOutputWatermark(), equalTo(new Instant(1234L))); CommittedBundle<Integer> fauxSecondKeyTimerBundle = bundleFactory.createKeyedBundle(null, StructuralKey.of("Even", StringUtf8Coder.of()), createdInts).commit(clock.now()); manager.updateWatermarks(fauxSecondKeyTimerBundle, TimerUpdate.empty(), result(filtered.getProducingTransformInternal(), fauxSecondKeyTimerBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>emptyList()), new Instant(5678L)); manager.refreshAll(); assertThat(filteredWatermarks.getOutputWatermark(), equalTo(new Instant(5678L))); manager.updateWatermarks(fauxSecondKeyTimerBundle, TimerUpdate.empty(), result(filtered.getProducingTransformInternal(), fauxSecondKeyTimerBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>emptyList()), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); assertThat(filteredWatermarks.getOutputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); } /** * Demonstrates that updated output watermarks are monotonic in the presence of late data, when * called on an {@link AppliedPTransform} that consumes no input. */ @Test public void updateOutputWatermarkShouldBeMonotonic() { CommittedBundle<?> firstInput = bundleFactory.createRootBundle(createdInts).commit(BoundedWindow.TIMESTAMP_MAX_VALUE); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(firstInput)), new Instant(0L)); manager.refreshAll(); TransformWatermarks firstWatermarks = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat(firstWatermarks.getOutputWatermark(), equalTo(new Instant(0L))); CommittedBundle<?> secondInput = bundleFactory.createRootBundle(createdInts).commit(BoundedWindow.TIMESTAMP_MAX_VALUE); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(secondInput)), new Instant(-250L)); manager.refreshAll(); TransformWatermarks secondWatermarks = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat(secondWatermarks.getOutputWatermark(), not(earlierThan(new Instant(0L)))); } /** * Demonstrates that updated output watermarks are monotonic in the presence of watermark holds * that become earlier than a previous watermark hold. */ @Test public void updateWatermarkWithHoldsShouldBeMonotonic() { CommittedBundle<Integer> createdBundle = timestampedBundle(createdInts, TimestampedValue.of(1, new Instant(1_000_000L)), TimestampedValue.of(2, new Instant(1234L)), TimestampedValue.of(3, new Instant(-1000L))); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(createdBundle)), new Instant(Long.MAX_VALUE)); CommittedBundle<KV<String, Integer>> keyBundle = timestampedBundle(keyed, TimestampedValue.of(KV.of("MyKey", 1), new Instant(1_000_000L)), TimestampedValue.of(KV.of("MyKey", 2), new Instant(1234L)), TimestampedValue.of(KV.of("MyKey", 3), new Instant(-1000L))); manager.updateWatermarks(createdBundle, TimerUpdate.empty(), result(keyed.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(keyBundle)), new Instant(500L)); manager.refreshAll(); TransformWatermarks keyedWatermarks = manager.getWatermarks(keyed.getProducingTransformInternal()); assertThat( keyedWatermarks.getInputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); assertThat(keyedWatermarks.getOutputWatermark(), not(laterThan(new Instant(500L)))); Instant oldOutputWatermark = keyedWatermarks.getOutputWatermark(); TransformWatermarks updatedWatermarks = manager.getWatermarks(keyed.getProducingTransformInternal()); assertThat( updatedWatermarks.getInputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); // We added a hold prior to the old watermark; we shouldn't progress (due to the earlier hold) // but the watermark is monotonic and should not backslide to the new, earlier hold assertThat(updatedWatermarks.getOutputWatermark(), equalTo(oldOutputWatermark)); } @Test public void updateWatermarkWithUnprocessedElements() { WindowedValue<Integer> first = WindowedValue.valueInGlobalWindow(1); WindowedValue<Integer> second = WindowedValue.timestampedValueInGlobalWindow(2, new Instant(-1000L)); WindowedValue<Integer> third = WindowedValue.timestampedValueInGlobalWindow(3, new Instant(1234L)); CommittedBundle<Integer> createdBundle = bundleFactory.createRootBundle(createdInts) .add(first) .add(second) .add(third) .commit(clock.now()); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(createdBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); CommittedBundle<KV<String, Integer>> keyBundle = timestampedBundle(keyed, TimestampedValue.of(KV.of("MyKey", 1), BoundedWindow.TIMESTAMP_MIN_VALUE)); manager.updateWatermarks(createdBundle, TimerUpdate.empty(), result(keyed.getProducingTransformInternal(), createdBundle.withElements(ImmutableList.of(second, third)), Collections.<CommittedBundle<?>>singleton(keyBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); TransformWatermarks keyedWatermarks = manager.getWatermarks(keyed.getProducingTransformInternal()); // the unprocessed second and third are readded to pending assertThat( keyedWatermarks.getInputWatermark(), not(laterThan(new Instant(-1000L)))); } /** * Demonstrates that updateWatermarks in the presence of late data is monotonic. */ @Test public void updateWatermarkWithLateData() { Instant sourceWatermark = new Instant(1_000_000L); CommittedBundle<Integer> createdBundle = timestampedBundle(createdInts, TimestampedValue.of(1, sourceWatermark), TimestampedValue.of(2, new Instant(1234L))); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(createdBundle)), sourceWatermark); CommittedBundle<KV<String, Integer>> keyBundle = timestampedBundle(keyed, TimestampedValue.of(KV.of("MyKey", 1), sourceWatermark), TimestampedValue.of(KV.of("MyKey", 2), new Instant(1234L))); // Finish processing the on-time data. The watermarks should progress to be equal to the source manager.updateWatermarks(createdBundle, TimerUpdate.empty(), result(keyed.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(keyBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks onTimeWatermarks = manager.getWatermarks(keyed.getProducingTransformInternal()); assertThat(onTimeWatermarks.getInputWatermark(), equalTo(sourceWatermark)); assertThat(onTimeWatermarks.getOutputWatermark(), equalTo(sourceWatermark)); CommittedBundle<Integer> lateDataBundle = timestampedBundle(createdInts, TimestampedValue.of(3, new Instant(-1000L))); // the late data arrives in a downstream PCollection after its watermark has advanced past it; // we don't advance the watermark past the current watermark until we've consumed the late data manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(lateDataBundle)), new Instant(2_000_000L)); manager.refreshAll(); TransformWatermarks bufferedLateWm = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat(bufferedLateWm.getOutputWatermark(), equalTo(new Instant(2_000_000L))); // The input watermark should be held to its previous value (not advanced due to late data; not // moved backwards in the presence of watermarks due to monotonicity). TransformWatermarks lateDataBufferedWatermark = manager.getWatermarks(keyed.getProducingTransformInternal()); assertThat(lateDataBufferedWatermark.getInputWatermark(), not(earlierThan(sourceWatermark))); assertThat(lateDataBufferedWatermark.getOutputWatermark(), not(earlierThan(sourceWatermark))); CommittedBundle<KV<String, Integer>> lateKeyedBundle = timestampedBundle(keyed, TimestampedValue.of(KV.of("MyKey", 3), new Instant(-1000L))); manager.updateWatermarks(lateDataBundle, TimerUpdate.empty(), result(keyed.getProducingTransformInternal(), lateDataBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(lateKeyedBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); } public void updateWatermarkWithDifferentWindowedValueInstances() { manager.updateWatermarks( null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton( bundleFactory .createRootBundle(createdInts) .add(WindowedValue.valueInGlobalWindow(1)) .commit(Instant.now()))), BoundedWindow.TIMESTAMP_MAX_VALUE); CommittedBundle<Integer> createdBundle = bundleFactory.createRootBundle(createdInts) .add(WindowedValue.valueInGlobalWindow(1)) .commit(Instant.now()); manager.updateWatermarks(createdBundle, TimerUpdate.empty(), result(keyed.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>emptyList()), null); manager.refreshAll(); TransformWatermarks onTimeWatermarks = manager.getWatermarks(keyed.getProducingTransformInternal()); assertThat(onTimeWatermarks.getInputWatermark(), equalTo(BoundedWindow.TIMESTAMP_MAX_VALUE)); } /** * Demonstrates that after watermarks of an upstream transform are updated, but no output has been * produced, the watermarks of a downstream process are advanced. */ @Test public void getWatermarksAfterOnlyEmptyOutput() { CommittedBundle<Integer> emptyCreateOutput = multiWindowedBundle(createdInts); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(emptyCreateOutput)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks updatedSourceWatermarks = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat( updatedSourceWatermarks.getOutputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); TransformWatermarks finishedFilterWatermarks = manager.getWatermarks(filtered.getProducingTransformInternal()); assertThat( finishedFilterWatermarks.getInputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); assertThat( finishedFilterWatermarks.getOutputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); } /** * Demonstrates that after watermarks of an upstream transform are updated, but no output has been * produced, and the downstream transform has a watermark hold, the watermark is held to the hold. */ @Test public void getWatermarksAfterHoldAndEmptyOutput() { CommittedBundle<Integer> firstCreateOutput = multiWindowedBundle(createdInts, 1, 2); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(firstCreateOutput)), new Instant(12_000L)); CommittedBundle<Integer> firstFilterOutput = multiWindowedBundle(filtered); manager.updateWatermarks(firstCreateOutput, TimerUpdate.empty(), result(filtered.getProducingTransformInternal(), firstCreateOutput.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(firstFilterOutput)), new Instant(10_000L)); manager.refreshAll(); TransformWatermarks firstFilterWatermarks = manager.getWatermarks(filtered.getProducingTransformInternal()); assertThat(firstFilterWatermarks.getInputWatermark(), not(earlierThan(new Instant(12_000L)))); assertThat(firstFilterWatermarks.getOutputWatermark(), not(laterThan(new Instant(10_000L)))); CommittedBundle<Integer> emptyCreateOutput = multiWindowedBundle(createdInts); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(emptyCreateOutput)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks updatedSourceWatermarks = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat( updatedSourceWatermarks.getOutputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); TransformWatermarks finishedFilterWatermarks = manager.getWatermarks(filtered.getProducingTransformInternal()); assertThat( finishedFilterWatermarks.getInputWatermark(), not(earlierThan(BoundedWindow.TIMESTAMP_MAX_VALUE))); assertThat(finishedFilterWatermarks.getOutputWatermark(), not(laterThan(new Instant(10_000L)))); } @Test public void getSynchronizedProcessingTimeInputWatermarksHeldToPendingBundles() { TransformWatermarks watermarks = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat(watermarks.getSynchronizedProcessingInputTime(), equalTo(clock.now())); assertThat( watermarks.getSynchronizedProcessingOutputTime(), equalTo(BoundedWindow.TIMESTAMP_MIN_VALUE)); TransformWatermarks filteredWatermarks = manager.getWatermarks(filtered.getProducingTransformInternal()); // Non-root processing watermarks don't progress until data has been processed assertThat( filteredWatermarks.getSynchronizedProcessingInputTime(), not(laterThan(BoundedWindow.TIMESTAMP_MIN_VALUE))); assertThat( filteredWatermarks.getSynchronizedProcessingOutputTime(), not(laterThan(BoundedWindow.TIMESTAMP_MIN_VALUE))); CommittedBundle<Integer> createOutput = bundleFactory.createRootBundle(createdInts).commit(new Instant(1250L)); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(createOutput)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks createAfterUpdate = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat(createAfterUpdate.getSynchronizedProcessingInputTime(), equalTo(clock.now())); assertThat(createAfterUpdate.getSynchronizedProcessingOutputTime(), equalTo(clock.now())); TransformWatermarks filterAfterProduced = manager.getWatermarks(filtered.getProducingTransformInternal()); assertThat( filterAfterProduced.getSynchronizedProcessingInputTime(), not(laterThan(clock.now()))); assertThat( filterAfterProduced.getSynchronizedProcessingOutputTime(), not(laterThan(clock.now()))); clock.set(new Instant(1500L)); assertThat(createAfterUpdate.getSynchronizedProcessingInputTime(), equalTo(clock.now())); assertThat(createAfterUpdate.getSynchronizedProcessingOutputTime(), equalTo(clock.now())); assertThat( filterAfterProduced.getSynchronizedProcessingInputTime(), not(laterThan(new Instant(1250L)))); assertThat( filterAfterProduced.getSynchronizedProcessingOutputTime(), not(laterThan(new Instant(1250L)))); CommittedBundle<?> filterOutputBundle = bundleFactory.createRootBundle(intsToFlatten).commit(new Instant(1250L)); manager.updateWatermarks(createOutput, TimerUpdate.empty(), result(filtered.getProducingTransformInternal(), createOutput.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(filterOutputBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks filterAfterConsumed = manager.getWatermarks(filtered.getProducingTransformInternal()); assertThat( filterAfterConsumed.getSynchronizedProcessingInputTime(), not(laterThan(createAfterUpdate.getSynchronizedProcessingOutputTime()))); assertThat( filterAfterConsumed.getSynchronizedProcessingOutputTime(), not(laterThan(filterAfterConsumed.getSynchronizedProcessingInputTime()))); } /** * Demonstrates that the Synchronized Processing Time output watermark cannot progress past * pending timers in the same set. This propagates to all downstream SynchronizedProcessingTimes. * * <p>Also demonstrate that the result is monotonic. */ // @Test public void getSynchronizedProcessingTimeOutputHeldToPendingTimers() { CommittedBundle<Integer> createdBundle = multiWindowedBundle(createdInts, 1, 2, 4, 8); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(createdBundle)), new Instant(1248L)); manager.refreshAll(); TransformWatermarks filteredWms = manager.getWatermarks(filtered.getProducingTransformInternal()); TransformWatermarks filteredDoubledWms = manager.getWatermarks(filteredTimesTwo.getProducingTransformInternal()); Instant initialFilteredWm = filteredWms.getSynchronizedProcessingOutputTime(); Instant initialFilteredDoubledWm = filteredDoubledWms.getSynchronizedProcessingOutputTime(); StructuralKey<String> key = StructuralKey.of("key", StringUtf8Coder.of()); CommittedBundle<Integer> filteredBundle = multiWindowedBundle(filtered, 2, 8); TimerData pastTimer = TimerData.of(StateNamespaces.global(), new Instant(250L), TimeDomain.PROCESSING_TIME); TimerData futureTimer = TimerData.of(StateNamespaces.global(), new Instant(4096L), TimeDomain.PROCESSING_TIME); TimerUpdate timers = TimerUpdate.builder(key).setTimer(pastTimer).setTimer(futureTimer).build(); manager.updateWatermarks(createdBundle, timers, result(filtered.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(filteredBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); Instant startTime = clock.now(); clock.set(startTime.plus(250L)); // We're held based on the past timer assertThat(filteredWms.getSynchronizedProcessingOutputTime(), not(laterThan(startTime))); assertThat(filteredDoubledWms.getSynchronizedProcessingOutputTime(), not(laterThan(startTime))); // And we're monotonic assertThat( filteredWms.getSynchronizedProcessingOutputTime(), not(earlierThan(initialFilteredWm))); assertThat( filteredDoubledWms.getSynchronizedProcessingOutputTime(), not(earlierThan(initialFilteredDoubledWm))); Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> firedTimers = manager.extractFiredTimers(); assertThat( firedTimers.get(filtered.getProducingTransformInternal()) .get(key) .getTimers(TimeDomain.PROCESSING_TIME), contains(pastTimer)); // Our timer has fired, but has not been completed, so it holds our synchronized processing WM assertThat(filteredWms.getSynchronizedProcessingOutputTime(), not(laterThan(startTime))); assertThat(filteredDoubledWms.getSynchronizedProcessingOutputTime(), not(laterThan(startTime))); CommittedBundle<Integer> filteredTimerBundle = bundleFactory .createKeyedBundle(null, key, filtered) .commit(BoundedWindow.TIMESTAMP_MAX_VALUE); CommittedBundle<Integer> filteredTimerResult = bundleFactory.createKeyedBundle(null, key, filteredTimesTwo) .commit(filteredWms.getSynchronizedProcessingOutputTime()); // Complete the processing time timer manager.updateWatermarks(filteredTimerBundle, TimerUpdate.builder(key) .withCompletedTimers(Collections.<TimerData>singleton(pastTimer)).build(), result(filtered.getProducingTransformInternal(), filteredTimerBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(filteredTimerResult)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); clock.set(startTime.plus(500L)); assertThat(filteredWms.getSynchronizedProcessingOutputTime(), not(laterThan(clock.now()))); // filtered should be held to the time at which the filteredTimerResult fired assertThat( filteredDoubledWms.getSynchronizedProcessingOutputTime(), not(earlierThan(filteredTimerResult.getSynchronizedProcessingOutputWatermark()))); manager.updateWatermarks(filteredTimerResult, TimerUpdate.empty(), result(filteredTimesTwo.getProducingTransformInternal(), filteredTimerResult.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>emptyList()), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); assertThat(filteredDoubledWms.getSynchronizedProcessingOutputTime(), equalTo(clock.now())); clock.set(new Instant(Long.MAX_VALUE)); assertThat(filteredWms.getSynchronizedProcessingOutputTime(), equalTo(new Instant(4096))); assertThat( filteredDoubledWms.getSynchronizedProcessingOutputTime(), equalTo(new Instant(4096))); } /** * Demonstrates that if any earlier processing holds appear in the synchronized processing time * output hold the result is monotonic. */ @Test public void getSynchronizedProcessingTimeOutputTimeIsMonotonic() { Instant startTime = clock.now(); TransformWatermarks watermarks = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat(watermarks.getSynchronizedProcessingInputTime(), equalTo(startTime)); TransformWatermarks filteredWatermarks = manager.getWatermarks(filtered.getProducingTransformInternal()); // Non-root processing watermarks don't progress until data has been processed assertThat( filteredWatermarks.getSynchronizedProcessingInputTime(), not(laterThan(BoundedWindow.TIMESTAMP_MIN_VALUE))); assertThat( filteredWatermarks.getSynchronizedProcessingOutputTime(), not(laterThan(BoundedWindow.TIMESTAMP_MIN_VALUE))); CommittedBundle<Integer> createOutput = bundleFactory.createRootBundle(createdInts).commit(new Instant(1250L)); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(createOutput)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks createAfterUpdate = manager.getWatermarks(createdInts.getProducingTransformInternal()); assertThat(createAfterUpdate.getSynchronizedProcessingInputTime(), not(laterThan(clock.now()))); assertThat(createAfterUpdate.getSynchronizedProcessingOutputTime(), not(laterThan(clock.now()))); CommittedBundle<Integer> createSecondOutput = bundleFactory.createRootBundle(createdInts).commit(new Instant(750L)); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(createSecondOutput)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); assertThat(createAfterUpdate.getSynchronizedProcessingOutputTime(), equalTo(clock.now())); } @Test public void synchronizedProcessingInputTimeIsHeldToUpstreamProcessingTimeTimers() { CommittedBundle<Integer> created = multiWindowedBundle(createdInts, 1, 2, 3); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(created)), new Instant(40_900L)); manager.refreshAll(); CommittedBundle<Integer> filteredBundle = multiWindowedBundle(filtered, 2, 4); Instant upstreamHold = new Instant(2048L); TimerData upstreamProcessingTimer = TimerData.of(StateNamespaces.global(), upstreamHold, TimeDomain.PROCESSING_TIME); manager.updateWatermarks(created, TimerUpdate.builder(StructuralKey.of("key", StringUtf8Coder.of())) .setTimer(upstreamProcessingTimer) .build(), result(filtered.getProducingTransformInternal(), created.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(filteredBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks downstreamWms = manager.getWatermarks(filteredTimesTwo.getProducingTransformInternal()); assertThat(downstreamWms.getSynchronizedProcessingInputTime(), equalTo(clock.now())); clock.set(BoundedWindow.TIMESTAMP_MAX_VALUE); assertThat(downstreamWms.getSynchronizedProcessingInputTime(), equalTo(upstreamHold)); manager.extractFiredTimers(); // Pending processing time timers that have been fired but aren't completed hold the // synchronized processing time assertThat(downstreamWms.getSynchronizedProcessingInputTime(), equalTo(upstreamHold)); CommittedBundle<Integer> otherCreated = multiWindowedBundle(createdInts, 4, 8, 12); manager.updateWatermarks(otherCreated, TimerUpdate.builder(StructuralKey.of("key", StringUtf8Coder.of())) .withCompletedTimers(Collections.singleton(upstreamProcessingTimer)).build(), result(filtered.getProducingTransformInternal(), otherCreated.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>emptyList()), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); assertThat(downstreamWms.getSynchronizedProcessingInputTime(), not(earlierThan(clock.now()))); } @Test public void synchronizedProcessingInputTimeIsHeldToPendingBundleTimes() { CommittedBundle<Integer> created = multiWindowedBundle(createdInts, 1, 2, 3); manager.updateWatermarks( null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>singleton(created)), new Instant(29_919_235L)); Instant upstreamHold = new Instant(2048L); CommittedBundle<Integer> filteredBundle = bundleFactory.createKeyedBundle(created, StructuralKey.of("key", StringUtf8Coder.of()), filtered).commit(upstreamHold); manager.updateWatermarks( created, TimerUpdate.empty(), result(filtered.getProducingTransformInternal(), created.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(filteredBundle)), BoundedWindow.TIMESTAMP_MAX_VALUE); manager.refreshAll(); TransformWatermarks downstreamWms = manager.getWatermarks(filteredTimesTwo.getProducingTransformInternal()); assertThat(downstreamWms.getSynchronizedProcessingInputTime(), equalTo(clock.now())); clock.set(BoundedWindow.TIMESTAMP_MAX_VALUE); assertThat(downstreamWms.getSynchronizedProcessingInputTime(), equalTo(upstreamHold)); } @Test public void extractFiredTimersReturnsFiredEventTimeTimers() { Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> initialTimers = manager.extractFiredTimers(); // Watermarks haven't advanced assertThat(initialTimers.entrySet(), emptyIterable()); // Advance WM of keyed past the first timer, but ahead of the second and third CommittedBundle<Integer> createdBundle = multiWindowedBundle(filtered); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.singleton(createdBundle)), new Instant(1500L)); manager.refreshAll(); TimerData earliestTimer = TimerData.of(StateNamespaces.global(), new Instant(1000), TimeDomain.EVENT_TIME); TimerData middleTimer = TimerData.of(StateNamespaces.global(), new Instant(5000L), TimeDomain.EVENT_TIME); TimerData lastTimer = TimerData.of(StateNamespaces.global(), new Instant(10000L), TimeDomain.EVENT_TIME); StructuralKey<byte[]> key = StructuralKey.of(new byte[] {1, 4, 9}, ByteArrayCoder.of()); TimerUpdate update = TimerUpdate.builder(key) .setTimer(earliestTimer) .setTimer(middleTimer) .setTimer(lastTimer) .build(); manager.updateWatermarks(createdBundle, update, result(filtered.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(multiWindowedBundle(intsToFlatten))), new Instant(1000L)); manager.refreshAll(); Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> firstTransformFiredTimers = manager.extractFiredTimers(); assertThat( firstTransformFiredTimers.get(filtered.getProducingTransformInternal()), not(nullValue())); Map<StructuralKey<?>, FiredTimers> firstFilteredTimers = firstTransformFiredTimers.get(filtered.getProducingTransformInternal()); assertThat(firstFilteredTimers.get(key), not(nullValue())); FiredTimers firstFired = firstFilteredTimers.get(key); assertThat(firstFired.getTimers(TimeDomain.EVENT_TIME), contains(earliestTimer)); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>emptyList()), new Instant(50_000L)); manager.refreshAll(); Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> secondTransformFiredTimers = manager.extractFiredTimers(); assertThat( secondTransformFiredTimers.get(filtered.getProducingTransformInternal()), not(nullValue())); Map<StructuralKey<?>, FiredTimers> secondFilteredTimers = secondTransformFiredTimers.get(filtered.getProducingTransformInternal()); assertThat(secondFilteredTimers.get(key), not(nullValue())); FiredTimers secondFired = secondFilteredTimers.get(key); // Contains, in order, middleTimer and then lastTimer assertThat(secondFired.getTimers(TimeDomain.EVENT_TIME), contains(middleTimer, lastTimer)); } @Test public void extractFiredTimersReturnsFiredProcessingTimeTimers() { Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> initialTimers = manager.extractFiredTimers(); // Watermarks haven't advanced assertThat(initialTimers.entrySet(), emptyIterable()); // Advance WM of keyed past the first timer, but ahead of the second and third CommittedBundle<Integer> createdBundle = multiWindowedBundle(filtered); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.singleton(createdBundle)), new Instant(1500L)); TimerData earliestTimer = TimerData.of(StateNamespaces.global(), new Instant(999L), TimeDomain.PROCESSING_TIME); TimerData middleTimer = TimerData.of(StateNamespaces.global(), new Instant(5000L), TimeDomain.PROCESSING_TIME); TimerData lastTimer = TimerData.of(StateNamespaces.global(), new Instant(10000L), TimeDomain.PROCESSING_TIME); StructuralKey<?> key = StructuralKey.of(-12L, VarLongCoder.of()); TimerUpdate update = TimerUpdate.builder(key) .setTimer(lastTimer) .setTimer(earliestTimer) .setTimer(middleTimer) .build(); manager.updateWatermarks( createdBundle, update, result(filtered.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(multiWindowedBundle(intsToFlatten))), new Instant(1000L)); manager.refreshAll(); Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> firstTransformFiredTimers = manager.extractFiredTimers(); assertThat( firstTransformFiredTimers.get(filtered.getProducingTransformInternal()), not(nullValue())); Map<StructuralKey<?>, FiredTimers> firstFilteredTimers = firstTransformFiredTimers.get(filtered.getProducingTransformInternal()); assertThat(firstFilteredTimers.get(key), not(nullValue())); FiredTimers firstFired = firstFilteredTimers.get(key); assertThat(firstFired.getTimers(TimeDomain.PROCESSING_TIME), contains(earliestTimer)); clock.set(new Instant(50_000L)); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>emptyList()), new Instant(50_000L)); manager.refreshAll(); Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> secondTransformFiredTimers = manager.extractFiredTimers(); assertThat( secondTransformFiredTimers.get(filtered.getProducingTransformInternal()), not(nullValue())); Map<StructuralKey<?>, FiredTimers> secondFilteredTimers = secondTransformFiredTimers.get(filtered.getProducingTransformInternal()); assertThat(secondFilteredTimers.get(key), not(nullValue())); FiredTimers secondFired = secondFilteredTimers.get(key); // Contains, in order, middleTimer and then lastTimer assertThat(secondFired.getTimers(TimeDomain.PROCESSING_TIME), contains(middleTimer, lastTimer)); } @Test public void extractFiredTimersReturnsFiredSynchronizedProcessingTimeTimers() { Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> initialTimers = manager.extractFiredTimers(); // Watermarks haven't advanced assertThat(initialTimers.entrySet(), emptyIterable()); // Advance WM of keyed past the first timer, but ahead of the second and third CommittedBundle<Integer> createdBundle = multiWindowedBundle(filtered); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.singleton(createdBundle)), new Instant(1500L)); TimerData earliestTimer = TimerData.of( StateNamespaces.global(), new Instant(999L), TimeDomain.SYNCHRONIZED_PROCESSING_TIME); TimerData middleTimer = TimerData.of( StateNamespaces.global(), new Instant(5000L), TimeDomain.SYNCHRONIZED_PROCESSING_TIME); TimerData lastTimer = TimerData.of( StateNamespaces.global(), new Instant(10000L), TimeDomain.SYNCHRONIZED_PROCESSING_TIME); StructuralKey<byte[]> key = StructuralKey.of(new byte[] {2, -2, 22}, ByteArrayCoder.of()); TimerUpdate update = TimerUpdate.builder(key) .setTimer(lastTimer) .setTimer(earliestTimer) .setTimer(middleTimer) .build(); manager.updateWatermarks( createdBundle, update, result(filtered.getProducingTransformInternal(), createdBundle.withElements(Collections.<WindowedValue<Integer>>emptyList()), Collections.<CommittedBundle<?>>singleton(multiWindowedBundle(intsToFlatten))), new Instant(1000L)); manager.refreshAll(); Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> firstTransformFiredTimers = manager.extractFiredTimers(); assertThat( firstTransformFiredTimers.get(filtered.getProducingTransformInternal()), not(nullValue())); Map<StructuralKey<?>, FiredTimers> firstFilteredTimers = firstTransformFiredTimers.get(filtered.getProducingTransformInternal()); assertThat(firstFilteredTimers.get(key), not(nullValue())); FiredTimers firstFired = firstFilteredTimers.get(key); assertThat( firstFired.getTimers(TimeDomain.SYNCHRONIZED_PROCESSING_TIME), contains(earliestTimer)); clock.set(new Instant(50_000L)); manager.updateWatermarks(null, TimerUpdate.empty(), result(createdInts.getProducingTransformInternal(), null, Collections.<CommittedBundle<?>>emptyList()), new Instant(50_000L)); manager.refreshAll(); Map<AppliedPTransform<?, ?, ?>, Map<StructuralKey<?>, FiredTimers>> secondTransformFiredTimers = manager.extractFiredTimers(); assertThat( secondTransformFiredTimers.get(filtered.getProducingTransformInternal()), not(nullValue())); Map<StructuralKey<?>, FiredTimers> secondFilteredTimers = secondTransformFiredTimers.get(filtered.getProducingTransformInternal()); assertThat(secondFilteredTimers.get(key), not(nullValue())); FiredTimers secondFired = secondFilteredTimers.get(key); // Contains, in order, middleTimer and then lastTimer assertThat( secondFired.getTimers(TimeDomain.SYNCHRONIZED_PROCESSING_TIME), contains(middleTimer, lastTimer)); } @Test public void timerUpdateBuilderBuildAddsAllAddedTimers() { TimerData set = TimerData.of(StateNamespaces.global(), new Instant(10L), TimeDomain.EVENT_TIME); TimerData deleted = TimerData.of(StateNamespaces.global(), new Instant(24L), TimeDomain.PROCESSING_TIME); TimerData completedOne = TimerData.of( StateNamespaces.global(), new Instant(1024L), TimeDomain.SYNCHRONIZED_PROCESSING_TIME); TimerData completedTwo = TimerData.of(StateNamespaces.global(), new Instant(2048L), TimeDomain.EVENT_TIME); TimerUpdate update = TimerUpdate.builder(StructuralKey.of("foo", StringUtf8Coder.of())) .withCompletedTimers(ImmutableList.of(completedOne, completedTwo)) .setTimer(set) .deletedTimer(deleted) .build(); assertThat(update.getCompletedTimers(), containsInAnyOrder(completedOne, completedTwo)); assertThat(update.getSetTimers(), contains(set)); assertThat(update.getDeletedTimers(), contains(deleted)); } @Test public void timerUpdateBuilderWithSetThenDeleteHasOnlyDeleted() { TimerUpdateBuilder builder = TimerUpdate.builder(null); TimerData timer = TimerData.of(StateNamespaces.global(), Instant.now(), TimeDomain.EVENT_TIME); TimerUpdate built = builder.setTimer(timer).deletedTimer(timer).build(); assertThat(built.getSetTimers(), emptyIterable()); assertThat(built.getDeletedTimers(), contains(timer)); } @Test public void timerUpdateBuilderWithDeleteThenSetHasOnlySet() { TimerUpdateBuilder builder = TimerUpdate.builder(null); TimerData timer = TimerData.of(StateNamespaces.global(), Instant.now(), TimeDomain.EVENT_TIME); TimerUpdate built = builder.deletedTimer(timer).setTimer(timer).build(); assertThat(built.getSetTimers(), contains(timer)); assertThat(built.getDeletedTimers(), emptyIterable()); } @Test public void timerUpdateBuilderWithSetAfterBuildNotAddedToBuilt() { TimerUpdateBuilder builder = TimerUpdate.builder(null); TimerData timer = TimerData.of(StateNamespaces.global(), Instant.now(), TimeDomain.EVENT_TIME); TimerUpdate built = builder.build(); builder.setTimer(timer); assertThat(built.getSetTimers(), emptyIterable()); builder.build(); assertThat(built.getSetTimers(), emptyIterable()); } @Test public void timerUpdateBuilderWithDeleteAfterBuildNotAddedToBuilt() { TimerUpdateBuilder builder = TimerUpdate.builder(null); TimerData timer = TimerData.of(StateNamespaces.global(), Instant.now(), TimeDomain.EVENT_TIME); TimerUpdate built = builder.build(); builder.deletedTimer(timer); assertThat(built.getDeletedTimers(), emptyIterable()); builder.build(); assertThat(built.getDeletedTimers(), emptyIterable()); } @Test public void timerUpdateBuilderWithCompletedAfterBuildNotAddedToBuilt() { TimerUpdateBuilder builder = TimerUpdate.builder(null); TimerData timer = TimerData.of(StateNamespaces.global(), Instant.now(), TimeDomain.EVENT_TIME); TimerUpdate built = builder.build(); builder.withCompletedTimers(ImmutableList.of(timer)); assertThat(built.getCompletedTimers(), emptyIterable()); builder.build(); assertThat(built.getCompletedTimers(), emptyIterable()); } @Test public void timerUpdateWithCompletedTimersNotAddedToExisting() { TimerUpdateBuilder builder = TimerUpdate.builder(null); TimerData timer = TimerData.of(StateNamespaces.global(), Instant.now(), TimeDomain.EVENT_TIME); TimerUpdate built = builder.build(); assertThat(built.getCompletedTimers(), emptyIterable()); assertThat( built.withCompletedTimers(ImmutableList.of(timer)).getCompletedTimers(), contains(timer)); assertThat(built.getCompletedTimers(), emptyIterable()); } private static Matcher<Instant> earlierThan(final Instant laterInstant) { return new BaseMatcher<Instant>() { @Override public boolean matches(Object item) { ReadableInstant instant = (ReadableInstant) item; return instant.isBefore(laterInstant); } @Override public void describeTo(Description description) { description.appendText("earlier than ").appendValue(laterInstant); } }; } private static Matcher<Instant> laterThan(final Instant shouldBeEarlier) { return new BaseMatcher<Instant>() { @Override public boolean matches(Object item) { ReadableInstant instant = (ReadableInstant) item; return instant.isAfter(shouldBeEarlier); } @Override public void describeTo(Description description) { description.appendText("later than ").appendValue(shouldBeEarlier); } }; } @SafeVarargs private final <T> CommittedBundle<T> timestampedBundle( PCollection<T> pc, TimestampedValue<T>... values) { UncommittedBundle<T> bundle = bundleFactory.createRootBundle(pc); for (TimestampedValue<T> value : values) { bundle.add( WindowedValue.timestampedValueInGlobalWindow(value.getValue(), value.getTimestamp())); } return bundle.commit(BoundedWindow.TIMESTAMP_MAX_VALUE); } @SafeVarargs private final <T> CommittedBundle<T> multiWindowedBundle(PCollection<T> pc, T... values) { UncommittedBundle<T> bundle = bundleFactory.createRootBundle(pc); Collection<BoundedWindow> windows = ImmutableList.of( GlobalWindow.INSTANCE, new IntervalWindow(BoundedWindow.TIMESTAMP_MIN_VALUE, new Instant(0))); for (T value : values) { bundle.add( WindowedValue.of(value, BoundedWindow.TIMESTAMP_MIN_VALUE, windows, PaneInfo.NO_FIRING)); } return bundle.commit(BoundedWindow.TIMESTAMP_MAX_VALUE); } private final CommittedResult result( AppliedPTransform<?, ?, ?> transform, @Nullable CommittedBundle<?> unprocessedBundle, Iterable<? extends CommittedBundle<?>> bundles) { return CommittedResult.create(StepTransformResult.withoutHold(transform).build(), unprocessedBundle, bundles); } }
// Copyright 2000-2017 JetBrains s.r.o. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.intellij.ui; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.EventDispatcher; import com.intellij.util.ui.ThreeStateCheckBox; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreeCellRenderer; import javax.swing.tree.TreeNode; import java.awt.*; import static com.intellij.util.ui.ThreeStateCheckBox.State; public class CheckboxTreeBase extends Tree { private final CheckboxTreeHelper myHelper; private final EventDispatcher<CheckboxTreeListener> myEventDispatcher = EventDispatcher.create(CheckboxTreeListener.class); public CheckboxTreeBase() { this(new CheckboxTreeCellRendererBase(), null); } public CheckboxTreeBase(final CheckboxTreeCellRendererBase cellRenderer, CheckedTreeNode root) { this(cellRenderer, root, CheckboxTreeHelper.DEFAULT_POLICY); } public CheckboxTreeBase(CheckboxTreeCellRendererBase cellRenderer, @Nullable CheckedTreeNode root, CheckPolicy checkPolicy) { myHelper = new CheckboxTreeHelper(checkPolicy, myEventDispatcher); if (root != null) { // override default model ("colors", etc.) ASAP to avoid CCE in renderers setModel(new DefaultTreeModel(root)); setSelectionRow(0); } myEventDispatcher.addListener(new CheckboxTreeListener() { @Override public void mouseDoubleClicked(@NotNull CheckedTreeNode node) { onDoubleClick(node); } @Override public void nodeStateChanged(@NotNull CheckedTreeNode node) { CheckboxTreeBase.this.onNodeStateChanged(node); } @Override public void beforeNodeStateChanged(@NotNull CheckedTreeNode node) { CheckboxTreeBase.this.nodeStateWillChange(node); } }); myHelper.initTree(this, this, cellRenderer); } @Deprecated public void installRenderer(final CheckboxTreeCellRendererBase cellRenderer) { setCellRenderer(cellRenderer); } /** * @deprecated use {@link #setNodeState} to change node state or subscribe to {@link #addCheckboxTreeListener} to get notifications about state changes */ @Deprecated protected boolean toggleNode(CheckedTreeNode node) { setNodeState(node, !node.isChecked()); return node.isChecked(); } /** * @deprecated use {@link #setNodeState} to change node state or subscribe to {@link #addCheckboxTreeListener} to get notifications about state changes */ @Deprecated protected void checkNode(CheckedTreeNode node, boolean checked) { setNodeState(node, checked); } public void setNodeState(@NotNull CheckedTreeNode node, boolean checked) { myHelper.setNodeState(this, node, checked); } public void addCheckboxTreeListener(@NotNull CheckboxTreeListener listener) { myEventDispatcher.addListener(listener); } protected void onDoubleClick(final CheckedTreeNode node) { } /** * Collect checked leaf nodes of the type {@code nodeType} and that are accepted by * {@code filter} * * @param nodeType the type of userobject to consider * @param filter the filter (if null all nodes are accepted) * @param <T> the type of the node * @return an array of collected nodes */ public <T> T[] getCheckedNodes(final Class<T> nodeType, @Nullable final NodeFilter<T> filter) { return CheckboxTreeHelper.getCheckedNodes(nodeType, filter, getModel()); } public int getToggleClickCount() { // to prevent node expanding/collapsing on checkbox toggling return -1; } protected void onNodeStateChanged(CheckedTreeNode node) { } protected void nodeStateWillChange(CheckedTreeNode node) { } @SuppressWarnings("unused") @Deprecated protected void adjustParents(final CheckedTreeNode node, final boolean checked) {} public static class CheckboxTreeCellRendererBase extends JPanel implements TreeCellRenderer { private final ColoredTreeCellRenderer myTextRenderer; public final ThreeStateCheckBox myCheckbox; private final boolean myUsePartialStatusForParentNodes; protected boolean myIgnoreInheritance; public CheckboxTreeCellRendererBase(boolean opaque) { this(opaque, true); } public CheckboxTreeCellRendererBase(boolean opaque, final boolean usePartialStatusForParentNodes) { super(new BorderLayout()); myUsePartialStatusForParentNodes = usePartialStatusForParentNodes; myCheckbox = new ThreeStateCheckBox(); myCheckbox.setSelected(false); myCheckbox.setThirdStateEnabled(false); myTextRenderer = new ColoredTreeCellRenderer() { public void customizeCellRenderer(@NotNull JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { } }; myTextRenderer.setOpaque(opaque); add(myCheckbox, BorderLayout.WEST); add(myTextRenderer, BorderLayout.CENTER); } public CheckboxTreeCellRendererBase() { this(true); } public final Component getTreeCellRendererComponent(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { invalidate(); if (value instanceof CheckedTreeNode) { CheckedTreeNode node = (CheckedTreeNode)value; State state = getNodeStatus(node); myCheckbox.setVisible(true); myCheckbox.setEnabled(node.isEnabled()); myCheckbox.setSelected(state != State.NOT_SELECTED); myCheckbox.setState(state); myCheckbox.setOpaque(false); myCheckbox.setBackground(null); setBackground(null); if (UIUtil.isUnderWin10LookAndFeel()) { Object hoverValue = getClientProperty(UIUtil.CHECKBOX_ROLLOVER_PROPERTY); myCheckbox.getModel().setRollover(hoverValue == value); Object pressedValue = getClientProperty(UIUtil.CHECKBOX_PRESSED_PROPERTY); myCheckbox.getModel().setPressed(pressedValue == value); } } else { myCheckbox.setVisible(false); } myTextRenderer.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus); if (UIUtil.isUnderGTKLookAndFeel()) { final Color background = selected ? UIUtil.getTreeSelectionBackground() : UIUtil.getTreeTextBackground(); UIUtil.changeBackGround(this, background); } customizeRenderer(tree, value, selected, expanded, leaf, row, hasFocus); revalidate(); return this; } private State getNodeStatus(final CheckedTreeNode node) { if (myIgnoreInheritance) return node.isChecked() ? State.SELECTED : State.NOT_SELECTED; final boolean checked = node.isChecked(); if (node.getChildCount() == 0 || !myUsePartialStatusForParentNodes) return checked ? State.SELECTED : State.NOT_SELECTED; State result = null; for (int i = 0; i < node.getChildCount(); i++) { TreeNode child = node.getChildAt(i); State childStatus = child instanceof CheckedTreeNode? getNodeStatus((CheckedTreeNode)child) : checked? State.SELECTED : State.NOT_SELECTED; if (childStatus == State.DONT_CARE) return State.DONT_CARE; if (result == null) { result = childStatus; } else if (result != childStatus) { return State.DONT_CARE; } } return result == null ? State.NOT_SELECTED : result; } /** * Should be implemented by concrete implementations. * This method is invoked only for customization of component. * All component attributes are cleared when this method is being invoked. * Note that in general case {@code value} is not an instance of CheckedTreeNode. */ public void customizeRenderer(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { if (value instanceof CheckedTreeNode) { customizeCellRenderer(tree, value, selected, expanded, leaf, row, hasFocus); } } /** * @see CheckboxTreeCellRendererBase#customizeRenderer(JTree, Object, boolean, boolean, boolean, int, boolean) * @deprecated */ @Deprecated public void customizeCellRenderer(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { } public ColoredTreeCellRenderer getTextRenderer() { return myTextRenderer; } public JCheckBox getCheckbox() { return myCheckbox; } } /** * Don't use this enum. Left for API compatibility. * @see ThreeStateCheckBox.State * @deprecated */ @Deprecated public enum NodeState { FULL, CLEAR, PARTIAL } public static class CheckPolicy { final boolean checkChildrenWithCheckedParent; final boolean uncheckChildrenWithUncheckedParent; final boolean checkParentWithCheckedChild; final boolean uncheckParentWithUncheckedChild; public CheckPolicy(final boolean checkChildrenWithCheckedParent, final boolean uncheckChildrenWithUncheckedParent, final boolean checkParentWithCheckedChild, final boolean uncheckParentWithUncheckedChild) { this.checkChildrenWithCheckedParent = checkChildrenWithCheckedParent; this.uncheckChildrenWithUncheckedParent = uncheckChildrenWithUncheckedParent; this.checkParentWithCheckedChild = checkParentWithCheckedChild; this.uncheckParentWithUncheckedChild = uncheckParentWithUncheckedChild; } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.source; import com.intellij.codeInsight.daemon.JavaErrorMessages; import com.intellij.lang.ASTNode; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.filters.ClassFilter; import com.intellij.psi.impl.source.resolve.ResolveCache; import com.intellij.psi.impl.source.resolve.StaticImportResolveProcessor; import com.intellij.psi.impl.source.tree.*; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.scope.processor.FilterScopeProcessor; import com.intellij.psi.scope.util.PsiScopesUtil; import com.intellij.psi.tree.ChildRoleBase; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; /** * @author dsl */ public class PsiImportStaticReferenceElementImpl extends CompositePsiElement implements PsiImportStaticReferenceElement { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.PsiImportStaticReferenceElementImpl"); private volatile String myCanonicalText; public PsiImportStaticReferenceElementImpl() { super(JavaElementType.IMPORT_STATIC_REFERENCE); } @Override public int getTextOffset() { ASTNode refName = findChildByRole(ChildRole.REFERENCE_NAME); if (refName != null){ return refName.getStartOffset(); } else{ return super.getTextOffset(); } } @Override public void clearCaches() { super.clearCaches(); myCanonicalText = null; } @Override public final ASTNode findChildByRole(int role) { LOG.assertTrue(ChildRole.isUnique(role)); switch (role) { default: return null; case ChildRole.REFERENCE_NAME: return findChildByType(JavaTokenType.IDENTIFIER); case ChildRole.QUALIFIER: final TreeElement node = getFirstChildNode(); return node.getElementType() == JavaElementType.JAVA_CODE_REFERENCE ? node : null; case ChildRole.DOT: return findChildByType(JavaTokenType.DOT); } } @Override public final int getChildRole(@NotNull ASTNode child) { LOG.assertTrue(child.getTreeParent() == this); IElementType i = child.getElementType(); if (i == JavaElementType.JAVA_CODE_REFERENCE) { return ChildRole.QUALIFIER; } else if (i == JavaTokenType.DOT) { return ChildRole.DOT; } else if (i == JavaTokenType.IDENTIFIER) { return ChildRole.REFERENCE_NAME; } else { return ChildRoleBase.NONE; } } @Override public PsiElement getReferenceNameElement() { return findChildByRoleAsPsiElement(ChildRole.REFERENCE_NAME); } @Override public PsiReferenceParameterList getParameterList() { return null; } @Override @NotNull public PsiType[] getTypeParameters() { return PsiType.EMPTY_ARRAY; } @Override public PsiElement getQualifier() { return findChildByRoleAsPsiElement(ChildRole.QUALIFIER); } @Override public PsiJavaCodeReferenceElement getClassReference() { return (PsiJavaCodeReferenceElement)findChildByRoleAsPsiElement(ChildRole.QUALIFIER); } @Override public PsiImportStaticStatement bindToTargetClass(final PsiClass aClass) throws IncorrectOperationException { final String qualifiedName = aClass.getQualifiedName(); if (qualifiedName == null) throw new IncorrectOperationException(); final PsiJavaParserFacade parserFacade = JavaPsiFacade.getInstance(getProject()).getParserFacade(); final CompositeElement newRef = (CompositeElement)parserFacade.createReferenceFromText(qualifiedName, null).getNode(); if (getQualifier() != null) { replaceChildInternal(findChildByRole(ChildRole.QUALIFIER), newRef); return (PsiImportStaticStatement)getParent(); } else { final LeafElement dot = Factory.createSingleLeafElement(JavaTokenType.DOT, ".", 0, 1, SharedImplUtil.findCharTableByTree(newRef), getManager()); newRef.rawInsertAfterMe(dot); final CompositeElement errorElement = Factory.createErrorElement(JavaErrorMessages.message("import.statement.identifier.or.asterisk.expected.")); dot.rawInsertAfterMe(errorElement); final CompositeElement parentComposite = (CompositeElement)SourceTreeToPsiMap.psiElementToTree(getParent()); parentComposite.addInternal(newRef, errorElement, this, Boolean.TRUE); parentComposite.deleteChildInternal(this); return (PsiImportStaticStatement)SourceTreeToPsiMap.treeElementToPsi(parentComposite); } } @Override public boolean isQualified() { return findChildByRole(ChildRole.QUALIFIER) != null; } @Override public String getQualifiedName() { return getCanonicalText(); } @Override public boolean isSoft() { return false; } @Override public String getReferenceName() { final ASTNode childByRole = findChildByRole(ChildRole.REFERENCE_NAME); if (childByRole == null) return ""; return childByRole.getText(); } @NotNull @Override public PsiElement getElement() { return this; } @NotNull @Override public TextRange getRangeInElement() { TreeElement nameChild = (TreeElement)findChildByRole(ChildRole.REFERENCE_NAME); if (nameChild == null) return new TextRange(0, getTextLength()); final int startOffset = nameChild.getStartOffsetInParent(); return new TextRange(startOffset, startOffset + nameChild.getTextLength()); } @Override @NotNull public String getCanonicalText() { String canonicalText = myCanonicalText; if (canonicalText == null) { myCanonicalText = canonicalText = calcCanonicalText(); } return canonicalText; } private String calcCanonicalText() { final PsiJavaCodeReferenceElement referenceElement = (PsiJavaCodeReferenceElement)getQualifier(); if (referenceElement == null) { return getReferenceName(); } else { return referenceElement.getCanonicalText() + "." + getReferenceName(); } } public String toString() { return "PsiImportStaticReferenceElement:" + getText(); } @Override @NotNull public JavaResolveResult advancedResolve(boolean incompleteCode) { final JavaResolveResult[] results = multiResolve(incompleteCode); if (results.length == 1) return results[0]; return JavaResolveResult.EMPTY; } @Override @NotNull public JavaResolveResult[] multiResolve(boolean incompleteCode) { PsiFile file = getContainingFile(); final ResolveCache resolveCache = ResolveCache.getInstance(file.getProject()); final ResolveResult[] results = resolveCache.resolveWithCaching(this, OurGenericsResolver.INSTANCE, true, incompleteCode,file); return results instanceof JavaResolveResult[] ? (JavaResolveResult[])results : JavaResolveResult.EMPTY_ARRAY; } private static final class OurGenericsResolver implements ResolveCache.PolyVariantResolver<PsiImportStaticReferenceElementImpl> { private static final OurGenericsResolver INSTANCE = new OurGenericsResolver(); @NotNull @Override public JavaResolveResult[] resolve(@NotNull final PsiImportStaticReferenceElementImpl referenceElement, final boolean incompleteCode) { final PsiElement qualifier = referenceElement.getQualifier(); if (!(qualifier instanceof PsiJavaCodeReferenceElement)) return JavaResolveResult.EMPTY_ARRAY; final PsiElement target = ((PsiJavaCodeReferenceElement)qualifier).resolve(); if (!(target instanceof PsiClass)) return JavaResolveResult.EMPTY_ARRAY; final StaticImportResolveProcessor processor = new StaticImportResolveProcessor(referenceElement); target.processDeclarations(processor, ResolveState.initial(), referenceElement, referenceElement); return processor.getResults(); } } @Override public PsiReference getReference() { return this; } @Override public PsiElement resolve() { return advancedResolve(false).getElement(); } @Override public boolean isReferenceTo(@NotNull PsiElement element) { final String name = getReferenceName(); if (name == null || !(element instanceof PsiNamedElement) || !name.equals(((PsiNamedElement)element).getName())) { return false; } for (JavaResolveResult result : multiResolve(false)) { if (getManager().areElementsEquivalent(result.getElement(), element)) { return true; } } return false; } @Override public PsiElement handleElementRename(@NotNull String newElementName) throws IncorrectOperationException { PsiElement oldIdentifier = findChildByRoleAsPsiElement(ChildRole.REFERENCE_NAME); if (oldIdentifier == null) { throw new IncorrectOperationException(); } PsiIdentifier identifier = JavaPsiFacade.getInstance(getProject()).getElementFactory().createIdentifier(newElementName); oldIdentifier.replace(identifier); return this; } @Override public PsiElement bindToElement(@NotNull PsiElement element) throws IncorrectOperationException { if (!(element instanceof PsiMember) || !(element instanceof PsiNamedElement) || ((PsiNamedElement)element).getName() == null) { throw new IncorrectOperationException(); } if (!((PsiModifierListOwner)element).hasModifierProperty(PsiModifier.STATIC)) { if (element instanceof PsiClass && ((PsiClass)element).getContainingClass() == null) { // "move inner to upper level" of a statically imported inner class => replace with regular import return replaceWithRegularImport((PsiClass) element); } throw new IncorrectOperationException(); } PsiClass containingClass = ((PsiMember)element).getContainingClass(); if (containingClass == null) throw new IncorrectOperationException(); PsiElement qualifier = getQualifier(); if (qualifier == null) { throw new IncorrectOperationException(); } ((PsiReference)qualifier).bindToElement(containingClass); PsiElement oldIdentifier = findChildByRoleAsPsiElement(ChildRole.REFERENCE_NAME); if (oldIdentifier == null){ throw new IncorrectOperationException(); } PsiIdentifier identifier = JavaPsiFacade.getInstance(getProject()).getElementFactory().createIdentifier(((PsiNamedElement)element).getName()); oldIdentifier.replace(identifier); return this; } private PsiElement replaceWithRegularImport(final PsiClass psiClass) throws IncorrectOperationException { PsiImportStaticStatement baseStatement = PsiTreeUtil.getParentOfType(getElement(), PsiImportStaticStatement.class); PsiImportStatement statement = JavaPsiFacade.getInstance(getProject()).getElementFactory().createImportStatement(psiClass); statement = (PsiImportStatement) baseStatement.replace(statement); final PsiJavaCodeReferenceElement reference = statement.getImportReference(); assert reference != null; return reference; } @Override public void processVariants(@NotNull PsiScopeProcessor processor) { FilterScopeProcessor proc = new FilterScopeProcessor(new ClassFilter(PsiModifierListOwner.class), processor); PsiScopesUtil.resolveAndWalk(proc, this, null, true); } @Override @NotNull public Object[] getVariants() { // IMPLEMENT[dsl] return ArrayUtil.EMPTY_OBJECT_ARRAY; } @Override public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof JavaElementVisitor) { ((JavaElementVisitor)visitor).visitImportStaticReferenceElement(this); } else { visitor.visitElement(this); } } }
/* * $Header: /home/cvs/jakarta-slide/webdavclient/clientlib/src/java/org/apache/webdav/lib/methods/OptionsMethod.java,v 1.1.2.2 2004/02/05 15:51:22 mholz Exp $ * $Revision: 1.1.2.2 $ * $Date: 2004/02/05 15:51:22 $ * * ==================================================================== * * Copyright 1999-2002 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.webdav.lib.methods; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.StringTokenizer; import java.util.Vector; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.HttpConnection; import org.apache.commons.httpclient.HttpException; import org.apache.commons.httpclient.HttpState; import org.apache.util.WebdavStatus; import org.apache.util.XMLPrinter; import org.w3c.dom.Document; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * OPTIONS Method. * * @author <a href="mailto:remm@apache.org">Remy Maucherat</a> */ public class OptionsMethod extends XMLResponseMethodBase { // -------------------------------------------------------------- Constants /** * DAV level 1. Mandatory. */ public static final String DAV_LEVEL1 = "1"; /** * DAV level 2. */ public static final String DAV_LEVEL2 = "2"; /** * Advanced collections. */ public static final String ADVANCED_COLLECTIONS = "3"; /** * Delta V. */ public static final String DELTAV = "4"; /** * ACL. */ public static final String ACL = "5"; /** * DASL. */ public static final String DASL = "6"; /** * */ public static final int OPTIONS_WORKSPACE = 8; /** * */ public static final int OPTIONS_VERSION_HISTORY = 9; // ----------------------------------------------------------- Constructors /** * Method constructor. */ public OptionsMethod() { } /** * Method constructor. */ public OptionsMethod(String path) { super(path); } /** * Method constructor. */ public OptionsMethod(String path, int type) { super(path); this.type = type; } // ----------------------------------------------------- Instance Variables /** * DAV Capabilities. */ private Vector davCapabilities = new Vector(); /** * Methods allowed. */ private Vector methodsAllowed = new Vector(); private int type = 0; private boolean hasXMLBody = false; // --------------------------------------------------------- Public Methods /** * Is the specified method allowed ? */ public boolean isAllowed(String method) { checkUsed(); return methodsAllowed.contains(method); } /** * Get a list of allowed methods. */ public Enumeration getAllowedMethods() { checkUsed(); return methodsAllowed.elements(); } /** * Is DAV capability supported ? */ public boolean isSupported(String capability) { checkUsed(); return davCapabilities.contains(capability); } /** * Get a list of supported DAV capabilities. */ public Enumeration getDavCapabilities() { checkUsed(); return davCapabilities.elements(); } /** * Parse response. * * @param input Input stream */ public void parseResponse(InputStream input, HttpState state, HttpConnection conn) throws IOException, HttpException { try { if (getStatusLine().getStatusCode() == WebdavStatus.SC_OK && hasXMLBody) { parseXMLResponse(input); } } catch (IOException e) { // FIX ME: provide a way to deliver non xml data } } // --------------------------------------------------- WebdavMethod Methods /** * Process response headers. The contract of this method is that it only * parses the response headers. * * @param state the state * @param conn the connection */ public void processResponseHeaders(HttpState state, HttpConnection conn) { Header davHeader = getResponseHeader("dav"); if (davHeader != null) { String davHeaderValue = davHeader.getValue(); StringTokenizer tokenizer = new StringTokenizer(davHeaderValue, ","); while (tokenizer.hasMoreElements()) { String davCapability = tokenizer.nextToken().trim(); davCapabilities.addElement(davCapability); } } Header allowHeader = getResponseHeader("allow"); if (allowHeader != null) { String allowHeaderValue = allowHeader.getValue(); StringTokenizer tokenizer = new StringTokenizer(allowHeaderValue, ","); while (tokenizer.hasMoreElements()) { String methodAllowed = tokenizer.nextToken().trim().toUpperCase(); methodsAllowed.addElement(methodAllowed); } } Header lengthHeader = getResponseHeader("content-length"); Header typeHeader = getResponseHeader("content-type"); if( (lengthHeader != null && Integer.parseInt(lengthHeader.getValue()) > 0) || (typeHeader != null && typeHeader.getValue().startsWith("text/xml"))) hasXMLBody = true; super.processResponseHeaders(state, conn); } /** * DAV requests that contain a body must override this function to * generate that body. * * <p>The default behavior simply returns an empty body.</p> */ protected String generateRequestBody() { if (type != 0){ XMLPrinter printer = new XMLPrinter(); printer.writeXMLHeader(); //System.out.println(printer.toString()); printer.writeElement("D", "DAV:", "options", XMLPrinter.OPENING); if (type == OPTIONS_VERSION_HISTORY) printer.writeElement("D", "version-history-collection-set", XMLPrinter.NO_CONTENT); if (type == OPTIONS_WORKSPACE) printer.writeElement("D", "workspace-collection-set", XMLPrinter.NO_CONTENT); printer.writeElement("D", "options", XMLPrinter.CLOSING); return printer.toString(); } return null; } public String getName() { return "OPTIONS"; } //get and set header public void addRequestHeaders(HttpState state, HttpConnection conn) throws IOException, HttpException { if (type!= 0){ // set the default utf-8 encoding, if not already present if (getRequestHeader("Content-Type") == null ) super.setRequestHeader("Content-Type", "text/xml; charset=utf-8"); } super.addRequestHeaders(state, conn); } /** * This method returns an enumeration of URL paths. If the PropFindMethod * was sent to the URL of a collection, then there will be multiple URLs. * The URLs are picked out of the <code>&lt;D:href&gt;</code> elements * of the response. * * @return an enumeration of URL paths as Strings */ public Enumeration getAllResponseURLs() { checkUsed(); return getResponseURLs().elements(); } public Enumeration getResponseProperties(){ Vector result = new Vector(); return (Enumeration) result; } protected Document parseResponseContent(InputStream is) throws ParserConfigurationException, SAXException, IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); byte[] chunk; byte[] all; int chunkLen; int allLen; List chunks; int i; int max; int ofs; allLen = 0; chunk = new byte[1024*4]; chunkLen = is.read(chunk); chunks = new ArrayList(); while (chunkLen != -1) { chunks.add(new Integer(chunkLen)); chunks.add(chunk); allLen += chunkLen; chunk = new byte[1024*4]; chunkLen = is.read(chunk); } all = new byte[allLen]; ofs = 0; max = chunks.size(); for (i = 0; i < max; i += 2) { chunkLen = ((Integer) chunks.get(i)).intValue(); chunk = (byte[]) chunks.get(i + 1); System.arraycopy(chunk, 0, all, ofs, chunkLen); ofs += chunkLen; } if (all.length == 0) return null; return builder.parse(new InputSource(new ByteArrayInputStream(all))); } }
/* * Copyright 2016 SCSK Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.primecloud.auto.sdk.model.instance; import com.fasterxml.jackson.annotation.JsonProperty; public class Instance { @JsonProperty("InstanceNo") private Long instanceNo; @JsonProperty("InstanceName") private String instanceName; @JsonProperty("FarmNo") private Long farmNo; @JsonProperty("PlatformNo") private Long platformNo; @JsonProperty("ImageNo") private Long imageNo; @JsonProperty("Comment") private String comment; @JsonProperty("Fqdn") private String fqdn; @JsonProperty("PublicIp") private String publicIp; @JsonProperty("PrivateIp") private String privateIp; @JsonProperty("Status") private String status; @JsonProperty("Aws") private InstanceAws aws; @JsonProperty("Nifty") private InstanceNifty nifty; @JsonProperty("Vmware") private InstanceVmware vmware; @JsonProperty("Cloudstack") private InstanceCloudstack cloudstack; @JsonProperty("Vcloud") private InstanceVcloud vcloud; @JsonProperty("Openstack") private InstanceOpenstack openstack; @JsonProperty("Azure") private InstanceAzure azure; public Long getInstanceNo() { return instanceNo; } public void setInstanceNo(Long instanceNo) { this.instanceNo = instanceNo; } public String getInstanceName() { return instanceName; } public void setInstanceName(String instanceName) { this.instanceName = instanceName; } public Long getFarmNo() { return farmNo; } public void setFarmNo(Long farmNo) { this.farmNo = farmNo; } public Long getPlatformNo() { return platformNo; } public void setPlatformNo(Long platformNo) { this.platformNo = platformNo; } public Long getImageNo() { return imageNo; } public void setImageNo(Long imageNo) { this.imageNo = imageNo; } public String getComment() { return comment; } public void setComment(String comment) { this.comment = comment; } public String getFqdn() { return fqdn; } public void setFqdn(String fqdn) { this.fqdn = fqdn; } public String getPublicIp() { return publicIp; } public void setPublicIp(String publicIp) { this.publicIp = publicIp; } public String getPrivateIp() { return privateIp; } public void setPrivateIp(String privateIp) { this.privateIp = privateIp; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public InstanceAws getAws() { return aws; } public void setAws(InstanceAws aws) { this.aws = aws; } public InstanceNifty getNifty() { return nifty; } public void setNifty(InstanceNifty nifty) { this.nifty = nifty; } public InstanceVmware getVmware() { return vmware; } public void setVmware(InstanceVmware vmware) { this.vmware = vmware; } public InstanceCloudstack getCloudstack() { return cloudstack; } public void setCloudstack(InstanceCloudstack cloudstack) { this.cloudstack = cloudstack; } public InstanceVcloud getVcloud() { return vcloud; } public void setVcloud(InstanceVcloud vcloud) { this.vcloud = vcloud; } public InstanceOpenstack getOpenstack() { return openstack; } public void setOpenstack(InstanceOpenstack openstack) { this.openstack = openstack; } public InstanceAzure getAzure() { return azure; } public void setAzure(InstanceAzure azure) { this.azure = azure; } }
/* Copyright 2014 OPM.gov Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package gov.opm.scrd.entities.application; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import gov.opm.scrd.TestsHelper; import gov.opm.scrd.entities.lookup.AccountStatus; import java.math.BigDecimal; import java.util.Date; import junit.framework.JUnit4TestAdapter; import org.junit.Before; import org.junit.Test; /** * <p> * Unit tests for {@link BatchDailyPayments} class. * </p> * * @author sparemax * @version 1.0 * @since OPM - Data Migration - Entities Update Module Assembly 1.0 */ public class BatchDailyPaymentsUnitTests { /** * <p> * Represents the <code>BatchDailyPayments</code> instance used in tests. * </p> */ private BatchDailyPayments instance; /** * <p> * Adapter for earlier versions of JUnit. * </p> * * @return a test suite. */ public static junit.framework.Test suite() { return new JUnit4TestAdapter(BatchDailyPaymentsUnitTests.class); } /** * <p> * Sets up the unit tests. * </p> * * @throws Exception * to JUnit. */ @Before public void setUp() throws Exception { instance = new BatchDailyPayments(); } /** * <p> * Accuracy test for the constructor <code>BatchDailyPayments()</code>.<br> * Instance should be correctly created. * </p> */ @Test public void testCtor() { instance = new BatchDailyPayments(); assertNull("'auditBatchLogId' should be correct.", TestsHelper.getField(instance, "auditBatchLogId")); assertNull("'payTransactionKey' should be correct.", TestsHelper.getField(instance, "payTransactionKey")); assertNull("'numberPaymentToday' should be correct.", TestsHelper.getField(instance, "numberPaymentToday")); assertNull("'batchTime' should be correct.", TestsHelper.getField(instance, "batchTime")); assertNull("'accountStatus' should be correct.", TestsHelper.getField(instance, "accountStatus")); assertNull("'payTransStatusCode' should be correct.", TestsHelper.getField(instance, "payTransStatusCode")); assertNull("'claimNumber' should be correct.", TestsHelper.getField(instance, "claimNumber")); assertNull("'accountBalance' should be correct.", TestsHelper.getField(instance, "accountBalance")); assertNull("'overPaymentAmount' should be correct.", TestsHelper.getField(instance, "overPaymentAmount")); assertNull("'achPayment' should be correct.", TestsHelper.getField(instance, "achPayment")); assertNull("'achStopLetter' should be correct.", TestsHelper.getField(instance, "achStopLetter")); assertNull("'printInvoice' should be correct.", TestsHelper.getField(instance, "printInvoice")); assertNull("'refundRequired' should be correct.", TestsHelper.getField(instance, "refundRequired")); assertNull("'reversedPayment' should be correct.", TestsHelper.getField(instance, "reversedPayment")); assertNull("'updateToCompleted' should be correct.", TestsHelper.getField(instance, "updateToCompleted")); assertNull("'printInitialBill' should be correct.", TestsHelper.getField(instance, "printInitialBill")); assertNull("'latestBatch' should be correct.", TestsHelper.getField(instance, "latestBatch")); assertNull("'errorProcessing' should be correct.", TestsHelper.getField(instance, "errorProcessing")); } /** * <p> * Accuracy test for the method <code>getAuditBatchLogId()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getAuditBatchLogId() { Long value = 1L; instance.setAuditBatchId(value); assertEquals("'getAuditBatchLogId' should be correct.", value, instance.getAuditBatchId()); } /** * <p> * Accuracy test for the method <code>setAuditBatchLogId(Long auditBatchLogId)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setAuditBatchLogId() { Long value = 1L; instance.setAuditBatchId(value); assertEquals("'setAuditBatchLogId' should be correct.", value, TestsHelper.getField(instance, "auditBatchId")); } /** * <p> * Accuracy test for the method <code>getPayTransactionKey()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getPayTransactionKey() { Integer value = 1; instance.setPayTransactionKey(value); assertEquals("'getPayTransactionKey' should be correct.", value, instance.getPayTransactionKey()); } /** * <p> * Accuracy test for the method <code>setPayTransactionKey(Integer payTransactionKey)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setPayTransactionKey() { Integer value = 1; instance.setPayTransactionKey(value); assertEquals("'setPayTransactionKey' should be correct.", value, TestsHelper.getField(instance, "payTransactionKey")); } /** * <p> * Accuracy test for the method <code>getNumberPaymentToday()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getNumberPaymentToday() { Integer value = 1; instance.setNumberPaymentToday(value); assertEquals("'getNumberPaymentToday' should be correct.", value, instance.getNumberPaymentToday()); } /** * <p> * Accuracy test for the method <code>setNumberPaymentToday(Integer numberPaymentToday)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setNumberPaymentToday() { Integer value = 1; instance.setNumberPaymentToday(value); assertEquals("'setNumberPaymentToday' should be correct.", value, TestsHelper.getField(instance, "numberPaymentToday")); } /** * <p> * Accuracy test for the method <code>getBatchTime()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getBatchTime() { Date value = new Date(); instance.setBatchTime(value); assertSame("'getBatchTime' should be correct.", value, instance.getBatchTime()); } /** * <p> * Accuracy test for the method <code>setBatchTime(Date batchTime)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setBatchTime() { Date value = new Date(); instance.setBatchTime(value); assertSame("'setBatchTime' should be correct.", value, TestsHelper.getField(instance, "batchTime")); } /** * <p> * Accuracy test for the method <code>getAccountStatus()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getAccountStatus() { AccountStatus value = new AccountStatus(); instance.setAccountStatus(value); assertSame("'getAccountStatus' should be correct.", value, instance.getAccountStatus()); } /** * <p> * Accuracy test for the method <code>setAccountStatus(AccountStatus accountStatus)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setAccountStatus() { AccountStatus value = new AccountStatus(); instance.setAccountStatus(value); assertSame("'setAccountStatus' should be correct.", value, TestsHelper.getField(instance, "accountStatus")); } /** * <p> * Accuracy test for the method <code>getPayTransStatusCode()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getPayTransStatusCode() { Integer value = 1; instance.setPayTransStatusCode(value); assertEquals("'getPayTransStatusCode' should be correct.", value, instance.getPayTransStatusCode()); } /** * <p> * Accuracy test for the method <code>setPayTransStatusCode(Integer payTransStatusCode)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setPayTransStatusCode() { Integer value = 1; instance.setPayTransStatusCode(value); assertEquals("'setPayTransStatusCode' should be correct.", value, TestsHelper.getField(instance, "payTransStatusCode")); } /** * <p> * Accuracy test for the method <code>getClaimNumber()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getClaimNumber() { String value = "new_value"; instance.setClaimNumber(value); assertEquals("'getClaimNumber' should be correct.", value, instance.getClaimNumber()); } /** * <p> * Accuracy test for the method <code>setClaimNumber(String claimNumber)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setClaimNumber() { String value = "new_value"; instance.setClaimNumber(value); assertEquals("'setClaimNumber' should be correct.", value, TestsHelper.getField(instance, "claimNumber")); } /** * <p> * Accuracy test for the method <code>getAccountBalance()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getAccountBalance() { BigDecimal value = new BigDecimal(1); instance.setAccountBalance(value); assertSame("'getAccountBalance' should be correct.", value, instance.getAccountBalance()); } /** * <p> * Accuracy test for the method <code>setAccountBalance(BigDecimal accountBalance)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setAccountBalance() { BigDecimal value = new BigDecimal(1); instance.setAccountBalance(value); assertSame("'setAccountBalance' should be correct.", value, TestsHelper.getField(instance, "accountBalance")); } /** * <p> * Accuracy test for the method <code>getOverPaymentAmount()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getOverPaymentAmount() { BigDecimal value = new BigDecimal(1); instance.setOverPaymentAmount(value); assertSame("'getOverPaymentAmount' should be correct.", value, instance.getOverPaymentAmount()); } /** * <p> * Accuracy test for the method <code>setOverPaymentAmount(BigDecimal overPaymentAmount)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setOverPaymentAmount() { BigDecimal value = new BigDecimal(1); instance.setOverPaymentAmount(value); assertSame("'setOverPaymentAmount' should be correct.", value, TestsHelper.getField(instance, "overPaymentAmount")); } /** * <p> * Accuracy test for the method <code>getAchPayment()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getAchPayment() { Boolean value = true; instance.setAchPayment(value); assertEquals("'getAchPayment' should be correct.", value, instance.getAchPayment()); } /** * <p> * Accuracy test for the method <code>setAchPayment(Boolean achPayment)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setAchPayment() { Boolean value = true; instance.setAchPayment(value); assertEquals("'setAchPayment' should be correct.", value, TestsHelper.getField(instance, "achPayment")); } /** * <p> * Accuracy test for the method <code>getAchStopLetter()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getAchStopLetter() { Boolean value = true; instance.setAchStopLetter(value); assertEquals("'getAchStopLetter' should be correct.", value, instance.getAchStopLetter()); } /** * <p> * Accuracy test for the method <code>setAchStopLetter(Boolean achStopLetter)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setAchStopLetter() { Boolean value = true; instance.setAchStopLetter(value); assertEquals("'setAchStopLetter' should be correct.", value, TestsHelper.getField(instance, "achStopLetter")); } /** * <p> * Accuracy test for the method <code>getPrintInvoice()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getPrintInvoice() { Boolean value = true; instance.setPrintInvoice(value); assertEquals("'getPrintInvoice' should be correct.", value, instance.getPrintInvoice()); } /** * <p> * Accuracy test for the method <code>setPrintInvoice(Boolean printInvoice)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setPrintInvoice() { Boolean value = true; instance.setPrintInvoice(value); assertEquals("'setPrintInvoice' should be correct.", value, TestsHelper.getField(instance, "printInvoice")); } /** * <p> * Accuracy test for the method <code>getRefundRequired()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getRefundRequired() { Boolean value = true; instance.setRefundRequired(value); assertEquals("'getRefundRequired' should be correct.", value, instance.getRefundRequired()); } /** * <p> * Accuracy test for the method <code>setRefundRequired(Boolean refundRequired)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setRefundRequired() { Boolean value = true; instance.setRefundRequired(value); assertEquals("'setRefundRequired' should be correct.", value, TestsHelper.getField(instance, "refundRequired")); } /** * <p> * Accuracy test for the method <code>getReversedPayment()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getReversedPayment() { Boolean value = true; instance.setReversedPayment(value); assertEquals("'getReversedPayment' should be correct.", value, instance.getReversedPayment()); } /** * <p> * Accuracy test for the method <code>setReversedPayment(Boolean reversedPayment)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setReversedPayment() { Boolean value = true; instance.setReversedPayment(value); assertEquals("'setReversedPayment' should be correct.", value, TestsHelper.getField(instance, "reversedPayment")); } /** * <p> * Accuracy test for the method <code>getUpdateToCompleted()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getUpdateToCompleted() { Boolean value = true; instance.setUpdateToCompleted(value); assertEquals("'getUpdateToCompleted' should be correct.", value, instance.getUpdateToCompleted()); } /** * <p> * Accuracy test for the method <code>setUpdateToCompleted(Boolean updateToCompleted)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setUpdateToCompleted() { Boolean value = true; instance.setUpdateToCompleted(value); assertEquals("'setUpdateToCompleted' should be correct.", value, TestsHelper.getField(instance, "updateToCompleted")); } /** * <p> * Accuracy test for the method <code>getPrintInitialBill()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getPrintInitialBill() { Boolean value = true; instance.setPrintInitialBill(value); assertEquals("'getPrintInitialBill' should be correct.", value, instance.getPrintInitialBill()); } /** * <p> * Accuracy test for the method <code>setPrintInitialBill(Boolean printInitialBill)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setPrintInitialBill() { Boolean value = true; instance.setPrintInitialBill(value); assertEquals("'setPrintInitialBill' should be correct.", value, TestsHelper.getField(instance, "printInitialBill")); } /** * <p> * Accuracy test for the method <code>getLatestBatch()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getLatestBatch() { Boolean value = true; instance.setLatestBatch(value); assertEquals("'getLatestBatch' should be correct.", value, instance.getLatestBatch()); } /** * <p> * Accuracy test for the method <code>setLatestBatch(Boolean latestBatch)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setLatestBatch() { Boolean value = true; instance.setLatestBatch(value); assertEquals("'setLatestBatch' should be correct.", value, TestsHelper.getField(instance, "latestBatch")); } /** * <p> * Accuracy test for the method <code>getErrorProcessing()</code>.<br> * The value should be properly retrieved. * </p> */ @Test public void test_getErrorProcessing() { Boolean value = true; instance.setErrorProcessing(value); assertEquals("'getErrorProcessing' should be correct.", value, instance.getErrorProcessing()); } /** * <p> * Accuracy test for the method <code>setErrorProcessing(Boolean errorProcessing)</code>.<br> * The value should be properly set. * </p> */ @Test public void test_setErrorProcessing() { Boolean value = true; instance.setErrorProcessing(value); assertEquals("'setErrorProcessing' should be correct.", value, TestsHelper.getField(instance, "errorProcessing")); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler; import java.util.Map.Entry; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; import org.apache.hadoop.yarn.util.resource.Resources; /** * Resource Usage by Labels for following fields by label - AM resource (to * enforce max-am-resource-by-label after YARN-2637) - Used resource (includes * AM resource usage) - Reserved resource - Pending resource - Headroom * * This class can be used to track resource usage in queue/user/app. * * And it is thread-safe */ public class ResourceUsage extends AbstractResourceUsage { // short for no-label :) private static final String NL = CommonNodeLabelsManager.NO_LABEL; public ResourceUsage() { super(); } /* * Used */ public Resource getUsed() { return getUsed(NL); } public Resource getUsed(String label) { return _get(label, ResourceType.USED); } public void incUsed(String label, Resource res) { _inc(label, ResourceType.USED, res); } public void incUsed(Resource res) { incUsed(NL, res); } public void decUsed(Resource res) { decUsed(NL, res); } public void decUsed(String label, Resource res) { _dec(label, ResourceType.USED, res); } public void setUsed(Resource res) { setUsed(NL, res); } public void copyAllUsed(AbstractResourceUsage other) { writeLock.lock(); try { for (Entry<String, UsageByLabel> entry : other.usages.entrySet()) { setUsed(entry.getKey(), Resources.clone(entry.getValue().getUsed())); } } finally { writeLock.unlock(); } } public void setUsed(String label, Resource res) { _set(label, ResourceType.USED, res); } /* * Pending */ public Resource getPending() { return getPending(NL); } public Resource getPending(String label) { return _get(label, ResourceType.PENDING); } public void incPending(String label, Resource res) { _inc(label, ResourceType.PENDING, res); } public void incPending(Resource res) { incPending(NL, res); } public void decPending(Resource res) { decPending(NL, res); } public void decPending(String label, Resource res) { _dec(label, ResourceType.PENDING, res); } public void setPending(Resource res) { setPending(NL, res); } public void setPending(String label, Resource res) { _set(label, ResourceType.PENDING, res); } /* * Reserved */ public Resource getReserved() { return getReserved(NL); } public Resource getReserved(String label) { return _get(label, ResourceType.RESERVED); } public void incReserved(String label, Resource res) { _inc(label, ResourceType.RESERVED, res); } public void incReserved(Resource res) { incReserved(NL, res); } public void decReserved(Resource res) { decReserved(NL, res); } public void decReserved(String label, Resource res) { _dec(label, ResourceType.RESERVED, res); } public void setReserved(Resource res) { setReserved(NL, res); } public void setReserved(String label, Resource res) { _set(label, ResourceType.RESERVED, res); } /* * AM-Used */ public Resource getAMUsed() { return getAMUsed(NL); } public Resource getAMUsed(String label) { return _get(label, ResourceType.AMUSED); } public void incAMUsed(String label, Resource res) { _inc(label, ResourceType.AMUSED, res); } public void incAMUsed(Resource res) { incAMUsed(NL, res); } public void decAMUsed(Resource res) { decAMUsed(NL, res); } public void decAMUsed(String label, Resource res) { _dec(label, ResourceType.AMUSED, res); } public void setAMUsed(Resource res) { setAMUsed(NL, res); } public void setAMUsed(String label, Resource res) { _set(label, ResourceType.AMUSED, res); } public Resource getAllPending() { return _getAll(ResourceType.PENDING); } public Resource getAllUsed() { return _getAll(ResourceType.USED); } public Resource getAllReserved() { return _getAll(ResourceType.RESERVED); } // Cache Used public Resource getCachedUsed() { return _get(NL, ResourceType.CACHED_USED); } public Resource getCachedUsed(String label) { return _get(label, ResourceType.CACHED_USED); } public Resource getCachedPending() { return _get(NL, ResourceType.CACHED_PENDING); } public Resource getCachedPending(String label) { return _get(label, ResourceType.CACHED_PENDING); } public void setCachedUsed(String label, Resource res) { _set(label, ResourceType.CACHED_USED, res); } public void setCachedUsed(Resource res) { _set(NL, ResourceType.CACHED_USED, res); } public void setCachedPending(String label, Resource res) { _set(label, ResourceType.CACHED_PENDING, res); } public void setCachedPending(Resource res) { _set(NL, ResourceType.CACHED_PENDING, res); } /* * AM-Resource Limit */ public Resource getAMLimit() { return getAMLimit(NL); } public Resource getAMLimit(String label) { return _get(label, ResourceType.AMLIMIT); } public void incAMLimit(String label, Resource res) { _inc(label, ResourceType.AMLIMIT, res); } public void incAMLimit(Resource res) { incAMLimit(NL, res); } public void decAMLimit(Resource res) { decAMLimit(NL, res); } public void decAMLimit(String label, Resource res) { _dec(label, ResourceType.AMLIMIT, res); } public void setAMLimit(Resource res) { setAMLimit(NL, res); } public void setAMLimit(String label, Resource res) { _set(label, ResourceType.AMLIMIT, res); } public Resource getUserAMLimit() { return getAMLimit(NL); } public Resource getUserAMLimit(String label) { return _get(label, ResourceType.USERAMLIMIT); } public void setUserAMLimit(Resource res) { setAMLimit(NL, res); } public void setUserAMLimit(String label, Resource res) { _set(label, ResourceType.USERAMLIMIT, res); } public Resource getCachedDemand(String label) { readLock.lock(); try { Resource demand = Resources.createResource(0); Resources.addTo(demand, getCachedUsed(label)); Resources.addTo(demand, getCachedPending(label)); return demand; } finally { readLock.unlock(); } } }
package de.ne0.sonar.plsql.api; import com.sonar.sslr.api.AstNode; import com.sonar.sslr.api.TokenType; import org.sonar.sslr.grammar.GrammarRuleKey; /** * Keywords according to * http://docs.oracle.com/cd/B28359_01/appdev.111/b31231/appb.htm * * @author wessl03 * */ public enum PlSQLKeyword implements TokenType, GrammarRuleKey { // Reserved words ABORT("abort"), ACCEPT("accept"), ACCESS("access"), ADD("add"), ADMIN("admin"), AFTER("after"), ALL("all"), ALLOCATE("allocate"), ALTER("alter"), ANALYZE("analyze"), AND("and"), ANY("any"), ARCHIVE("archive"), ARCHIVELOG("archivelog"), ARRAY("array"), ARRAYLEN("arraylen"), AS("as"), ASC("asc"), ASSERT("assert"), ASSIGN("assign"), AT("at"), AUTHORIZATION("authorization"), AVG("avg"), BACKUP("backup"), BASE_TABLE("base_table"), BECOME("become"), BEFORE("before"), BEGIN("begin"), BETWEEN("between"), BINARY_INTEGER("binary_integer"), BLOCK("block"), BODY("body"), BOOLEAN("boolean"), BY("by"), CACHE("cache"), CANCEL("cancel"), CASCADE("cascade"), CASE("case"), CHANGE("change"), CHAR("char"), CHAR_BASE("char_base"), CHARACTER("character"), CHECK("check"), CHECKPOINT("checkpoint"), CLOSE("close"), CLUSTER("cluster"), CLUSTERS("clusters"), COBOL("cobol"), COLAUTH("colauth"), COLUMNS("columns"), COMMIT("commit"), COMPILE("compile"), COMPRESS("compress"), CONNECT("connect"), CONSTANT("constant"), CONSTRAINT("constraint"), CONSTRAINTS("constraints"), CONTENTS("contents"), CONTINUE("continue"), CONTROLFILE("controlfile"), COUNT("count"), CRASH("crash"), CREATE("create"), CURRENT("current"), CURRVAL("currval"), CURSOR("cursor"), CYCLE("cycle"), DATA_BASE("data_base"), DATABASE("database"), DATAFILE("datafile"), DATE("date"), DBA("dba"), DEBUGOFF("debugoff"), DEBUGON("debugon"), DEC("dec"), DECIMAL("decimal"), DECLARE("declare"), DEFAULT("default"), DEFINITION("definition"), DELAY("delay"), DELETE("delete"), DELTA("delta"), DESC("desc"), DIGITS("digits"), DISABLE("disable"), DISMOUNT("dismount"), DISPOSE("dispose"), DISTINCT("distinct"), DO("do"), DOUBLE("double"), DROP("drop"), DUMP("dump"), EACH("each"), ELSE("else"), CONDITIONAL_ELSE("$else"), ELSIF("elsif"), CONDITIONAL_ELSIF("$elsif"), ENABLE("enable"), END("end"), CONDITIONAL_END("$end"), ENTRY("entry"), ESCAPE("escape"), EVENTS("events"), EXCEPT("except"), EXCEPTION("exception"), EXCEPTION_INIT("exception_init"), EXCEPTIONS("exceptions"), EXEC("exec"), EXECUTE("execute"), EXISTS("exists"), EXIT("exit"), EXPLAIN("explain"), EXTENT("extent"), EXTERNALLY("externally"), FALSE("false"), FETCH("fetch"), FLOAT("float"), FLUSH("flush"), FOR("for"), FORCE("force"), FOREIGN("foreign"), FORM("form"), FORTRAN("fortran"), FOUND("found"), FREELIST("freelist"), FREELISTS("freelists"), FROM("from"), FUNCTION("function"), GENERIC("generic"), GO("go"), GOTO("goto"), GRANT("grant"), GROUP("group"), GROUPS("groups"), HAVING("having"), IDENTIFIED("identified"), IF("if"), CONDITIONAL_IF("$if"), IN("in"), INCLUDING("including"), INDEX("index"), INDEXES("indexes"), INDICATOR("indicator"), INITRANS("initrans"), INSERT("insert"), INSTANCE("instance"), INT("int"), INTEGER("integer"), INTERSECT("intersect"), INTO("into"), IS("is"), KEY("key"), LANGUAGE("language"), LAYER("layer"), LEVEL("level"), LIKE("like"), LIMITED("limited"), LINK("link"), LISTS("lists"), LOGFILE("logfile"), LOOP("loop"), MANAGE("manage"), MANUAL("manual"), MAX("max"), MAXDATAFILES("maxdatafiles"), MAXINSTANCES("maxinstances"), MAXLOGFILES("maxlogfiles"), MAXLOGHISTORY("maxloghistory"), MAXLOGMEMBERS("maxlogmembers"), MAXTRANS("maxtrans"), MAXVALUE("maxvalue"), MIN("min"), MINEXTENTS("minextents"), MINUS("minus"), MINVALUE("minvalue"), MLSLABEL("mlslabel"), MOD("mod"), MODE("mode"), MODULE("module"), MOUNT("mount"), NATURAL("natural"), NEW("new"), NEXT("next"), NEXTVAL("nextval"), NOARCHIVELOG("noarchivelog"), NOCACHE("nocache"), NOCOMPRESS("nocompress"), NOCYCLE("nocycle"), NOMAXVALUE("nomaxvalue"), NOMINVALUE("nominvalue"), NONE("none"), NOORDER("noorder"), NORESETLOGS("noresetlogs"), NORMAL("normal"), NOSORT("nosort"), NOT("not"), NULL("null"), NUMBER("number"), NUMBER_BASE("number_base"), NUMERIC("numeric"), OF("of"), OFF("off"), OLD("old"), ON("on"), ONLY("only"), OPEN("open"), OPTIMAL("optimal"), OPTION("option"), OR("or"), ORDER("order"), OTHERS("others"), OUT("out"), OWN("own"), PACKAGE("package"), PARALLEL("parallel"), PARTITION("partition"), PCTFREE("pctfree"), PCTINCREASE("pctincrease"), PCTUSED("pctused"), PLAN("plan"), PLI("pli"), POSITIVE("positive"), PRAGMA("pragma"), PRECISION("precision"), PRIMARY("primary"), PRIOR("prior"), PRIVATE("private"), PROCEDURE("procedure"), PROFILE("profile"), PUBLIC("public"), QUOTA("quota"), RAISE("raise"), RANGE("range"), READ("read"), REAL("real"), RECORD("record"), RECOVER("recover"), REFERENCES("references"), REFERENCING("referencing"), RELEASE("release"), REMR("remr"), RENAME("rename"), RESETLOGS("resetlogs"), RESOURCE("resource"), RESTRICTED("restricted"), RETURN("return"), REUSE("reuse"), REVERSE("reverse"), REVOKE("revoke"), ROLE("role"), ROLES("roles"), ROLLBACK("rollback"), ROWID("rowid"), ROWLABEL("rowlabel"), ROWNUM("rownum"), ROWTYPE("rowtype"), RUN("run"), SAVEPOINT("savepoint"), SCHEMA("schema"), SCN("scn"), SECTION("section"), SEGMENT("segment"), SELECT("select"), SEPARATE("separate"), SEQUENCE("sequence"), SET("set"), SHARED("shared"), SIZE("size"), SMALLINT("smallint"), SNAPSHOT("snapshot"), SOME("some"), SORT("sort"), SPACE("space"), SQL("sql"), SQLCODE("sqlcode"), SQLERRM("sqlerrm"), SQLERROR("sqlerror"), SQLSTATE("sqlstate"), START("start"), STATEMENT("statement"), STATEMENT_ID("statement_id"), STATISTICS("statistics"), STDDEV("stddev"), STOP("stop"), STORAGE("storage"), SUBTYPE("subtype"), SUM("sum"), SWITCH("switch"), SYSTEM("system"), TABAUTH("tabauth"), TABLE("table"), TABLES("tables"), TABLESPACE("tablespace"), TASK("task"), TEMPORARY("temporary"), TERMINATE("terminate"), THEN("then"), CONDITIONAL_THEN("$then"), THREAD("thread"), TIME("time"), TO("to"), TRACING("tracing"), TRANSACTION("transaction"), TRIGGERS("triggers"), TRUE("true"), TRUNCATE("truncate"), TYPE("type"), UNDER("under"), UNION("union"), UNIQUE("unique"), UNLIMITED("unlimited"), UNTIL("until"), UPDATE("update"), USE("use"), USING("using"), VALUES("values"), VARCHAR("varchar"), VARCHAR2("varchar2"), VARIANCE("variance"), VIEW("view"), VIEWS("views"), WHEN("when"), WHERE("where"), WHILE("while"), WITH("with"), WORK("work"), WRITE("write"), XOR("xor"), // Future reserved words //manually added words //TODO: might be required to move REPLACE("replace") ; private final String value; private PlSQLKeyword(String value) { this.value = value; } public String getName() { return name(); } public String getValue() { return value; } public boolean hasToBeSkippedFromAst(AstNode node) { return false; } public static String[] keywordValues() { PlSQLKeyword[] keywordsEnum = PlSQLKeyword.values(); String[] keywords = new String[keywordsEnum.length]; for (int i = 0; i < keywords.length; i++) { keywords[i] = keywordsEnum[i].getValue(); } return keywords; } }
package com.google.api.ads.dfp.jaxws.v201306; import java.util.List; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import javax.jws.WebService; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.ws.RequestWrapper; import javax.xml.ws.ResponseWrapper; /** * * Provides methods for managing {@link BaseRate} objects. * <p> * To use this service, you need to have the new sales management solution * enabled on your network. If you do not see a "Sales" tab in * <a href="https://www.google.com/dfp">DoubleClick for Publishers (DFP)</a>, * you will not be able to use this service. * * * This class was generated by the JAX-WS RI. * JAX-WS RI 2.2.4-b01 * Generated source version: 2.1 * */ @WebService(name = "BaseRateServiceInterface", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") @XmlSeeAlso({ ObjectFactory.class }) public interface BaseRateServiceInterface { /** * * Creates a new {@link BaseRate} object. * * @param baseRate the base rate to be created * @return the base rate with its ID filled in * * * @param baseRate * @return * returns com.google.api.ads.dfp.jaxws.v201306.BaseRate * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") @RequestWrapper(localName = "createBaseRate", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfacecreateBaseRate") @ResponseWrapper(localName = "createBaseRateResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfacecreateBaseRateResponse") public BaseRate createBaseRate( @WebParam(name = "baseRate", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") BaseRate baseRate) throws ApiException_Exception ; /** * * Creates a list of new {@link BaseRate} objects. * * @param baseRates the base rates to be created * @return the base rates with their IDs filled in * * * @param baseRates * @return * returns java.util.List<com.google.api.ads.dfp.jaxws.v201306.BaseRate> * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") @RequestWrapper(localName = "createBaseRates", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfacecreateBaseRates") @ResponseWrapper(localName = "createBaseRatesResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfacecreateBaseRatesResponse") public List<BaseRate> createBaseRates( @WebParam(name = "baseRates", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") List<BaseRate> baseRates) throws ApiException_Exception ; /** * * Returns the {@link BaseRate} object uniquely identified by the given ID. * * @param baseRateId the ID of the base rate, which must already exist * * * @param baseRateId * @return * returns com.google.api.ads.dfp.jaxws.v201306.BaseRate * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") @RequestWrapper(localName = "getBaseRate", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfacegetBaseRate") @ResponseWrapper(localName = "getBaseRateResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfacegetBaseRateResponse") public BaseRate getBaseRate( @WebParam(name = "baseRateId", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") Long baseRateId) throws ApiException_Exception ; /** * * Gets a {@link BaseRatePage} of {@link BaseRate} objects that satisfy the given * {@link Statement#query}. * * The following fields are supported for filtering: * * <table> * <tr> * <th scope="col">PQL Property</th> * <th scope="col">Object Property</th> * </tr> * <tr> * <td>{@code rateCardId}</td> * <td>{@link BaseRate#rateCardId}</td> * </tr> * <tr> * <td>{@code id}</td> * <td>{@link BaseRate#id}</td> * </tr> * <tr> * <td>{@code productTemplateId}</td> * <td>{@link ProductTemplateBaseRate#id}<br><b>Note:</b>&nbsp;Cannot be * combined with {@code productId}. * </tr> * <td>{@code productId}</td> * <td>{@link ProductBaseRate#id}<br><b>Note:</b>&nbsp;Cannot be combined with * {@code productTemplateId}. * </tr> * * @param filterStatement a Publisher Query Language statement used to filter * a set of base rates. * @return the page of base rates that match the given filter * * * @param filterStatement * @return * returns com.google.api.ads.dfp.jaxws.v201306.BaseRatePage * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") @RequestWrapper(localName = "getBaseRatesByStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfacegetBaseRatesByStatement") @ResponseWrapper(localName = "getBaseRatesByStatementResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfacegetBaseRatesByStatementResponse") public BaseRatePage getBaseRatesByStatement( @WebParam(name = "filterStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") Statement filterStatement) throws ApiException_Exception ; /** * * Performs actions on {@link BaseRate} objects that satisfy the given * {@link Statement#query}. * * @param baseRateAction the action to perform * @param filterStatement a Publisher Query Language statement used to filter * a set of base rates. * @return the result of the action performed * * * @param baseRateAction * @param filterStatement * @return * returns com.google.api.ads.dfp.jaxws.v201306.UpdateResult * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") @RequestWrapper(localName = "performBaseRateAction", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfaceperformBaseRateAction") @ResponseWrapper(localName = "performBaseRateActionResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfaceperformBaseRateActionResponse") public UpdateResult performBaseRateAction( @WebParam(name = "baseRateAction", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") BaseRateAction baseRateAction, @WebParam(name = "filterStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") Statement filterStatement) throws ApiException_Exception ; /** * * Updates the specified {@link BaseRate} object. * * @param baseRate the base rate to be updated * @return the updated base rate * * * @param baseRate * @return * returns com.google.api.ads.dfp.jaxws.v201306.BaseRate * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") @RequestWrapper(localName = "updateBaseRate", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfaceupdateBaseRate") @ResponseWrapper(localName = "updateBaseRateResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfaceupdateBaseRateResponse") public BaseRate updateBaseRate( @WebParam(name = "baseRate", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") BaseRate baseRate) throws ApiException_Exception ; /** * * Updates the specified {@link BaseRate} objects. * * @param baseRates the base rates to be updated * @return the updated base rates * * * @param baseRates * @return * returns java.util.List<com.google.api.ads.dfp.jaxws.v201306.BaseRate> * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") @RequestWrapper(localName = "updateBaseRates", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfaceupdateBaseRates") @ResponseWrapper(localName = "updateBaseRatesResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306", className = "com.google.api.ads.dfp.jaxws.v201306.BaseRateServiceInterfaceupdateBaseRatesResponse") public List<BaseRate> updateBaseRates( @WebParam(name = "baseRates", targetNamespace = "https://www.google.com/apis/ads/publisher/v201306") List<BaseRate> baseRates) throws ApiException_Exception ; }