gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.TreeSet; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key; import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore; import org.apache.jackrabbit.oak.plugins.document.util.Utils; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.EmptyHook; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.NodeStore; import org.apache.jackrabbit.oak.stats.Clock; import org.jetbrains.annotations.NotNull; import org.junit.Test; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import static com.google.common.collect.ImmutableList.copyOf; import static org.apache.jackrabbit.oak.plugins.document.Collection.NODES; import static org.apache.jackrabbit.oak.plugins.document.MongoBlobGCTest.randomStream; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.DOC_SIZE_THRESHOLD; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.NUM_REVS_THRESHOLD; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.PREV_SPLIT_FACTOR; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.SplitDocType; import static org.apache.jackrabbit.oak.plugins.document.TestUtils.NO_BINARY; import static org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation.Type.REMOVE_MAP_ENTRY; import static org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation.Type.SET_MAP_ENTRY; import static org.apache.jackrabbit.oak.plugins.document.util.Utils.isCommitted; import static org.apache.jackrabbit.oak.plugins.memory.BinaryPropertyState.binaryProperty; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Check correct splitting of documents (OAK-926 & OAK-1342). */ public class DocumentSplitTest extends BaseDocumentMKTest { @Test public void splitRevisions() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); Set<Revision> revisions = Sets.newHashSet(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/")); assertNotNull(doc); revisions.addAll(doc.getLocalRevisions().keySet()); revisions.add(Revision.fromString(mk.commit("/", "+\"foo\":{}+\"bar\":{}", null, null))); // create nodes while (revisions.size() <= NodeDocument.NUM_REVS_THRESHOLD) { revisions.add(Revision.fromString(mk.commit("/", "+\"foo/node-" + revisions.size() + "\":{}" + "+\"bar/node-" + revisions.size() + "\":{}", null, null))); } mk.runBackgroundOperations(); String head = mk.getHeadRevision(); doc = store.find(NODES, Utils.getIdFromPath("/")); assertNotNull(doc); Map<Revision, String> revs = doc.getLocalRevisions(); // one remaining in the local revisions map assertEquals(1, revs.size()); for (Revision rev : revisions) { assertTrue(doc.containsRevision(rev)); assertTrue(isCommitted(ns.getCommitValue(rev, doc))); } // check if document is still there assertNotNull(ns.getNode(Path.ROOT, RevisionVector.fromString(head))); NodeDocument prevDoc = Iterators.getOnlyElement(doc.getAllPreviousDocs()); assertThat(prevDoc.getSplitDocType(), either(is(SplitDocType.DEFAULT)).or(is(SplitDocType.DEFAULT_NO_BRANCH))); mk.commit("/", "+\"baz\":{}", null, null); ns.setAsyncDelay(0); mk.backgroundWrite(); } @Test public void splitDeleted() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); Set<Revision> revisions = Sets.newHashSet(); mk.commit("/", "+\"foo\":{}", null, null); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); revisions.addAll(doc.getLocalRevisions().keySet()); boolean create = false; while (revisions.size() <= NodeDocument.NUM_REVS_THRESHOLD) { if (create) { revisions.add(Revision.fromString(mk.commit("/", "+\"foo\":{}", null, null))); } else { revisions.add(Revision.fromString(mk.commit("/", "-\"foo\"", null, null))); } create = !create; } mk.runBackgroundOperations(); String head = mk.getHeadRevision(); doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Map<Revision, String> deleted = doc.getLocalDeleted(); // one remaining in the local deleted map assertEquals(1, deleted.size()); for (Revision rev : revisions) { assertTrue("document should contain revision (or have revision in commit root path):" + rev, doc.containsRevision(rev) || doc.getCommitRootPath(rev) != null); assertTrue(isCommitted(ns.getCommitValue(rev, doc))); } DocumentNodeState node = ns.getNode(Path.fromString("/foo"), RevisionVector.fromString(head)); // check status of node if (create) { assertNull(node); } else { assertNotNull(node); } } @Test public void splitCommitRoot() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); mk.commit("/", "+\"foo\":{}+\"bar\":{}", null, null); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Set<Revision> commitRoots = Sets.newHashSet(); commitRoots.addAll(doc.getLocalCommitRoot().keySet()); // create nodes while (commitRoots.size() <= NodeDocument.NUM_REVS_THRESHOLD) { commitRoots.add(Revision.fromString(mk.commit("/", "^\"foo/prop\":" + commitRoots.size() + "^\"bar/prop\":" + commitRoots.size(), null, null))); } mk.runBackgroundOperations(); doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Map<Revision, String> commits = doc.getLocalCommitRoot(); // two remaining in the local commit root map // the first _commitRoot entry for the _deleted when the node was created // the second _commitRoot entry for the most recent prop change assertEquals(2, commits.size()); for (Revision rev : commitRoots) { assertTrue(isCommitted(ns.getCommitValue(rev, doc))); } } @Test public void splitPropertyRevisions() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); mk.commit("/", "+\"foo\":{}", null, null); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Set<Revision> revisions = Sets.newHashSet(); // create nodes while (revisions.size() <= NodeDocument.NUM_REVS_THRESHOLD) { revisions.add(Revision.fromString(mk.commit("/", "^\"foo/prop\":" + revisions.size(), null, null))); } mk.runBackgroundOperations(); doc = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(doc); Map<Revision, String> localRevs = doc.getLocalRevisions(); // one remaining in the local revisions map assertEquals(1, localRevs.size()); for (Revision rev : revisions) { assertTrue(isCommitted(ns.getCommitValue(rev, doc))); } // all revisions in the prop map Map<Revision, String> valueMap = doc.getValueMap("prop"); assertEquals((long) revisions.size(), valueMap.size()); // one remaining revision in the local map valueMap = doc.getLocalMap("prop"); assertEquals(1L, valueMap.size()); } @Test public void cluster() { MemoryDocumentStore ds = new MemoryDocumentStore(); MemoryBlobStore bs = new MemoryBlobStore(); DocumentMK.Builder builder; builder = new DocumentMK.Builder(); builder.setDocumentStore(ds).setBlobStore(bs).setAsyncDelay(0); DocumentMK mk1 = builder.setClusterId(1).open(); mk1.commit("/", "+\"test\":{\"prop1\":0}", null, null); // make sure the new node is visible to other DocumentMK instances mk1.backgroundWrite(); builder = new DocumentMK.Builder(); builder.setDocumentStore(ds).setBlobStore(bs).setAsyncDelay(0); DocumentMK mk2 = builder.setClusterId(2).open(); builder = new DocumentMK.Builder(); builder.setDocumentStore(ds).setBlobStore(bs).setAsyncDelay(0); DocumentMK mk3 = builder.setClusterId(3).open(); for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { mk1.commit("/", "^\"test/prop1\":" + i, null, null); mk2.commit("/", "^\"test/prop2\":" + i, null, null); mk3.commit("/", "^\"test/prop3\":" + i, null, null); } mk1.runBackgroundOperations(); mk2.runBackgroundOperations(); mk3.runBackgroundOperations(); NodeDocument doc = ds.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); Map<Revision, String> revs = doc.getLocalRevisions(); assertEquals(3, revs.size()); revs = doc.getValueMap("_revisions"); assertEquals(3 * NodeDocument.NUM_REVS_THRESHOLD, revs.size()); Revision previous = null; for (Map.Entry<Revision, String> entry : revs.entrySet()) { if (previous != null) { assertTrue(previous.compareRevisionTimeThenClusterId(entry.getKey()) > 0); } previous = entry.getKey(); } mk1.dispose(); mk2.dispose(); mk3.dispose(); } @Test // OAK-1233 public void manyRevisions() { final int numMKs = 3; MemoryDocumentStore ds = new MemoryDocumentStore(); MemoryBlobStore bs = new MemoryBlobStore(); List<Set<String>> changes = new ArrayList<Set<String>>(); List<DocumentMK> mks = new ArrayList<DocumentMK>(); for (int i = 1; i <= numMKs; i++) { DocumentMK.Builder builder = new DocumentMK.Builder(); builder.setDocumentStore(ds).setBlobStore(bs).setAsyncDelay(0); DocumentMK mk = builder.setClusterId(i).open(); mks.add(mk); changes.add(new HashSet<String>()); if (i == 1) { mk.commit("/", "+\"test\":{}", null, null); mk.runBackgroundOperations(); } } List<String> propNames = Arrays.asList("prop1", "prop2", "prop3"); Random random = new Random(0); for (int i = 0; i < 1000; i++) { int mkIdx = random.nextInt(mks.size()); // pick mk DocumentMK mk = mks.get(mkIdx); DocumentNodeStore ns = mk.getNodeStore(); // pick property name to update String name = propNames.get(random.nextInt(propNames.size())); // need to sync? for (int j = 0; j < changes.size(); j++) { Set<String> c = changes.get(j); if (c.contains(name)) { syncMKs(mks, j); c.clear(); break; } } // read current value NodeDocument doc = ds.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); RevisionVector head = ns.getHeadRevision(); Revision lastRev = ns.getPendingModifications().get(Path.fromString("/test")); DocumentNodeState n = doc.getNodeAtRevision(mk.getNodeStore(), head, lastRev); assertNotNull(n); String value = n.getPropertyAsString(name); // set or increment if (value == null) { value = String.valueOf(0); } else { value = String.valueOf(Integer.parseInt(value) + 1); } mk.commit("/test", "^\"" + name + "\":" + value, null, null); changes.get(mkIdx).add(name); } for (DocumentMK mk : mks) { mk.dispose(); } } @Test public void commitRootInPrevious() { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); mk.commit("/", "+\"test\":{\"node\":{}}", null, null); mk.commit("/test", "+\"foo\":{}+\"bar\":{}", null, null); mk.commit("/test", "^\"foo/prop\":0^\"bar/prop\":0", null, null); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); assertNotNull(doc); String rev = null; for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { rev = mk.commit("/test/foo", "^\"prop\":" + i, null, null); } ns.runBackgroundOperations(); doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); assertNotNull(doc); DocumentNodeState node = doc.getNodeAtRevision(ns, RevisionVector.fromString(rev), null); assertNotNull(node); } @Test public void testSplitDocNoChild() throws Exception{ DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); mk.commit("/", "+\"test\":{\"node\":{}}", null, null); mk.commit("/test", "+\"foo\":{}+\"bar\":{}", null, null); for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { mk.commit("/test/foo", "^\"prop\":" + i, null, null); } ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); List<NodeDocument> prevDocs = ImmutableList.copyOf(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); assertEquals(SplitDocType.DEFAULT_LEAF, prevDocs.get(0).getSplitDocType()); } @Test public void testSplitPropAndCommitOnly() throws Exception{ DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder b1 = ns.getRoot().builder(); b1.child("test").child("foo").child("bar"); ns.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); //Commit on a node which has a child and where the commit root // is parent for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { b1 = ns.getRoot().builder(); b1.child("test").child("foo").setProperty("prop",i); b1.child("test").setProperty("prop",i); ns.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); } ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); List<NodeDocument> prevDocs = ImmutableList.copyOf(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); assertEquals(SplitDocType.COMMIT_ROOT_ONLY, prevDocs.get(0).getSplitDocType()); } @Test public void splitDocWithHasBinary() throws Exception{ DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder b1 = ns.getRoot().builder(); b1.child("test").child("foo").setProperty("binaryProp",ns.createBlob(randomStream(1, 4096))); ns.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); //Commit on a node which has a child and where the commit root // is parent for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD; i++) { b1 = ns.getRoot().builder(); b1.child("test").child("foo").setProperty("prop",i); ns.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); } ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test/foo")); List<NodeDocument> prevDocs = ImmutableList.copyOf(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); //Check for hasBinary assertTrue(doc.hasBinary()); assertTrue(prevDocs.get(0).hasBinary()); } @Test public void cascadingSplit() { cascadingSplit("/test/node"); } @Test public void cascadingSplitLongPath() { Path p = Path.ROOT; while (!Utils.isLongPath(p)) { p = new Path(p, "long-path-element"); } cascadingSplit(p.toString()); } private void cascadingSplit(String path) { // use a store without sync delay mk.dispose(); mk = new DocumentMK.Builder().setAsyncDelay(0).open(); DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); String rev = null; String p = "/"; for (String name : PathUtils.elements(path)) { rev = mk.commit(p, "+\"" + name + "\":{}", rev, null); p = PathUtils.concat(p, name); } List<String> revs = Lists.newArrayList(); for (int i = 0; i < NodeDocument.PREV_SPLIT_FACTOR + 1; i++) { NodeDocument doc = store.find(NODES, Utils.getIdFromPath(path)); assertNotNull(doc); assertEquals(i, doc.getPreviousRanges().size()); for (int j = 0; j < NodeDocument.NUM_REVS_THRESHOLD; j++) { int value = (i * NodeDocument.NUM_REVS_THRESHOLD + j); rev = mk.commit(path, "^\"prop\":" + value, rev, null); revs.add(rev); } ns.runBackgroundOperations(); } NodeDocument doc = store.find(NODES, Utils.getIdFromPath(path)); assertNotNull(doc); assertEquals(2, doc.getPreviousRanges().size()); List<NodeDocument> prevDocs = ImmutableList.copyOf(doc.getAllPreviousDocs()); //1 intermediate and 11 previous doc assertEquals(1 + 11, prevDocs.size()); assertTrue(Iterables.any(prevDocs, new Predicate<NodeDocument>() { @Override public boolean apply(NodeDocument input) { return input.getSplitDocType() == SplitDocType.INTERMEDIATE; } })); for (String s : revs) { Revision r = Revision.fromString(s); if (doc.getLocalRevisions().containsKey(r)) { continue; } Iterable<NodeDocument> prev = doc.getPreviousDocs("prop", r); assertEquals(1, Iterables.size(prev)); for (NodeDocument d : prev) { assertTrue(d.containsRevision(r)); } } int numPrev = 0; for (NodeDocument prev : doc.getPreviousDocs("prop", null)) { numPrev++; assertTrue(!prev.getValueMap("prop").isEmpty()); } assertEquals(2, numPrev); Revision previous = null; int numValues = 0; Map<Revision, String> valueMap = doc.getValueMap("prop"); for (Map.Entry<Revision, String> entry : valueMap.entrySet()) { if (previous != null) { assertTrue(previous.compareRevisionTime(entry.getKey()) > 0); } previous = entry.getKey(); numValues++; assertEquals(entry.getValue(), valueMap.get(entry.getKey())); } assertEquals(revs.size(), numValues); assertEquals(revs.size(), valueMap.size()); assertNotNull(doc.getNodeAtRevision(ns, RevisionVector.fromString(rev), null)); } @Test public void mainPath() { Revision r = Revision.fromString("r1-0-1"); for (String p : new String[]{"/", "/test", "/test/path"}) { Path path = Path.fromString(p); DocumentStore store = mk.getDocumentStore(); NodeDocument doc = new NodeDocument(store); String id = Utils.getPreviousIdFor(path, r, 0); doc.put(NodeDocument.ID, id); assertEquals(path, doc.getMainPath()); } } // OAK-1692 @Test public void cascadingWithSplitRatio() { String id = Utils.getIdFromPath("/test"); mk.commit("/", "+\"test\":{}", null, null); DocumentStore store = mk.getDocumentStore(); int clusterId = mk.getNodeStore().getClusterId(); UpdateOp op = new UpdateOp(id, false); // create some baggage from another cluster node for (int i = 0; i < 4000; i++) { Revision r = Revision.newRevision(2); op.setMapEntry("prop", r, "some long test value with many characters"); NodeDocument.setRevision(op, r, "c"); } store.findAndUpdate(NODES, op); NodeDocument doc = store.find(NODES, id); assertNotNull(doc); assertTrue(doc.getMemory() > DOC_SIZE_THRESHOLD); // some fake previous doc references to trigger UpdateOp // for an intermediate document TreeSet<Revision> prev = Sets.newTreeSet(StableRevisionComparator.INSTANCE); for (int i = 0; i < PREV_SPLIT_FACTOR; i++) { Revision low = Revision.newRevision(clusterId); Revision high = Revision.newRevision(clusterId); prev.add(high); NodeDocument.setPrevious(op, new Range(high, low, 0)); } store.findAndUpdate(NODES, op); doc = store.find(NODES, id); assertNotNull(doc); List<UpdateOp> splitOps = Lists.newArrayList(doc.split( mk.getNodeStore(), mk.getNodeStore().getHeadRevision(), NO_BINARY)); assertEquals(2, splitOps.size()); // first update op is for the new intermediate doc op = splitOps.get(0); String newPrevId = Utils.getPreviousIdFor(Path.fromString("/test"), prev.last(), 1); assertEquals(newPrevId, op.getId()); // second update op is for the main document op = splitOps.get(1); assertEquals(id, op.getId()); for (Map.Entry<Key, Operation> entry : op.getChanges().entrySet()) { Revision r = entry.getKey().getRevision(); assertNotNull(r); assertEquals(clusterId, r.getClusterId()); if (entry.getKey().getName().equals("_prev")) { if (entry.getValue().type == REMOVE_MAP_ENTRY) { assertTrue(prev.contains(r)); } else if (entry.getValue().type == SET_MAP_ENTRY) { assertEquals(newPrevId, Utils.getPreviousIdFor(Path.fromString("/test"), r, 1)); } else { fail("unexpected update operation " + entry); } } else { fail("unexpected update operation " + entry); } } } // OAK-1770 @Test public void splitRevisionsManyClusterNodes() { int numClusterNodes = 5; String id = Utils.getIdFromPath("/test"); mk.commit("/", "+\"test\":{}", null, null); DocumentStore store = mk.getDocumentStore(); int clusterId = mk.getNodeStore().getClusterId(); List<Revision> revs = Lists.newArrayList(); UpdateOp op = new UpdateOp(id, false); for (int i = 0; i < numClusterNodes; i++) { // create some commits for each cluster node for (int j = 0; j < NUM_REVS_THRESHOLD; j++) { Revision r = Revision.newRevision(i + 1); if (clusterId == r.getClusterId()) { revs.add(r); } op.setMapEntry("prop", r, "value"); NodeDocument.setRevision(op, r, "c"); } } store.findAndUpdate(NODES, op); NodeDocument doc = store.find(NODES, id); assertNotNull(doc); // must split document and create a previous document starting at // the second most recent revision List<UpdateOp> splitOps = Lists.newArrayList(doc.split( mk.getNodeStore(), mk.getNodeStore().getHeadRevision(), NO_BINARY)); assertEquals(2, splitOps.size()); String prevId = Utils.getPreviousIdFor(Path.fromString("/test"), revs.get(revs.size() - 2), 0); assertEquals(prevId, splitOps.get(0).getId()); assertEquals(id, splitOps.get(1).getId()); } // OAK-1794 @Test public void keepRevisionsForMostRecentChanges() throws Exception { DocumentStore store = mk.getDocumentStore(); NodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.setProperty("foo", -1); builder.setProperty("bar", -1); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); for (int i = 0; i < NUM_REVS_THRESHOLD; i++) { builder = ns.getRoot().builder(); builder.setProperty("foo", i); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } mk.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/")); assertNotNull(doc); // the local _revisions map must still contain the entry for // the initial 'bar' property Map<Revision, String> valueMap = doc.getValueMap("bar"); assertFalse(valueMap.isEmpty()); Revision r = valueMap.keySet().iterator().next(); assertTrue(doc.getLocalRevisions().containsKey(r)); // but also the previous document must contain the revision List<NodeDocument> prevDocs = Lists.newArrayList(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); NodeDocument prev = prevDocs.get(0); assertTrue(prev.getLocalRevisions().containsKey(r)); } // OAK-1794 @Test public void keepCommitRootForMostRecentChanges() throws Exception { DocumentStore store = mk.getDocumentStore(); NodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.setProperty("p", -1); NodeBuilder test = builder.child("test"); test.setProperty("foo", -1); test.setProperty("bar", -1); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); for (int i = 0; i < NUM_REVS_THRESHOLD; i++) { builder = ns.getRoot().builder(); builder.setProperty("p", i); test = builder.child("test"); test.setProperty("foo", i); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } mk.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); // the local _commitRoot map must still contain the entry for // the initial 'bar' property Map<Revision, String> valueMap = doc.getValueMap("bar"); assertFalse(valueMap.isEmpty()); Revision r = valueMap.keySet().iterator().next(); assertTrue(doc.getLocalCommitRoot().containsKey(r)); // but also the previous document must contain the commitRoot entry List<NodeDocument> prevDocs = Lists.newArrayList(doc.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); NodeDocument prev = prevDocs.get(0); assertTrue(prev.getLocalCommitRoot().containsKey(r)); } @Test(expected = IllegalArgumentException.class) public void splitPreviousDocument() { NodeDocument doc = new NodeDocument(mk.getDocumentStore()); doc.put(NodeDocument.ID, Utils.getIdFromPath("/test")); doc.put(NodeDocument.SD_TYPE, NodeDocument.SplitDocType.DEFAULT.type); RevisionVector head = mk.getNodeStore().getHeadRevision(); SplitOperations.forDocument(doc, DummyRevisionContext.INSTANCE, head, NO_BINARY, NUM_REVS_THRESHOLD); } @Test public void readLocalCommitInfo() throws Exception { final Set<String> readSet = Sets.newHashSet(); DocumentStore store = new MemoryDocumentStore() { @Override public <T extends Document> T find(Collection<T> collection, String key, int maxCacheAge) { readSet.add(key); return super.find(collection, key, maxCacheAge); } }; DocumentNodeStore ns = new DocumentMK.Builder() .setDocumentStore(store).setAsyncDelay(0).getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("test"); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); for (int i = 0; i < NUM_REVS_THRESHOLD; i++) { builder = ns.getRoot().builder(); builder.setProperty("p", i); builder.child("test").setProperty("p", i); builder.child("test").setProperty("q", i); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } builder = ns.getRoot().builder(); builder.child("test").removeProperty("q"); ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); readSet.clear(); // must not access previous document of /test doc.getNodeAtRevision(ns, ns.getHeadRevision(), null); for (String id : Sets.newHashSet(readSet)) { doc = store.find(NODES, id); assertNotNull(doc); if (doc.isSplitDocument() && !doc.getMainPath().equals(Path.ROOT)) { fail("must not access previous document: " + id); } } ns.dispose(); } // OAK-2528 @Test public void commitRootForChildrenFlag() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); for (int i = 0; i < NUM_REVS_THRESHOLD * 2; i++) { NodeBuilder builder = ns.getRoot().builder(); builder.child("test").child("child-" + i); merge(ns, builder); } ns.runBackgroundOperations(); NodeDocument doc = store.find(NODES, Utils.getIdFromPath("/test")); assertNotNull(doc); assertTrue(doc.getLocalCommitRoot().size() < NUM_REVS_THRESHOLD); } // OAK-3333 @Test public void purgeAllButMostRecentCommitRoot() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns1 = mk.getNodeStore(); NodeBuilder builder1 = ns1.getRoot().builder(); builder1.child("test"); merge(ns1, builder1); ns1.runBackgroundOperations(); DocumentNodeStore ns2 = new DocumentMK.Builder().setDocumentStore(store) .setAsyncDelay(0).setClusterId(ns1.getClusterId() + 1).getNodeStore(); // prevent merge retries ns2.setMaxBackOffMillis(0); assertTrue(ns2.getRoot().hasChildNode("test")); NodeBuilder builder2 = ns2.getRoot().builder(); builder2.child("test").remove(); for (int i = 0; i < NUM_REVS_THRESHOLD * 2; i++) { builder1 = ns1.getRoot().builder(); builder1.child("test").child("child-" + i); merge(ns1, builder1); } ns1.runBackgroundOperations(); try { merge(ns2, builder2); fail("merge must fail with CommitFailedException"); } catch (CommitFailedException e) { // expected } ns2.dispose(); } // OAK-4050 @Test public void purgeAllButMostRecentCommittedCommitRoot() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns1 = mk.getNodeStore(); NodeBuilder builder1 = ns1.getRoot().builder(); builder1.child("test"); merge(ns1, builder1); ns1.runBackgroundOperations(); DocumentNodeStore ns2 = new DocumentMK.Builder().setDocumentStore(store) .setAsyncDelay(0).setClusterId(ns1.getClusterId() + 1).getNodeStore(); // prevent merge retries ns2.setMaxBackOffMillis(0); assertTrue(ns2.getRoot().hasChildNode("test")); NodeBuilder builder2 = ns2.getRoot().builder(); builder2.child("test").remove(); for (int i = 0; i < NUM_REVS_THRESHOLD * 2; i++) { builder1 = ns1.getRoot().builder(); builder1.child("test").child("child-" + i); merge(ns1, builder1); } // create a _commitRoot entry for a revision, which is not committed UpdateOp op = new UpdateOp(Utils.getIdFromPath("/test"), false); NodeDocument.setCommitRoot(op, ns1.newRevision(), 0); store.findAndUpdate(NODES, op); ns1.runBackgroundOperations(); try { merge(ns2, builder2); fail("merge must fail with CommitFailedException"); } catch (CommitFailedException e) { // expected } ns2.dispose(); } // OAK-3081 @Test public void removeGarbage() throws Exception { final DocumentStore store = mk.getDocumentStore(); final DocumentNodeStore ns = mk.getNodeStore(); final List<Exception> exceptions = Lists.newArrayList(); final List<RevisionVector> revisions = Lists.newArrayList(); Thread t = new Thread(new Runnable() { @Override public void run() { try { for (int i = 0; i < 200; i++) { NodeBuilder builder = ns.getRoot().builder(); builder.child("foo").child("node").child("node").child("node").child("node"); builder.child("bar").child("node").child("node").child("node").child("node"); merge(ns, builder); revisions.add(ns.getHeadRevision()); builder = ns.getRoot().builder(); builder.child("foo").child("node").remove(); builder.child("bar").child("node").remove(); merge(ns, builder); revisions.add(ns.getHeadRevision()); } } catch (CommitFailedException e) { exceptions.add(e); } } }); t.start(); // Use a revision context, which wraps the DocumentNodeStore and // randomly delays calls to get the head revision RevisionContext rc = new TestRevisionContext(ns); while (t.isAlive()) { for (String id : ns.getSplitCandidates()) { RevisionVector head = ns.getHeadRevision(); NodeDocument doc = store.find(NODES, id); List<UpdateOp> ops = SplitOperations.forDocument(doc, rc, head, NO_BINARY, NUM_REVS_THRESHOLD); Set<Revision> removed = Sets.newHashSet(); Set<Revision> added = Sets.newHashSet(); for (UpdateOp op : ops) { for (Map.Entry<Key, Operation> e : op.getChanges().entrySet()) { if (!"_deleted".equals(e.getKey().getName())) { continue; } Revision r = e.getKey().getRevision(); if (e.getValue().type == Operation.Type.REMOVE_MAP_ENTRY) { removed.add(r); } else if (e.getValue().type == Operation.Type.SET_MAP_ENTRY) { added.add(r); } } } removed.removeAll(added); assertTrue("SplitOperations must not remove committed changes: " + removed, removed.isEmpty()); } // perform the actual cleanup ns.runBackgroundOperations(); } // check documents below /foo and /bar // the _deleted map must contain all revisions for (NodeDocument doc : Utils.getAllDocuments(store)) { if (doc.isSplitDocument() || Utils.getDepthFromId(doc.getId()) < 2) { continue; } Set<Revision> revs = Sets.newHashSet(); for (RevisionVector rv : revisions) { Iterables.addAll(revs, rv); } revs.removeAll(doc.getValueMap("_deleted").keySet()); assertTrue("Missing _deleted entries on " + doc.getId() + ": " + revs, revs.isEmpty()); } } @Test public void splitDocumentWithBinary() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); // use more than 4k of binary data (OAK-5205) PropertyState binary = binaryProperty("p", randomBytes(5 * 1024)); for (int i = 0; i < 10; i++) { builder = ns.getRoot().builder(); builder.child("foo").setProperty(binary); merge(ns, builder); ns.runBackgroundOperations(); } NodeDocument foo = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(foo); List<NodeDocument> prevDocs = copyOf(foo.getAllPreviousDocs()); // all but most recent value are moved to individual previous docs assertEquals(9, prevDocs.size()); } @Test public void noBinarySplitWhenRemoved() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); // use more than 4k of binary data (OAK-5205) PropertyState binary = binaryProperty("p", randomBytes(5 * 1024)); builder.child("foo").setProperty(binary); merge(ns, builder); builder = ns.getRoot().builder(); builder.child("foo").remove(); merge(ns, builder); ns.runBackgroundOperations(); // must not create split document in this case. See OAK-5010 NodeDocument foo = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(foo); assertEquals(0, foo.getPreviousRanges().size()); // re-create it builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); ns.runBackgroundOperations(); // now the old binary value must be moved to a previous document foo = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(foo); List<NodeDocument> prevDocs = copyOf(foo.getAllPreviousDocs()); assertEquals(1, prevDocs.size()); } // OAK-5205 @Test public void noSplitForSmallBinary() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); for (int i = 0; i < 10; i++) { builder = ns.getRoot().builder(); builder.child("foo").setProperty( binaryProperty("p", ("value" + i).getBytes())); merge(ns, builder); ns.runBackgroundOperations(); } NodeDocument foo = store.find(NODES, Utils.getIdFromPath("/foo")); assertNotNull(foo); List<NodeDocument> prevDocs = copyOf(foo.getAllPreviousDocs()); // must not create split documents for small binaries less 4k assertEquals(0, prevDocs.size()); } @Test public void nonSplittableBigDocument() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); String id = Utils.getIdFromPath("/foo"); int num = 0; while (store.find(NODES, id).getMemory() < DOC_SIZE_THRESHOLD) { builder = ns.getRoot().builder(); for (int i = 0; i < 50; i++) { builder.child("foo").setProperty("p" + num++, "some value as payload for the document"); } merge(ns, builder); } Iterable<UpdateOp> splitOps = store.find(NODES, id) .split(ns, ns.getHeadRevision(), NO_BINARY); assertEquals(0, Iterables.size(splitOps)); } @Test public void splitWithBranchCommit() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo"); merge(ns, builder); String branch = mk.branch(null); branch = mk.commit("/foo", "^\"p\":\"value\"", branch, null); mk.merge(branch, null); String id = Utils.getIdFromPath("/foo"); NodeDocument doc = store.find(NODES, id); assertNotNull(doc); assertThat(doc.getLocalBranchCommits(), is(not(empty()))); for (int i = 0; i < 5; i++) { builder = ns.getRoot().builder(); builder.child("foo").setProperty("p", "value-" + i); merge(ns, builder); } ns.runBackgroundOperations(); doc = store.find(NODES, id); for (UpdateOp op : SplitOperations.forDocument(doc, ns, ns.getHeadRevision(), NO_BINARY, 5)) { store.createOrUpdate(NODES, op); } doc = store.find(NODES, id); // must have a previous document now assertThat(doc.getPreviousRanges().keySet(), hasSize(1)); // branch commit entry moved to previous document assertThat(doc.getLocalBranchCommits(), is(empty())); NodeDocument prev = doc.getAllPreviousDocs().next(); assertThat(prev.getLocalBranchCommits(), is(not(empty()))); } @Test public void splitDefaultNoBranch() throws Exception { DocumentStore store = mk.getDocumentStore(); DocumentNodeStore ns = mk.getNodeStore(); NodeBuilder builder = ns.getRoot().builder(); builder.child("foo").child("bar"); merge(ns, builder); for (int i = 0; i < 5; i++) { builder = ns.getRoot().builder(); builder.child("foo").setProperty("p", "value-" + i); merge(ns, builder); } ns.runBackgroundOperations(); String id = Utils.getIdFromPath("/foo"); NodeDocument doc = store.find(NODES, id); assertNotNull(doc); for (UpdateOp op : SplitOperations.forDocument(doc, ns, ns.getHeadRevision(), NO_BINARY, 5)) { store.createOrUpdate(NODES, op); } doc = store.find(NODES, id); // must have a previous document now assertThat(doc.getPreviousRanges().keySet(), is(not(empty()))); Iterator<NodeDocument> it = doc.getAllPreviousDocs(); while (it.hasNext()) { assertEquals(SplitDocType.DEFAULT_NO_BRANCH, it.next().getSplitDocType()); } } private static class TestRevisionContext implements RevisionContext { private final RevisionContext rc; TestRevisionContext(RevisionContext rc) { this.rc = rc; } @Override public UnmergedBranches getBranches() { return rc.getBranches(); } @Override public UnsavedModifications getPendingModifications() { return rc.getPendingModifications(); } @Override public int getClusterId() { return rc.getClusterId(); } @NotNull @Override public RevisionVector getHeadRevision() { try { Thread.sleep((long) (Math.random() * 100)); } catch (InterruptedException e) { // ignore } return rc.getHeadRevision(); } @NotNull @Override public Revision newRevision() { return rc.newRevision(); } @NotNull @Override public Clock getClock() { return rc.getClock(); } @Override public String getCommitValue(@NotNull Revision changeRevision, @NotNull NodeDocument doc) { return rc.getCommitValue(changeRevision, doc); } } private static NodeState merge(NodeStore store, NodeBuilder root) throws CommitFailedException { return store.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY); } private void syncMKs(List<DocumentMK> mks, int idx) { mks.get(idx).runBackgroundOperations(); for (int i = 0; i < mks.size(); i++) { if (idx != i) { mks.get(i).runBackgroundOperations(); } } } private byte[] randomBytes(int num) { Random random = new Random(42); byte[] data = new byte[num]; random.nextBytes(data); return data; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2013.09.16 at 10:18:13 AM CEST // package org.fiteagle.interactors.sfa.rspec.manifest; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlElementDecl; import javax.xml.bind.annotation.XmlRegistry; import javax.xml.namespace.QName; /** * This object contains factory methods for each * Java content interface and Java element interface * generated in the net.geni.resources.rspec._3 package. * <p>An ObjectFactory allows you to programatically * construct new instances of the Java representation * for XML content. The Java representation of XML * content can consist of schema derived interfaces * and classes representing the binding of schema * type definitions, element declarations and model * groups. Factory methods for each of these are * provided in this class. * */ @XmlRegistry public class ObjectFactory { private final static QName _ComponentHop_QNAME = new QName("http://www.geni.net/resources/rspec/3", "component_hop"); private final static QName _Login_QNAME = new QName("http://www.geni.net/resources/rspec/3", "login"); private final static QName _Rspec_QNAME = new QName("http://www.geni.net/resources/rspec/3", "rspec"); private final static QName _Node_QNAME = new QName("http://www.geni.net/resources/rspec/3", "node"); private final static QName _Ip_QNAME = new QName("http://www.geni.net/resources/rspec/3", "ip"); private final static QName _Interface_QNAME = new QName("http://www.geni.net/resources/rspec/3", "interface"); private final static QName _Property_QNAME = new QName("http://www.geni.net/resources/rspec/3", "property"); private final static QName _Relation_QNAME = new QName("http://www.geni.net/resources/rspec/3", "relation"); private final static QName _Link_QNAME = new QName("http://www.geni.net/resources/rspec/3", "link"); private final static QName _HardwareType_QNAME = new QName("http://www.geni.net/resources/rspec/3", "hardware_type"); private final static QName _Location_QNAME = new QName("http://www.geni.net/resources/rspec/3", "location"); private final static QName _Services_QNAME = new QName("http://www.geni.net/resources/rspec/3", "services"); private final static QName _Execute_QNAME = new QName("http://www.geni.net/resources/rspec/3", "execute"); private final static QName _DiskImage_QNAME = new QName("http://www.geni.net/resources/rspec/3", "disk_image"); private final static QName _Install_QNAME = new QName("http://www.geni.net/resources/rspec/3", "install"); private final static QName _ComponentHopContentsInterfaceRef_QNAME = new QName("http://www.geni.net/resources/rspec/3", "interface_ref"); private final static QName _NodeContentsSliverType_QNAME = new QName("http://www.geni.net/resources/rspec/3", "sliver_type"); /** * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: net.geni.resources.rspec._3 * */ public ObjectFactory() { } /** * Create an instance of {@link ComponentHopContents } * */ public ComponentHopContents createComponentHopContents() { return new ComponentHopContents(); } /** * Create an instance of {@link NodeContents } * */ public NodeContents createNodeContents() { return new NodeContents(); } /** * Create an instance of {@link InstallServiceContents } * */ public InstallServiceContents createInstallServiceContents() { return new InstallServiceContents(); } /** * Create an instance of {@link DiskImageContents } * */ public DiskImageContents createDiskImageContents() { return new DiskImageContents(); } /** * Create an instance of {@link ServiceContents } * */ public ServiceContents createServiceContents() { return new ServiceContents(); } /** * Create an instance of {@link ExecuteServiceContents } * */ public ExecuteServiceContents createExecuteServiceContents() { return new ExecuteServiceContents(); } /** * Create an instance of {@link Host } * */ public Host createHost() { return new Host(); } /** * Create an instance of {@link HardwareTypeContents } * */ public HardwareTypeContents createHardwareTypeContents() { return new HardwareTypeContents(); } /** * Create an instance of {@link LocationContents } * */ public LocationContents createLocationContents() { return new LocationContents(); } /** * Create an instance of {@link LinkContents } * */ public LinkContents createLinkContents() { return new LinkContents(); } /** * Create an instance of {@link RelationContents } * */ public RelationContents createRelationContents() { return new RelationContents(); } /** * Create an instance of {@link LinkPropertyContents } * */ public LinkPropertyContents createLinkPropertyContents() { return new LinkPropertyContents(); } /** * Create an instance of {@link InterfaceContents } * */ public InterfaceContents createInterfaceContents() { return new InterfaceContents(); } /** * Create an instance of {@link IpContents } * */ public IpContents createIpContents() { return new IpContents(); } /** * Create an instance of {@link RSpecContents } * */ public RSpecContents createRSpecContents() { return new RSpecContents(); } /** * Create an instance of {@link ComponentManager } * */ public ComponentManager createComponentManager() { return new ComponentManager(); } /** * Create an instance of {@link LoginServiceContents } * */ public LoginServiceContents createLoginServiceContents() { return new LoginServiceContents(); } /** * Create an instance of {@link LinkClass } * */ public LinkClass createLinkClass() { return new LinkClass(); } /** * Create an instance of {@link User } * */ public User createUser() { return new User(); } /** * Create an instance of {@link LinkType } * */ public LinkType createLinkType() { return new LinkType(); } /** * Create an instance of {@link InterfaceRefContents } * */ public InterfaceRefContents createInterfaceRefContents() { return new InterfaceRefContents(); } /** * Create an instance of {@link ComponentHopContents.InterfaceRef } * */ public ComponentHopContents.InterfaceRef createComponentHopContentsInterfaceRef() { return new ComponentHopContents.InterfaceRef(); } /** * Create an instance of {@link NodeContents.SliverType } * */ public NodeContents.SliverType createNodeContentsSliverType() { return new NodeContents.SliverType(); } /** * Create an instance of {@link JAXBElement }{@code <}{@link ComponentHopContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "component_hop") public JAXBElement<ComponentHopContents> createComponentHop(ComponentHopContents value) { return new JAXBElement<ComponentHopContents>(_ComponentHop_QNAME, ComponentHopContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link LoginServiceContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "login") public JAXBElement<LoginServiceContents> createLogin(LoginServiceContents value) { return new JAXBElement<LoginServiceContents>(_Login_QNAME, LoginServiceContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link RSpecContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "rspec") public JAXBElement<RSpecContents> createRspec(RSpecContents value) { return new JAXBElement<RSpecContents>(_Rspec_QNAME, RSpecContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link NodeContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "node") public JAXBElement<NodeContents> createNode(NodeContents value) { return new JAXBElement<NodeContents>(_Node_QNAME, NodeContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link IpContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "ip") public JAXBElement<IpContents> createIp(IpContents value) { return new JAXBElement<IpContents>(_Ip_QNAME, IpContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link InterfaceContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "interface") public JAXBElement<InterfaceContents> createInterface(InterfaceContents value) { return new JAXBElement<InterfaceContents>(_Interface_QNAME, InterfaceContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link LinkPropertyContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "property") public JAXBElement<LinkPropertyContents> createProperty(LinkPropertyContents value) { return new JAXBElement<LinkPropertyContents>(_Property_QNAME, LinkPropertyContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link RelationContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "relation") public JAXBElement<RelationContents> createRelation(RelationContents value) { return new JAXBElement<RelationContents>(_Relation_QNAME, RelationContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link LinkContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "link") public JAXBElement<LinkContents> createLink(LinkContents value) { return new JAXBElement<LinkContents>(_Link_QNAME, LinkContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HardwareTypeContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "hardware_type") public JAXBElement<HardwareTypeContents> createHardwareType(HardwareTypeContents value) { return new JAXBElement<HardwareTypeContents>(_HardwareType_QNAME, HardwareTypeContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link LocationContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "location") public JAXBElement<LocationContents> createLocation(LocationContents value) { return new JAXBElement<LocationContents>(_Location_QNAME, LocationContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link ServiceContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "services") public JAXBElement<ServiceContents> createServices(ServiceContents value) { return new JAXBElement<ServiceContents>(_Services_QNAME, ServiceContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link ExecuteServiceContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "execute") public JAXBElement<ExecuteServiceContents> createExecute(ExecuteServiceContents value) { return new JAXBElement<ExecuteServiceContents>(_Execute_QNAME, ExecuteServiceContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link DiskImageContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "disk_image") public JAXBElement<DiskImageContents> createDiskImage(DiskImageContents value) { return new JAXBElement<DiskImageContents>(_DiskImage_QNAME, DiskImageContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link InstallServiceContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "install") public JAXBElement<InstallServiceContents> createInstall(InstallServiceContents value) { return new JAXBElement<InstallServiceContents>(_Install_QNAME, InstallServiceContents.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link ComponentHopContents.InterfaceRef }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "interface_ref", scope = ComponentHopContents.class) public JAXBElement<ComponentHopContents.InterfaceRef> createComponentHopContentsInterfaceRef(ComponentHopContents.InterfaceRef value) { return new JAXBElement<ComponentHopContents.InterfaceRef>(_ComponentHopContentsInterfaceRef_QNAME, ComponentHopContents.InterfaceRef.class, ComponentHopContents.class, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link InterfaceRefContents }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "interface_ref", scope = LinkContents.class) public JAXBElement<InterfaceRefContents> createLinkContentsInterfaceRef(InterfaceRefContents value) { return new JAXBElement<InterfaceRefContents>(_ComponentHopContentsInterfaceRef_QNAME, InterfaceRefContents.class, LinkContents.class, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link NodeContents.SliverType }{@code >}} * */ @XmlElementDecl(namespace = "http://www.geni.net/resources/rspec/3", name = "sliver_type", scope = NodeContents.class) public JAXBElement<NodeContents.SliverType> createNodeContentsSliverType(NodeContents.SliverType value) { return new JAXBElement<NodeContents.SliverType>(_NodeContentsSliverType_QNAME, NodeContents.SliverType.class, NodeContents.class, value); } }
/*=========================================================================== * Licensed Materials - Property of IBM * "Restricted Materials of IBM" * * IBM SDK, Java(tm) Technology Edition, v8 * (C) Copyright IBM Corp. 2007, 2013. All Rights Reserved * * US Government Users Restricted Rights - Use, duplication or disclosure * restricted by GSA ADP Schedule Contract with IBM Corp. *=========================================================================== */ /* * Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package javax.xml.bind; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.TimeZone; import javax.xml.namespace.QName; import javax.xml.namespace.NamespaceContext; import javax.xml.datatype.DatatypeFactory; import javax.xml.datatype.DatatypeConfigurationException; /** * This class is the JAXB RI's default implementation of the * {@link DatatypeConverterInterface}. * * <p> * When client applications specify the use of the static print/parse * methods in {@link DatatypeConverter}, it will delegate * to this class. * * <p> * This class is responsible for whitespace normalization. * * @author <ul><li>Ryan Shoemaker, Sun Microsystems, Inc.</li></ul> * @since JAXB2.1 */ final class DatatypeConverterImpl implements DatatypeConverterInterface { /** * To avoid re-creating instances, we cache one instance. */ public static final DatatypeConverterInterface theInstance = new DatatypeConverterImpl(); protected DatatypeConverterImpl() { } public String parseString(String lexicalXSDString) { return lexicalXSDString; } public BigInteger parseInteger(String lexicalXSDInteger) { return _parseInteger(lexicalXSDInteger); } public static BigInteger _parseInteger(CharSequence s) { return new BigInteger(removeOptionalPlus(WhiteSpaceProcessor.trim(s)).toString()); } public String printInteger(BigInteger val) { return _printInteger(val); } public static String _printInteger(BigInteger val) { return val.toString(); } public int parseInt(String s) { return _parseInt(s); } /** * Faster but less robust String->int conversion. * * Note that: * <ol> * <li>XML Schema allows '+', but {@link Integer#valueOf(String)} is not. * <li>XML Schema allows leading and trailing (but not in-between) whitespaces. * {@link Integer#valueOf(String)} doesn't allow any. * </ol> */ public static int _parseInt(CharSequence s) { int len = s.length(); int sign = 1; int r = 0; for (int i = 0; i < len; i++) { char ch = s.charAt(i); if (WhiteSpaceProcessor.isWhiteSpace(ch)) { // skip whitespace } else if ('0' <= ch && ch <= '9') { r = r * 10 + (ch - '0'); } else if (ch == '-') { sign = -1; } else if (ch == '+') { // noop } else { throw new NumberFormatException("Not a number: " + s); } } return r * sign; } public long parseLong(String lexicalXSLong) { return _parseLong(lexicalXSLong); } public static long _parseLong(CharSequence s) { return Long.valueOf(removeOptionalPlus(WhiteSpaceProcessor.trim(s)).toString()); } public short parseShort(String lexicalXSDShort) { return _parseShort(lexicalXSDShort); } public static short _parseShort(CharSequence s) { return (short) _parseInt(s); } public String printShort(short val) { return _printShort(val); } public static String _printShort(short val) { return String.valueOf(val); } public BigDecimal parseDecimal(String content) { return _parseDecimal(content); } public static BigDecimal _parseDecimal(CharSequence content) { content = WhiteSpaceProcessor.trim(content); if (content.length() <= 0) { return null; } return new BigDecimal(content.toString()); // from purely XML Schema perspective, // this implementation has a problem, since // in xs:decimal "1.0" and "1" is equal whereas the above // code will return different values for those two forms. // // the code was originally using com.sun.msv.datatype.xsd.NumberType.load, // but a profiling showed that the process of normalizing "1.0" into "1" // could take non-trivial time. // // also, from the user's point of view, one might be surprised if // 1 (not 1.0) is returned from "1.000" } public float parseFloat(String lexicalXSDFloat) { return _parseFloat(lexicalXSDFloat); } public static float _parseFloat(CharSequence _val) { String s = WhiteSpaceProcessor.trim(_val).toString(); /* Incompatibilities of XML Schema's float "xfloat" and Java's float "jfloat" * jfloat.valueOf ignores leading and trailing whitespaces, whereas this is not allowed in xfloat. * jfloat.valueOf allows "float type suffix" (f, F) to be appended after float literal (e.g., 1.52e-2f), whereare this is not the case of xfloat. gray zone --------- * jfloat allows ".523". And there is no clear statement that mentions this case in xfloat. Although probably this is allowed. * */ if (s.equals("NaN")) { return Float.NaN; } if (s.equals("INF")) { return Float.POSITIVE_INFINITY; } if (s.equals("-INF")) { return Float.NEGATIVE_INFINITY; } if (s.length() == 0 || !isDigitOrPeriodOrSign(s.charAt(0)) || !isDigitOrPeriodOrSign(s.charAt(s.length() - 1))) { throw new NumberFormatException(); } // these screening process is necessary due to the wobble of Float.valueOf method return Float.parseFloat(s); } public String printFloat(float v) { return _printFloat(v); } public static String _printFloat(float v) { if (Float.isNaN(v)) { return "NaN"; } if (v == Float.POSITIVE_INFINITY) { return "INF"; } if (v == Float.NEGATIVE_INFINITY) { return "-INF"; } return String.valueOf(v); } public double parseDouble(String lexicalXSDDouble) { return _parseDouble(lexicalXSDDouble); } public static double _parseDouble(CharSequence _val) { String val = WhiteSpaceProcessor.trim(_val).toString(); if (val.equals("NaN")) { return Double.NaN; } if (val.equals("INF")) { return Double.POSITIVE_INFINITY; } if (val.equals("-INF")) { return Double.NEGATIVE_INFINITY; } if (val.length() == 0 || !isDigitOrPeriodOrSign(val.charAt(0)) || !isDigitOrPeriodOrSign(val.charAt(val.length() - 1))) { throw new NumberFormatException(val); } // these screening process is necessary due to the wobble of Float.valueOf method return Double.parseDouble(val); } public boolean parseBoolean(String lexicalXSDBoolean) { Boolean b = _parseBoolean(lexicalXSDBoolean); return (b == null) ? false : b.booleanValue(); } public static Boolean _parseBoolean(CharSequence literal) { if (literal == null) { return null; } int i = 0; int len = literal.length(); char ch; boolean value = false; if (literal.length() <= 0) { return null; } do { ch = literal.charAt(i++); } while (WhiteSpaceProcessor.isWhiteSpace(ch) && i < len); int strIndex = 0; switch (ch) { case '1': value = true; break; case '0': value = false; break; case 't': String strTrue = "rue"; do { ch = literal.charAt(i++); } while ((strTrue.charAt(strIndex++) == ch) && i < len && strIndex < 3); if (strIndex == 3) { value = true; } else { return false; } // throw new IllegalArgumentException("String \"" + literal + "\" is not valid boolean value."); break; case 'f': String strFalse = "alse"; do { ch = literal.charAt(i++); } while ((strFalse.charAt(strIndex++) == ch) && i < len && strIndex < 4); if (strIndex == 4) { value = false; } else { return false; } // throw new IllegalArgumentException("String \"" + literal + "\" is not valid boolean value."); break; } if (i < len) { do { ch = literal.charAt(i++); } while (WhiteSpaceProcessor.isWhiteSpace(ch) && i < len); } if (i == len) { return value; } else { return null; } // throw new IllegalArgumentException("String \"" + literal + "\" is not valid boolean value."); } public String printBoolean(boolean val) { return val ? "true" : "false"; } public static String _printBoolean(boolean val) { return val ? "true" : "false"; } public byte parseByte(String lexicalXSDByte) { return _parseByte(lexicalXSDByte); } public static byte _parseByte(CharSequence literal) { return (byte) _parseInt(literal); } public String printByte(byte val) { return _printByte(val); } public static String _printByte(byte val) { return String.valueOf(val); } public QName parseQName(String lexicalXSDQName, NamespaceContext nsc) { return _parseQName(lexicalXSDQName, nsc); } /** * @return null if fails to convert. */ public static QName _parseQName(CharSequence text, NamespaceContext nsc) { int length = text.length(); // trim whitespace int start = 0; while (start < length && WhiteSpaceProcessor.isWhiteSpace(text.charAt(start))) { start++; } int end = length; while (end > start && WhiteSpaceProcessor.isWhiteSpace(text.charAt(end - 1))) { end--; } if (end == start) { throw new IllegalArgumentException("input is empty"); } String uri; String localPart; String prefix; // search ':' int idx = start + 1; // no point in searching the first char. that's not valid. while (idx < end && text.charAt(idx) != ':') { idx++; } if (idx == end) { uri = nsc.getNamespaceURI(""); localPart = text.subSequence(start, end).toString(); prefix = ""; } else { // Prefix exists, check everything prefix = text.subSequence(start, idx).toString(); localPart = text.subSequence(idx + 1, end).toString(); uri = nsc.getNamespaceURI(prefix); // uri can never be null according to javadoc, // but some users reported that there are implementations that return null. if (uri == null || uri.length() == 0) // crap. the NamespaceContext interface is broken. // error: unbound prefix { throw new IllegalArgumentException("prefix " + prefix + " is not bound to a namespace"); } } return new QName(uri, localPart, prefix); } public Calendar parseDateTime(String lexicalXSDDateTime) { return _parseDateTime(lexicalXSDDateTime); } public static GregorianCalendar _parseDateTime(CharSequence s) { String val = WhiteSpaceProcessor.trim(s).toString(); return datatypeFactory.newXMLGregorianCalendar(val).toGregorianCalendar(); } public String printDateTime(Calendar val) { return _printDateTime(val); } public static String _printDateTime(Calendar val) { return CalendarFormatter.doFormat("%Y-%M-%DT%h:%m:%s%z", val); } public byte[] parseBase64Binary(String lexicalXSDBase64Binary) { return _parseBase64Binary(lexicalXSDBase64Binary); } public byte[] parseHexBinary(String s) { final int len = s.length(); // "111" is not a valid hex encoding. if (len % 2 != 0) { throw new IllegalArgumentException("hexBinary needs to be even-length: " + s); } byte[] out = new byte[len / 2]; for (int i = 0; i < len; i += 2) { int h = hexToBin(s.charAt(i)); int l = hexToBin(s.charAt(i + 1)); if (h == -1 || l == -1) { throw new IllegalArgumentException("contains illegal character for hexBinary: " + s); } out[i / 2] = (byte) (h * 16 + l); } return out; } private static int hexToBin(char ch) { if ('0' <= ch && ch <= '9') { return ch - '0'; } if ('A' <= ch && ch <= 'F') { return ch - 'A' + 10; } if ('a' <= ch && ch <= 'f') { return ch - 'a' + 10; } return -1; } private static final char[] hexCode = "0123456789ABCDEF".toCharArray(); public String printHexBinary(byte[] data) { StringBuilder r = new StringBuilder(data.length * 2); for (byte b : data) { r.append(hexCode[(b >> 4) & 0xF]); r.append(hexCode[(b & 0xF)]); } return r.toString(); } public long parseUnsignedInt(String lexicalXSDUnsignedInt) { return _parseLong(lexicalXSDUnsignedInt); } public String printUnsignedInt(long val) { return _printLong(val); } public int parseUnsignedShort(String lexicalXSDUnsignedShort) { return _parseInt(lexicalXSDUnsignedShort); } public Calendar parseTime(String lexicalXSDTime) { return datatypeFactory.newXMLGregorianCalendar(lexicalXSDTime).toGregorianCalendar(); } public String printTime(Calendar val) { return CalendarFormatter.doFormat("%h:%m:%s%z", val); } public Calendar parseDate(String lexicalXSDDate) { return datatypeFactory.newXMLGregorianCalendar(lexicalXSDDate).toGregorianCalendar(); } public String printDate(Calendar val) { return _printDate(val); } public static String _printDate(Calendar val) { return CalendarFormatter.doFormat((new StringBuilder("%Y-%M-%D").append("%z")).toString(),val); } public String parseAnySimpleType(String lexicalXSDAnySimpleType) { return lexicalXSDAnySimpleType; // return (String)SimpleURType.theInstance._createValue( lexicalXSDAnySimpleType, null ); } public String printString(String val) { // return StringType.theInstance.convertToLexicalValue( val, null ); return val; } public String printInt(int val) { return _printInt(val); } public static String _printInt(int val) { return String.valueOf(val); } public String printLong(long val) { return _printLong(val); } public static String _printLong(long val) { return String.valueOf(val); } public String printDecimal(BigDecimal val) { return _printDecimal(val); } public static String _printDecimal(BigDecimal val) { return val.toPlainString(); } public String printDouble(double v) { return _printDouble(v); } public static String _printDouble(double v) { if (Double.isNaN(v)) { return "NaN"; } if (v == Double.POSITIVE_INFINITY) { return "INF"; } if (v == Double.NEGATIVE_INFINITY) { return "-INF"; } return String.valueOf(v); } public String printQName(QName val, NamespaceContext nsc) { return _printQName(val, nsc); } public static String _printQName(QName val, NamespaceContext nsc) { // Double-check String qname; String prefix = nsc.getPrefix(val.getNamespaceURI()); String localPart = val.getLocalPart(); if (prefix == null || prefix.length() == 0) { // be defensive qname = localPart; } else { qname = prefix + ':' + localPart; } return qname; } public String printBase64Binary(byte[] val) { return _printBase64Binary(val); } public String printUnsignedShort(int val) { return String.valueOf(val); } public String printAnySimpleType(String val) { return val; } /** * Just return the string passed as a parameter but * installs an instance of this class as the DatatypeConverter * implementation. Used from static fixed value initializers. */ public static String installHook(String s) { DatatypeConverter.setDatatypeConverter(theInstance); return s; } // base64 decoder private static final byte[] decodeMap = initDecodeMap(); private static final byte PADDING = 127; private static byte[] initDecodeMap() { byte[] map = new byte[128]; int i; for (i = 0; i < 128; i++) { map[i] = -1; } for (i = 'A'; i <= 'Z'; i++) { map[i] = (byte) (i - 'A'); } for (i = 'a'; i <= 'z'; i++) { map[i] = (byte) (i - 'a' + 26); } for (i = '0'; i <= '9'; i++) { map[i] = (byte) (i - '0' + 52); } map['+'] = 62; map['/'] = 63; map['='] = PADDING; return map; } /** * computes the length of binary data speculatively. * * <p> * Our requirement is to create byte[] of the exact length to store the binary data. * If we do this in a straight-forward way, it takes two passes over the data. * Experiments show that this is a non-trivial overhead (35% or so is spent on * the first pass in calculating the length.) * * <p> * So the approach here is that we compute the length speculatively, without looking * at the whole contents. The obtained speculative value is never less than the * actual length of the binary data, but it may be bigger. So if the speculation * goes wrong, we'll pay the cost of reallocation and buffer copying. * * <p> * If the base64 text is tightly packed with no indentation nor illegal char * (like what most web services produce), then the speculation of this method * will be correct, so we get the performance benefit. */ private static int guessLength(String text) { final int len = text.length(); // compute the tail '=' chars int j = len - 1; for (; j >= 0; j--) { byte code = decodeMap[text.charAt(j)]; if (code == PADDING) { continue; } if (code == -1) // most likely this base64 text is indented. go with the upper bound { return text.length() / 4 * 3; } break; } j++; // text.charAt(j) is now at some base64 char, so +1 to make it the size int padSize = len - j; if (padSize > 2) // something is wrong with base64. be safe and go with the upper bound { return text.length() / 4 * 3; } // so far this base64 looks like it's unindented tightly packed base64. // take a chance and create an array with the expected size return text.length() / 4 * 3 - padSize; } /** * @param text * base64Binary data is likely to be long, and decoding requires * each character to be accessed twice (once for counting length, another * for decoding.) * * A benchmark showed that taking {@link String} is faster, presumably * because JIT can inline a lot of string access (with data of 1K chars, it was twice as fast) */ public static byte[] _parseBase64Binary(String text) { final int buflen = guessLength(text); final byte[] out = new byte[buflen]; int o = 0; final int len = text.length(); int i; final byte[] quadruplet = new byte[4]; int q = 0; // convert each quadruplet to three bytes. for (i = 0; i < len; i++) { char ch = text.charAt(i); byte v = decodeMap[ch]; if (v != -1) { quadruplet[q++] = v; } if (q == 4) { // quadruplet is now filled. out[o++] = (byte) ((quadruplet[0] << 2) | (quadruplet[1] >> 4)); if (quadruplet[2] != PADDING) { out[o++] = (byte) ((quadruplet[1] << 4) | (quadruplet[2] >> 2)); } if (quadruplet[3] != PADDING) { out[o++] = (byte) ((quadruplet[2] << 6) | (quadruplet[3])); } q = 0; } } if (buflen == o) // speculation worked out to be OK { return out; } // we overestimated, so need to create a new buffer byte[] nb = new byte[o]; System.arraycopy(out, 0, nb, 0, o); return nb; } private static final char[] encodeMap = initEncodeMap(); private static char[] initEncodeMap() { char[] map = new char[64]; int i; for (i = 0; i < 26; i++) { map[i] = (char) ('A' + i); } for (i = 26; i < 52; i++) { map[i] = (char) ('a' + (i - 26)); } for (i = 52; i < 62; i++) { map[i] = (char) ('0' + (i - 52)); } map[62] = '+'; map[63] = '/'; return map; } public static char encode(int i) { return encodeMap[i & 0x3F]; } public static byte encodeByte(int i) { return (byte) encodeMap[i & 0x3F]; } public static String _printBase64Binary(byte[] input) { return _printBase64Binary(input, 0, input.length); } public static String _printBase64Binary(byte[] input, int offset, int len) { char[] buf = new char[((len + 2) / 3) * 4]; int ptr = _printBase64Binary(input, offset, len, buf, 0); assert ptr == buf.length; return new String(buf); } /** * Encodes a byte array into a char array by doing base64 encoding. * * The caller must supply a big enough buffer. * * @return * the value of {@code ptr+((len+2)/3)*4}, which is the new offset * in the output buffer where the further bytes should be placed. */ public static int _printBase64Binary(byte[] input, int offset, int len, char[] buf, int ptr) { // encode elements until only 1 or 2 elements are left to encode int remaining = len; int i; for (i = offset;remaining >= 3; remaining -= 3, i += 3) { buf[ptr++] = encode(input[i] >> 2); buf[ptr++] = encode( ((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF)); buf[ptr++] = encode( ((input[i + 1] & 0xF) << 2) | ((input[i + 2] >> 6) & 0x3)); buf[ptr++] = encode(input[i + 2] & 0x3F); } // encode when exactly 1 element (left) to encode if (remaining == 1) { buf[ptr++] = encode(input[i] >> 2); buf[ptr++] = encode(((input[i]) & 0x3) << 4); buf[ptr++] = '='; buf[ptr++] = '='; } // encode when exactly 2 elements (left) to encode if (remaining == 2) { buf[ptr++] = encode(input[i] >> 2); buf[ptr++] = encode(((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF)); buf[ptr++] = encode((input[i + 1] & 0xF) << 2); buf[ptr++] = '='; } return ptr; } /** * Encodes a byte array into another byte array by first doing base64 encoding * then encoding the result in ASCII. * * The caller must supply a big enough buffer. * * @return * the value of {@code ptr+((len+2)/3)*4}, which is the new offset * in the output buffer where the further bytes should be placed. */ public static int _printBase64Binary(byte[] input, int offset, int len, byte[] out, int ptr) { byte[] buf = out; int remaining = len; int i; for (i=offset; remaining >= 3; remaining -= 3, i += 3 ) { buf[ptr++] = encodeByte(input[i]>>2); buf[ptr++] = encodeByte( ((input[i]&0x3)<<4) | ((input[i+1]>>4)&0xF)); buf[ptr++] = encodeByte( ((input[i+1]&0xF)<<2)| ((input[i+2]>>6)&0x3)); buf[ptr++] = encodeByte(input[i+2]&0x3F); } // encode when exactly 1 element (left) to encode if (remaining == 1) { buf[ptr++] = encodeByte(input[i]>>2); buf[ptr++] = encodeByte(((input[i])&0x3)<<4); buf[ptr++] = '='; buf[ptr++] = '='; } // encode when exactly 2 elements (left) to encode if (remaining == 2) { buf[ptr++] = encodeByte(input[i]>>2); buf[ptr++] = encodeByte( ((input[i]&0x3)<<4) | ((input[i+1]>>4)&0xF)); buf[ptr++] = encodeByte((input[i+1]&0xF)<<2); buf[ptr++] = '='; } return ptr; } private static CharSequence removeOptionalPlus(CharSequence s) { int len = s.length(); if (len <= 1 || s.charAt(0) != '+') { return s; } s = s.subSequence(1, len); char ch = s.charAt(0); if ('0' <= ch && ch <= '9') { return s; } if ('.' == ch) { return s; } throw new NumberFormatException(); } private static boolean isDigitOrPeriodOrSign(char ch) { if ('0' <= ch && ch <= '9') { return true; } if (ch == '+' || ch == '-' || ch == '.') { return true; } return false; } private static final DatatypeFactory datatypeFactory; static { try { datatypeFactory = DatatypeFactory.newInstance(); } catch (DatatypeConfigurationException e) { throw new Error(e); } } private static final class CalendarFormatter { public static String doFormat(String format, Calendar cal) throws IllegalArgumentException { int fidx = 0; int flen = format.length(); StringBuilder buf = new StringBuilder(); while (fidx < flen) { char fch = format.charAt(fidx++); if (fch != '%') { // not a meta character buf.append(fch); continue; } // seen meta character. we don't do error check against the format switch (format.charAt(fidx++)) { case 'Y': // year formatYear(cal, buf); break; case 'M': // month formatMonth(cal, buf); break; case 'D': // days formatDays(cal, buf); break; case 'h': // hours formatHours(cal, buf); break; case 'm': // minutes formatMinutes(cal, buf); break; case 's': // parse seconds. formatSeconds(cal, buf); break; case 'z': // time zone formatTimeZone(cal, buf); break; default: // illegal meta character. impossible. throw new InternalError(); } } return buf.toString(); } private static void formatYear(Calendar cal, StringBuilder buf) { int year = cal.get(Calendar.YEAR); String s; if (year <= 0) // negative value { s = Integer.toString(1 - year); } else // positive value { s = Integer.toString(year); } while (s.length() < 4) { s = '0' + s; } if (year <= 0) { s = '-' + s; } buf.append(s); } private static void formatMonth(Calendar cal, StringBuilder buf) { formatTwoDigits(cal.get(Calendar.MONTH) + 1, buf); } private static void formatDays(Calendar cal, StringBuilder buf) { formatTwoDigits(cal.get(Calendar.DAY_OF_MONTH), buf); } private static void formatHours(Calendar cal, StringBuilder buf) { formatTwoDigits(cal.get(Calendar.HOUR_OF_DAY), buf); } private static void formatMinutes(Calendar cal, StringBuilder buf) { formatTwoDigits(cal.get(Calendar.MINUTE), buf); } private static void formatSeconds(Calendar cal, StringBuilder buf) { formatTwoDigits(cal.get(Calendar.SECOND), buf); if (cal.isSet(Calendar.MILLISECOND)) { // milliseconds int n = cal.get(Calendar.MILLISECOND); if (n != 0) { String ms = Integer.toString(n); while (ms.length() < 3) { ms = '0' + ms; // left 0 paddings. } buf.append('.'); buf.append(ms); } } } /** formats time zone specifier. */ private static void formatTimeZone(Calendar cal, StringBuilder buf) { TimeZone tz = cal.getTimeZone(); if (tz == null) { return; } // otherwise print out normally. int offset = tz.getOffset(cal.getTime().getTime()); if (offset == 0) { buf.append('Z'); return; } if (offset >= 0) { buf.append('+'); } else { buf.append('-'); offset *= -1; } offset /= 60 * 1000; // offset is in milli-seconds formatTwoDigits(offset / 60, buf); buf.append(':'); formatTwoDigits(offset % 60, buf); } /** formats Integer into two-character-wide string. */ private static void formatTwoDigits(int n, StringBuilder buf) { // n is always non-negative. if (n < 10) { buf.append('0'); } buf.append(n); } } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.workbench.screens.solver.backend.server; import java.util.List; import javax.annotation.PostConstruct; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.event.Event; import javax.inject.Inject; import javax.inject.Named; import org.guvnor.common.services.backend.exceptions.ExceptionUtilities; import org.guvnor.common.services.backend.util.CommentedOptionFactory; import org.guvnor.common.services.shared.metadata.model.Metadata; import org.guvnor.common.services.shared.metadata.model.Overview; import org.guvnor.common.services.shared.validation.model.ValidationMessage; import org.jboss.errai.bus.server.annotations.Service; import org.kie.workbench.common.services.backend.service.KieService; import org.optaplanner.workbench.screens.solver.model.SolverConfigModel; import org.optaplanner.workbench.screens.solver.model.SolverModelContent; import org.optaplanner.workbench.screens.solver.service.SolverEditorService; import org.optaplanner.workbench.screens.solver.type.SolverResourceTypeDefinition; import org.uberfire.backend.server.util.Paths; import org.uberfire.backend.vfs.Path; import org.uberfire.ext.editor.commons.backend.service.SaveAndRenameServiceImpl; import org.uberfire.ext.editor.commons.service.CopyService; import org.uberfire.ext.editor.commons.service.DeleteService; import org.uberfire.ext.editor.commons.service.RenameService; import org.uberfire.io.IOService; import org.uberfire.java.nio.file.FileAlreadyExistsException; import org.uberfire.rpc.SessionInfo; import org.uberfire.workbench.events.ResourceOpenedEvent; @Service @ApplicationScoped public class SolverEditorServiceImpl extends KieService<SolverModelContent> implements SolverEditorService { @Inject @Named("ioStrategy") private IOService ioService; @Inject private CopyService copyService; @Inject private DeleteService deleteService; @Inject private RenameService renameService; @Inject private Event<ResourceOpenedEvent> resourceOpenedEvent; @Inject private SolverResourceTypeDefinition solverResourceType; @Inject private ConfigPersistence configPersistence; @Inject private SolverValidator solverValidator; @Inject private SessionInfo sessionInfo; @Inject private CommentedOptionFactory commentedOptionFactory; @Inject private SaveAndRenameServiceImpl<SolverConfigModel, Metadata> saveAndRenameService; @PostConstruct public void init() { saveAndRenameService.init(this); } @Override public Path create(final Path context, final String fileName, final SolverConfigModel config, final String comment) { try { final org.uberfire.java.nio.file.Path nioPath = Paths.convert(context).resolve(fileName); final Path newPath = Paths.convert(nioPath); if (ioService.exists(nioPath)) { throw new FileAlreadyExistsException(nioPath.toString()); } ioService.write(nioPath, configPersistence.toXML(config), commentedOptionFactory.makeCommentedOption(comment)); return newPath; } catch (Exception e) { throw ExceptionUtilities.handleException(e); } } @Override public SolverConfigModel load(final Path path) { String xml = ioService.readAllString(Paths.convert(path)); return configPersistence.toConfig(xml); } @Override public SolverModelContent loadContent(final Path path) { return super.loadContent(path); } @Override protected SolverModelContent constructContent(Path path, Overview overview) { //Signal opening to interested parties resourceOpenedEvent.fire(new ResourceOpenedEvent(path, sessionInfo)); return new SolverModelContent(load(path), overview); } @Override public String toSource(final Path path, final SolverConfigModel model) { return configPersistence.toXML(model); } @Override public Path save(final Path resource, final SolverConfigModel config, final Metadata metadata, final String comment) { try { Metadata currentMetadata = metadataService.getMetadata(resource); ioService.write(Paths.convert(resource), configPersistence.toXML(config), metadataService.setUpAttributes(resource, metadata), commentedOptionFactory.makeCommentedOption(comment)); fireMetadataSocialEvents(resource, currentMetadata, metadata); return resource; } catch (Exception e) { throw ExceptionUtilities.handleException(e); } } @Override public void delete(final Path path, final String comment) { try { deleteService.delete(path, comment); } catch (Exception e) { throw ExceptionUtilities.handleException(e); } } @Override public Path rename(final Path path, final String newName, final String comment) { try { return renameService.rename(path, newName, comment); } catch (Exception e) { throw ExceptionUtilities.handleException(e); } } @Override public Path copy(final Path path, final String newName, final String comment) { try { return copyService.copy(path, newName, comment); } catch (Exception e) { throw ExceptionUtilities.handleException(e); } } @Override public Path copy(final Path path, final String newName, final Path targetDirectory, final String comment) { try { return copyService.copy(path, newName, targetDirectory, comment); } catch (Exception e) { throw ExceptionUtilities.handleException(e); } } @Override public List<ValidationMessage> validate(final Path path, final SolverConfigModel config) { try { return solverValidator.validate(path, toSource(path, config)); } catch (Exception e) { throw ExceptionUtilities.handleException(e); } } @Override public List<ValidationMessage> smokeTest(final Path path, final SolverConfigModel config) { try { return solverValidator.validateAndRun(path, toSource(path, config)); } catch (Exception e) { throw ExceptionUtilities.handleException(e); } } @Override public Path saveAndRename(final Path path, final String newFileName, final Metadata metadata, final SolverConfigModel content, final String comment) { return saveAndRenameService.saveAndRename(path, newFileName, metadata, content, comment); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode.checker; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.server.datanode.StorageLocation; import org.apache.hadoop.util.FakeTimer; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DISK_CHECK_TIMEOUT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_FAILED_VOLUMES_TOLERATED_KEY; import static org.apache.hadoop.hdfs.server.datanode.checker.VolumeCheckResult.*; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.*; /** * Unit tests for the {@link StorageLocationChecker} class. */ public class TestStorageLocationChecker { public static final Logger LOG = LoggerFactory.getLogger( TestStorageLocationChecker.class); @Rule public ExpectedException thrown = ExpectedException.none(); /** * Verify that all healthy locations are correctly handled and that the * check routine is invoked as expected. * @throws Exception */ @Test(timeout=30000) public void testAllLocationsHealthy() throws Exception { final List<StorageLocation> locations = makeMockLocations(HEALTHY, HEALTHY, HEALTHY); final Configuration conf = new HdfsConfiguration(); conf.setInt(DFS_DATANODE_FAILED_VOLUMES_TOLERATED_KEY, 0); StorageLocationChecker checker = new StorageLocationChecker(conf, new FakeTimer()); List<StorageLocation> filteredLocations = checker.check(conf, locations); // All locations should be healthy. assertThat(filteredLocations.size(), is(3)); // Ensure that the check method was invoked for each location. for (StorageLocation location : locations) { verify(location).check(any(StorageLocation.CheckContext.class)); } } /** * Test handling when the number of failed locations is below the * max volume failure threshold. * * @throws Exception */ @Test(timeout=30000) public void testFailedLocationsBelowThreshold() throws Exception { final List<StorageLocation> locations = makeMockLocations(HEALTHY, HEALTHY, FAILED); // 2 healthy, 1 failed. final Configuration conf = new HdfsConfiguration(); conf.setInt(DFS_DATANODE_FAILED_VOLUMES_TOLERATED_KEY, 1); StorageLocationChecker checker = new StorageLocationChecker(conf, new FakeTimer()); List<StorageLocation> filteredLocations = checker.check(conf, locations); assertThat(filteredLocations.size(), is(2)); } /** * Test handling when the number of volume failures tolerated is the * same as the number of volumes. * * @throws Exception */ @Test(timeout=30000) public void testFailedLocationsAboveThreshold() throws Exception { final List<StorageLocation> locations = makeMockLocations(HEALTHY, FAILED, FAILED); // 1 healthy, 2 failed. final Configuration conf = new HdfsConfiguration(); conf.setInt(DFS_DATANODE_FAILED_VOLUMES_TOLERATED_KEY, 1); thrown.expect(IOException.class); thrown.expectMessage("Too many failed volumes - current valid volumes: 1," + " volumes configured: 3, volumes failed: 2, volume failures" + " tolerated: 1"); StorageLocationChecker checker = new StorageLocationChecker(conf, new FakeTimer()); checker.check(conf, locations); } /** * Test handling all storage locations are failed. * * @throws Exception */ @Test(timeout=30000) public void testBadConfiguration() throws Exception { final List<StorageLocation> locations = makeMockLocations(HEALTHY, HEALTHY, HEALTHY); final Configuration conf = new HdfsConfiguration(); conf.setInt(DFS_DATANODE_FAILED_VOLUMES_TOLERATED_KEY, 3); thrown.expect(IOException.class); thrown.expectMessage("Invalid value configured"); StorageLocationChecker checker = new StorageLocationChecker(conf, new FakeTimer()); checker.check(conf, locations); } /** * Verify that a {@link StorageLocation#check} timeout is correctly detected * as a failure. * * This is hard to test without a {@link Thread#sleep} call. * * @throws Exception */ @Test (timeout=300000) public void testTimeoutInCheck() throws Exception { final Configuration conf = new HdfsConfiguration(); conf.setTimeDuration(DFS_DATANODE_DISK_CHECK_TIMEOUT_KEY, 1, TimeUnit.SECONDS); conf.setInt(DFS_DATANODE_FAILED_VOLUMES_TOLERATED_KEY, 1); final FakeTimer timer = new FakeTimer(); // Generate a list of storage locations the first of which sleeps // for 2 seconds in its check() routine. final List<StorageLocation> locations = makeSlowLocations(2000, 1); StorageLocationChecker checker = new StorageLocationChecker(conf, timer); try { // Check the two locations and ensure that only one of them // was filtered out. List<StorageLocation> filteredList = checker.check(conf, locations); assertThat(filteredList.size(), is(1)); } finally { checker.shutdownAndWait(10, TimeUnit.SECONDS); } } /** * Return a list of storage locations - one per argument - which return * health check results corresponding to the supplied arguments. */ private List<StorageLocation> makeMockLocations(VolumeCheckResult... args) throws IOException { final List<StorageLocation> locations = new ArrayList<>(args.length); final AtomicInteger index = new AtomicInteger(0); for (VolumeCheckResult result : args) { final StorageLocation location = mock(StorageLocation.class); when(location.toString()).thenReturn("/" + index.incrementAndGet()); when(location.check(any(StorageLocation.CheckContext.class))) .thenReturn(result); locations.add(location); } return locations; } /** * Return a list of storage locations - one per argument - whose check() * method takes at least the specified number of milliseconds to complete. */ private List<StorageLocation> makeSlowLocations(long... args) throws IOException { final List<StorageLocation> locations = new ArrayList<>(args.length); final AtomicInteger index = new AtomicInteger(0); for (final long checkDelayMs: args) { final StorageLocation location = mock(StorageLocation.class); when(location.toString()).thenReturn("/" + index.incrementAndGet()); when(location.check(any(StorageLocation.CheckContext.class))) .thenAnswer(new Answer<VolumeCheckResult>() { @Override public VolumeCheckResult answer(InvocationOnMock invocation) throws Throwable { Thread.sleep(checkDelayMs); return VolumeCheckResult.HEALTHY; } }); locations.add(location); } return locations; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.01.06 at 03:51:15 PM EAT // package org.akomantoso.schema.v3.csd07; import java.util.HashMap; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyAttribute; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import javax.xml.namespace.QName; /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;type xmlns="http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;Complex&lt;/type&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;name xmlns="http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;srcType&lt;/name&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;comment xmlns="http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt; * the complex type srcType defines the empty content model and the list of attributes for manifestation-level references to external resources&lt;/comment&gt; * </pre> * * * <p>Java class for srcType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="srcType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}show"/> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}idopt"/> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}src"/> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}core"/> * &lt;anyAttribute processContents='lax' namespace='##other'/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "srcType") public class SrcType { @XmlAttribute(name = "showAs", required = true) protected String showAs; @XmlAttribute(name = "shortForm") protected String shortForm; @XmlAttribute(name = "currentId") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "NMTOKEN") protected String currentId; @XmlAttribute(name = "originalId") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "NMTOKEN") protected String originalId; @XmlAttribute(name = "GUID") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "NMTOKEN") protected String guid; @XmlAttribute(name = "src", required = true) @XmlSchemaType(name = "anyURI") protected String src; @XmlAttribute(name = "alt") protected String alt; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<QName, String>(); /** * Gets the value of the showAs property. * * @return * possible object is * {@link String } * */ public String getShowAs() { return showAs; } /** * Sets the value of the showAs property. * * @param value * allowed object is * {@link String } * */ public void setShowAs(String value) { this.showAs = value; } /** * Gets the value of the shortForm property. * * @return * possible object is * {@link String } * */ public String getShortForm() { return shortForm; } /** * Sets the value of the shortForm property. * * @param value * allowed object is * {@link String } * */ public void setShortForm(String value) { this.shortForm = value; } /** * Gets the value of the currentId property. * * @return * possible object is * {@link String } * */ public String getCurrentId() { return currentId; } /** * Sets the value of the currentId property. * * @param value * allowed object is * {@link String } * */ public void setCurrentId(String value) { this.currentId = value; } /** * Gets the value of the originalId property. * * @return * possible object is * {@link String } * */ public String getOriginalId() { return originalId; } /** * Sets the value of the originalId property. * * @param value * allowed object is * {@link String } * */ public void setOriginalId(String value) { this.originalId = value; } /** * Gets the value of the guid property. * * @return * possible object is * {@link String } * */ public String getGUID() { return guid; } /** * Sets the value of the guid property. * * @param value * allowed object is * {@link String } * */ public void setGUID(String value) { this.guid = value; } /** * Gets the value of the src property. * * @return * possible object is * {@link String } * */ public String getSrc() { return src; } /** * Sets the value of the src property. * * @param value * allowed object is * {@link String } * */ public void setSrc(String value) { this.src = value; } /** * Gets the value of the alt property. * * @return * possible object is * {@link String } * */ public String getAlt() { return alt; } /** * Sets the value of the alt property. * * @param value * allowed object is * {@link String } * */ public void setAlt(String value) { this.alt = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and * the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute * by updating the map directly. Because of this design, there's no setter. * * * @return * always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
package cz.metacentrum.perun.webgui.tabs.facilitiestabs; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.json.client.JSONString; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.safehtml.shared.SafeHtmlUtils; import com.google.gwt.user.client.ui.*; import cz.metacentrum.perun.webgui.client.PerunWebSession; import cz.metacentrum.perun.webgui.client.localization.ButtonTranslation; import cz.metacentrum.perun.webgui.client.mainmenu.MainMenu; import cz.metacentrum.perun.webgui.client.resources.SmallIcons; import cz.metacentrum.perun.webgui.client.resources.TableSorter; import cz.metacentrum.perun.webgui.json.JsonCallbackEvents; import cz.metacentrum.perun.webgui.json.JsonUtils; import cz.metacentrum.perun.webgui.json.tasksManager.GetFacilityState; import cz.metacentrum.perun.webgui.model.FacilityState; import cz.metacentrum.perun.webgui.model.PerunError; import cz.metacentrum.perun.webgui.tabs.FacilitiesTabs; import cz.metacentrum.perun.webgui.tabs.TabItem; import cz.metacentrum.perun.webgui.tabs.TabItemWithUrl; import cz.metacentrum.perun.webgui.tabs.UrlMapper; import cz.metacentrum.perun.webgui.widgets.AjaxLoaderImage; import cz.metacentrum.perun.webgui.widgets.CustomButton; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Map; import java.util.Set; /** * Tab with propagation status of all facilities (or where user is admin). * * @author Pavel Zlamal <256627@mail.muni.cz> */ public class FacilitiesPropagationsTabItem implements TabItem, TabItemWithUrl { /** * Perun web session */ private PerunWebSession session = PerunWebSession.getInstance(); /** * Content widget - should be simple panel */ private SimplePanel contentWidget = new SimplePanel(); /** * Title widget */ private Label titleWidget = new Label("All facilities states"); private int mainrow = 0; private int okCounter = 0; private int errorCounter = 0; private int notDeterminedCounter = 0; private int procesingCounter = 0; /** * Creates a tab instance */ public FacilitiesPropagationsTabItem(){} public boolean isPrepared(){ return true; } @Override public boolean isRefreshParentOnClose() { return false; } @Override public void onClose() { } public Widget draw() { mainrow = 0; okCounter = 0; errorCounter = 0; notDeterminedCounter = 0; procesingCounter = 0; VerticalPanel mainTab = new VerticalPanel(); mainTab.setWidth("100%"); final TabItem tab = this; // MAIN PANEL final ScrollPanel firstTabPanel = new ScrollPanel(); firstTabPanel.setSize("100%", "100%"); firstTabPanel.setStyleName("perun-tableScrollPanel"); final FlexTable help = new FlexTable(); help.setCellPadding(4); help.setWidth("100%"); help.setHTML(0, 0, "<strong>Color&nbsp;notation:</strong>"); help.getFlexCellFormatter().setWidth(0, 0, "100px"); help.setHTML(0, 1, "<strong>OK</strong>"); help.getFlexCellFormatter().setHorizontalAlignment(0, 1, HasHorizontalAlignment.ALIGN_CENTER); help.getFlexCellFormatter().setWidth(0, 1, "50px"); help.getFlexCellFormatter().setStyleName(0, 1, "green"); help.setHTML(0, 2, "<strong>Error</strong>"); help.getFlexCellFormatter().setWidth(0, 2, "50px"); help.getFlexCellFormatter().setStyleName(0, 2, "red"); help.getFlexCellFormatter().setHorizontalAlignment(0, 2, HasHorizontalAlignment.ALIGN_CENTER); help.setHTML(0, 3, "<strong>Not&nbsp;determined</strong>"); help.getFlexCellFormatter().setWidth(0, 3, "50px"); help.getFlexCellFormatter().setHorizontalAlignment(0, 3, HasHorizontalAlignment.ALIGN_CENTER); help.getFlexCellFormatter().setStyleName(0, 3, "notdetermined"); /* help.setHTML(0, 4, "<strong>Processing</strong>"); help.getFlexCellFormatter().setWidth(0, 4, "50px"); help.getFlexCellFormatter().setStyleName(0, 4, "yellow"); help.getFlexCellFormatter().setHorizontalAlignment(0, 4, HasHorizontalAlignment.ALIGN_CENTER); */ final CustomButton cb = new CustomButton(ButtonTranslation.INSTANCE.refreshButton(), ButtonTranslation.INSTANCE.refreshPropagationResults(),SmallIcons.INSTANCE.updateIcon(), new ClickHandler() { public void onClick(ClickEvent clickEvent) { session.getTabManager().reloadTab(tab); } }); help.setWidget(0, 5, cb); help.getFlexCellFormatter().setWidth(0, 5, "200px"); help.setHTML(0, 6, "&nbsp;"); help.getFlexCellFormatter().setWidth(0, 6, "50%"); mainTab.add(help); mainTab.add(new HTML("<hr size=\"2\" />")); mainTab.add(firstTabPanel); final FlexTable content = new FlexTable(); content.setWidth("100%"); content.setBorderWidth(0); firstTabPanel.add(content); content.setStyleName("propagationTable", true); final AjaxLoaderImage im = new AjaxLoaderImage(); content.setWidget(0, 0, im); content.getFlexCellFormatter().setHorizontalAlignment(0, 0, HasHorizontalAlignment.ALIGN_CENTER); final GetFacilityState callback = new GetFacilityState(0, 0, new JsonCallbackEvents(){ public void onLoadingStart(){ im.loadingStart(); cb.setProcessing(true); } public void onError(PerunError error){ im.loadingError(error); cb.setProcessing(false); } public void onFinished(JavaScriptObject jso) { im.loadingFinished(); cb.setProcessing(false); content.clear(); content.getFlexCellFormatter().setHorizontalAlignment(0, 0, HasHorizontalAlignment.ALIGN_LEFT); ArrayList<FacilityState> list = JsonUtils.jsoAsList(jso); if (list != null && !list.isEmpty()){ list = new TableSorter<FacilityState>().sortByNumberOfDestinations(list); ArrayList<FacilityState> clusters = new ArrayList<FacilityState>(); ArrayList<FacilityState> hosts = new ArrayList<FacilityState>(); for (final FacilityState state : list) { if (state.getDestinations().size() > 1) { clusters.add(state); } else { hosts.add(state); } } clusters = new TableSorter<FacilityState>().sortByFacilityName(clusters); hosts = new TableSorter<FacilityState>().sortByFacilityName(hosts); // PROCESS CLUSTERS (with more than one destinations) for (final FacilityState state : clusters) { content.setHTML(mainrow, 0, "<strong>" + SafeHtmlUtils.fromString(state.getFacility().getName()).asString() + "</strong>"); final FlowPanel inner = new FlowPanel(); content.setWidget(mainrow+1, 0, inner); content.getFlexCellFormatter().setStyleName(mainrow + 1, 0, "propagationTablePadding"); Set<String> destinations = state.getDestinations().keySet(); ArrayList<String> destList = new ArrayList<String>(); int width = 0; for (String dest : destinations) { destList.add(dest); if (dest.indexOf(".")*8 > width) { width = dest.indexOf(".")*8; } } Collections.sort(destList, new Comparator<String>() { @Override public int compare(String o1, String o2) { return TableSorter.smartCompare(o1, o2); } }); for (final String dest : destList) { String show = dest.substring(0, dest.indexOf(".")); if (show.length() == 0) { show = dest; width = dest.length()*8; } Anchor hyp = new Anchor(); hyp.setHTML("<span style=\"display: inline-block; width: "+width+"px; text-align: center;\">"+SafeHtmlUtils.fromString((show != null) ? show : "").asString()+"</span>"); hyp.addClickHandler(new ClickHandler() { public void onClick(ClickEvent clickEvent) { session.getTabManager().addTab(new DestinationResultsTabItem(state.getFacility(), null, dest, false)); } }); inner.add(hyp); // style if (state.getDestinations().get(dest).equals(new JSONString("ERROR"))) { hyp.addStyleName("red"); errorCounter++; } else if (state.getDestinations().get(dest).equals(new JSONString("OK"))) { hyp.addStyleName("green"); okCounter++; } else { hyp.addStyleName("notdetermined"); notDeterminedCounter++; } } if (destList.isEmpty()) { notDeterminedCounter++; } mainrow++; mainrow++; } // PROCESS HOSTS (with one or less destination) // FIX WIDTH int width = 0; for (FacilityState state : hosts) { if (state.getDestinations().size() < 2) { if (state.getFacility().getName().length()*8 > width) { width = state.getFacility().getName().length()*8; } } } FlowPanel inner = new FlowPanel(); for (final FacilityState state : hosts) { Set<String> destinations = state.getDestinations().keySet(); ArrayList<String> destList = new ArrayList<String>(); for (String dest : destinations) { destList.add(dest); } Collections.sort(destList, new Comparator<String>() { @Override public int compare(String o1, String o2) { return TableSorter.smartCompare(o1, o2); } }); for (final String dest : destList) { Anchor hyp = new Anchor(); hyp.setHTML("<span style=\"display: inline-block; width: "+width+"px; text-align: center;\">"+SafeHtmlUtils.fromString((dest != null) ? dest : "").asString()+"</span>"); inner.add(hyp); hyp.addClickHandler(new ClickHandler() { public void onClick(ClickEvent clickEvent) { session.getTabManager().addTab(new DestinationResultsTabItem(state.getFacility(), null, dest, false)); } }); // style if (state.getDestinations().get(dest).equals(new JSONString("ERROR"))) { hyp.addStyleName("red"); errorCounter++; } else if (state.getDestinations().get(dest).equals(new JSONString("OK"))) { hyp.addStyleName("green"); okCounter++; } else { hyp.addStyleName("notdetermined"); notDeterminedCounter++; } } if (destList.isEmpty()) { Anchor hyp = new Anchor(); hyp.setHTML("<span style=\"display: inline-block; width: "+width+"px; text-align: center;\">"+SafeHtmlUtils.fromString((state.getFacility().getName() != null) ? state.getFacility().getName() : "").asString()+"</span>"); inner.add(hyp); hyp.addStyleName("notdetermined"); notDeterminedCounter++; } } if (!hosts.isEmpty()) { content.setHTML(mainrow, 0, "<strong>Single hosts</strong>"); mainrow++; } content.setWidget(mainrow, 0, inner); content.getFlexCellFormatter().setStyleName(mainrow, 0, "propagationTablePadding"); mainrow++; } // set counters help.setHTML(0, 1, "<strong>Ok&nbsp;("+okCounter+")</strong>"); help.setHTML(0, 2, "<strong>Error&nbsp;("+errorCounter+")</strong>"); help.setHTML(0, 3, "<strong>Not&nbsp;determined&nbsp;("+notDeterminedCounter+")</strong>"); //help.setHTML(0, 4, "<strong>Processing&nbsp;(" + procesingCounter + ")</strong>"); } }); // get for all facilities callback.retrieveData(); // resize perun table to correct size on screen session.getUiElements().resizePerunTable(firstTabPanel, 400, this); this.contentWidget.setWidget(mainTab); return getWidget(); } public Widget getWidget() { return this.contentWidget; } public Widget getTitle() { return this.titleWidget; } public ImageResource getIcon() { return SmallIcons.INSTANCE.arrowRightIcon(); } @Override public int hashCode() { final int prime = 701; int result = 1; result = prime * result + 122341; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; return true; } public boolean multipleInstancesEnabled() { return false; } public void open() { session.getUiElements().getMenu().openMenu(MainMenu.FACILITY_ADMIN, true); session.getUiElements().getBreadcrumbs().setLocation(MainMenu.FACILITY_ADMIN, "All facilities states", getUrlWithParameters()); } public boolean isAuthorized() { if (session.isFacilityAdmin()) { return true; } else { return false; } } public final static String URL = "propags"; public String getUrl() { return URL; } public String getUrlWithParameters() { return FacilitiesTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + getUrl(); } static public FacilitiesPropagationsTabItem load(Map<String, String> parameters) { return new FacilitiesPropagationsTabItem(); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment.realtime.firehose; import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import io.druid.data.input.Firehose; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.guava.Yielder; import io.druid.java.util.common.guava.Yielders; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.filter.DimFilter; import io.druid.query.select.EventHolder; import io.druid.segment.BaseLongColumnValueSelector; import io.druid.segment.BaseObjectColumnValueSelector; import io.druid.segment.Cursor; import io.druid.segment.DimensionSelector; import io.druid.segment.VirtualColumns; import io.druid.segment.column.Column; import io.druid.segment.data.IndexedInts; import io.druid.segment.filter.Filters; import io.druid.segment.transform.TransformSpec; import io.druid.segment.transform.Transformer; import io.druid.utils.Runnables; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; public class IngestSegmentFirehose implements Firehose { private final Transformer transformer; private Yielder<InputRow> rowYielder; public IngestSegmentFirehose( final List<WindowedStorageAdapter> adapters, final TransformSpec transformSpec, final List<String> dims, final List<String> metrics, final DimFilter dimFilter ) { this.transformer = transformSpec.toTransformer(); Sequence<InputRow> rows = Sequences.concat( Iterables.transform( adapters, new Function<WindowedStorageAdapter, Sequence<InputRow>>() { @Nullable @Override public Sequence<InputRow> apply(WindowedStorageAdapter adapter) { return Sequences.concat( Sequences.map( adapter.getAdapter().makeCursors( Filters.toFilter(dimFilter), adapter.getInterval(), VirtualColumns.EMPTY, Granularities.ALL, false, null ), new Function<Cursor, Sequence<InputRow>>() { @Nullable @Override public Sequence<InputRow> apply(final Cursor cursor) { final BaseLongColumnValueSelector timestampColumnSelector = cursor.getColumnSelectorFactory().makeColumnValueSelector(Column.TIME_COLUMN_NAME); final Map<String, DimensionSelector> dimSelectors = Maps.newHashMap(); for (String dim : dims) { final DimensionSelector dimSelector = cursor .getColumnSelectorFactory() .makeDimensionSelector(new DefaultDimensionSpec(dim, dim)); // dimSelector is null if the dimension is not present if (dimSelector != null) { dimSelectors.put(dim, dimSelector); } } final Map<String, BaseObjectColumnValueSelector> metSelectors = Maps.newHashMap(); for (String metric : metrics) { final BaseObjectColumnValueSelector metricSelector = cursor.getColumnSelectorFactory().makeColumnValueSelector(metric); metSelectors.put(metric, metricSelector); } return Sequences.simple( new Iterable<InputRow>() { @Override public Iterator<InputRow> iterator() { return new Iterator<InputRow>() { @Override public boolean hasNext() { return !cursor.isDone(); } @Override public InputRow next() { final Map<String, Object> theEvent = Maps.newLinkedHashMap(); final long timestamp = timestampColumnSelector.getLong(); theEvent.put(EventHolder.timestampKey, DateTimes.utc(timestamp)); for (Map.Entry<String, DimensionSelector> dimSelector : dimSelectors.entrySet()) { final String dim = dimSelector.getKey(); final DimensionSelector selector = dimSelector.getValue(); final IndexedInts vals = selector.getRow(); int valsSize = vals.size(); if (valsSize == 1) { final String dimVal = selector.lookupName(vals.get(0)); theEvent.put(dim, dimVal); } else if (valsSize > 1) { List<String> dimVals = new ArrayList<>(valsSize); for (int i = 0; i < valsSize; ++i) { dimVals.add(selector.lookupName(vals.get(i))); } theEvent.put(dim, dimVals); } } for (Map.Entry<String, BaseObjectColumnValueSelector> metSelector : metSelectors.entrySet()) { final String metric = metSelector.getKey(); final BaseObjectColumnValueSelector selector = metSelector.getValue(); Object value = selector.getObject(); if (value != null) { theEvent.put(metric, value); } } cursor.advance(); return new MapBasedInputRow(timestamp, dims, theEvent); } @Override public void remove() { throw new UnsupportedOperationException("Remove Not Supported"); } }; } } ); } } ) ); } } ) ); rowYielder = Yielders.each(rows); } @Override public boolean hasMore() { return !rowYielder.isDone(); } @Nullable @Override public InputRow nextRow() { final InputRow inputRow = rowYielder.get(); rowYielder = rowYielder.next(null); return transformer.transform(inputRow); } @Override public Runnable commit() { return Runnables.getNoopRunnable(); } @Override public void close() throws IOException { rowYielder.close(); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.erhannis.mathnstuff.symbolic; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Map; /** * * @author erhannis */ public class ExMult extends Expression { public ArrayList<Expression> terms = new ArrayList<Expression>(); public ExMult(Expression... terms) { Collections.addAll(this.terms, terms); } private ExMult(ArrayList<Expression> terms) { this.terms = terms; } @Override public double eval(Map<String, Double> varValues) { double result = 1; for (Expression term : terms) { result *= term.eval(varValues); } return result; } @Override public Expression sort() { ArrayList<Expression> newTerms = new ArrayList<Expression>(); for (Expression term : terms) { newTerms.add(term.sort()); } Collections.sort(newTerms, new Comparator<Expression>() { @Override public int compare(Expression o1, Expression o2) { // I want constants in front if (o1 instanceof ExConstant) { if (o2 instanceof ExConstant) { return o1.toString().compareTo(o2.toString()); } else { return -1; } } else if (o2 instanceof ExConstant) { return 1; } else { return o1.toString().compareTo(o2.toString()); } } }); return new ExMult(newTerms); } @Override public Expression collapse() { ExMult newEx = new ExMult(); for (Expression term : terms) { term = term.collapse(); if (term instanceof ExMult) { newEx.terms.addAll(((ExMult)term).terms); } else { newEx.terms.add(term); } } return newEx; } /** * Combines scalars, expands products of sums. * If only one term, returns that. * //TODO Should maybe turn (b^a)(b^c) into (b^(a+c))? * //TODO Should maybe turn a*a into (a^2)? * //TODO Could maybe reduce multiplied fractions * @return */ @Override public Expression reduce() { ExMult collapsed = (ExMult)this.collapse(); ExMult subSimplified = new ExMult(); // Simplify sub expressions ExConstant factor = new ExConstant(1); subSimplified.terms.add(factor); for (Expression term : collapsed.terms) { term = term.collapse(); //TODO May be very redundant term = term.reduce(); term = term.sort(); if (term instanceof ExMult) { subSimplified.terms.addAll(((ExMult)term).terms); } else if (term instanceof ExConstant) { factor.value *= ((ExConstant)term).value; } else { subSimplified.terms.add(term); } } if (factor.value == 1) { subSimplified.terms.remove(factor); } if (subSimplified.terms.size() == 1) { return subSimplified.terms.get(0); } // Expand for (int i = 0; i < subSimplified.terms.size(); i++) { Expression term = subSimplified.terms.get(i); if (term instanceof ExAdd) { ExAdd curTerm = (ExAdd)term; ExMult others = new ExMult(); for (int j = 0; j < subSimplified.terms.size(); j++) { if (j == i) continue; others.terms.add(subSimplified.terms.get(j)); } ExAdd newAdd = new ExAdd(); for (int j = 0; j < curTerm.terms.size(); j++) { ExMult newMult = new ExMult(); newMult.terms.add(curTerm.terms.get(j)); newMult.terms.addAll(others.terms); newAdd.terms.add(newMult); } return newAdd.reduce(); } } // No expansion happened. //TODO Anything else? return subSimplified; } /** * Assumes that this Expression has already been reduced (and therefore that the * factor, if there is one, is out front). * @return */ public String toStringNoFactor() { if (terms.size() > 0) { if (terms.get(0) instanceof ExConstant) { // Has factor if (terms.size() == 2) { // Is factor times expression return terms.get(1).toString(); } else { // Is factor times several things StringBuilder sb = new StringBuilder(); sb.append("(" + terms.get(1).toString() + ")"); for (int i = 2; i < terms.size(); i++) { sb.append("*(" + terms.get(i).toString() + ")"); } return sb.toString(); } } else { // Does not have factor if (terms.size() == 1) { // Is single inner expression (which shouldn't actually happen...error, for information) throw new IllegalArgumentException("Found an illegal (though harmless) state! Reduced ExMult had single subexpression!"); } else { // Has several expressions return this.toString(); } } } else { // Kinda weird, but technically right, I think? return "1"; } } /** * Assumes that this Expression has already been reduced (and therefore that the * factor, if there is one, is out front). * @return */ public double getFactor() { if (terms.size() > 0 && terms.get(0) instanceof ExConstant) { return ((ExConstant)terms.get(0)).value; } else { return 1; } } /** * Assumes that this Expression has already been reduced (and therefore that the * factor, if there is one, is out front). * @return */ public Expression getWithoutFactor() { if (terms.size() > 0 && terms.get(0) instanceof ExConstant) { if (terms.size() == 2) { return terms.get(1); } else { ExMult newMult = new ExMult(); for (int i = 1; i < terms.size(); i++) { newMult.terms.add(terms.get(i)); } return newMult; } } else { //TODO Hmm, like, I like the idea of consistently returning copies, but this may be excessive. // Also, has the side effect of sorting it. return this.sort(); } } @Override public String toString() { if (terms.size() > 0) { StringBuilder sb = new StringBuilder(); sb.append("(" + terms.get(0).toString() + ")"); for (int i = 1; i < terms.size(); i++) { sb.append("*(" + terms.get(i).toString() + ")"); } return sb.toString(); } else { // Kinda weird, but technically right, I think? return "1"; } } @Override public String toStringSimple() { if (terms.size() > 0) { StringBuilder sb = new StringBuilder(); if (Expression.addParentheses(terms.get(0))) { sb.append("(" + terms.get(0).toStringSimple() + ")"); } else { sb.append(terms.get(0).toStringSimple()); } for (int i = 1; i < terms.size(); i++) { if (Expression.addParentheses(terms.get(i))) { sb.append(" * (" + terms.get(i).toStringSimple() + ")"); } else { sb.append(" * " + terms.get(i).toStringSimple()); } } return sb.toString(); } else { // Kinda weird, but technically right, I think? return "1"; } } }
/* * Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of Oracle nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.tech11.spg; import static java.nio.file.LinkOption.NOFOLLOW_LINKS; import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE; import static java.nio.file.StandardWatchEventKinds.ENTRY_DELETE; import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY; import static java.nio.file.StandardWatchEventKinds.OVERFLOW; import java.io.IOException; import java.nio.file.FileSystems; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.WatchEvent; import java.nio.file.WatchEvent.Kind; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.nio.file.attribute.BasicFileAttributes; import java.util.HashMap; import java.util.Map; import java.util.function.Supplier; /** * Example to watch a directory (or tree) for changes to files. */ public class WatchDir { private final WatchService watcher; private final Map<WatchKey, Path> keys; private final boolean recursive; private boolean trace = true; /** * Creates a WatchService and registers the given directory */ WatchDir(Path dir, boolean recursive) throws IOException { this.watcher = FileSystems.getDefault().newWatchService(); this.keys = new HashMap<>(); this.recursive = recursive; if (recursive) { System.out.format("Scanning %s ...%n", dir); registerAll(dir); System.out.println("Done."); } else { register(dir); } // enable trace after initial registration this.trace = true; } @SuppressWarnings("unchecked") static <T> WatchEvent<T> cast(WatchEvent<?> event) { return (WatchEvent<T>) event; } /** * Register the given directory with the WatchService */ private void register(Path dir) throws IOException { WatchKey key = dir.register(watcher, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); if (trace) { Path prev = keys.get(key); if (prev == null) { System.out.format("register: %s%n", dir); } else { if (!dir.equals(prev)) { System.out.format("update: %s -> %s%n", prev, dir); } } } keys.put(key, dir); } /** * Register the given directory, and all its sub-directories, with the * WatchService. */ private void registerAll(final Path start) throws IOException { // register directory and sub-directories Files.walkFileTree(start, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { register(dir); return FileVisitResult.CONTINUE; } }); } /** * Process all events for keys queued to the watcher */ void processEvents(Supplier<Runnable> callable) { for (;;) { // wait for key to be signalled WatchKey key; try { key = watcher.take(); } catch (InterruptedException x) { return; } Path dir = keys.get(key); if (dir == null) { System.err.println("WatchKey not recognized!!"); continue; } // see: // http://stackoverflow.com/questions/16777869/java-7-watchservice-ignoring-multiple-occurrences-of-the-same-event // Prevent receiving two separate ENTRY_MODIFY events: file modified // and timestamp updated. Instead, receive one ENTRY_MODIFY event // with two counts. try { Thread.sleep(50); } catch (InterruptedException e) { } for (WatchEvent<?> event : key.pollEvents()) { Kind<?> kind = event.kind(); // TBD - provide example of how OVERFLOW event is handled if (kind == OVERFLOW) { continue; } // Context for directory entry event is the file name of entry WatchEvent<Path> ev = cast(event); Path name = ev.context(); Path child = dir.resolve(name); // print out event System.out.format("%s: %s%n", event.kind().name(), child); if (callable != null) callable.get().run(); // if directory is created, and watching recursively, then // register it and its sub-directories if (recursive && (kind == ENTRY_CREATE)) { try { if (Files.isDirectory(child, NOFOLLOW_LINKS)) { registerAll(child); } } catch (IOException x) { // ignore to keep sample readbale } } } // reset key and remove from set if directory no longer accessible boolean valid = key.reset(); if (!valid) { keys.remove(key); // all directories are inaccessible if (keys.isEmpty()) { break; } } } } static void usage() { System.err.println("usage: java WatchDir [-r] dir"); System.exit(-1); } public static void main(String[] args) throws IOException { // parse arguments if (args.length == 0 || args.length > 2) usage(); boolean recursive = false; int dirArg = 0; if ("-r".equals(args[0])) { if (args.length < 2) usage(); recursive = true; dirArg++; } // register directory and process its events Path dir = Paths.get(args[dirArg]); new WatchDir(dir, recursive).processEvents(null); } }
/* * Copyright 2015 Aberdeen Broadcast Services. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.abercap.odoo; import java.util.ArrayList; import java.util.HashMap; public class Field { public enum FieldType { INTEGER, CHAR, TEXT, BINARY, BOOLEAN, FLOAT, DATETIME, DATE, MANY2ONE, ONE2MANY, MANY2MANY, SELECTION } private final String name; private final HashMap<String, Object> odooFieldData; public Field(String fieldName, HashMap<String, Object> odooFieldData){ this.odooFieldData = odooFieldData; this.name = fieldName; } /*** * Any property not covered by a get function can be fetched using this function * @param propertyName Name of property to fetch, for example 'name'. * @return The value associated with the property if any. */ public Object getFieldProperty(String propertyName){ Object value = null; if (odooFieldData.containsKey(propertyName)) value = odooFieldData.get(propertyName); return value; } /** * Gets field property values for every object state * @return An array of values for all states in the format [state, propvalue] */ public Object [][] getStateProperties(String propertyName){ ArrayList<Object[]> stateValues = new ArrayList<Object[]>(); @SuppressWarnings("unchecked") HashMap<String, Object> states = (HashMap<String, Object>) getFieldProperty("states"); if (states != null){ Object[] stateValue = new Object[2]; for (Object stateKey : states.keySet()){ stateValue[0] = stateKey.toString(); for (Object stateProperty : (Object[]) states.get(stateKey)){ Object[] statePropertyArr = (Object[]) stateProperty; if (statePropertyArr[0].toString().equals(propertyName)){ stateValue[1] = statePropertyArr[1]; stateValues.add(stateValue); } } } } return stateValues.toArray(new Object[0][]); } /** * Get the field name * @return The field name */ public String getName() { return name; } /*** * Get the field description or label * @return The field description or label */ public String getDescription() { return (String) getFieldProperty("string"); } /** * Get the datatype of the field. If you want the original Odoo type, use getFieldProperty("type") * @return The datatype of the field */ public FieldType getType() { String fieldType = (String) getFieldProperty("type"); if (fieldType.equalsIgnoreCase("char")) return FieldType.CHAR; else if (fieldType.equalsIgnoreCase("text")) return FieldType.TEXT; else if (fieldType.equalsIgnoreCase("integer")) return FieldType.INTEGER; else if (fieldType.equalsIgnoreCase("binary")) return FieldType.BINARY; else if (fieldType.equalsIgnoreCase("boolean")) return FieldType.BOOLEAN; else if (fieldType.equalsIgnoreCase("float")) return FieldType.FLOAT; else if (fieldType.equalsIgnoreCase("datetime")) return FieldType.DATETIME; else if (fieldType.equalsIgnoreCase("date")) return FieldType.DATE; else if (fieldType.equalsIgnoreCase("many2one")) return FieldType.MANY2ONE; else if (fieldType.equalsIgnoreCase("one2many")) return FieldType.ONE2MANY; else if (fieldType.equalsIgnoreCase("many2many")) return FieldType.MANY2MANY; else if (fieldType.equalsIgnoreCase("selection")) return FieldType.SELECTION; else return FieldType.CHAR; } /** * Get the required property * @return The required property */ public boolean getRequired() { Object value = getFieldProperty("required"); if (value == null) return false; return (Boolean) value; } /** * Get the selectable property * @return The selectable property */ public boolean getSelectable() { Object value = getFieldProperty("selectable"); if (value == null) return true; else return (Boolean) value; } /** * If a field is a selection field, the list of selecton options are returned. * @return The list of selection options */ public ArrayList<SelectionOption> getSelectionOptions(){ if (this.getType() != FieldType.SELECTION) return null; ArrayList<SelectionOption> options = new ArrayList<SelectionOption>(); Object values = getFieldProperty("selection"); if (values instanceof Object[]) for(Object val : (Object []) values){ Object [] multiVal = (Object[]) val; options.add(new SelectionOption(multiVal[0].toString(), multiVal[1].toString())); } return options; } /** * Get the size property * @return The size property */ public int getSize() { Object value = getFieldProperty("size"); if (value == null) return 64; else return (Integer) value; } /** * Get the help property * @return The help property */ public String getHelp() { return (String) getFieldProperty("help"); } /** * Get the store property * @return The store property */ public boolean getStore() { Object value = getFieldProperty("store"); if (value == null) return true; return (Boolean) value; } /** * Get the func_method property * @return The func_method property */ public boolean getFunc_method() { Object value = getFieldProperty("func_method"); if (value == null) return false; return (Boolean) value; } /** * Get the relation property * @return The relation property */ public String getRelation() { Object value = getFieldProperty("relation"); if (value == null) return ""; return (String) value; } /** * Get the readonly property * @return The read only property */ public boolean getReadonly() { Object value = getFieldProperty("readonly"); if (value == null) return false; else return (Boolean) (value instanceof Integer ? (Integer) value == 1: value); } }
/** * Copyright (C) 2006-2013 phloc systems * http://www.phloc.com * office[at]phloc[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.phloc.commons.microdom.serialize; import java.io.File; import java.io.OutputStream; import java.io.Writer; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.WillClose; import javax.annotation.concurrent.Immutable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.phloc.commons.CGlobal; import com.phloc.commons.annotations.PresentForCodeCoverage; import com.phloc.commons.io.file.FileUtils; import com.phloc.commons.io.streams.NonBlockingStringWriter; import com.phloc.commons.io.streams.StreamUtils; import com.phloc.commons.microdom.IMicroNode; import com.phloc.commons.state.ESuccess; import com.phloc.commons.stats.IStatisticsHandlerSize; import com.phloc.commons.stats.StatisticsManager; import com.phloc.commons.xml.serialize.IXMLSerializer; import com.phloc.commons.xml.serialize.IXMLWriterSettings; import com.phloc.commons.xml.serialize.XMLWriterSettings; /** * Utility class for serializing micro document objects. * * @author Philip Helger */ @Immutable public final class MicroWriter { private static final Logger s_aLogger = LoggerFactory.getLogger (MicroWriter.class); private static final IStatisticsHandlerSize s_aSizeHdl = StatisticsManager.getSizeHandler (MicroWriter.class); @PresentForCodeCoverage @SuppressWarnings ("unused") private static final MicroWriter s_aInstance = new MicroWriter (); private MicroWriter () {} /** * Write a Micro Node to a file using the default settings. * * @param aNode * The node to be serialized. May be any kind of node (incl. * documents). May not be <code>null</code>. * @param aFile * The file to write to. May not be <code>null</code>. * @return {@link ESuccess} */ @Nonnull public static ESuccess writeToFile (@Nonnull final IMicroNode aNode, @Nonnull final File aFile) { return writeToFile (aNode, aFile, XMLWriterSettings.DEFAULT_XML_SETTINGS); } /** * Write a Micro Node to a file. * * @param aNode * The node to be serialized. May be any kind of node (incl. * documents). May not be <code>null</code>. * @param aFile * The file to write to. May not be <code>null</code>. * @param aSettings * The settings to be used for the creation. May not be * <code>null</code>. * @return {@link ESuccess} */ @Nonnull public static ESuccess writeToFile (@Nonnull final IMicroNode aNode, @Nonnull final File aFile, @Nonnull final IXMLWriterSettings aSettings) { if (aFile == null) throw new NullPointerException ("file"); final OutputStream aOS = FileUtils.getOutputStream (aFile); if (aOS == null) return ESuccess.FAILURE; // No need to wrap the OS in a BufferedOutputStream as inside, it is later // on wrapped in a BufferedWriter return writeToStream (aNode, aOS, aSettings); } /** * Write a Micro Node to an output stream using the default settings. * * @param aNode * The node to be serialized. May be any kind of node (incl. * documents). May not be <code>null</code>. * @param aOS * The output stream to write to. May not be <code>null</code>. The * output stream is closed anyway directly after the operation finishes * (on success and on error). * @return {@link ESuccess} */ @Nonnull public static ESuccess writeToStream (@Nonnull final IMicroNode aNode, @Nonnull @WillClose final OutputStream aOS) { return writeToStream (aNode, aOS, XMLWriterSettings.DEFAULT_XML_SETTINGS); } /** * Write a Micro Node to an {@link OutputStream}. * * @param aNode * The node to be serialized. May be any kind of node (incl. * documents). May not be <code>null</code>. * @param aOS * The output stream to write to. May not be <code>null</code>. The * output stream is closed anyway directly after the operation finishes * (on success and on error). * @param aSettings * The settings to be used for the creation. May not be * <code>null</code>. * @return {@link ESuccess} */ @Nonnull public static ESuccess writeToStream (@Nonnull final IMicroNode aNode, @Nonnull @WillClose final OutputStream aOS, @Nonnull final IXMLWriterSettings aSettings) { if (aNode == null) throw new NullPointerException ("node"); if (aOS == null) throw new NullPointerException ("outputStream"); if (aSettings == null) throw new NullPointerException ("settings"); try { final IXMLSerializer <IMicroNode> aSerializer = new MicroSerializer (aSettings); aSerializer.write (aNode, aOS); return ESuccess.SUCCESS; } finally { StreamUtils.close (aOS); } } /** * Write a Micro Node to a {@link Writer}. * * @param aNode * The node to be serialized. May be any kind of node (incl. * documents). May not be <code>null</code>. * @param aWriter * The writer to write to. May not be <code>null</code>. The writer is * closed anyway directly after the operation finishes (on success and * on error). * @param aSettings * The settings to be used for the creation. May not be * <code>null</code>. * @return {@link ESuccess} */ @Nonnull public static ESuccess writeToWriter (@Nonnull final IMicroNode aNode, @Nonnull @WillClose final Writer aWriter, @Nonnull final IXMLWriterSettings aSettings) { if (aNode == null) throw new NullPointerException ("node"); if (aWriter == null) throw new NullPointerException ("writer"); if (aSettings == null) throw new NullPointerException ("settings"); try { final IXMLSerializer <IMicroNode> aSerializer = new MicroSerializer (aSettings); aSerializer.write (aNode, aWriter); return ESuccess.SUCCESS; } finally { StreamUtils.close (aWriter); } } @Nullable public static String getNodeAsString (@Nonnull final IMicroNode aNode, @Nonnull final IXMLWriterSettings aSettings) { if (aNode == null) throw new NullPointerException ("node"); if (aSettings == null) throw new NullPointerException ("settings"); NonBlockingStringWriter aWriter = null; try { // start serializing aWriter = new NonBlockingStringWriter (50 * CGlobal.BYTES_PER_KILOBYTE); if (writeToWriter (aNode, aWriter, aSettings).isSuccess ()) { s_aSizeHdl.addSize (aWriter.size ()); return aWriter.getAsString (); } } catch (final Throwable t) { s_aLogger.error ("Error serializing MicroDOM with settings " + aSettings.toString (), t); } finally { StreamUtils.close (aWriter); } return null; } /** * Convert the passed micro node to an XML string using * {@link XMLWriterSettings#DEFAULT_XML_SETTINGS}. This is a specialized * version of {@link #getNodeAsString(IMicroNode, IXMLWriterSettings)}. * * @param aNode * The node to be converted to a string. May not be <code>null</code> . * @return The string representation of the passed node. */ @Nullable public static String getXMLString (@Nonnull final IMicroNode aNode) { return getNodeAsString (aNode, XMLWriterSettings.DEFAULT_XML_SETTINGS); } }
/** * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.integration.distribution; import static com.asakusafw.integration.distribution.Util.*; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import com.asakusafw.integration.AsakusaConfigurator; import com.asakusafw.integration.AsakusaConstants; import com.asakusafw.integration.AsakusaProject; import com.asakusafw.integration.AsakusaProjectProvider; import com.asakusafw.utils.gradle.Bundle; import com.asakusafw.utils.gradle.ContentsConfigurator; /** * Test for Asakusa distribution. */ @RunWith(Parameterized.class) public class DistributionTest { /** * Return the test parameters. * @return the test parameters */ @Parameters(name = "use-hadoop:{0}") public static Object[][] getTestParameters() { return new Object[][] { { false }, { true }, }; } /** * project provider. */ @Rule public final AsakusaProjectProvider provider = new AsakusaProjectProvider() .withProject(ContentsConfigurator.copy(data("distribution"))) .withProject(ContentsConfigurator.copy(data("ksv"))) .withProject(ContentsConfigurator.copy(data("logback-test"))) .withProject(AsakusaConfigurator.projectHome()); /** * Creates a new instance. * @param useHadoop whether or not the test uses hadoop command */ public DistributionTest(boolean useHadoop) { if (useHadoop) { provider.withProject(AsakusaConfigurator.hadoop(AsakusaConfigurator.Action.SKIP_IF_UNDEFINED)); } else { provider.withProject(AsakusaConfigurator.hadoop(AsakusaConfigurator.Action.UNSET_ALWAYS)); } } /** * help. */ @Test public void help() { AsakusaProject project = provider.newInstance("prj"); project.gradle("help"); } /** * version. */ @Test public void version() { AsakusaProject project = provider.newInstance("prj"); project.gradle("asakusaVersions"); } /** * upgrade. */ @Test public void upgrade() { AsakusaProject project = provider.newInstance("prj"); project.gradle("asakusaUpgrade"); Bundle contents = project.getContents(); assertThat(contents.find("gradlew"), is(not(Optional.empty()))); assertThat(contents.find("gradlew.bat"), is(not(Optional.empty()))); } /** * {@code assemble}. */ @Test public void assemble() { AsakusaProject project = provider.newInstance("prj"); project.gradle("assemble"); Bundle contents = project.getContents(); assertThat(contents.find("build/asakusafw-prj.tar.gz"), is(not(Optional.empty()))); } /** * {@code installAsakusafw}. */ @Test public void installAsakusafw() { AsakusaProject project = provider.newInstance("prj"); project.gradle("installAsakusafw"); Bundle framework = project.getFramework(); assertThat(framework.find("vanilla"), is(not(Optional.empty()))); assertThat(framework.find("spark"), is(not(Optional.empty()))); assertThat(framework.find("m3bp"), is(not(Optional.empty()))); } /** * {@code test}. */ @Test public void test() { AsakusaProject project = provider.newInstance("prj"); project.gradle("installAsakusafw", "test"); } /** * YAESS w/ vanilla. */ @Test public void yaess_vanilla() { AsakusaProject project = provider.newInstance("prj"); doYaess(project, "attachVanillaBatchapps", "vanilla.perf.average.sort"); } /** * YAESS w/ spark. */ @Test public void yaess_spark() { AsakusaProject project = provider.newInstance("prj") .with(AsakusaConfigurator.spark(AsakusaConfigurator.Action.SKIP_IF_UNDEFINED)); doYaess(project, "attachSparkBatchapps", "spark.perf.average.sort"); } /** * YAESS w/ m3bp. */ @Test public void yaess_m3bp() { AsakusaProject project = provider.newInstance("prj"); doYaess(project, "attachM3bpBatchapps", "m3bp.perf.average.sort"); } /** * YAESS w/ mapreduce. */ @Test public void yaess_mapreduce() { AsakusaProject project = provider.newInstance("prj"); doYaess(project, "attachMapreduceBatchapps", "perf.average.sort"); } /** * run w/ vanilla. */ @Test public void workflow_vanilla() { AsakusaProject project = provider.newInstance("prj") .with(AsakusaConfigurator.hadoop(AsakusaConfigurator.Action.UNSET_IF_UNDEFINED)); runWorkflow(project, "attachVanillaBatchapps", "vanilla.perf.average.sort"); } /** * run w/ spark. */ @Test public void workflow_spark() { AsakusaProject project = provider.newInstance("prj") .with(AsakusaConfigurator.spark(AsakusaConfigurator.Action.SKIP_IF_UNDEFINED)); runWorkflow(project, "attachSparkBatchapps", "spark.perf.average.sort"); } /** * run w/ m3bp. */ @Test public void workflow_m3bp() { AsakusaProject project = provider.newInstance("prj") .with(AsakusaConfigurator.hadoop(AsakusaConfigurator.Action.UNSET_IF_UNDEFINED)); runWorkflow(project, "attachM3bpBatchapps", "m3bp.perf.average.sort"); } /** * run w/ mapreduce. */ @Test public void workflow_mapreduce() { AsakusaProject project = provider.newInstance("prj") .with(AsakusaConfigurator.hadoop(AsakusaConfigurator.Action.SKIP_IF_UNDEFINED)); runWorkflow(project, "attachMapreduceBatchapps", "perf.average.sort"); } private static void doYaess(AsakusaProject project, String taskName, String batchId) { project.gradle(taskName, "installAsakusafw"); String[] csv = new String[] { "1,1.0,A", "2,2.0,B", "3,3.0,C", }; project.getContents().put("var/data/input/file.csv", f -> { Files.write(f, Arrays.asList(csv), StandardCharsets.UTF_8); }); project.getFramework().withLaunch( AsakusaConstants.CMD_YAESS, batchId, "-A", "input=input", "-A", "output=output"); project.getContents().get("var/data/output", dir -> { List<String> results = Files.list(dir) .flatMap(Util::lines) .sorted() .collect(Collectors.toList()); assertThat(results, containsInAnyOrder(csv)); }); } private static void runWorkflow(AsakusaProject project, String taskName, String batchId) { project.gradle(taskName, "installAsakusafw"); String[] csv = new String[] { "1,1.0,A", "2,2.0,B", "3,3.0,C", }; project.getContents().put("var/data/input/file.csv", f -> { Files.write(f, Arrays.asList(csv), StandardCharsets.UTF_8); }); project.getFramework().withLaunch( AsakusaConstants.CMD_PORTAL, "run", batchId, "-Ainput=input", "-Aoutput=output"); project.getContents().get("var/data/output", dir -> { List<String> results = Files.list(dir) .flatMap(Util::lines) .sorted() .collect(Collectors.toList()); assertThat(results, containsInAnyOrder(csv)); }); } }
/* * Copyright (c) 2017 Gowtham Parimalazhagan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gm.grecyclerview.decoration; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.support.annotation.NonNull; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.View; import android.widget.LinearLayout; import com.gm.grecyclerview.SimpleAdapter; import com.gm.grecyclerview.GmRecyclerView; import java.util.List; public class DividerItemDecoration extends RecyclerView.ItemDecoration { public static final int HORIZONTAL = LinearLayout.HORIZONTAL; public static final int VERTICAL = LinearLayout.VERTICAL; private static final int[] ATTRS = new int[]{android.R.attr.listDivider}; private Drawable mDivider; private int mOrientation; private boolean isShowLastDivider; private final Rect mBounds = new Rect(); private List<String> noDividerCellTypes; private RecyclerView.LayoutManager layoutManager; private LinearLayoutManager linearLayoutManager; private GridLayoutManager gridLayoutManager; public DividerItemDecoration(Context context, int orientation) { final TypedArray a = context.obtainStyledAttributes(ATTRS); mDivider = a.getDrawable(0); a.recycle(); setOrientation(orientation); } public void setOrientation(int orientation) { if (orientation != HORIZONTAL && orientation != VERTICAL) { throw new IllegalArgumentException( "Invalid orientation. It should be either HORIZONTAL or VERTICAL"); } mOrientation = orientation; } public void setShowLastDivider(boolean isShowLastDivider) { this.isShowLastDivider = isShowLastDivider; } public void setDrawable(@NonNull Drawable drawable) { if (drawable == null) { throw new IllegalArgumentException("Drawable cannot be null."); } mDivider = drawable; } @Override public void onDraw(Canvas c, RecyclerView parent, RecyclerView.State state) { if (parent.getLayoutManager() == null) { return; } if (mOrientation == VERTICAL) { drawHorizontalDivider(c, parent); } else { drawVerticalDivider(c, parent); } } @SuppressLint("NewApi") private void drawHorizontalDivider(Canvas canvas, RecyclerView parent) { canvas.save(); final int left; final int right; if (parent.getClipToPadding()) { left = parent.getPaddingLeft(); right = parent.getWidth() - parent.getPaddingRight(); canvas.clipRect(left, parent.getPaddingTop(), right, parent.getHeight() - parent.getPaddingBottom()); } else { left = 0; right = parent.getWidth(); } final int childCount = parent.getChildCount(); for (int i = 0; i < childCount; i++) { final View child = parent.getChildAt(i); if (ignoreDrawDividerForCellTypes(parent, i)) { continue; } if (isLastRow(parent, child) && !isShowLastDivider) { continue; } parent.getDecoratedBoundsWithMargins(child, mBounds); final int bottom = mBounds.bottom; final int top = bottom - mDivider.getIntrinsicHeight(); mDivider.setBounds(left, top, right, bottom); mDivider.draw(canvas); } canvas.restore(); } @SuppressLint("NewApi") private void drawVerticalDivider(Canvas canvas, RecyclerView parent) { canvas.save(); final int childCount = parent.getChildCount(); for (int i = 0; i < childCount; i++) { final View child = parent.getChildAt(i); if (ignoreDrawDividerForCellTypes(parent, i)) { continue; } if (isLastColumn(parent, child) && !isShowLastDivider) { continue; } if (isLastColumn(parent, child) && isGridMode(parent)) { continue; } parent.getLayoutManager().getDecoratedBoundsWithMargins(child, mBounds); final int right = mBounds.right; final int left = right - mDivider.getIntrinsicWidth(); final int bottom = child.getBottom(); final int top = child.getTop(); mDivider.setBounds(left, top, right, bottom); mDivider.draw(canvas); } canvas.restore(); } @Override public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) { if (mOrientation == VERTICAL) { if (isLastRow(parent, view) && !isShowLastDivider) { outRect.set(0, 0, 0, 0); } else { outRect.set(0, 0, 0, mDivider.getIntrinsicHeight()); } } else { if ((isLastColumn(parent, view) && isGridMode(parent)) || (isLastColumn(parent, view) && !isShowLastDivider)) { outRect.set(0, 0, 0, 0); } else { outRect.set(0, 0, mDivider.getIntrinsicWidth(), 0); } } } private boolean ignoreDrawDividerForCellTypes(RecyclerView parent, int position) { if (noDividerCellTypes == null) { noDividerCellTypes = ((GmRecyclerView) parent).getNoDividerCellTypes(); } if (noDividerCellTypes.size() <= 0) { return false; } if (isLinearMode(parent)) { int pos = getLinearLayoutManager().findFirstVisibleItemPosition(); if (pos == -1) { return false; } String type = ((SimpleAdapter) parent.getAdapter()).getCell(pos + position).getClass().getSimpleName(); return noDividerCellTypes.contains(type); } return false; } private boolean isLastColumn(RecyclerView parent, View view) { int position = parent.getChildAdapterPosition(view); int totalChildCount = parent.getAdapter().getItemCount(); boolean isLastColumn = false; if (isGridMode(parent)) { int spanCount = getGridLayoutManager().getSpanCount(); int spanIndex = getGridLayoutManager().getSpanSizeLookup().getSpanIndex(position, spanCount); int spanSize = getGridLayoutManager().getSpanSizeLookup().getSpanSize(position); isLastColumn = spanIndex == spanCount - spanSize; } else if (isLinearMode(parent)) { isLastColumn = position == totalChildCount - 1; } return isLastColumn; } private boolean isLastRow(RecyclerView parent, View view) { int position = parent.getChildAdapterPosition(view); int totalChildCount = parent.getAdapter().getItemCount(); boolean isLastRow = false; if (isGridMode(parent)) { int spanCount = getGridLayoutManager().getSpanCount(); int spanIndex = getGridLayoutManager().getSpanSizeLookup().getSpanIndex(position, spanCount); int spanSize = getGridLayoutManager().getSpanSizeLookup().getSpanSize(position); int column = (spanIndex + spanSize) / spanSize - 1; // check if next row first item's index is the last index if (spanSize == 1) { isLastRow = position + spanCount - column > totalChildCount - 1; } else { int maxColumns = totalChildCount - position + column; int columns = spanCount / spanSize > maxColumns ? maxColumns : spanCount / spanSize; isLastRow = position + columns - column > totalChildCount - 1; } } else if (isLinearMode(parent)) { isLastRow = position == totalChildCount - 1; } return isLastRow; } private boolean isGridMode(RecyclerView parent) { if (layoutManager == null) { layoutManager = parent.getLayoutManager(); } return layoutManager instanceof GridLayoutManager; } private boolean isLinearMode(RecyclerView parent) { if (layoutManager == null) { layoutManager = parent.getLayoutManager(); } return layoutManager instanceof LinearLayoutManager; } public GridLayoutManager getGridLayoutManager() { if (gridLayoutManager == null) { gridLayoutManager = (GridLayoutManager) layoutManager; } return gridLayoutManager; } public LinearLayoutManager getLinearLayoutManager() { if (linearLayoutManager == null) { linearLayoutManager = (LinearLayoutManager) layoutManager; } return linearLayoutManager; } }
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.pmml.compiler.commons.utils; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import com.github.javaparser.StaticJavaParser; import com.github.javaparser.ast.NodeList; import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration; import com.github.javaparser.ast.body.MethodDeclaration; import com.github.javaparser.ast.body.Parameter; import com.github.javaparser.ast.body.VariableDeclarator; import com.github.javaparser.ast.expr.Expression; import com.github.javaparser.ast.expr.LambdaExpr; import com.github.javaparser.ast.expr.MethodCallExpr; import com.github.javaparser.ast.expr.MethodReferenceExpr; import com.github.javaparser.ast.expr.NameExpr; import com.github.javaparser.ast.expr.StringLiteralExpr; import com.github.javaparser.ast.expr.ThisExpr; import com.github.javaparser.ast.expr.VariableDeclarationExpr; import com.github.javaparser.ast.stmt.BlockStmt; import com.github.javaparser.ast.stmt.ExpressionStmt; import com.github.javaparser.ast.stmt.ReturnStmt; import com.github.javaparser.ast.type.ClassOrInterfaceType; import com.github.javaparser.ast.type.Type; import org.kie.pmml.commons.model.tuples.KiePMMLNameValue; import static com.github.javaparser.StaticJavaParser.parseClassOrInterfaceType; /** * Class meant to provide <i>helper</i> methods to all <i>code-generating</i> classes */ public class CommonCodegenUtils { public static String OPTIONAL_FILTERED_KIEPMMLNAMEVALUE_NAME ="kiePMMLNameValue"; static final String LAMBDA_PARAMETER_NAME = "lmbdParam"; static final String METHOD_NAME_TEMPLATE = "%s%s"; static final String PARAMETER_NAME_TEMPLATE = "param%s"; private CommonCodegenUtils() { // Avoid instantiation } /** * Populate the <code>ClassOrInterfaceDeclaration</code> with the provided <code>MethodDeclaration</code>s * * @param toPopulate * @param methodDeclarations */ public static void populateMethodDeclarations(final ClassOrInterfaceDeclaration toPopulate, final Collection<MethodDeclaration> methodDeclarations) { methodDeclarations.forEach(toPopulate::addMember); } /** * Returns * <pre> * Optional<KiePMMLNameValue> kiePMMLNameValue = (<i>kiePMMLNameValueListParam</i>) * .stream() * .filter((KiePMMLNameValue kpmmlnv) -> Objects.equals("(<i>fieldNameToRef</i>)", kpmmlnv.getName())) * .findFirst(); * </pre> * * expression, where <b>kiePMMLNameValueListParam</b> is the name of the * <code>List&lt;KiePMMLNameValue&gt;</code> parameter, and * <b>fieldNameToRef</b> is the name of the field to find, in the containing method * * @param kiePMMLNameValueListParam * @param fieldNameToRef * @param stringLiteralComparison if <code>true</code>, equals comparison is made on the String, e.g Objects.equals("(<i>fieldNameToRef</i>)", kpmmlnv.getName())), * otherwise, is done on object reference, e.g Objects.equals((<i>fieldNameToRef</i>), kpmmlnv.getName())). In this latter case, a <i>fieldNameToRef</i> variable is * expected to exists * * @return */ public static ExpressionStmt getFilteredKiePMMLNameValueExpression(final String kiePMMLNameValueListParam, final String fieldNameToRef, boolean stringLiteralComparison) { // kpmmlnv.getName() MethodCallExpr argumentBodyExpressionArgument2 = new MethodCallExpr("getName"); argumentBodyExpressionArgument2.setScope(new NameExpr(LAMBDA_PARAMETER_NAME)); // Objects.equals(fieldNameToRef, kpmmlnv.getName()) MethodCallExpr argumentBodyExpression = new MethodCallExpr("equals"); Expression equalsComparisonExpression; if (stringLiteralComparison) { equalsComparisonExpression = new StringLiteralExpr(fieldNameToRef); } else { equalsComparisonExpression = new NameExpr(fieldNameToRef); } argumentBodyExpression.setArguments(NodeList.nodeList(equalsComparisonExpression, argumentBodyExpressionArgument2)); argumentBodyExpression.setScope(new NameExpr(Objects.class.getName())); ExpressionStmt argumentBody = new ExpressionStmt(argumentBodyExpression); // (KiePMMLNameValue kpmmlnv) -> Objects.equals(fieldNameToRef, kpmmlnv.getName()) Parameter argumentParameter = new Parameter(parseClassOrInterfaceType(KiePMMLNameValue.class.getName()), LAMBDA_PARAMETER_NAME); LambdaExpr argument = new LambdaExpr(); argument.setEnclosingParameters(true).setParameters(NodeList.nodeList(argumentParameter)); // (KiePMMLNameValue kpmmlnv) -> argument.setBody(argumentBody); // Objects.equals(fieldNameToRef, kpmmlnv.getName()) // kiePMMLNameValueListParam.stream() MethodCallExpr initializerScopeScope = new MethodCallExpr("stream"); initializerScopeScope.setScope(new NameExpr(kiePMMLNameValueListParam)); // kiePMMLNameValueListParam.stream().filter((KiePMMLNameValue kpmmlnv) -> Objects.equals(fieldNameToRef, kpmmlnv.getName())) MethodCallExpr initializerScope = new MethodCallExpr("filter"); initializerScope.setScope(initializerScopeScope); initializerScope.setArguments(NodeList.nodeList(argument)); // kiePMMLNameValueListParam.stream().filter((KiePMMLNameValue kpmmlnv) -> Objects.equals(fieldNameToRef, kpmmlnv.getName())).findFirst() MethodCallExpr initializer = new MethodCallExpr( "findFirst"); initializer.setScope(initializerScope); // Optional<KiePMMLNameValue> kiePMMLNameValue VariableDeclarator variableDeclarator = new VariableDeclarator(getTypedClassOrInterfaceType(Optional.class.getName(), Collections.singletonList(KiePMMLNameValue.class.getName())), OPTIONAL_FILTERED_KIEPMMLNAMEVALUE_NAME); // Optional<KiePMMLNameValue> kiePMMLNameValue = kiePMMLNameValueListParam.stream().filter((KiePMMLNameValue kpmmlnv) -> Objects.equals(fieldNameToRef, kpmmlnv.getName())).findFirst() variableDeclarator.setInitializer(initializer); // VariableDeclarationExpr variableDeclarationExpr = new VariableDeclarationExpr(NodeList.nodeList(variableDeclarator)); ExpressionStmt toReturn = new ExpressionStmt(); toReturn.setExpression(variableDeclarationExpr); return toReturn; } /** * For every entry in the given map, add * <pre> * (<i>mapName</i>).put(<i>entry_key<i/>, this::<i>entry_value_ref</i>>); * </pre> * e.g. * <pre> * MAP_NAME.put("KEY_0", this::METHOD_015); * MAP_NAME.put("KEY_3", this::METHOD_33); * MAP_NAME.put("KEY_2", this::METHOD_219); * MAP_NAME.put("KEY_4", this::METHOD_46); * </pre> * inside the given <code>BlockStmt</code> * * @param toAdd * @param body * @param mapName */ public static void addMapPopulation(final Map<String, MethodDeclaration> toAdd, final BlockStmt body, final String mapName) { toAdd.forEach((s, methodDeclaration) -> { MethodReferenceExpr methodReferenceExpr = new MethodReferenceExpr(); methodReferenceExpr.setScope(new ThisExpr()); methodReferenceExpr.setIdentifier(methodDeclaration.getNameAsString()); NodeList<Expression> expressions = NodeList.nodeList(new StringLiteralExpr(s), methodReferenceExpr); body.addStatement(new MethodCallExpr(new NameExpr(mapName), "put", expressions)); }); } /** * Returns * <pre> * empty (<i>methodName</i>)((list of <i>parameterType</i> <i>parameter name</i>)) { * } * </pre> * * * a <b>multi-parameters</b> <code>MethodDeclaration</code> whose names are the <b>key</b>s of the given <code>Map</code> * and <b>methodArity</b>, and whose parameters types are the <b>value</b>s * * <b>The </b> * @param methodName * @param parameterNameTypeMap expecting an <b>ordered</b> map here, since parameters order matter for <i>caller</i> code * @return */ public static MethodDeclaration getMethodDeclaration(final String methodName, final Map<String, ClassOrInterfaceType> parameterNameTypeMap) { MethodDeclaration toReturn = getMethodDeclaration(methodName); NodeList<Parameter> typeParameters = new NodeList<>(); parameterNameTypeMap.forEach((parameterName, classOrInterfaceType) -> { Parameter toAdd = new Parameter(); toAdd.setName(parameterName); toAdd.setType(classOrInterfaceType); typeParameters.add(toAdd); }); toReturn.setParameters(typeParameters); return toReturn; } /** * Returns * <pre> * empty (<i>methodName</i>)() { * } * </pre> * * A <b>no-parameter</b> <code>MethodDeclaration</code> whose name is derived from given <b>methodName</b> * and <b>methodArity</b> * @param methodName * @return */ public static MethodDeclaration getMethodDeclaration(final String methodName) { MethodDeclaration toReturn = new MethodDeclaration(); toReturn.setName(methodName); return toReturn; } /** * Returns * <pre> * return (<i>returnedVariableName</i>); * </pre> * * e.g * <pre> * return varOne; * </pre> * @param returnedVariableName * @return */ public static ReturnStmt getReturnStmt(final String returnedVariableName) { ReturnStmt toReturn = new ReturnStmt(); toReturn.setExpression(new NameExpr(returnedVariableName)); return toReturn; } /** * Returns * <pre> * (<i>className</i>)<(<i>comma-separated list of types</i>)> * </pre> * * e.g * <pre> * CLASS_NAME<TypeA, TypeB> * </pre> * a <b>typed</b> <code>ClassOrInterfaceType</code> * @param className * @param typesName * @return */ public static ClassOrInterfaceType getTypedClassOrInterfaceType(final String className, final List<String> typesName ) { ClassOrInterfaceType toReturn = parseClassOrInterfaceType(className); List<Type> types = typesName.stream() .map(StaticJavaParser::parseClassOrInterfaceType).collect(Collectors.toList()); toReturn.setTypeArguments(NodeList.nodeList(types)); return toReturn; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/security/privateca/v1beta1/service.proto package com.google.cloud.security.privateca.v1beta1; /** * * * <pre> * Request message for * [CertificateAuthorityService.ListReusableConfigs][google.cloud.security.privateca.v1beta1.CertificateAuthorityService.ListReusableConfigs]. * </pre> * * Protobuf type {@code google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest} */ public final class ListReusableConfigsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest) ListReusableConfigsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListReusableConfigsRequest.newBuilder() to construct. private ListReusableConfigsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListReusableConfigsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; orderBy_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListReusableConfigsRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListReusableConfigsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 16: { pageSize_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } case 34: { java.lang.String s = input.readStringRequireUtf8(); filter_ = s; break; } case 42: { java.lang.String s = input.readStringRequireUtf8(); orderBy_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.security.privateca.v1beta1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1beta1_ListReusableConfigsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.security.privateca.v1beta1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1beta1_ListReusableConfigsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest.class, com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The resource name of the location associated with the * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig], in the format * `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the location associated with the * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig], in the format * `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_; /** * * * <pre> * Optional. Limit on the number of * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig] to include in the response. * Further [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig] can subsequently be * obtained by including the * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token] in a subsequent request. If * unspecified, the server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; private volatile java.lang.Object pageToken_; /** * * * <pre> * Optional. Pagination token, returned earlier via * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token]. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. Pagination token, returned earlier via * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token]. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; private volatile java.lang.Object filter_; /** * * * <pre> * Optional. Only include resources that match the filter in the response. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. Only include resources that match the filter in the response. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ORDER_BY_FIELD_NUMBER = 5; private volatile java.lang.Object orderBy_; /** * * * <pre> * Optional. Specify how the results should be sorted. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The orderBy. */ @java.lang.Override public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } } /** * * * <pre> * Optional. Specify how the results should be sorted. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for orderBy. */ @java.lang.Override public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest)) { return super.equals(obj); } com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest other = (com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getOrderBy().equals(other.getOrderBy())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + ORDER_BY_FIELD_NUMBER; hash = (53 * hash) + getOrderBy().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [CertificateAuthorityService.ListReusableConfigs][google.cloud.security.privateca.v1beta1.CertificateAuthorityService.ListReusableConfigs]. * </pre> * * Protobuf type {@code google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest) com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.security.privateca.v1beta1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1beta1_ListReusableConfigsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.security.privateca.v1beta1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1beta1_ListReusableConfigsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest.class, com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest.Builder.class); } // Construct using // com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; orderBy_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.security.privateca.v1beta1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1beta1_ListReusableConfigsRequest_descriptor; } @java.lang.Override public com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest getDefaultInstanceForType() { return com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest build() { com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest buildPartial() { com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest result = new com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest(this); result.parent_ = parent_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; result.filter_ = filter_; result.orderBy_ = orderBy_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest) { return mergeFrom( (com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest other) { if (other == com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; onChanged(); } if (!other.getOrderBy().isEmpty()) { orderBy_ = other.orderBy_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the location associated with the * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig], in the format * `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the location associated with the * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig], in the format * `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the location associated with the * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig], in the format * `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the location associated with the * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig], in the format * `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the location associated with the * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig], in the format * `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Limit on the number of * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig] to include in the response. * Further [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig] can subsequently be * obtained by including the * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token] in a subsequent request. If * unspecified, the server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Limit on the number of * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig] to include in the response. * Further [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig] can subsequently be * obtained by including the * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token] in a subsequent request. If * unspecified, the server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * * * <pre> * Optional. Limit on the number of * [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig] to include in the response. * Further [ReusableConfigs][google.cloud.security.privateca.v1beta1.ReusableConfig] can subsequently be * obtained by including the * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token] in a subsequent request. If * unspecified, the server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Pagination token, returned earlier via * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token]. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Pagination token, returned earlier via * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token]. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Pagination token, returned earlier via * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token]. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * * * <pre> * Optional. Pagination token, returned earlier via * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token]. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * * * <pre> * Optional. Pagination token, returned earlier via * [ListReusableConfigsResponse.next_page_token][google.cloud.security.privateca.v1beta1.ListReusableConfigsResponse.next_page_token]. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Only include resources that match the filter in the response. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Only include resources that match the filter in the response. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Only include resources that match the filter in the response. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); return this; } /** * * * <pre> * Optional. Only include resources that match the filter in the response. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); onChanged(); return this; } /** * * * <pre> * Optional. Only include resources that match the filter in the response. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; onChanged(); return this; } private java.lang.Object orderBy_ = ""; /** * * * <pre> * Optional. Specify how the results should be sorted. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The orderBy. */ public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Specify how the results should be sorted. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for orderBy. */ public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Specify how the results should be sorted. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The orderBy to set. * @return This builder for chaining. */ public Builder setOrderBy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } orderBy_ = value; onChanged(); return this; } /** * * * <pre> * Optional. Specify how the results should be sorted. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearOrderBy() { orderBy_ = getDefaultInstance().getOrderBy(); onChanged(); return this; } /** * * * <pre> * Optional. Specify how the results should be sorted. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for orderBy to set. * @return This builder for chaining. */ public Builder setOrderByBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); orderBy_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest) private static final com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest(); } public static com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListReusableConfigsRequest> PARSER = new com.google.protobuf.AbstractParser<ListReusableConfigsRequest>() { @java.lang.Override public ListReusableConfigsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListReusableConfigsRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListReusableConfigsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListReusableConfigsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.security.privateca.v1beta1.ListReusableConfigsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * (c) Copyright 2012 WibiData, Inc. * * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kiji.schema; import java.io.Closeable; import java.io.Flushable; import java.io.IOException; import java.util.Collections; import java.util.Map; import com.google.common.base.Objects; import org.apache.avro.Schema; import org.apache.avro.util.WeakIdentityHashMap; import org.apache.commons.lang.builder.HashCodeBuilder; import org.kiji.annotations.ApiAudience; import org.kiji.annotations.ApiStability; import org.kiji.annotations.Inheritance; import org.kiji.schema.avro.SchemaTableBackup; import org.kiji.schema.util.BytesKey; import org.kiji.schema.util.Hasher; /** * The Kiji schema table, which contains the lookup table between schema IDs, hashes, and full * schemas. * * @see KijiMetaTable * @see KijiSystemTable */ @ApiAudience.Framework @ApiStability.Stable @Inheritance.Sealed public interface KijiSchemaTable extends Flushable, Closeable { /** * Looks up a schema ID given an Avro schema object. * * If the schema is unknown, allocates a new ID and stores the new schema mapping. * * @param schema The full schema to store in the table. * @return The schema ID. * @throws IOException on I/O error. */ long getOrCreateSchemaId(Schema schema) throws IOException; /** * Looks up a schema hash given an Avro schema object. * * If the schema is unknown, allocates a new ID and stores the new schema mapping. * * @param schema Avro schema to look up. * @return The schema hash. * @throws IOException on I/O error. */ BytesKey getOrCreateSchemaHash(Schema schema) throws IOException; /** * Computes a schema hash. * * @param schema The Avro schema to hash. * @return The hash of the schema */ BytesKey getSchemaHash(Schema schema); /** * Looks up a schema given an ID. * * @param schemaId Schema ID to look up. * @return Avro schema, or null if the schema ID is unknown. * @throws IOException on I/O error. */ Schema getSchema(long schemaId) throws IOException; /** * Looks up a schema given a hash. * * @param schemaHash Schema hash to look up. * @return Avro schema, or null if the schema hash is unknown. * @throws IOException on I/O error. */ Schema getSchema(BytesKey schemaHash) throws IOException; /** * Looks up a schema entry from a given ID. * * @param schemaId Schema ID to look up. * @return SchemaEntry matching the given ID or null if the ID is unknown. * @throws IOException on I/O error. */ SchemaEntry getSchemaEntry(long schemaId) throws IOException; /** * Looks up a schema entry from a given hash. * * @param schemaHash Schema hash to look up. * @return SchemaEntry matching the given hash or null if the hash is unknown. * @throws IOException on I/O error. */ SchemaEntry getSchemaEntry(BytesKey schemaHash) throws IOException; /** * Looks up a schema entry from a given Schema definition. * * @param schema Schema definition to look up. * @return SchemaEntry matching the given Schema or null if the schema is unknown. * @throws IOException on I/O error. */ SchemaEntry getSchemaEntry(Schema schema) throws IOException; /** Association between a schema and its ID. */ public static class SchemaEntry { private final long mId; private final BytesKey mHash; private final Schema mSchema; /** * Creates a new schema entry. * * @param id the schema ID * @param hash the schema hash * @param schema the Avro schema object */ @ApiAudience.Private public SchemaEntry(long id, BytesKey hash, Schema schema) { this.mId = id; this.mHash = hash; this.mSchema = schema; } /** @return the schema ID */ public long getId() { return this.mId; } /** @return the schema hash */ public BytesKey getHash() { return this.mHash; } /** @return the Avro schema object */ public Schema getSchema() { return this.mSchema; } /** {@inheritDoc} */ @Override public String toString() { return Objects.toStringHelper(SchemaEntry.class) .add("id", this.mId) .add("hash", this.mHash) .add("schema", this.mSchema) .toString(); } /** {@inheritDoc} */ @Override public boolean equals(Object other) { if (!(other instanceof SchemaEntry)) { return false; } final SchemaEntry entry = (SchemaEntry) other; return (this.mId == entry.mId) && (this.mHash.equals(entry.mHash)) && (this.mSchema.equals(entry.mSchema)); } /** {@inheritDoc} */ @Override public int hashCode() { return new HashCodeBuilder() .append(mId) .append(mHash) .append(mSchema) .toHashCode(); } } /** * Cache providing an efficient mapping from Avro schema object to the schema hash. * * Computing the hash code of a schema is expensive as it serializes the Avro schema object * into JSON. */ @ApiAudience.Private static final class SchemaHashCache { /** * Underlying cache is a weak identity hash map: * <li> We must use object IDs since Schema.hashCode() and Schema.equals() implement a * comparison that ignores doc fields or default values. * <li> We must use a weak map to ensure the cache gets garbage collected properly. */ private final Map<Schema, BytesKey> mCache = Collections.synchronizedMap(new WeakIdentityHashMap<Schema, BytesKey>()); /** * Computes a hash of the specified Avro schema. * * Kiji currently uses MD5 sums (128 bits) of the schema JSON representation. * * @param schema Avro schema to compute a hash of. * @return Hash code as an array of bytes (16 bytes). */ public static byte[] hashSchema(Schema schema) { return Hasher.hash(schema.toString()); } /** * Hashes an Avro schema. * * @param schema Avro schema to hash. * @return the schema hash. */ public BytesKey getHash(Schema schema) { final BytesKey hash = mCache.get(schema); if (null != hash) { return hash; } final BytesKey newHash = new BytesKey(hashSchema(schema)); mCache.put(schema, newHash); return newHash; } } /** * Returns schema backup information in a form that can be directly written to a MetadataBackup * record. To read more about the avro type that has been specified to store this info, see * Layout.avdl * * @throws IOException on I/O error. * @return A list of schema table entries. */ SchemaTableBackup toBackup() throws IOException; /** * Restores the schema entries from the specified backup record. * * @param backup The schema entries from a MetadataBackup record. This consist of the schema * definition, schema id, and schema hash. * @throws IOException on I/O error. */ void fromBackup(SchemaTableBackup backup) throws IOException; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.jdbc; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.util.concurrent.Callable; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.ConnectorConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * Connection test. */ public class JdbcConnectionSelfTest extends GridCommonAbstractTest { /** IP finder. */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** Custom cache name. */ private static final String CUSTOM_CACHE_NAME = "custom-cache"; /** Custom REST TCP port. */ private static final int CUSTOM_PORT = 11212; /** URL prefix. */ private static final String URL_PREFIX = "jdbc:ignite://"; /** Host. */ private static final String HOST = "127.0.0.1"; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setCacheConfiguration(cacheConfiguration(DEFAULT_CACHE_NAME), cacheConfiguration(CUSTOM_CACHE_NAME)); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(IP_FINDER); cfg.setDiscoverySpi(disco); assert cfg.getConnectorConfiguration() == null; ConnectorConfiguration clientCfg = new ConnectorConfiguration(); if (!igniteInstanceName.endsWith("0")) clientCfg.setPort(CUSTOM_PORT); cfg.setConnectorConfiguration(clientCfg); return cfg; } /** * @param name Cache name. * @return Cache configuration. * @throws Exception In case of error. */ private CacheConfiguration cacheConfiguration(@NotNull String name) throws Exception { CacheConfiguration cfg = defaultCacheConfiguration(); cfg.setName(name); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startGridsMultiThreaded(2); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); } /** * @throws Exception If failed. */ public void testDefaults() throws Exception { String url = URL_PREFIX + HOST; assert DriverManager.getConnection(url) != null; assert DriverManager.getConnection(url + "/") != null; } /** * @throws Exception If failed. */ public void testNodeId() throws Exception { String url = URL_PREFIX + HOST + "/?nodeId=" + grid(0).localNode().id(); assert DriverManager.getConnection(url) != null; url = URL_PREFIX + HOST + "/" + CUSTOM_CACHE_NAME + "?nodeId=" + grid(0).localNode().id(); assert DriverManager.getConnection(url) != null; } /** * @throws Exception If failed. */ public void testCustomCache() throws Exception { String url = URL_PREFIX + HOST + "/" + CUSTOM_CACHE_NAME; assert DriverManager.getConnection(url) != null; } /** * @throws Exception If failed. */ public void testCustomPort() throws Exception { String url = URL_PREFIX + HOST + ":" + CUSTOM_PORT; assert DriverManager.getConnection(url) != null; assert DriverManager.getConnection(url + "/") != null; } /** * @throws Exception If failed. */ public void testCustomCacheNameAndPort() throws Exception { String url = URL_PREFIX + HOST + ":" + CUSTOM_PORT + "/" + CUSTOM_CACHE_NAME; assert DriverManager.getConnection(url) != null; } /** * @throws Exception If failed. */ public void testWrongCache() throws Exception { final String url = URL_PREFIX + HOST + "/wrongCacheName"; GridTestUtils.assertThrows( log, new Callable<Object>() { @Override public Object call() throws Exception { DriverManager.getConnection(url); return null; } }, SQLException.class, "Client is invalid. Probably cache name is wrong." ); } /** * @throws Exception If failed. */ public void testWrongPort() throws Exception { final String url = URL_PREFIX + HOST + ":33333"; GridTestUtils.assertThrows( log, new Callable<Object>() { @Override public Object call() throws Exception { DriverManager.getConnection(url); return null; } }, SQLException.class, "Failed to establish connection." ); } /** * @throws Exception If failed. */ public void testClose() throws Exception { String url = URL_PREFIX + HOST; final Connection conn = DriverManager.getConnection(url); assert conn != null; assert !conn.isClosed(); conn.close(); assert conn.isClosed(); GridTestUtils.assertThrows( log, new Callable<Object>() { @Override public Object call() throws Exception { conn.isValid(2); return null; } }, SQLException.class, "Connection is closed." ); } }
/* * Copyright 2013-2015 Aleksandr Mashchenko. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amashchenko.struts2.actionflow; import java.beans.PropertyDescriptor; import java.lang.reflect.Method; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import com.amashchenko.struts2.actionflow.entities.ActionFlowStepConfig; import com.amashchenko.struts2.actionflow.entities.ActionFlowStepsData; import com.opensymphony.xwork2.Action; import com.opensymphony.xwork2.ActionInvocation; import com.opensymphony.xwork2.inject.Inject; import com.opensymphony.xwork2.interceptor.AbstractInterceptor; import com.opensymphony.xwork2.interceptor.PreResultListener; import com.opensymphony.xwork2.util.logging.Logger; import com.opensymphony.xwork2.util.logging.LoggerFactory; /** * <!-- START SNIPPET: description --> * <p/> * An interceptor that controls flow actions. * <p/> * <!-- END SNIPPET: description --> * <p/> * <p/> * <u>Interceptor parameters:</u> * <p/> * <!-- START SNIPPET: parameters --> * <p/> * <ul> * <p/> * <li>nextActionName (optional) - Name of the 'next' action. If none is * specified the default name <code>next</code> will be used.</li> * <p/> * <li>prevActionName (optional) - Name of the 'previous' action. If none is * specified the default name <code>prev</code> will be used.</li> * <p/> * <li>forceFlowStepsOrder (optional) - To force the order of flow action * executions. The default is <code>true</code>.</li> * <p/> * <li>viewActionPostfix (optional) - String to append to generated view action * name. The default is <code>View</code>.</li> * <p/> * <li>viewActionMethod (optional) - Action method to execute in generated view * actions. The default is <code>execute</code>.</li> * <p/> * <li>stepParameterName (optional) - Name of the form parameter holding * previous action value. The default is <code>step</code>.</li> * <p/> * </ul> * <p/> * <p/> * <!-- END SNIPPET: parameters --> * <p/> * <p/> * <p/> * <u>Extending the interceptor:</u> * <p/> * <p/> * <!-- START SNIPPET: extending --> * <p/> * There are no known extensions points for this interceptor. * <p/> * <!-- END SNIPPET: extending --> * <p/> * <u>Example code:</u> * * <pre> * <!-- START SNIPPET: example-configuration --> * &lt;action name="saveName" method="saveName" class="com.example.FlowAction"&gt; * &lt;param name="actionFlowStep"&gt;1&lt;/param&gt; * * &lt;result name="input"&gt;input_result.jsp&lt;/result&gt; * &lt;result name="success"&gt;success_result.jsp&lt;/result&gt; * &lt;/action&gt; * <!-- END SNIPPET: example-configuration --> * </pre> * <p/> * You must use {@link #nextActionName} and {@link #prevActionName} in the form. * <p/> * * <pre> * <!-- START SNIPPET: example-form --> * &lt;s:form action="next"&gt; * &lt;s:hidden name="step" value="%{#session['actionFlowPreviousAction']}" /&gt; * * &lt;s:textfield name="name" label="Name" /&gt; * &lt;s:submit value="previous" action="prev" /&gt; * &lt;s:submit value="next" action="next" /&gt; * &lt;/s:form&gt; * <!-- END SNIPPET: example-form --> * </pre> * <p/> * * @author Aleksandr Mashchenko * */ public class ActionFlowInterceptor extends AbstractInterceptor { /** Serial version uid. */ private static final long serialVersionUID = -8931708101962468929L; /** Logger. */ public static final Logger LOG = LoggerFactory .getLogger(ActionFlowInterceptor.class); /** Key for holding in session the name of the previous flow action. */ private static final String PREVIOUS_FLOW_ACTION = "actionFlowPreviousAction"; /** Key for holding in session current highest action index. */ private static final String HIGHEST_CURRENT_ACTION_INDEX = "actionFlowHighestCurrentActionIndex"; /** Key for holding in session map of skip actions. */ private static final String SKIP_ACTIONS = "actionFlowSkipActionsMap"; /** Key for holding in session overridden action name. */ private static final String OVERRIDE_ACTION_NAME = "actionFlowOverrideActionName"; /** Default next action name. */ private static final String DEFAULT_NEXT_ACTION_NAME = "next"; /** Default previous action name. */ private static final String DEFAULT_PREV_ACTION_NAME = "prev"; /** Name of the first flow action. */ protected static final String FIRST_FLOW_ACTION_NAME = "firstFlowAction"; protected static final String GLOBAL_VIEW_RESULT = "actionFlowViewResult"; /** Default postfix for view actions. */ private static final String DEFAULT_VIEW_ACTION_POSTFIX = "View"; private static final String DEFAULT_VIEW_ACTION_METHOD = "execute"; private static final String DEFAULT_STEP_PARAM_NAME = "step"; protected static final String NEXT_ACTION_PARAM = "nextAction"; protected static final String PREV_ACTION_PARAM = "prevAction"; protected static final String VIEW_ACTION_PARAM = "viewAction"; private static final String FLOW_SCOPE_KEY = "actionFlowScope"; private Map<String, List<PropertyDescriptor>> flowScopeFields; /** Previous not special nor flow action. */ private String prevSimpleAction; /** Action before first next. */ private String startAction; // interceptor parameters private String nextActionName = DEFAULT_NEXT_ACTION_NAME; private String prevActionName = DEFAULT_PREV_ACTION_NAME; private boolean forceFlowStepsOrder = true; private String viewActionPostfix = DEFAULT_VIEW_ACTION_POSTFIX; private String viewActionMethod = DEFAULT_VIEW_ACTION_METHOD; private String stepParameterName = DEFAULT_STEP_PARAM_NAME; /** Holds action flow. */ private Map<String, ActionFlowStepConfig> flowMap; /** Holds action flow steps data. */ private TreeMap<Integer, String> steps; /** Action flow configuration builder. */ @Inject private ActionFlowConfigBuilder flowConfigBuilder; /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public String intercept(ActionInvocation invocation) throws Exception { final String actionName = invocation.getInvocationContext().getName(); // initialize action flow configuration if (flowMap == null) { initFlowConfiguration(invocation.getProxy().getConfig() .getPackageName()); } Integer stepCount = 1; final boolean flowAction; final boolean lastFlowAction; if (flowMap.containsKey(actionName)) { flowAction = true; // this is needed when input result is returned stepCount = flowMap.get(actionName).getIndex(); lastFlowAction = flowMap.get(actionName).getNextAction() == null; } else { flowAction = false; lastFlowAction = false; } boolean flowViewAction = false; // action name w/o view postfix String plainActionName = null; if (actionName.endsWith(viewActionPostfix)) { plainActionName = actionName.substring(0, actionName.indexOf(viewActionPostfix)); if (flowMap.containsKey(plainActionName)) { flowViewAction = true; stepCount = flowMap.get(plainActionName).getIndex(); } } Map<String, Object> session = invocation.getInvocationContext() .getSession(); // start if (startAction != null && startAction.equals(actionName)) { clearSession(session); } // action flow steps aware if (invocation.getAction() instanceof ActionFlowStepsAware) { ((ActionFlowStepsAware) invocation.getAction()) .setActionFlowSteps(new ActionFlowStepsData(steps, stepCount)); } // scope if (flowAction || flowViewAction) { handleFlowScope(invocation.getAction(), session, true); } // not a flow nor next nor previous action, just invoke if (!flowAction && !prevActionName.equals(actionName) && !nextActionName.equals(actionName)) { prevSimpleAction = actionName; return invocation.invoke(); } String previousFlowAction = (String) session.get(PREVIOUS_FLOW_ACTION); if (previousFlowAction == null) { previousFlowAction = FIRST_FLOW_ACTION_NAME; } // handling of back/forward buttons Object[] stepParam = (Object[]) invocation.getInvocationContext() .getParameters().get(stepParameterName); boolean overriddenWithStep = false; if (stepParam != null && stepParam.length > 0) { String step = "" + stepParam[0]; if (step.isEmpty()) { step = FIRST_FLOW_ACTION_NAME; } if (step != null && !step.equals(previousFlowAction) && flowMap.containsKey(step)) { if (LOG.isDebugEnabled()) { LOG.debug("The 'previousFlowAction' value from session is '" + previousFlowAction + "', but '" + stepParameterName + "' parameter value is '" + step + "' The '" + stepParameterName + "' parameter value will be used for 'previousFlowAction'."); } previousFlowAction = step; overriddenWithStep = true; } } String nextAction = null; String prevAction = null; if (flowMap.containsKey(previousFlowAction)) { nextAction = flowMap.get(previousFlowAction).getNextAction(); prevAction = flowMap.get(previousFlowAction).getPrevAction(); } if (LOG.isDebugEnabled()) { LOG.debug(actionName + "-> previousFlowAction: " + previousFlowAction + ", nextAction: " + nextAction + ", prevAction: " + prevAction); } final Integer highestCurrentIndex; if (session.containsKey(HIGHEST_CURRENT_ACTION_INDEX) && session.get(HIGHEST_CURRENT_ACTION_INDEX) != null) { highestCurrentIndex = (Integer) session .get(HIGHEST_CURRENT_ACTION_INDEX); } else { highestCurrentIndex = 0; } // force order of flow actions if (forceFlowStepsOrder && flowAction && (highestCurrentIndex.intValue() + 1) < flowMap.get( actionName).getIndex()) { if (LOG.isDebugEnabled()) { LOG.debug("The forceFlowStepsOrder parameter is set to true. The '" + actionName + "' action will not be executed because it is called in the wrong order."); } if (overriddenWithStep) { invocation .getInvocationContext() .getValueStack() .set(VIEW_ACTION_PARAM, previousFlowAction + viewActionPostfix); } else { invocation.getInvocationContext().getValueStack() .set(VIEW_ACTION_PARAM, nextAction + viewActionPostfix); } return GLOBAL_VIEW_RESULT; } Map<String, String> skipMap = null; if (session.containsKey(SKIP_ACTIONS) && session.get(SKIP_ACTIONS) instanceof Map) { skipMap = (Map<String, String>) session.get(SKIP_ACTIONS); } if (nextActionName.equals(actionName)) { // set start action if (startAction == null) { startAction = prevSimpleAction; } invocation.getInvocationContext().getValueStack() .set(NEXT_ACTION_PARAM, nextAction); } else if (prevActionName.equals(actionName)) { String prevView = null; if (FIRST_FLOW_ACTION_NAME.equals(previousFlowAction)) { prevView = nextAction; } else { if (skipMap != null && skipMap.containsKey(nextAction)) { prevView = skipMap.get(nextAction); // override prevAction prevAction = flowMap.get(prevView).getPrevAction(); } else { prevView = previousFlowAction; } } invocation.getInvocationContext().getValueStack() .set(PREV_ACTION_PARAM, prevView + viewActionPostfix); session.put(PREVIOUS_FLOW_ACTION, prevAction); } // add pre-result listener on correct flow action if (flowAction && nextAction.equals(actionName)) { invocation.addPreResultListener(new PreResultListener() { public void beforeResult(ActionInvocation invocation, String resultCode) { if (Action.SUCCESS.equals(resultCode)) { Map<String, Object> session = invocation .getInvocationContext().getSession(); // action flow aware String nextFromAction = null; if (invocation.getAction() instanceof ActionFlowAware) { nextFromAction = ((ActionFlowAware) invocation .getAction()) .nextActionFlowAction(actionName); // if null just ignore otherwise check if returned // action is a flow action if (nextFromAction != null && !flowMap.containsKey(nextFromAction)) { nextFromAction = null; } } Map<String, String> skipMap = null; if (session.containsKey(SKIP_ACTIONS) && session.get(SKIP_ACTIONS) instanceof Map) { skipMap = (Map<String, String>) session .get(SKIP_ACTIONS); } else { skipMap = new HashMap<String, String>(); } if (nextFromAction != null) { skipMap.put(nextFromAction, actionName); String overriddenName = flowMap.get(nextFromAction) .getPrevAction(); // override actionName session.put(OVERRIDE_ACTION_NAME, overriddenName); } else if (!lastFlowAction) { // get next action if it isn't the last flow action // and not overridden from ActionFlowAware nextFromAction = flowMap.get(actionName) .getNextAction(); skipMap.remove(nextFromAction); } session.put(SKIP_ACTIONS, skipMap); // execute global view result on not last flow action or // if next action is overridden from ActionFlowAware if (!lastFlowAction || nextFromAction != null) { invocation .getInvocationContext() .getValueStack() .set(VIEW_ACTION_PARAM, nextFromAction + viewActionPostfix); invocation.setResultCode(GLOBAL_VIEW_RESULT); } } } }); } final String result = invocation.invoke(); // scope if (flowAction) { handleFlowScope(invocation.getAction(), session, false); } if (GLOBAL_VIEW_RESULT.equals(result) && flowAction) { // check if action name is overridden in ActionFlowAware // remove override action name from session String actionNameOverridden = (String) session .remove(OVERRIDE_ACTION_NAME); if (actionNameOverridden == null) { actionNameOverridden = actionName; } session.put(PREVIOUS_FLOW_ACTION, actionNameOverridden); // set highest current action index on a view result if (flowMap.get(actionNameOverridden).getIndex() > highestCurrentIndex) { session.put(HIGHEST_CURRENT_ACTION_INDEX, flowMap.get(actionNameOverridden).getIndex()); } } // last flow action if (Action.SUCCESS.equals(result) && flowAction && lastFlowAction) { clearSession(session); } return result; } private void initFlowConfiguration(final String packageName) { flowMap = flowConfigBuilder.createFlowMap(packageName, nextActionName, prevActionName, viewActionPostfix, viewActionMethod); flowScopeFields = flowConfigBuilder.createFlowScopeFields(packageName); // create action flow steps data if (flowMap != null) { steps = new TreeMap<Integer, String>(); for (ActionFlowStepConfig cfg : flowMap.values()) { if (cfg.getIndex() < flowMap.size() - 1) { steps.put(cfg.getIndex() + 1, cfg.getNextAction()); } } } } void clearSession(final Map<String, Object> session) { session.put(PREVIOUS_FLOW_ACTION, null); session.put(FLOW_SCOPE_KEY, null); session.put(HIGHEST_CURRENT_ACTION_INDEX, null); session.put(SKIP_ACTIONS, null); session.put(OVERRIDE_ACTION_NAME, null); } /** * Handles action flow scope fields. * * @param action * action object. * @param session * session map. * @param fromFlowScope * whether to store value into the session or retrieve it. On * <code>true</code> sets value from session into the action * field, on <code>false</code> puts value from action field to * session. */ @SuppressWarnings("unchecked") void handleFlowScope(final Object action, final Map<String, Object> session, final boolean fromFlowScope) { if (action != null && flowScopeFields != null && session != null) { final String actionClassName = action.getClass().getName(); Map<String, Object> scopeMap = null; if (session.containsKey(FLOW_SCOPE_KEY) && session.get(FLOW_SCOPE_KEY) instanceof Map) { scopeMap = (Map<String, Object>) session.get(FLOW_SCOPE_KEY); } if (scopeMap == null) { scopeMap = new HashMap<String, Object>(); } if (flowScopeFields.containsKey(actionClassName) && flowScopeFields.get(actionClassName) != null) { for (PropertyDescriptor pd : flowScopeFields .get(actionClassName)) { try { final Method getter = pd.getReadMethod(); if (getter != null) { final Object val = getter.invoke(action); final String scopeFieldKey = getter.toString(); if (fromFlowScope) { if (val == null && scopeMap.containsKey(scopeFieldKey)) { final Method setter = pd.getWriteMethod(); if (setter != null) { if (LOG.isDebugEnabled()) { LOG.debug("Setting the value: '" + scopeMap .get(scopeFieldKey) + "' for key: '" + scopeFieldKey + "' from the action flow scope into the action."); } setter.invoke(action, scopeMap.get(scopeFieldKey)); } } } else { if (val != null) { if (LOG.isDebugEnabled()) { LOG.debug("Storing the value: '" + val + "' for key: '" + scopeFieldKey + "' from the action into the action flow scope."); } scopeMap.put(scopeFieldKey, val); session.put(FLOW_SCOPE_KEY, scopeMap); } } } } catch (Exception e) { LOG.warn("In handleFlowScope", e); } } } } } /** * @param nextActionName * the nextActionName to set */ public void setNextActionName(String nextActionName) { this.nextActionName = nextActionName; } /** * @param prevActionName * the prevActionName to set */ public void setPrevActionName(String prevActionName) { this.prevActionName = prevActionName; } /** * @param value * the forceFlowStepsOrder to set */ public void setForceFlowStepsOrder(String value) { this.forceFlowStepsOrder = Boolean.valueOf(value).booleanValue(); } /** * @param viewActionPostfix * the viewActionPostfix to set */ public void setViewActionPostfix(String viewActionPostfix) { this.viewActionPostfix = viewActionPostfix; } /** * @param viewActionMethod * the viewActionMethod to set */ public void setViewActionMethod(String viewActionMethod) { this.viewActionMethod = viewActionMethod; } /** * @param stepParameterName * the stepParameterName to set */ public void setStepParameterName(String stepParameterName) { this.stepParameterName = stepParameterName; } }
/* * Copyright (c) 2016, De Novo Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRES S OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package org.denovogroup.murmur.ui; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.database.Cursor; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v7.app.ActionBar; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.text.Editable; import android.text.TextWatcher; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.view.inputmethod.InputMethodManager; import android.widget.AdapterView; import android.widget.Button; import android.widget.CursorAdapter; import android.widget.EditText; import android.widget.ListView; import android.widget.Spinner; import android.widget.TextView; import org.denovogroup.murmur.R; import org.denovogroup.murmur.backend.*; import org.denovogroup.murmur.backend.SecurityManager; import java.util.ArrayList; import java.util.List; /** * Created by Liran on 12/27/2015. * * The fragment which display the message feed overview */ public class FeedFragment extends Fragment implements View.OnClickListener, TextWatcher, FragmentBackHandler{ public static final int REQ_CODE_MESSAGE = 100; public static final int REQ_CODE_SEARCH = 101; public static final String HASHTAG = "hashtag"; private static final int MAX_NEW_MESSAGES_DISPLAY = 1000; private boolean inSearchMode = false; private boolean inSelectionMode = false; private boolean selectAll = false; private ListView feedListView; private Button newPostButton; private ViewGroup newMessagesNotification; private TextView newMessagesNotification_text; private Button newMessagesNotification_button; private Spinner sortSpinner; private TextView leftText; private sortOption currentSort = sortOption.NEWEST; private EditText searchView; Menu menu; private String query = ""; @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) {} @Override public void onTextChanged(CharSequence s, int start, int before, int count) { query = s.toString(); FeedAdapter adapter = new FeedAdapter(getActivity(), getCursor(), false, feedAdapterCallbacks); if(SearchHelper.searchToSQL(query) == null) { adapter.setHighlight(Utils.getKeywords(query)); } feedListView.setAdapter(adapter); } @Override public void afterTextChanged(Editable s) {} private enum sortOption{ NEWEST, OLDEST, MOST_ENDORSED, LEAST_ENDORSED, MOST_CONNECTED, LEAST_CONNECTED } private List<Object[]> sortOptions = new ArrayList<Object[]>(){{ add(new Object[]{R.drawable.sort_spinner_newest, R.string.sort_opt_newest, sortOption.NEWEST}); add(new Object[]{R.drawable.sort_spinner_oldest,R.string.sort_opt_oldest, sortOption.OLDEST}); add(new Object[]{R.drawable.sort_spinner_most_endorsed,R.string.sort_opt_mostendorsed, sortOption.MOST_ENDORSED}); add(new Object[]{R.drawable.sort_spinner_least_endorsed,R.string.sort_opt_leastendorsed, sortOption.LEAST_ENDORSED}); add(new Object[]{R.drawable.sort_spinner_most_connected,R.string.sort_opt_mostconnected, sortOption.MOST_CONNECTED}); add(new Object[]{R.drawable.sort_spinner_least_connected,R.string.sort_opt_leastconnected, sortOption.LEAST_CONNECTED}); }}; // Create reciever object private BroadcastReceiver receiver; // Set When broadcast event will fire. private IntentFilter filter = new IntentFilter(MessageStore.NEW_MESSAGE); /** * This is the broadcast receiver object that I am registering. I created a * new class in order to override onReceive functionality. * * @author jesus * */ public class MessageEventReceiver extends BroadcastReceiver { /** * When the receiver is activated then that means a message has been * added to the message store, (either by the user or by the active * services). The reason that the instanceof check is necessary is * because there are two possible routes of activity: * * 1) The previous/current fragment viewed could have been the about * fragment, if it was then the focused fragment is not a * ListFragmentOrganizer and when the user returns to the feed then the * feed will check its own data set and not crash. * * 2) The previous/current fragment is the feed, it needs to be notified * immediately that there was a change in the underlying dataset. */ @Override public void onReceive(Context context, Intent intent) { setPendingUnreadMessagesDisplay(); } } public interface FeedFragmentCallbacks{ void onFeedItemExpand(String messageId); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { ((AppCompatActivity) getActivity()).getSupportActionBar().setTitle(R.string.drawer_menu_feed); searchView = (EditText) ((MainActivity)getActivity()).getToolbar().findViewById(R.id.searchView); leftText = (TextView) ((MainActivity) getActivity()).getToolbar().findViewById(R.id.leftText); initSortSpinner(); View v = inflater.inflate(R.layout.feed_fragment, container, false); feedListView = (ListView) v.findViewById(R.id.feed_listView); newPostButton = (Button) v.findViewById(R.id.new_post_button); newPostButton.setOnClickListener(this); newMessagesNotification = (ViewGroup) v.findViewById(R.id.new_message_notification); newMessagesNotification_text = (TextView) v.findViewById(R.id.new_message_notification_desc); newMessagesNotification_button = (Button) v.findViewById(R.id.new_message_notification_btn); newMessagesNotification_button.setOnClickListener(this); MessageStore.getInstance(getActivity()).setAllAsRead(); setListView(); Bundle args = getArguments(); if(args != null && args.containsKey(HASHTAG)){ searchHashTagFromClick(args.getString(HASHTAG)); args.remove(HASHTAG); } return v; } @Override public void onClick(View v) { switch (v.getId()){ case R.id.new_post_button: Intent intent = new Intent(getActivity(), org.denovogroup.murmur.ui.PostActivity.class); startActivityForResult(intent, REQ_CODE_MESSAGE); break; case R.id.new_message_notification_btn: //TODO break; } } private void initSortSpinner(){ if(getActivity() instanceof MainActivity) { sortSpinner = (Spinner) ((MainActivity) getActivity()).getToolbar().findViewById(R.id.sortSpinner); sortSpinner.setAdapter(new FeedSortSpinnerAdapter(getActivity(), sortOptions, inSearchMode)); for(int i=0; i<sortOptions.size();i++){ if(sortOptions.get(i)[2] == currentSort){ sortSpinner.setSelection(i); break; } } sortSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { currentSort = (sortOption) sortOptions.get(position)[2]; MessageStore store = MessageStore.getInstance(getActivity()); switch(currentSort){ case LEAST_CONNECTED: store.setSortOption(new String[]{MessageStore.COL_TRUST}, true); break; case MOST_CONNECTED: store.setSortOption(new String[]{MessageStore.COL_TRUST}, false); break; case LEAST_ENDORSED: store.setSortOption(new String[]{MessageStore.COL_LIKES}, true); break; case MOST_ENDORSED: store.setSortOption(new String[]{MessageStore.COL_LIKES}, false); break; case NEWEST: store.setSortOption(new String[]{MessageStore.COL_ROWID}, false); break; case OLDEST: store.setSortOption(new String[]{MessageStore.COL_ROWID}, true); break; } feedListView.setAdapter(new FeedAdapter(getActivity(), getCursor(), inSelectionMode, feedAdapterCallbacks)); } @Override public void onNothingSelected(AdapterView<?> parent) { // do nothing } }); } } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.feed_fragment_menu, menu); this.menu = menu; //Setup the search view /*MenuItem searchItem = menu.findItem(R.id.search); searchView = (SearchView) searchItem.getActionView(); setSearchView(searchView);*/ super.onCreateOptionsMenu(menu, inflater); } @Override public void onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); } /** callback handler from clicks on buttons inside the feed list view */ FeedAdapter.FeedAdapterCallbacks feedAdapterCallbacks = new FeedAdapter.FeedAdapterCallbacks() { @Override public void onUpvote(String message, int oldPriority) { MessageStore.getInstance(getActivity()).likeMessage( message, true); swapCursor(); } @Override public void onDownvote(String message, int oldPriority) { MessageStore.getInstance(getActivity()).likeMessage( message, false); swapCursor(); } @Override public void onFavorite(String message, boolean isFavoriteBefore) { MessageStore.getInstance(getActivity()).favoriteMessage( message, !isFavoriteBefore); swapCursor(); } @Override public void onNavigate(String message, String latxLon) { double lat = Double.parseDouble(latxLon.substring(0, latxLon.indexOf(" "))); double lon = Double.parseDouble(latxLon.substring(latxLon.indexOf(" ") + 1)); Uri gmmIntentUri = Uri.parse("geo:"+lat+","+lon); Intent mapIntent = new Intent(Intent.ACTION_VIEW, gmmIntentUri); mapIntent.setPackage("com.google.android.apps.maps"); if (mapIntent.resolveActivity(getActivity().getPackageManager()) != null) { getActivity().startActivity(mapIntent); } } @Override public void onReply(String parentId, String sender) { if (MessageStore.getInstance(getActivity()).getCommentCount(parentId) <= 0) { Intent intent = new Intent(getActivity(), PostActivity.class); intent.putExtra(PostActivity.MESSAGE_PARENT, parentId); if(sender != null && sender.length() > 0) intent.putExtra(PostActivity.MESSAGE_BODY, "@"+sender+" "); startActivityForResult(intent, REQ_CODE_MESSAGE); } else if (getActivity() instanceof FeedFragmentCallbacks) { ((FeedFragmentCallbacks) getActivity()).onFeedItemExpand(parentId); } } }; private Cursor getCursor(){ String sqlQuery = SearchHelper.searchToSQL(query); return (sqlQuery != null) ? MessageStore.getInstance(getActivity()).getMessagesByQuery(sqlQuery) : MessageStore.getInstance(getActivity()).getMessagesContainingCursor(query, false, false, -1); } private void swapCursor(){ int offsetFromTop = 0; int firstVisiblePosition = Math.max(0, feedListView.getFirstVisiblePosition()); if(feedListView.getChildCount() > 0) { offsetFromTop = feedListView.getChildAt(0).getTop(); } CursorAdapter newAdapter = ((CursorAdapter) feedListView.getAdapter()); newAdapter.swapCursor(getCursor()); if(SearchHelper.searchToSQL(query) == null) { ((FeedAdapter) newAdapter).setHighlight(Utils.getKeywords(query)); } feedListView.setAdapter(newAdapter); feedListView.setSelectionFromTop(firstVisiblePosition, offsetFromTop); } private void setListView() { feedListView.setAdapter(new FeedAdapter(getActivity(), getCursor(), inSelectionMode, feedAdapterCallbacks)); if(inSelectionMode) { setListInSelectionMode(); } else { setListInDisplayMode(); } } AdapterView.OnItemLongClickListener longClickListener = new AdapterView.OnItemLongClickListener() { @Override public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) { setListInSelectionMode(); return false; } }; private void setListInSelectionMode(){ inSelectionMode = true; ((FeedAdapter) feedListView.getAdapter()).setSelectionMode(true); swapCursor(); feedListView.setOnItemLongClickListener(null); feedListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { Cursor c = ((CursorAdapter) feedListView.getAdapter()).getCursor(); c.moveToPosition(position); boolean isChecked = c.getInt(c.getColumnIndex(MessageStore.COL_CHECKED)) == MessageStore.TRUE; String message = c.getString(c.getColumnIndex(MessageStore.COL_MESSAGE)); MessageStore.getInstance(getActivity()).checkMessage(message, !isChecked); swapCursor(); Cursor checkedCursor = MessageStore.getInstance(getActivity()).getCheckedMessages(); int checkedCount = checkedCursor.getCount(); updateSelectAll(); ((AppCompatActivity) getActivity()).getSupportActionBar().setTitle(checkedCount <= 99 ? String.valueOf(checkedCount) : "+99"); if (menu != null) { menu.findItem(R.id.action_delete).setEnabled(checkedCount > 0); boolean canDeleteTrust = false; boolean canDeleteLikes = false; boolean canDeleteSender = false; boolean canDeleteExchange = false; boolean canDeleteTree = false; if (checkedCount == 1) { checkedCursor.moveToFirst(); String sender = checkedCursor.getString(checkedCursor.getColumnIndex(MessageStore.COL_PSEUDONYM)); if (sender != null) canDeleteSender = MessageStore.getInstance(getActivity()).getMessagesBySenderCount(sender) > 0; String exchange = checkedCursor.getString(checkedCursor.getColumnIndex(MessageStore.COL_EXCHANGE)); if (exchange != null) canDeleteExchange = MessageStore.getInstance(getActivity()).getMessagesByExchangeCount(exchange) > 0; float trust = checkedCursor.getFloat(checkedCursor.getColumnIndex(MessageStore.COL_TRUST)); canDeleteTrust = SecurityManager.getCurrentProfile(getActivity()).isUseTrust() &&MessageStore.getInstance(getActivity()).getMessagesByTrustCount(trust) > 0; int likes = checkedCursor.getInt(checkedCursor.getColumnIndex(MessageStore.COL_LIKES)); canDeleteLikes = MessageStore.getInstance(getActivity()).getMessagesByLikeCount(likes) > 0; String treeId = checkedCursor.getString(checkedCursor.getColumnIndex(MessageStore.COL_MESSAGE_ID)); canDeleteTree = treeId != null && MessageStore.getInstance(getActivity()).getCommentCount(treeId) > 0; } menu.findItem(R.id.action_delete_by_connection).setEnabled(checkedCount == 1 && canDeleteTrust); menu.findItem(R.id.action_delete_by_exchange).setEnabled(checkedCount == 1 && canDeleteExchange); menu.findItem(R.id.action_delete_from_sender).setEnabled(checkedCount == 1 && canDeleteSender); menu.findItem(R.id.action_delete_tree).setEnabled(checkedCount == 1 && canDeleteTree); menu.findItem(R.id.action_retweet).setEnabled(checkedCount == 1); menu.findItem(R.id.action_share).setEnabled(checkedCount == 1); } checkedCursor.close(); } }); setActionbar(); newPostButton.setVisibility(View.INVISIBLE); } private void setListInDisplayMode(){ inSelectionMode = false; MessageStore.getInstance(getActivity()).checkAllMessages(false, true); ((FeedAdapter) feedListView.getAdapter()).setSelectionMode(false); feedListView.setOnItemLongClickListener(longClickListener); feedListView.setOnItemClickListener(null/*new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { //go to expanded view if (getActivity() instanceof FeedFragmentCallbacks) { Cursor c = ((CursorAdapter) parent.getAdapter()).getCursor(); c.moveToPosition(position); String messageId = c.getString(c.getColumnIndex(MessageStore.COL_MESSAGE_ID)); //if (MessageStore.getInstance(getActivity()).getCommentCount(messageId) > 0) { ((FeedFragmentCallbacks) getActivity()).onFeedItemExpand(messageId); //} } } }*/); swapCursor(); setActionbar(); newPostButton.setVisibility(View.VISIBLE); } private void setActionbar(){ ActionBar actionBar = ((AppCompatActivity) getActivity()).getSupportActionBar(); if(actionBar != null) { Drawable actionbarBg; if(inSelectionMode){ actionbarBg = new ColorDrawable(getActivity().getResources().getColor(R.color.toolbar_grey)); } else if(inSearchMode) { actionbarBg = new ColorDrawable(getActivity().getResources().getColor(android.R.color.white)); } else { if(Build.VERSION.SDK_INT >= 21){ actionbarBg = getResources().getDrawable(R.drawable.actionbar_default_bg, null); } else { actionbarBg = getResources().getDrawable(R.drawable.actionbar_default_bg); } } actionBar.setBackgroundDrawable(actionbarBg); actionBar.setTitle(inSelectionMode ? R.string.empty_string : (inSearchMode ? R.string.empty_string : R.string.drawer_menu_feed)); } if(menu != null) { menu.setGroupVisible(R.id.checked_only_actions, inSelectionMode); menu.findItem(R.id.search).setVisible(!inSearchMode && !inSelectionMode); Cursor checkedCursor = MessageStore.getInstance(getActivity()).getCheckedMessages(); int checkedCount = checkedCursor.getCount(); if(inSelectionMode) ((AppCompatActivity) getActivity()).getSupportActionBar().setTitle(checkedCount <= 99 ? String.valueOf(checkedCount) : "+99"); menu.findItem(R.id.action_delete).setEnabled(checkedCount > 0); boolean canDeleteTrust = false; boolean canDeleteLikes = false; boolean canDeleteSender = false; boolean canDeleteExchange = false; boolean canDeleteTree = false; if(checkedCount == 1){ checkedCursor.moveToFirst(); String sender = checkedCursor.getString(checkedCursor.getColumnIndex(MessageStore.COL_PSEUDONYM)); if(sender != null) canDeleteSender = MessageStore.getInstance(getActivity()).getMessagesBySenderCount(sender) > 0; String exchange = checkedCursor.getString(checkedCursor.getColumnIndex(MessageStore.COL_EXCHANGE)); if(exchange != null) canDeleteExchange = MessageStore.getInstance(getActivity()).getMessagesByExchangeCount(exchange) > 0; float trust = checkedCursor.getFloat(checkedCursor.getColumnIndex(MessageStore.COL_TRUST)); canDeleteTrust = SecurityManager.getCurrentProfile(getActivity()).isUseTrust() && MessageStore.getInstance(getActivity()).getMessagesByTrustCount(trust) > 0; int likes = checkedCursor.getInt(checkedCursor.getColumnIndex(MessageStore.COL_LIKES)); canDeleteLikes = MessageStore.getInstance(getActivity()).getMessagesByLikeCount(likes) > 0; String treeId = checkedCursor.getString(checkedCursor.getColumnIndex(MessageStore.COL_MESSAGE_ID)); canDeleteTree = treeId != null && MessageStore.getInstance(getActivity()).getCommentCount(treeId) > 0; } checkedCursor.close(); menu.findItem(R.id.action_delete_by_connection).setEnabled(checkedCount == 1 && canDeleteTrust); menu.findItem(R.id.action_delete_by_exchange).setEnabled(checkedCount == 1 && canDeleteExchange); menu.findItem(R.id.action_delete_from_sender).setEnabled(checkedCount == 1 && canDeleteSender); menu.findItem(R.id.action_delete_tree).setEnabled(checkedCount == 1 && canDeleteTree); menu.findItem(R.id.action_retweet).setEnabled(checkedCount == 1); menu.findItem(R.id.action_share).setEnabled(checkedCount == 1); } if(searchView != null){ searchView.setVisibility(inSearchMode && !inSelectionMode ? View.VISIBLE : View.GONE); searchView.removeTextChangedListener(this); searchView.setText(query); if(inSearchMode && !inSelectionMode){ searchView.addTextChangedListener(this); searchView.requestFocus(); InputMethodManager imm = (InputMethodManager) getActivity().getSystemService(Context.INPUT_METHOD_SERVICE); imm.showSoftInput(searchView, InputMethodManager.SHOW_IMPLICIT); } else if(!inSelectionMode){ query = ""; searchView.removeTextChangedListener(this); searchView.setText(""); //reset the list to its normal state swapCursor(); } } ((DrawerActivityHelper) getActivity()).getDrawerToggle().setDrawerIndicatorEnabled(!(inSearchMode || inSelectionMode)); ((DrawerActivityHelper) getActivity()).getDrawerToggle().syncState(); if(actionBar != null) { if(inSelectionMode && inSearchMode){ ((DrawerActivityHelper) getActivity()).getDrawerToggle().setDrawerIndicatorEnabled(true); ((DrawerActivityHelper) getActivity()).getDrawerToggle().syncState(); ((DrawerActivityHelper) getActivity()).getDrawerToggle().setDrawerIndicatorEnabled(false); ((DrawerActivityHelper) getActivity()).getDrawerToggle().syncState(); } else if (inSearchMode && !inSelectionMode) { actionBar.setHomeAsUpIndicator(R.drawable.ic_close_dark); } } if(sortSpinner != null) { sortSpinner.setVisibility(inSelectionMode ? View.GONE : View.VISIBLE); initSortSpinner(); } if(leftText != null){ updateSelectAll(); leftText.setVisibility(inSelectionMode ? View.VISIBLE : View.GONE); leftText.setOnClickListener(inSelectionMode ? new View.OnClickListener() { @Override public void onClick(View v) { String sqlQuery = SearchHelper.searchToSQL(query); if (sqlQuery != null) { MessageStore.getInstance(getActivity()).checkAllQueriedMessages(!selectAll, sqlQuery); } else { MessageStore.getInstance(getActivity()).checkAllMessagesContaining(!selectAll, query); } selectAll = !selectAll; swapCursor(); setActionbar(); } } : null); } } private void updateSelectAll() { //TODO: Danielk Should this contain replies as well? int checkedCount = MessageStore.getInstance(getActivity()).getCheckedMessages().getCount(); long totalCount = getCursor().getCount(); selectAll = checkedCount == totalCount; if(leftText != null) { if (inSelectionMode) leftText.setText(!selectAll ? R.string.select_all : R.string.deselect_all); else leftText.setText(R.string.empty_string); } } @Override public boolean onOptionsItemSelected(MenuItem item) { final Cursor checkedMessages = MessageStore.getInstance(getActivity()).getCheckedMessages(); checkedMessages.moveToFirst(); AlertDialog.Builder dialog = null; switch (item.getItemId()){ case android.R.id.home: if(inSelectionMode){ setListInDisplayMode(); } else if(inSearchMode){ InputMethodManager imm = (InputMethodManager) getActivity().getSystemService(Context.INPUT_METHOD_SERVICE); if(searchView != null) imm.hideSoftInputFromWindow(searchView.getWindowToken(), 0); inSearchMode = false; setActionbar(); break; } else if(!inSelectionMode){ inSearchMode = false; setActionbar(); break; } ActionBarDrawerToggle toogle = ((DrawerActivityHelper) getActivity()).getDrawerToggle(); if(!toogle.isDrawerIndicatorEnabled()){ setListInDisplayMode(); } break; case R.id.search: inSearchMode = true; setActionbar(); break; case R.id.action_retweet: Intent intent = new Intent(getActivity(), PostActivity.class); intent.putExtra(PostActivity.MESSAGE_BODY, checkedMessages.getString(checkedMessages.getColumnIndex(MessageStore.COL_MESSAGE))); setListInDisplayMode(); getActivity().startActivityForResult(intent, REQ_CODE_MESSAGE); break; case R.id.action_share: Intent shareIntent = new Intent(Intent.ACTION_SEND); shareIntent.setType("text/plain"); shareIntent.putExtra(android.content.Intent.EXTRA_SUBJECT, getString(R.string.share_prefix)); shareIntent.putExtra(Intent.EXTRA_TEXT, checkedMessages.getString(checkedMessages.getColumnIndex(MessageStore.COL_MESSAGE))); getActivity().startActivity(Intent.createChooser(shareIntent, getString(R.string.share_using))); break; case R.id.action_delete: dialog = new AlertDialog.Builder(getActivity()); dialog.setTitle(R.string.delete_dialog_title); dialog.setMessage(getString(R.string.delete_dialog_message1) + " " + checkedMessages.getCount() + " " + getString(R.string.delete_dialog_message2)); dialog.setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); dialog.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { MessageStore.getInstance(getActivity()).removeCheckedMessage(); setListInDisplayMode(); dialog.dismiss(); } }); break; case R.id.action_delete_by_connection: final float trust = checkedMessages.getFloat(checkedMessages.getColumnIndex(MessageStore.COL_TRUST)); dialog = new AlertDialog.Builder(getActivity()); dialog.setTitle(R.string.delete_dialog_title); dialog.setMessage(getString(R.string.delete_dialog_message1) + " " + MessageStore.getInstance(getActivity()).getMessagesByTrustCount(trust) + " " + getString(R.string.delete_dialog_message2)); dialog.setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); dialog.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { MessageStore.getInstance(getActivity()).deleteByTrust( trust ); setListInDisplayMode(); dialog.dismiss(); } }); break; case R.id.action_delete_by_exchange: final String exchange = checkedMessages.getString(checkedMessages.getColumnIndex(MessageStore.COL_EXCHANGE)); dialog = new AlertDialog.Builder(getActivity()); dialog.setTitle(R.string.delete_dialog_title); dialog.setMessage(getString(R.string.delete_dialog_message1) + " " + MessageStore.getInstance(getActivity()).getMessagesByExchangeCount(exchange) + " " + getString(R.string.delete_dialog_message2)); dialog.setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); dialog.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { MessageStore.getInstance(getActivity()).deleteByExchange(exchange); setListInDisplayMode(); dialog.dismiss(); } }); break; case R.id.action_delete_from_sender: final String senderName = checkedMessages.getString(checkedMessages.getColumnIndex(MessageStore.COL_PSEUDONYM)); dialog = new AlertDialog.Builder(getActivity()); dialog.setTitle(R.string.delete_dialog_title); dialog.setMessage(getString(R.string.delete_dialog_message1) + " " + MessageStore.getInstance(getActivity()).getMessagesBySenderCount(senderName) + " " + getString(R.string.delete_dialog_message2)); dialog.setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); dialog.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { MessageStore.getInstance(getActivity()).deleteBySender( senderName ); setListInDisplayMode(); dialog.dismiss(); } }); break; case R.id.action_delete_tree: final String treeId = checkedMessages.getString(checkedMessages.getColumnIndex(MessageStore.COL_MESSAGE_ID)); dialog = new AlertDialog.Builder(getActivity()); dialog.setTitle(R.string.delete_dialog_title); dialog.setMessage(getString(R.string.delete_dialog_message1) + " " + (MessageStore.getInstance(getActivity()).getCommentCount(treeId)+1) + " " + getString(R.string.delete_dialog_message2)); dialog.setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); dialog.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { MessageStore.getInstance(getActivity()).deleteTree(treeId); setListInDisplayMode(); dialog.dismiss(); } }); break; } if(dialog != null){ AlertDialog alertdialog = dialog.create(); DialogStyler.styleAndShow(getActivity(), alertdialog); } checkedMessages.close(); return super.onOptionsItemSelected(item); } /** display a notification bar showing how many unread messages there are in the store */ private void setPendingUnreadMessagesDisplay(){ long unreadCount = MessageStore.getInstance(getActivity()).getUnreadCount(); if(newMessagesNotification != null){ if(unreadCount > 0) { String countString = ((unreadCount <= MAX_NEW_MESSAGES_DISPLAY) ? unreadCount +" "+getString(unreadCount > 1 ? R.string.new_messages_notification_desc : R.string.new_message_notification_desc) : "+"+MAX_NEW_MESSAGES_DISPLAY) +"\n("+ ExchangeHistoryTracker.getInstance().getExchangeHistory()+" "+getString(ExchangeHistoryTracker.getInstance().getExchangeHistory() > 1 ? R.string.exchanges : R.string.exchange)+")"; ((TextView)newMessagesNotification.findViewById(R.id.new_message_notification_desc)).setText(countString); newMessagesNotification.setVisibility(View.VISIBLE); newMessagesNotification.findViewById(R.id.new_message_notification_btn).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { //mark all the messages as read MessageStore.getInstance(getActivity()).setAllAsRead(); setPendingUnreadMessagesDisplay(); swapCursor(); setPendingUnreadMessagesDisplay(); } }); } else { newMessagesNotification.setVisibility(View.GONE); } } } @Override public void onResume() { super.onResume(); MessageStore.getInstance(getActivity()).setAllAsRead(); setPendingUnreadMessagesDisplay(); swapCursor(); receiver = new MessageEventReceiver(); getActivity().registerReceiver(receiver, filter); } @Override public void onPause() { super.onPause(); if(receiver != null){ getActivity().unregisterReceiver(receiver); receiver = null; } //mark all the messages as read MessageStore.getInstance(getActivity()).setAllAsRead(); setPendingUnreadMessagesDisplay(); swapCursor(); } @Override public void onActivityResult(int reqCode, int resCode, Intent data) { super.onActivityResult(reqCode, resCode, data); if(resCode == Activity.RESULT_OK) { switch (reqCode){ case REQ_CODE_MESSAGE: //mark all the messages as read MessageStore.getInstance(getActivity()).setAllAsRead(); setPendingUnreadMessagesDisplay(); swapCursor(); setPendingUnreadMessagesDisplay(); break; } } } @Override public void onDestroy() { super.onDestroy(); if(searchView != null) searchView.removeTextChangedListener(this); MessageStore.getInstance(getActivity()).setAllAsRead(); } @Override public boolean onBackPressed() { if(inSelectionMode){ setListInDisplayMode(); return true; } else if(inSearchMode){ InputMethodManager imm = (InputMethodManager) getActivity().getSystemService(Context.INPUT_METHOD_SERVICE); if(searchView != null) imm.hideSoftInputFromWindow(searchView.getWindowToken(), 0); inSearchMode = false; setActionbar(); return true; } return false; } /** Set the the Actionbar search view with the hashtag supplied and run the * default search method. * * @param hashtag The hashtag to search for */ private void searchHashTagFromClick(String hashtag){ query = hashtag; inSearchMode = true; setActionbar(); searchView.clearFocus(); InputMethodManager imm = (InputMethodManager) getActivity().getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(searchView.getWindowToken(), 0); getActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN); } }
/* Copyright 2015 Samsung Electronics Co., LTD * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gearvrf; import static org.gearvrf.utility.Assert.*; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.gearvrf.utility.Exceptions; import org.gearvrf.utility.Log; /** * Describes an indexed triangle mesh as a set of shared vertices with integer * indices for each triangle. * * Usually each mesh vertex may have a positions, normal and texture coordinate. * Skinned mesh vertices will also have bone weights and indices. * If the mesh uses a normal map for lighting, it will have tangents * and bitangents as well. These vertex components correspond to vertex * attributes in the OpenGL vertex shader. */ public class GVRMesh extends GVRHybridObject implements PrettyPrint { private static final String TAG = GVRMesh.class.getSimpleName(); public GVRMesh(GVRContext gvrContext) { this(gvrContext, NativeMesh.ctor()); mAttributeKeys = new HashSet<String>(); } GVRMesh(GVRContext gvrContext, long ptr) { super(gvrContext, ptr); setBones(new ArrayList<GVRBone>()); mVertexBoneData = new GVRVertexBoneData(gvrContext, this); mAttributeKeys = new HashSet<String>(); } /** * Get the 3D vertices of the mesh. Each vertex is represented as a packed * {@code float} triplet: * <p> * <code> * { x0, y0, z0, x1, y1, z1, x2, y2, z2, ... } * </code> * * @return Array with the packed vertex data. */ public float[] getVertices() { return NativeMesh.getVertices(getNative()); } /** * Sets the 3D vertices of the mesh. Each vertex is represented as a packed * {@code float} triplet: * <p> * <code>{ x0, y0, z0, x1, y1, z1, x2, y2, z2, ...}</code> * * @param vertices * Array containing the packed vertex data. */ public void setVertices(float[] vertices) { checkValidFloatArray("vertices", vertices, 3); mAttributeKeys.add("a_position"); NativeMesh.setVertices(getNative(), vertices); } /** * Get the normal vectors of the mesh. Each normal vector is represented as * a packed {@code float} triplet: * <p> * <code>{ x0, y0, z0, x1, y1, z1, x2, y2, z2, ...}</code> * * @return Array with the packed normal data. */ public float[] getNormals() { return NativeMesh.getNormals(getNative()); } /** * Sets the normal vectors of the mesh. Each normal vector is represented as * a packed {@code float} triplet: * <p> * <code>{ x0, y0, z0, x1, y1, z1, x2, y2, z2, ...}</code> * * @param normals * Array containing the packed normal data. */ public void setNormals(float[] normals) { checkValidFloatArray("normals", normals, 3); mAttributeKeys.add("a_normal"); NativeMesh.setNormals(getNative(), normals); } /** * Get the u,v texture coordinates for the mesh. Each texture coordinate is * represented as a packed {@code float} pair: * <p> * <code>{ u0, v0, u1, v1, u2, v2, ...}</code> * * @return Array with the packed texture coordinate data. */ public float[] getTexCoords() { return NativeMesh.getTexCoords(getNative()); } /** * Sets the texture coordinates for the mesh. Each texture coordinate is * represented as a packed {@code float} pair: * <p> * <code>{ u0, v0, u1, v1, u2, v2, ...}</code> * * @param texCoords * Array containing the packed texture coordinate data. */ public void setTexCoords(float[] texCoords) { setTexCoords(texCoords, 0); } public void setTexCoords(float [] texCoords, int index){ String key = (index > 0) ? ("a_texcoord" +index) : "a_texcoord"; checkValidFloatArray(key, texCoords, 2); mAttributeKeys.add(key); NativeMesh.setVec2Vector(getNative(),key,texCoords); } /** * Get the triangle vertex indices of the mesh. The indices for each * triangle are represented as a packed {@code char} triplet, where * {@code t0} is the first triangle, {@code t1} is the second, etc.: * <p> * <code> * { t0[0], t0[1], t0[2], t1[0], t1[1], t1[2], ...} * </code> * * @return Array with the packed triangle index data. * * @deprecated use {@link #getIndices()} instead. */ public char[] getTriangles() { return NativeMesh.getTriangles(getNative()); } /** * Sets the triangle vertex indices of the mesh. The indices for each * triangle are represented as a packed {@code int} triplet, where * {@code t0} is the first triangle, {@code t1} is the second, etc.: * <p> * <code> * { t0[0], t0[1], t0[2], t1[0], t1[1], t1[2], ...} * </code> * * @param triangles * Array containing the packed triangle index data. * @deprecated use {@link #setIndices(char[])} instead. */ public void setTriangles(char[] triangles) { checkDivisibleDataLength("triangles", triangles, 3); NativeMesh.setTriangles(getNative(), triangles); } /** * Get the vertex indices of the mesh. The indices for each * vertex to be referenced. * * @return Array with the packed index data. */ public char[] getIndices() { return NativeMesh.getIndices(getNative()); } /** * Sets the vertex indices of the mesh. The indices for each * vertex. * * @param indices * Array containing the packed index data. */ public void setIndices(char[] indices) { NativeMesh.setIndices(getNative(), indices); } /** * Get the array of {@code float} scalars bound to the shader attribute * {@code key}. * * @param key * Name of the shader attribute * @return Array of {@code float} scalars. */ public float[] getFloatVector(String key) { return NativeMesh.getFloatVector(getNative(), key); } /** * Bind an array of {@code float} scalars to the shader attribute * {@code key}. * * @param key * Name of the shader attribute * @param floatVector * Data to bind to the shader attribute. */ public void setFloatVector(String key, float[] floatVector) { checkValidFloatVector("key", key, "floatVector", floatVector, 1); mAttributeKeys.add(key); NativeMesh.setFloatVector(getNative(), key, floatVector); } /** * Get the array of two-component {@code float} vectors bound to the shader * attribute {@code key}. * * @param key * Name of the shader attribute * @return Array of two-component {@code float} vectors. */ public float[] getVec2Vector(String key) { return NativeMesh.getVec2Vector(getNative(), key); } /** * Bind an array of two-component {@code float} vectors to the shader * attribute {@code key}. * * @param key * Name of the shader attribute * @param vec2Vector * Two-component {@code float} vector data to bind to the shader * attribute. */ public void setVec2Vector(String key, float[] vec2Vector) { checkValidFloatVector("key", key, "vec2Vector", vec2Vector, 2); mAttributeKeys.add(key); NativeMesh.setVec2Vector(getNative(), key, vec2Vector); } /** * Get the array of three-component {@code float} vectors bound to the * shader attribute {@code key}. * * @param key * Name of the shader attribute * @return Array of three-component {@code float} vectors. */ public float[] getVec3Vector(String key) { return NativeMesh.getVec3Vector(getNative(), key); } /** * Bind an array of three-component {@code float} vectors to the shader * attribute {@code key}. * * @param key * Name of the shader attribute * @param vec3Vector * Three-component {@code float} vector data to bind to the * shader attribute. */ public void setVec3Vector(String key, float[] vec3Vector) { checkValidFloatVector("key", key, "vec3Vector", vec3Vector, 3); mAttributeKeys.add(key); NativeMesh.setVec3Vector(getNative(), key, vec3Vector); } /** * Get the array of four-component {@code float} vectors bound to the shader * attribute {@code key}. * * @param key * Name of the shader attribute * @return Array of four-component {@code float} vectors. */ public float[] getVec4Vector(String key) { return NativeMesh.getVec4Vector(getNative(), key); } /** * Bind an array of four-component {@code float} vectors to the shader * attribute {@code key}. * * @param key * Name of the shader attribute * @param vec4Vector * Four-component {@code float} vector data to bind to the shader * attribute. */ public void setVec4Vector(String key, float[] vec4Vector) { checkValidFloatVector("key", key, "vec4Vector", vec4Vector, 4); mAttributeKeys.add(key); NativeMesh.setVec4Vector(getNative(), key, vec4Vector); } /** * Get the names of all the vertex attributes on this mesh. * @return array of string names */ public Set<String> getAttributeNames() { if(mAttributeKeys.size() > 0) return mAttributeKeys; String[] attribKeys = NativeMesh.getAttribNames(getNative()); for(String i : attribKeys){ mAttributeKeys.add(i); } return mAttributeKeys; } /** * Calculate a bounding sphere from the mesh vertices. * @param sphere float[4] array to get center of sphere and radius; * sphere[0] = center.x, sphere[1] = center.y, sphere[2] = center.z, sphere[3] = radius */ public void getSphereBound(float[] sphere) { NativeMesh.getSphereBound(getNative(), sphere); } /** * Determine if a named attribute exists in this mesh. * @param key Name of the shader attribute * @return true if attribute exists, false if not */ public boolean hasAttribute(String key) { return NativeMesh.hasAttribute(getNative(), key); } /** * Constructs a {@link GVRMesh mesh} that contains this mesh. * * <p> * This is primarily useful with the {@link GVRPicker}, which does * "ray casting" to detect which scene object you're pointing to. Ray * casting is computationally expensive, and you generally want to limit the * number of {@linkplain GVRCollider triangles to check.} A simple * {@linkplain GVRContext#createQuad(float, float) quad} is cheap enough, * but with complex meshes you will probably want to cut search time by * registering the object's bounding box, not the whole mesh. * * @return A {@link GVRMesh} of the bounding box. */ public GVRMesh getBoundingBox() { return new GVRMesh(getGVRContext(), NativeMesh.getBoundingBox(getNative())); } /** * Returns the bones of this mesh. * * @return a list of bones */ public List<GVRBone> getBones() { return mBones; } /** * Sets bones of this mesh. * * @param bones a list of bones */ public void setBones(List<GVRBone> bones) { mBones.clear(); mBones.addAll(bones); NativeMesh.setBones(getNative(), GVRHybridObject.getNativePtrArray(mBones)); // Process bones int boneId = -1; for (GVRBone bone : mBones) { boneId++; List<GVRBoneWeight> boneWeights = bone.getBoneWeights(); for (GVRBoneWeight weight : boneWeights) { int vid = weight.getVertexId(); int boneSlot = getVertexBoneData().getFreeBoneSlot(vid); if (boneSlot >= 0) { getVertexBoneData().setVertexBoneWeight(vid, boneSlot, boneId, weight.getWeight()); } else { Log.w(TAG, "Vertex %d (total %d) has too many bones", vid, getVertices().length / 3); } } } if (getVertexBoneData() != null) { mAttributeKeys.add("a_bone_indices"); mAttributeKeys.add("a_bone_weights"); getVertexBoneData().normalizeWeights(); } } /** * Gets the vertex bone data. * * @return the vertex bone data. */ public GVRVertexBoneData getVertexBoneData() { return mVertexBoneData; } @Override public void prettyPrint(StringBuffer sb, int indent) { sb.append(getVertices() == null ? 0 : Integer.toString(getVertices().length / 3)); sb.append(" vertices, "); sb.append(getIndices() == null ? 0 : Integer.toString(getIndices().length / 3)); sb.append(" triangles, "); sb.append(getTexCoords() == null ? 0 : Integer.toString(getTexCoords().length / 2)); sb.append(" tex-coords, "); sb.append(getNormals() == null ? 0 : Integer.toString(getNormals().length / 3)); sb.append(" normals, "); sb.append(getBones() == null ? 0 : Integer.toString(getBones().size())); sb.append(" bones"); sb.append(System.lineSeparator()); // Bones List<GVRBone> bones = getBones(); if (!bones.isEmpty()) { sb.append(Log.getSpaces(indent)); sb.append("Bones:"); sb.append(System.lineSeparator()); for (GVRBone bone : bones) { bone.prettyPrint(sb, indent + 2); } } } @Override public String toString() { StringBuffer sb = new StringBuffer(); prettyPrint(sb, 0); return sb.toString(); } private void checkValidFloatVector(String keyName, String key, String vectorName, float[] vector, int expectedComponents) { checkStringNotNullOrEmpty(keyName, key); checkDivisibleDataLength(vectorName, vector, expectedComponents); checkVectorLengthWithVertices(vectorName, vector.length, expectedComponents); } private void checkValidFloatArray(String parameterName, float[] data, int expectedComponents) { checkDivisibleDataLength(parameterName, data, expectedComponents); } private void checkVectorLengthWithVertices(String parameterName, int dataLength, int expectedComponents) { int verticesNumber = getVertices().length / 3; int numberOfElements = dataLength / expectedComponents; if (dataLength / expectedComponents != verticesNumber) { throw Exceptions .IllegalArgument( "The input array %s should be an array of %d-component elements and the number of elements should match the number of vertices. The current number of elements is %d, but the current number of vertices is %d.", parameterName, expectedComponents, numberOfElements, verticesNumber); } } private List<GVRBone> mBones = new ArrayList<GVRBone>(); private GVRVertexBoneData mVertexBoneData; private Set<String> mAttributeKeys; } class NativeMesh { static native long ctor(); static native String[] getAttribNames(long mesh); static native float[] getVertices(long mesh); static native void setVertices(long mesh, float[] vertices); static native float[] getNormals(long mesh); static native void setNormals(long mesh, float[] normals); static native float[] getTexCoords(long mesh); static native char[] getTriangles(long mesh); static native void setTriangles(long mesh, char[] triangles); static native char[] getIndices(long mesh); static native void setIndices(long mesh, char[] indices); static native float[] getFloatVector(long mesh, String key); static native void setFloatVector(long mesh, String key, float[] floatVector); static native float[] getVec2Vector(long mesh, String key); static native void setVec2Vector(long mesh, String key, float[] vec2Vector); static native float[] getVec3Vector(long mesh, String key); static native void setVec3Vector(long mesh, String key, float[] vec3Vector); static native float[] getVec4Vector(long mesh, String key); static native void setVec4Vector(long mesh, String key, float[] vec4Vector); static native long getBoundingBox(long mesh); static native void setBones(long mesh, long[] bonePtrs); static native void getSphereBound(long mesh, float[] sphere); static native boolean hasAttribute(long mesh, String key); }
/** * Copyright (c) 2015 TwoDucks Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the <organization> nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package ca.twoducks.vor.ossindex.report; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.URI; import java.util.HashSet; import java.util.Set; import org.apache.commons.codec.digest.DigestUtils; /** Information for an individual file. * * @author Ken Duck * */ public class FileConfig { private String name; private String digest; private String path; private String license; private String comment; private String state; /** * Known artifact extensions. * * FIXME: This should be loaded from a configuration file. */ private static Set<String> artifactExtensions = new HashSet<String>(); static { artifactExtensions.add("jar"); artifactExtensions.add("deb"); } /** * Known image extensions. * * FIXME: This should be loaded from a configuration file. */ private static Set<String> imageExtensions = new HashSet<String>(); static { imageExtensions.add("jpg"); imageExtensions.add("jpeg"); imageExtensions.add("png"); imageExtensions.add("gif"); imageExtensions.add("bmp"); imageExtensions.add("xbm"); imageExtensions.add("xpm"); imageExtensions.add("ico"); imageExtensions.add("svg"); imageExtensions.add("pspimage"); } /** * Indicate whether or not this file was ignored for analysis purposes. This is * often done if the file is too small, since identifying a file origin in this * circumstance is much more error prone. */ private boolean ignored; /** * List of dependencies found in the file. */ private Set<DependencyConfig> dependencies = null; /** * * @param file * @throws IOException */ public FileConfig(File file) throws IOException { // Get the SHA1 sum for a file, then check if the MD5 is listed in the // OSS Index (indicating it is third party code). FileInputStream is = null; try { is = new FileInputStream(file); digest = DigestUtils.shaHex(is); path = file.getPath(); } finally { if(is != null) { is.close(); } } } /** When converting from CSV we only have a digest * * @param digest */ public FileConfig(String digest) { this.digest = digest; } /** SHA1 digest for the file. Note that this is a platform dependent value. * * @return */ public String getDigest() { return digest; } /** Best known name for the file * * @return */ public String getName() { return name; } /** Set the name for the file * * @param name */ public void setName(String name) { this.name = name; } /** Local path to the file. * * @return */ public String getPath() { return path; } /** Set the path to the file * * @param path */ public void setPath(String path) { this.path = path; } /** Text name of a license found within the file itself, often in a header comment. * * @return */ public String getLicense() { return license; } /** Set the file license name * * @param license */ public void setLicense(String license) { this.license = license; } /** The comment provides a location for user-formatted information * about the file. * * @return */ public String getComment() { return comment; } /** Set the file comment * * @param comment */ public void setComment(String comment) { this.comment = comment; } /** Merge the data from the given file into the fields that are not currently * filled by anything else. * * @param file */ public void merge(FileConfig file) { if(name == null) name = file.name; if(path == null) path = file.path; if(license == null) license = file.license; if(comment == null) comment = file.comment; if(state == null) state = file.state; if(file.ignored == true) ignored = true; // Use the path to get an optimal name if(path != null) { File f = new File(path); name = f.getName(); } } /** Get the analysis state of the file. * * @return */ public String getState() { return state; } /** Set the analysis state for the file * * @param state */ public void setState(String state) { this.state = state; } /* * (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object o) { if(o instanceof FileConfig) { return ((FileConfig)o).digest.equals(digest); } return false; } /* * (non-Javadoc) * @see java.lang.Object#hashCode() */ @Override public int hashCode() { return digest.hashCode(); } /** Returns true if the file was ignored for project identification purposes. * * @return */ public boolean isIgnored() { return ignored; } /** Add an HTML dependency to the file. This could be an external JavaScript or CSS file. * * @param type Type of dependency (HTML, Maven, Node, Ruby, Java, etc.) * @param uri */ public void addDependency(String type, String artifactId, URI uri, String version, String comment) { DependencyConfig dep = new DependencyConfig(type, artifactId, uri, version); dep.setComment(comment); if(dependencies == null) dependencies = new HashSet<DependencyConfig>(); dependencies.add(dep); } /** Add a package/version to the dependency list. * * @param type Type of dependency (HTML, Maven, Node, Ruby, Java, etc.) * @param pkgName * @param version */ public void addDependency(String type, String pkgName, String artifactId, String version, String comment) { DependencyConfig dep = new DependencyConfig(type, pkgName, artifactId, version); dep.setComment(comment); if(dependencies == null) dependencies = new HashSet<DependencyConfig>(); dependencies.add(dep); } /** Indicates whether the file is a known artifact type. Currently done through file * extensions. * * @return */ public boolean isArtifact() { if(name != null) { int index = name.lastIndexOf('.'); if(index >= 0) { String ext = name.substring(index + 1); return artifactExtensions.contains(ext); } } return false; } /** Indicates whether the file is a known artifact type. Currently done through file * extensions. * * @return */ public boolean isImage() { if(name != null) { int index = name.lastIndexOf('.'); if(index >= 0) { String ext = name.substring(index + 1); return imageExtensions.contains(ext); } } return false; } }
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.cas.util.http; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Ints; import org.apache.http.ConnectionReuseStrategy; import org.apache.http.Header; import org.apache.http.HttpHost; import org.apache.http.client.AuthenticationStrategy; import org.apache.http.client.ConnectionBackoffStrategy; import org.apache.http.client.CookieStore; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.RedirectStrategy; import org.apache.http.client.ServiceUnavailableRetryStrategy; import org.apache.http.client.config.RequestConfig; import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.routing.HttpRoute; import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.socket.LayeredConnectionSocketFactory; import org.apache.http.conn.socket.PlainConnectionSocketFactory; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.conn.ssl.X509HostnameVerifier; import org.apache.http.impl.DefaultConnectionReuseStrategy; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.DefaultBackoffStrategy; import org.apache.http.impl.client.DefaultRedirectStrategy; import org.apache.http.impl.client.DefaultServiceUnavailableRetryStrategy; import org.apache.http.impl.client.FutureRequestExecutionService; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.ProxyAuthenticationStrategy; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.FactoryBean; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import java.net.HttpURLConnection; import java.net.InetAddress; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** * The factory to build a {@link SimpleHttpClient}. * * @author Jerome Leleu * @since 4.1.0 */ public final class SimpleHttpClientFactoryBean implements FactoryBean<SimpleHttpClient> { /** Max connections per route. */ public static final int MAX_CONNECTIONS_PER_ROUTE = 50; private static final Logger LOGGER = LoggerFactory.getLogger(SimpleHttpClientFactoryBean.class); private static final int MAX_POOLED_CONNECTIONS = 100; private static final int DEFAULT_THREADS_NUMBER = 200; private static final int DEFAULT_TIMEOUT = 5000; /** The default status codes we accept. */ private static final int[] DEFAULT_ACCEPTABLE_CODES = new int[] { HttpURLConnection.HTTP_OK, HttpURLConnection.HTTP_NOT_MODIFIED, HttpURLConnection.HTTP_MOVED_TEMP, HttpURLConnection.HTTP_MOVED_PERM, HttpURLConnection.HTTP_ACCEPTED}; /** 20% of the total of threads in the pool to handle overhead. */ private static final int DEFAULT_QUEUE_SIZE = (int) (DEFAULT_THREADS_NUMBER * 0.2); /** The number of threads used to build the pool of threads (if no executorService provided). */ private int threadsNumber = DEFAULT_THREADS_NUMBER; /** The queue size to absorb additional tasks when the threads pool is saturated (if no executorService provided). */ private int queueSize = DEFAULT_QUEUE_SIZE; /** The Max pooled connections. */ private int maxPooledConnections = MAX_POOLED_CONNECTIONS; /** The Max connections per each route connections. */ private int maxConnectionsPerRoute = MAX_CONNECTIONS_PER_ROUTE; /** List of HTTP status codes considered valid by the caller. */ @NotNull @Size(min = 1) private List<Integer> acceptableCodes = Ints.asList(DEFAULT_ACCEPTABLE_CODES); @Min(0) private int connectionTimeout = DEFAULT_TIMEOUT; @Min(0) private int readTimeout = DEFAULT_TIMEOUT; private RedirectStrategy redirectionStrategy = new DefaultRedirectStrategy(); /** * The socket factory to be used when verifying the validity of the endpoint. */ private SSLConnectionSocketFactory sslSocketFactory = SSLConnectionSocketFactory.getSocketFactory(); /** * The hostname verifier to be used when verifying the validity of the endpoint. */ private X509HostnameVerifier hostnameVerifier = SSLConnectionSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER; /** The credentials provider for endpoints that require authentication. */ private CredentialsProvider credentialsProvider; /** The cookie store for authentication. */ private CookieStore cookieStore; /** Interface for deciding whether a connection can be re-used for subsequent requests and should be kept alive. **/ private ConnectionReuseStrategy connectionReuseStrategy = new DefaultConnectionReuseStrategy(); /** * When managing a dynamic number of connections for a given route, this strategy assesses whether a * given request execution outcome should result in a backoff * signal or not, based on either examining the Throwable that resulted or by examining * the resulting response (e.g. for its status code). */ private ConnectionBackoffStrategy connectionBackoffStrategy = new DefaultBackoffStrategy(); /** Strategy interface that allows API users to plug in their own logic to control whether or not a retry * should automatically be done, how many times it should be retried and so on. */ private ServiceUnavailableRetryStrategy serviceUnavailableRetryStrategy = new DefaultServiceUnavailableRetryStrategy(); /** Default headers to be sent. **/ private Collection<? extends Header> defaultHeaders = Collections.emptyList(); /** Default strategy implementation for proxy host authentication.**/ private AuthenticationStrategy proxyAuthenticationStrategy = new ProxyAuthenticationStrategy(); /** Determines whether circular redirects (redirects to the same location) should be allowed. **/ private boolean circularRedirectsAllowed = true; /** Determines whether authentication should be handled automatically. **/ private boolean authenticationEnabled; /** Determines whether redirects should be handled automatically. **/ private boolean redirectsEnabled = true; /** * The executor service used to create a {@link #buildRequestExecutorService}. */ private ExecutorService executorService; @Override public SimpleHttpClient getObject() throws Exception { final CloseableHttpClient httpClient = buildHttpClient(); final FutureRequestExecutionService requestExecutorService = buildRequestExecutorService(httpClient); return new SimpleHttpClient(this.acceptableCodes, httpClient, requestExecutorService); } @Override public Class<?> getObjectType() { return SimpleHttpClient.class; } @Override public boolean isSingleton() { return false; } /** * Build a HTTP client based on the current properties. * * @return the built HTTP client */ private CloseableHttpClient buildHttpClient() { try { final ConnectionSocketFactory plainsf = PlainConnectionSocketFactory.getSocketFactory(); final LayeredConnectionSocketFactory sslsf = this.sslSocketFactory; final Registry<ConnectionSocketFactory> registry = RegistryBuilder.<ConnectionSocketFactory>create() .register("http", plainsf) .register("https", sslsf) .build(); final PoolingHttpClientConnectionManager connMgmr = new PoolingHttpClientConnectionManager(registry); connMgmr.setMaxTotal(this.maxPooledConnections); connMgmr.setDefaultMaxPerRoute(this.maxConnectionsPerRoute); final HttpHost httpHost = new HttpHost(InetAddress.getLocalHost()); final HttpRoute httpRoute = new HttpRoute(httpHost); connMgmr.setMaxPerRoute(httpRoute, MAX_CONNECTIONS_PER_ROUTE); final RequestConfig requestConfig = RequestConfig.custom() .setSocketTimeout(this.readTimeout) .setConnectTimeout(this.connectionTimeout) .setConnectionRequestTimeout(this.connectionTimeout) .setStaleConnectionCheckEnabled(true) .setCircularRedirectsAllowed(this.circularRedirectsAllowed) .setRedirectsEnabled(this.redirectsEnabled) .setAuthenticationEnabled(this.authenticationEnabled) .build(); final HttpClientBuilder builder = HttpClients.custom() .setConnectionManager(connMgmr) .setDefaultRequestConfig(requestConfig) .setSSLSocketFactory(sslsf) .setHostnameVerifier(this.hostnameVerifier) .setRedirectStrategy(this.redirectionStrategy) .setDefaultCredentialsProvider(this.credentialsProvider) .setDefaultCookieStore(this.cookieStore) .setConnectionReuseStrategy(this.connectionReuseStrategy) .setConnectionBackoffStrategy(this.connectionBackoffStrategy) .setServiceUnavailableRetryStrategy(this.serviceUnavailableRetryStrategy) .setProxyAuthenticationStrategy(this.proxyAuthenticationStrategy) .setDefaultHeaders(this.defaultHeaders) .useSystemProperties(); return builder.build(); } catch (final Exception e) { LOGGER.error(e.getMessage(), e); throw new RuntimeException(e); } } /** * Build a {@link FutureRequestExecutionService} from the current properties and a HTTP client. * * @param httpClient the provided HTTP client * @return the built request executor service */ private FutureRequestExecutionService buildRequestExecutorService(final CloseableHttpClient httpClient) { final ExecutorService definedExecutorService; // no executor service provided -> create a default one if (this.executorService == null) { definedExecutorService = new ThreadPoolExecutor(this.threadsNumber, this.threadsNumber, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(this.queueSize)); } else { definedExecutorService = this.executorService; } return new FutureRequestExecutionService(httpClient, definedExecutorService); } public ExecutorService getExecutorService() { return this.executorService; } public void setExecutorService(final ExecutorService executorService) { this.executorService = executorService; } public int getThreadsNumber() { return this.threadsNumber; } public void setThreadsNumber(final int threadsNumber) { this.threadsNumber = threadsNumber; } public int getQueueSize() { return this.queueSize; } public void setQueueSize(final int queueSize) { this.queueSize = queueSize; } public int getMaxPooledConnections() { return this.maxPooledConnections; } public void setMaxPooledConnections(final int maxPooledConnections) { this.maxPooledConnections = maxPooledConnections; } public int getMaxConnectionsPerRoute() { return this.maxConnectionsPerRoute; } public void setMaxConnectionsPerRoute(final int maxConnectionsPerRoute) { this.maxConnectionsPerRoute = maxConnectionsPerRoute; } public List<Integer> getAcceptableCodes() { return ImmutableList.copyOf(this.acceptableCodes); } public void setAcceptableCodes(final int[] acceptableCodes) { this.acceptableCodes = Ints.asList(acceptableCodes); } public int getConnectionTimeout() { return this.connectionTimeout; } public void setConnectionTimeout(final int connectionTimeout) { this.connectionTimeout = connectionTimeout; } public int getReadTimeout() { return this.readTimeout; } public void setReadTimeout(final int readTimeout) { this.readTimeout = readTimeout; } public RedirectStrategy getRedirectionStrategy() { return this.redirectionStrategy; } public void setRedirectionStrategy(final RedirectStrategy redirectionStrategy) { this.redirectionStrategy = redirectionStrategy; } public SSLConnectionSocketFactory getSslSocketFactory() { return this.sslSocketFactory; } public void setSslSocketFactory(final SSLConnectionSocketFactory sslSocketFactory) { this.sslSocketFactory = sslSocketFactory; } public X509HostnameVerifier getHostnameVerifier() { return this.hostnameVerifier; } public void setHostnameVerifier(final X509HostnameVerifier hostnameVerifier) { this.hostnameVerifier = hostnameVerifier; } public CredentialsProvider getCredentialsProvider() { return this.credentialsProvider; } public void setCredentialsProvider(final CredentialsProvider credentialsProvider) { this.credentialsProvider = credentialsProvider; } public CookieStore getCookieStore() { return this.cookieStore; } public void setCookieStore(final CookieStore cookieStore) { this.cookieStore = cookieStore; } public ConnectionReuseStrategy getConnectionReuseStrategy() { return this.connectionReuseStrategy; } public void setConnectionReuseStrategy(final ConnectionReuseStrategy connectionReuseStrategy) { this.connectionReuseStrategy = connectionReuseStrategy; } public ConnectionBackoffStrategy getConnectionBackoffStrategy() { return this.connectionBackoffStrategy; } public void setConnectionBackoffStrategy(final ConnectionBackoffStrategy connectionBackoffStrategy) { this.connectionBackoffStrategy = connectionBackoffStrategy; } public ServiceUnavailableRetryStrategy getServiceUnavailableRetryStrategy() { return this.serviceUnavailableRetryStrategy; } public void setServiceUnavailableRetryStrategy(final ServiceUnavailableRetryStrategy serviceUnavailableRetryStrategy) { this.serviceUnavailableRetryStrategy = serviceUnavailableRetryStrategy; } public Collection<? extends Header> getDefaultHeaders() { return this.defaultHeaders; } public void setDefaultHeaders(final Collection<? extends Header> defaultHeaders) { this.defaultHeaders = defaultHeaders; } public AuthenticationStrategy getProxyAuthenticationStrategy() { return this.proxyAuthenticationStrategy; } public void setProxyAuthenticationStrategy(final AuthenticationStrategy proxyAuthenticationStrategy) { this.proxyAuthenticationStrategy = proxyAuthenticationStrategy; } public boolean isCircularRedirectsAllowed() { return this.circularRedirectsAllowed; } public void setCircularRedirectsAllowed(final boolean circularRedirectsAllowed) { this.circularRedirectsAllowed = circularRedirectsAllowed; } public boolean isAuthenticationEnabled() { return this.authenticationEnabled; } public void setAuthenticationEnabled(final boolean authenticationEnabled) { this.authenticationEnabled = authenticationEnabled; } public boolean isRedirectsEnabled() { return this.redirectsEnabled; } public void setRedirectsEnabled(final boolean redirectsEnabled) { this.redirectsEnabled = redirectsEnabled; } }
/* * eXist Open Source Native XML Database * Copyright (C) 2006 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; er version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * $Id: XmldbURI.java 10315 2009-11-01 20:48:23Z wolfgang_m $ */ package org.exist.xmldb; import imc.disxmldb.config.SysConfig; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLDecoder; import java.util.regex.Pattern; import org.apache.log4j.Logger; import org.exist.util.UTF8; import org.exist.xquery.Constants; import org.exist.xquery.util.URIUtils; /** A utility class for xmldb URis. * Since, java.net.URI is <strong>final</strong> this class acts as a wrapper. * @author Pierrick Brihaye <pierrick.brihaye@free.fr> * modified by xiafan <xiafan68@gmail.com> */ /* * This base class implementation only provides a path. FullXmldbURI provides * full uri support. The create method creates a minimal object to keep memory * usage low. */ public class XmldbURI implements Comparable { protected final static Logger LOG = Logger.getLogger(XmldbURI.class); public static final int NO_PORT = -1; //Should be provided by org.xmldb.api package !!! public static final String XMLDB_SCHEME = "xmldb"; public static final String XMLDB_URI_PREFIX = "xmldb:"; public static final String DEFAULT_INSTANCE_NAME = "exist"; public static final String EMBEDDED_SERVER_AUTHORITY = "embedded-eXist-server"; public static final String EMBEDDED_SERVER_URI_PREFIX = XMLDB_URI_PREFIX + DEFAULT_INSTANCE_NAME + "://"; public final static XmldbURI ROOT_COLLECTION_URI = create(SysConfig.ROOT_COLLECTION); public final static XmldbURI RELATIVE_ROOT_COLLECTION_URI = create(SysConfig.ROOT_COLLECTION_NAME); public final static XmldbURI SYSTEM_COLLECTION_URI = create(SysConfig.SYSTEM_COLLECTION); public final static XmldbURI CONFIG_COLLECTION_URI = create(SysConfig.CONFIG_COLLECTION); //TODO : create using resolve() public final static XmldbURI ROOT_COLLECTION_CONFIG_URI = create(SysConfig.CONFIG_COLLECTION + "/" + SysConfig.ROOT_COLLECTION_NAME); public final static XmldbURI METADATA_COLLECTION_URI = create(SysConfig.SYSTEM_COLLECTION + "/metadata"); public final static XmldbURI TEMP_COLLECTION_URI = create(SysConfig.TEMP_COLLECTION); public final static XmldbURI EMPTY_URI = createInternal(""); public static final XmldbURI EMBEDDED_SERVER_URI = XmldbURI.create(EMBEDDED_SERVER_URI_PREFIX + EMBEDDED_SERVER_AUTHORITY); private String encodedCollectionPath; //TODO : deprecate when we split at root collection public final static String API_XMLRPC = "xmlrpc"; public final static String API_WEBDAV = "webdav"; public final static String API_REST = "rest-style"; public final static String API_LOCAL = "local"; public static XmldbURI xmldbUriFor(URI uri) throws URISyntaxException { return getXmldbURI(uri); } public static XmldbURI xmldbUriFor(String xmldbURI) throws URISyntaxException { return xmldbUriFor(xmldbURI, true); } public static XmldbURI xmldbUriFor(String xmldbURI, boolean escape) throws URISyntaxException { if (xmldbURI==null) return null; URI uri = new URI(escape ? escape(xmldbURI) : xmldbURI); return getXmldbURI(uri); } private static String escape(String xmldbURI) { return escape(xmldbURI, false); } /** * Does the actual escaping. This method is copied from Michael Kay's * saxon (see http://saxon.sf.net). * * @param s * @param escapeReserved */ public static String escape(CharSequence s, boolean escapeReserved) { //TODO : use dedidated URIUtils... -pb StringBuilder sb = new StringBuilder(s.length()); for (int i=0; i<s.length(); i++) { char c = s.charAt(i); if ((c>='a' && c<='z') || (c>='A' && c<='Z') || (c>='0' && c<='9')) { sb.append(c); } else if (c<=0x20 || c>=0x7f) { escapeChar(c, ((i+1)<s.length() ? s.charAt(i+1) : ' '), sb); } else if (escapeReserved) { if ("-_.!~*'()%".indexOf(c)>=0) { sb.append(c); } else { escapeChar(c, ' ', sb); } } else { if ("-_.!~*'()%;/?:@&=+$,#[]".indexOf(c)>=0) { sb.append(c); } else { escapeChar(c, ' ', sb); } } } return sb.toString(); } private static final String hex = "0123456789ABCDEF"; private static void escapeChar(char c, char c2, StringBuilder sb) { byte[] array = new byte[4]; int used = UTF8.getUTF8Encoding(c, c2, array); for (int b=0; b<used; b++) { int v = (array[b]>=0 ? array[b] : 256 + array[b]); sb.append('%'); sb.append(hex.charAt(v/16)); sb.append(hex.charAt(v%16)); } } public static XmldbURI xmldbUriFor(String accessURI, String collectionPath) throws URISyntaxException { if (collectionPath==null) return null; URI uri = new URI(accessURI + URIUtils.iriToURI(collectionPath)); return getXmldbURI(uri); } public static XmldbURI create(URI uri){ try { return xmldbUriFor(uri); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid URI: "+e.getMessage()); } } public static XmldbURI create(String uri) { try { return xmldbUriFor(uri); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid URI: "+e.getMessage()); } } public static XmldbURI create(String accessURI, String collectionPath) { try { return xmldbUriFor(accessURI,collectionPath); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid URI: "+e.getMessage()); } } public static XmldbURI createInternal(String collectionPath) { return new XmldbURI(collectionPath); } private static XmldbURI getXmldbURI(URI uri) throws URISyntaxException{ if(uri.getScheme()!=null || uri.getFragment()!=null || uri.getQuery()!=null) { return new FullXmldbURI(uri); } return new XmldbURI(uri); /* //TODO : get rid of this and use a more robust approach (dedicated constructor ?) -pb //TODO : use named constants index = path.lastIndexOf("/xmlrpc"); if (index > lastIndex) { return false; } //TODO : use named constants index = path.lastIndexOf("/webdav"); if (index > lastIndex) { return false; } */ } /** * Contructs an XmldbURI from given URI. * The provided URI must have the XMLDB_SCHEME ("xmldb") * @param xmldbURI A string * @throws URISyntaxException If the given string is not a valid xmldb URI. */ protected XmldbURI(URI xmldbURI) throws URISyntaxException { boolean hadXmldbPrefix=false; if(xmldbURI.getScheme()!=null) { if (!XMLDB_SCHEME.equals(xmldbURI.getScheme())) { throw new URISyntaxException(xmldbURI.toString(), "xmldb URI scheme does not start with " + XMLDB_URI_PREFIX); } xmldbURI = new URI(xmldbURI.toString().substring(XMLDB_URI_PREFIX.length())); hadXmldbPrefix=true; } parseURI(xmldbURI,hadXmldbPrefix); } protected XmldbURI(String collectionPath) { this.encodedCollectionPath = collectionPath; } /** Feeds private members. Receives a URI with the xmldb: scheme already stripped * @throws URISyntaxException */ protected void parseURI(URI xmldbURI, boolean hadXmldbPrefix) throws URISyntaxException { splitPath(xmldbURI.getRawPath()); } /** Given a java.net.URI.getPath(), <strong>tries</strong> to dispatch the host's context * from the collection path as smartly as possible. * One would probably prefer a split policy based on the presence of a well-known root collection. * @param path The java.net.URI.getPath() provided. * @throws URISyntaxException */ protected void splitPath(String path) throws URISyntaxException { encodedCollectionPath = path; if (encodedCollectionPath != null && encodedCollectionPath.length() > 1 && encodedCollectionPath.endsWith("/")) encodedCollectionPath = encodedCollectionPath.substring(0, encodedCollectionPath.length() - 1); //TODO : check that collectionPath starts with DBBroker.ROOT_COLLECTION ? } /** To be called before a context operation with another XmldbURI. * @param uri * @throws IllegalArgumentException */ protected void checkCompatibilityForContextOperation(XmldbURI uri) throws IllegalArgumentException { if (this.getInstanceName() != null && uri.getInstanceName() != null && !this.getInstanceName().equals(uri.getInstanceName())) throw new IllegalArgumentException(this.getInstanceName() + " instance differs from " + uri.getInstanceName()); //case insensitive comparison if (this.getHost() != null && uri.getHost() != null && !this.getHost().equalsIgnoreCase(uri.getHost())) throw new IllegalArgumentException(this.getHost() + " host differs from " + uri.getHost()); if (this.getPort() != NO_PORT && uri.getPort() != NO_PORT && this.getPort() != uri.getPort()) throw new IllegalArgumentException(this.getPort() + " port differs from " + uri.getPort()); if (this.getCollectionPath() != null && uri.getCollectionPath() != null && !this.getCollectionPath().equals(uri.getCollectionPath())) throw new IllegalArgumentException(this.getCollectionPath() + " collection differs from " + uri.getCollectionPath()); } /** To be called before a collection path operation with another XmldbURI. * @param uri * @throws IllegalArgumentException */ protected void checkCompatibilityForCollectionOperation(XmldbURI uri) throws IllegalArgumentException { if (this.getInstanceName() != null && uri.getInstanceName() != null && !this.getInstanceName().equals(uri.getInstanceName())) throw new IllegalArgumentException(this.getInstanceName() + " instance differs from " + uri.getInstanceName()); //case insensitive comparison if (this.getHost() != null && uri.getHost() != null && !this.getHost().equalsIgnoreCase(uri.getHost())) throw new IllegalArgumentException(this.getHost() + " host differs from " + uri.getHost()); if (this.getPort() != NO_PORT && uri.getPort() != NO_PORT && this.getPort() != uri.getPort()) throw new IllegalArgumentException(this.getPort() + " port differs from " + uri.getPort()); if (this.getContext() != null && uri.getContext() != null && !this.getContext().equals(uri.getContext())) throw new IllegalArgumentException(this.getContext() + " context differs from " + uri.getContext()); } /* * It is an error for any of the following private members to throw an exception. */ /* private void setInstanceName(String instanceName) { String oldInstanceName = this.instanceName; try { this.instanceName = instanceName; recomputeURI(); } catch (URISyntaxException e) { this.instanceName = oldInstanceName; throw new IllegalArgumentException("Invalid URI: "+e.getMessage()); } } private void setContext(String context) throws URISyntaxException { String oldContext = this.context; try { //trims any trailing slash if (context != null && context.endsWith("/")) { //include root slash if we have a host if (this.getHost() != null) context = context.substring(0, context.length() - 1); } this.context = "".equals(context) ? null : context; recomputeURI(); } catch (URISyntaxException e) { this.context = oldContext; throw e; } } private void setCollectionPath(String collectionPath) throws URISyntaxException { String oldCollectionPath = collectionPath; try { if (collectionPath == null) this.encodedCollectionPath = null; else { String escaped = URIUtils.escapeHtmlURI(collectionPath); this.encodedCollectionPath = escaped; } recomputeURI(); } catch (URISyntaxException e) { this.encodedCollectionPath = oldCollectionPath; throw e; } catch (UnsupportedEncodingException e) { wrappedURI = null; throw new URISyntaxException(this.toString(), e.getMessage()); } } */ /** * This returns a proper heirarchical URI - the xmldb scheme is trimmed * from the beginning. The scheme will be the instance name, and all * other fields will be populated as would be expected from a heirarchical * URI * * @see #getXmldbURI */ public URI getURI() { return URI.create(encodedCollectionPath); } /** * This returns an xmldb uri. This is the most generic sort of uri - the * only fields set in the uri are scheme and schemeSpecificPart */ public URI getXmldbURI() { return URI.create(encodedCollectionPath); } public String getInstanceName() { return null; } /** * Method to return the collection path with reserved characters * percent encoded * * @return Returns the encoded collection path */ public String getRawCollectionPath() { return encodedCollectionPath; } public String getCollectionPath() { if (encodedCollectionPath == null) return null; try { //TODO: we might want to cache this value return URLDecoder.decode(encodedCollectionPath, "UTF-8"); } catch (UnsupportedEncodingException e) { //Should never happen throw new IllegalArgumentException(encodedCollectionPath + " can not be properly escaped"); } } public XmldbURI toCollectionPathURI() { return (this instanceof FullXmldbURI)?XmldbURI.create(getRawCollectionPath()):this; } /** To be called each time a private member that interacts with the wrapped URI is modified. * @throws URISyntaxException */ protected void recomputeURI() throws URISyntaxException { } /** To be called each time a private member that interacts with the wrapped URI is modified. * @throws URISyntaxException */ protected void safeRecomputeURI() { try { recomputeURI(); } catch(URISyntaxException e) {} } /* * Must be encoded! */ private void setCollectionPath(String collectionPath) { String oldCollectionPath = encodedCollectionPath; try { encodedCollectionPath = "".equals(collectionPath) ? null : collectionPath; //include root slash if we have a context if (encodedCollectionPath!=null && getContext() != null & encodedCollectionPath.charAt(0)!='/') { encodedCollectionPath = "/"+encodedCollectionPath; } recomputeURI(); } catch (URISyntaxException e) { encodedCollectionPath = oldCollectionPath; throw new IllegalArgumentException("Invalid URI: "+e.getMessage()); } } public String getApiName() { return null; } public String getContext() { return null; } public int compareTo(Object ob) throws ClassCastException { if (!(ob instanceof XmldbURI)) throw new ClassCastException("The provided Object is not an XmldbURI"); return getXmldbURI().compareTo(((XmldbURI)ob).getXmldbURI()); } /** * This function returns a relative XmldbURI with the value after the last * / in the collection path of the URI * * @return A relative XmldbURI containing the value after the last / * in the collection path */ public XmldbURI lastSegment() { String name = getRawCollectionPath(); int last; // No slash - give them the whole thing! if((last=name.lastIndexOf('/'))==Constants.STRING_NOT_FOUND) { return this; } // Checks against a trailing slash // is this appropriate? if(last==name.length()-1) { name = name.substring(0,last); last = name.lastIndexOf('/'); } return XmldbURI.create(name.substring(last+1)); } /** * This function returns a relative XmldbURI with the value after the last * / in the collection path of the URI * * @return A relative XmldbURI containing the value after the last / * in the collection path */ public int numSegments() { String name = getRawCollectionPath(); if(name==null || "".equals(name)) { return 0; } String[] split = name.split("/"); return split.length; } /** * This function returns a relative XmldbURI with the value after the last * / in the collection path of the URI * * @return A relative XmldbURI containing the value after the last / * in the collection path */ public XmldbURI[] getPathSegments() { String name = getRawCollectionPath(); if(name==null || "".equals(name)) { return new XmldbURI[0]; } String[] split = name.split("/"); int fix = ("".equals(split[0]))?1:0; XmldbURI[] segments = new XmldbURI[split.length-fix]; for(int i=fix;i<split.length;i++) { segments[i-fix] = XmldbURI.create(split[i]); } return segments; } /** * This function returns a string with everything after the last / removed * * @return A relative XmldbURI containing the value after the last / * in the collection path */ public XmldbURI removeLastSegment() { String uri = toString(); int last; // No slash - return null! if((last=uri.lastIndexOf('/'))==Constants.STRING_NOT_FOUND) { return XmldbURI.EMPTY_URI; } // Checks against a trailing slash // is this appropriate? if(last==uri.length()-1) { uri = uri.substring(0,last); last = uri.lastIndexOf('/'); } return last<=0?XmldbURI.EMPTY_URI:XmldbURI.create(uri.substring(0,last)); } public XmldbURI append(String uri) { return append(XmldbURI.create(uri)); } public XmldbURI append(XmldbURI uri) { String toAppend = uri.getRawCollectionPath(); String prepend = toString(); if("".equals(toAppend)) return this; if("".equals(prepend)) { return uri; } if(!(prepend.charAt(prepend.length()-1)=='/') && !(toAppend.charAt(0)=='/')) { return XmldbURI.create(prepend+"/"+toAppend); } else { return XmldbURI.create(prepend+toAppend); } } public XmldbURI appendInternal(XmldbURI uri) { return XmldbURI.createInternal(getRawCollectionPath() + '/' + uri.getRawCollectionPath()); } /** Ugly workaround for non-URI compliant pathes * @param pseudoURI What is supposed to be a URI * @return an supposedly correctly escaped URI <strong>string representation</string> * @deprecated By definition, using this method is strongly discouraged */ public static String recoverPseudoURIs(String pseudoURI) throws URISyntaxException { Pattern p = Pattern.compile("/"); String[] parts = p.split(pseudoURI); StringBuilder newURIString = new StringBuilder(parts[0]); for (int i = 1 ; i <parts.length; i ++) { newURIString.append("/"); if (!"".equals(parts[i])) { try { //Try to instantiate the parst as a URI new URI(newURIString + parts[i]); newURIString.append(parts[i]); } catch (URISyntaxException e) { LOG.info("Trying to escape : ''" + parts[i] + "' in '" + pseudoURI + "' !"); newURIString.append(URIUtils.encodeForURI(parts[i])); } } } return newURIString.toString(); } public boolean equals(Object ob) { if (ob instanceof XmldbURI) { return getXmldbURI().equals(((XmldbURI)ob).getXmldbURI()); } if (ob instanceof URI) { return getXmldbURI().equals(ob); } if (ob instanceof String) { try { return getXmldbURI().equals(new URI((String)ob)); } catch(URISyntaxException e) { return false; } } return false; } public boolean equalsInternal(XmldbURI other) { if (this == other) return true; return encodedCollectionPath.equals(other.encodedCollectionPath); } public boolean isAbsolute() { return isCollectionPathAbsolute(); } public boolean isContextAbsolute() { return false; } public XmldbURI normalizeContext() { return this; } public URI relativizeContext(URI uri) { return null; } public URI resolveContext(String str) throws NullPointerException, IllegalArgumentException { return null; } public URI resolveContext(URI uri) throws NullPointerException { return null; } public boolean isCollectionPathAbsolute() { return encodedCollectionPath !=null && encodedCollectionPath.length() > 0 && encodedCollectionPath.charAt(0)=='/'; } public XmldbURI normalizeCollectionPath() { String collectionPath = this.encodedCollectionPath; if (collectionPath == null) return this; URI collectionPathURI = URI.create(collectionPath).normalize(); if(collectionPathURI.getPath().equals(collectionPath)) { return this; } XmldbURI uri = XmldbURI.create(getXmldbURI()); uri.setCollectionPath(collectionPathURI.toString()); return uri; } public URI relativizeCollectionPath(URI uri) { if (uri == null) throw new NullPointerException("The provided URI is null"); String collectionPath = this.encodedCollectionPath; if (collectionPath == null) throw new NullPointerException("The current collection path is null"); URI collectionPathURI; //Adds a final slash if necessary if (!collectionPath.endsWith("/")) { LOG.info("Added a final '/' to '" + collectionPath + "'"); collectionPathURI = URI.create(collectionPath + "/"); } else collectionPathURI = URI.create(collectionPath); return collectionPathURI.relativize(uri); } //TODO: unit test! public XmldbURI resolveCollectionPath(XmldbURI child) throws NullPointerException, IllegalArgumentException { if (child == null) throw new NullPointerException("The provided child URI is null"); // if (child.isAbsolute()) // return child; //Old method: /* String collectionPath = this.encodedCollectionPath; if (collectionPath == null) throw new NullPointerException("The current collection path is null"); URI collectionPathURI; //Adds a final slash if necessary if (!collectionPath.endsWith("/")) { LOG.info("Added a final '/' to '" + collectionPath + "'"); collectionPathURI = URI.create(collectionPath + "/"); } else collectionPathURI = URI.create(collectionPath); */ String collectionPath = toCollectionPathURI().toString(); URI newCollectionURI = null; if(!collectionPath.endsWith("/")) { newCollectionURI = URI.create(collectionPath+"/").resolve(child.toCollectionPathURI().getURI()); } else { newCollectionURI = getURI().resolve(child.toCollectionPathURI().getURI()); } XmldbURI newURI = XmldbURI.create(getXmldbURI()); String newCollectionPath = newCollectionURI.getRawPath(); if(newCollectionPath.endsWith("/")) { newCollectionPath = newCollectionPath.substring(0,newCollectionPath.length()-1); } newURI.encodedCollectionPath=newCollectionPath; newURI.safeRecomputeURI(); return newURI; } public URI resolveCollectionPath(URI uri) throws NullPointerException { if (uri == null) throw new NullPointerException("The provided URI is null"); String collectionPath = this.encodedCollectionPath; if (collectionPath == null) throw new NullPointerException("The current collection path is null"); URI collectionPathURI; //Adds a final slash if necessary if (!collectionPath.endsWith("/")) { LOG.info("Added a final '/' to '" + collectionPath + "'"); collectionPathURI = URI.create(collectionPath + "/"); } else collectionPathURI = URI.create(collectionPath); return collectionPathURI.resolve(uri); } public String toASCIIString() { //TODO : trim trailing slash if necessary return getXmldbURI().toASCIIString(); } public URL toURL() throws IllegalArgumentException, MalformedURLException { return getXmldbURI().toURL(); } //TODO: add unit test for this //TODO : come on ! use a URI method name. //resolve() is a must here public boolean startsWith(XmldbURI xmldbUri) { return (xmldbUri==null)? false : toString().startsWith(xmldbUri.toString()); } //TODO : come on ! use a URI method name. //resolve() is a must here public boolean startsWith(String string) throws URISyntaxException { return startsWith(XmldbURI.xmldbUriFor(string)); } //TODO: add unit test for this public boolean endsWith(XmldbURI xmldbUri) { return (xmldbUri==null)? false : toString().endsWith(xmldbUri.toString()); } public boolean endsWith(String string) throws URISyntaxException { return endsWith(XmldbURI.xmldbUriFor(string)); } //TODO: add unit test for this public XmldbURI prepend(XmldbURI xmldbUri) { if(xmldbUri==null) { throw new NullPointerException(toString() + " cannot start with null!"); } //TODO : resolve URIs !!! xmldbUri.resolve(this) return xmldbUri.append(this); } //TODO: add unit test for this public XmldbURI trimFromBeginning(XmldbURI xmldbUri) { if(xmldbUri==null) { throw new NullPointerException(toString() + " cannot start with null!"); } if(!startsWith(xmldbUri)) { throw new IllegalArgumentException(toString() + " does not start with " + xmldbUri.toString()); } return XmldbURI.create(toString().substring(xmldbUri.toString().length())); } public XmldbURI trimFromBeginning(String string) throws URISyntaxException { return trimFromBeginning(XmldbURI.xmldbUriFor(string)); } public String toString() { return encodedCollectionPath; } public static String[] getPathComponents(String collectionPath) { Pattern p = Pattern.compile("/"); String [] split = p.split(collectionPath); String [] result = new String[split.length - 1]; System.arraycopy(split, 1, result, 0, split.length - 1); return result; } /* @deprecated Legacy method used here and there in the code * if the currentPath is null return the parentPath else * if the currentPath doesnt not start with "/db/" and is not equal to "/db" then adjust the path to start with the parentPath * * Fix to Jens collection/resource name problem by deliriumsky * * @deprecated Use {@link #resolveCollectionPath(String) resolveCollectionPath} instead */ public static String checkPath(String currentPath, String parentPath) { if(currentPath == null) return parentPath; //Absolute path if (SysConfig.ROOT_COLLECTION.equals(currentPath)) return currentPath; //Absolute path if (currentPath.startsWith(SysConfig.ROOT_COLLECTION + "/")) return currentPath; //Kind of relative path : against all conventions ! -pb if(currentPath.startsWith("/")) LOG.warn("Initial '/' for relative path '" + currentPath + "'"); //OK : let's process this so-called relative path if (currentPath.startsWith("/")) { if (parentPath.endsWith("/")) return parentPath + currentPath.substring(1); return parentPath + currentPath; } //True relative pathes if (parentPath.endsWith("/")) return parentPath + currentPath; return parentPath + "/" + currentPath; } /** @deprecated Legacy method used here and there in the code * @param fileName * @param parentPath */ public static String checkPath2(String fileName, String parentPath) { //if (!fileName.startsWith("/")) // fileName = "/" + fileName; /*if (!fileName.startsWith(ROOT_COLLECTION)) fileName = ROOT_COLLECTION + fileName;*/ return checkPath(fileName, parentPath); } /**@deprecated Legacy method used here and there in the code and copied as such * @param name */ //TODO : changes // into / */ public String makeAbsolute(String name) { StringBuilder out = new StringBuilder(); for (int i = 0; i < name.length(); i++) //TODO : use dedicated function in XmldbURI if (name.charAt(i) == '/' && name.length() > i + 1 && name.charAt(i + 1) == '/') i++; else out.append(name.charAt(i)); String name2 = out.toString(); if (name2.length() > 0 && name2.charAt(0) != '/') name2 = "/" + name2; if (!name2.startsWith(SysConfig.ROOT_COLLECTION)) name2 = SysConfig.ROOT_COLLECTION + name2; if (name2.endsWith("/") && name2.length() > 1) name2 = name2.substring(0, name2.length() - 1); return name2; } /**@deprecated Legacy method used here and there in the code and copied as such * @param name */ //TODO : changes // into / */ public final static String normalizeCollectionName(String name) { StringBuilder out = new StringBuilder(); for (int i = 0; i < name.length(); i++) //TODO : use dedicated function in XmldbURI if (name.charAt(i) == '/' && name.length() > i + 1 && name.charAt(i + 1) == '/') i++; else out.append(name.charAt(i)); String name2 = out.toString(); if (name2.length() > 0 && name2.charAt(0) != '/') name2 = "/" + name2; if (!name2.startsWith(SysConfig.ROOT_COLLECTION)) name2 = SysConfig.ROOT_COLLECTION + name2; if (name2.endsWith("/") && name2.length() > 1) name2 = name2.substring(0, name2.length() - 1); return name2; } /* (non-Javadoc) * @see java.net.URI#getAuthority() */ public String getAuthority() { return null; } /* (non-Javadoc) * @see java.net.URI#getFragment() */ public String getFragment() { return null; } /* (non-Javadoc) * @see java.net.URI#getPort() */ public int getPort() { return NO_PORT; } /* (non-Javadoc) * @see java.net.URI#getQuery() */ public String getQuery() { return null; } /* (non-Javadoc) * @see java.net.URI#getRawAuthority() */ public String getRawAuthority() { return null; } /* (non-Javadoc) * @see java.net.URI#getHost() */ public String getHost() { return null; } /* (non-Javadoc) * @see java.net.URI#getUserInfo() */ public String getUserInfo() { return null; } /* (non-Javadoc) * @see java.net.URI#getRawFragment() */ public String getRawFragment() { return null; } /* (non-Javadoc) * @see java.net.URI#getRawQuery() */ public String getRawQuery() { return null; } /* (non-Javadoc) * @see java.net.URI#getRawUserInfo() */ public String getRawUserInfo() { return null; } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ public int hashCode() { return getXmldbURI().hashCode(); } // TODO : prefefined URIs as static classes... }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.easy.json.loader; import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.record.metadata.ColumnMetadata; import org.apache.drill.exec.record.metadata.MetadataUtils; import org.apache.drill.exec.store.easy.json.parser.ElementParser; import org.apache.drill.exec.store.easy.json.parser.ValueDef; import org.apache.drill.exec.store.easy.json.parser.ValueDef.JsonType; import org.apache.drill.exec.store.easy.json.values.VarCharListener; import org.apache.drill.exec.store.easy.json.parser.ValueParser; import org.apache.drill.exec.vector.accessor.ScalarWriter; import org.apache.drill.shaded.guava.com.google.common.base.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Create Drill field listeners based on the observed look-ahead * tokens in JSON. */ public class InferredFieldFactory extends BaseFieldFactory { protected static final Logger logger = LoggerFactory.getLogger(InferredFieldFactory.class); public InferredFieldFactory(JsonLoaderImpl loader) { super(loader); } /** * Build a column and its listener based on a look-ahead hint. */ @Override public ElementParser fieldParser(FieldDefn fieldDefn) { ValueDef valueDef = fieldDefn.lookahead(); if (valueDef.type().isUnknown()) { return parserForUnknown(fieldDefn); } else { return resolveField(fieldDefn); } } /** * Create a listener when we don't have type information. For the case * {@code null} appears before other values. */ private ElementParser parserForUnknown(FieldDefn fieldDefn) { ValueDef valueDef = fieldDefn.lookahead(); if (!valueDef.isArray()) { // For the case null appears before other values. return new NullFieldParser(fieldDefn.tupleParser(), fieldDefn.key()); } else if (valueDef.dimensions() > 1) { // An unknown nested array: [[]], etc. Must guess a type. return forceRepeatedListResolution(fieldDefn); } else if (valueDef.type() == JsonType.NULL) { // For the case of [null], must force resolution return forceArrayResolution(fieldDefn); } else { // For the case [] appears before other values. return new EmptyArrayFieldParser(fieldDefn.tupleParser(), fieldDefn.key()); } } private ElementParser forceRepeatedListResolution(FieldDefn fieldDefn) { ColumnMetadata innerSchema = schemaForUnknown(fieldDefn, true); int dims = fieldDefn.lookahead().dimensions(); ColumnMetadata fieldSchema = repeatedListSchemaFor(fieldDefn.key(), dims, innerSchema); return buildOuterArrays( fieldDefn.fieldWriterFor(fieldSchema), dims, innerWriter -> scalarArrayParserFor( unknownParserFor(innerWriter.array().scalar()))); } @Override public ElementParser forceNullResolution(FieldDefn fieldDefn) { logger.warn("Ambiguous type! JSON field {}" + " contains all nulls. Assuming JSON text.", fieldDefn.key()); return forceResolution(fieldDefn, false); } @Override public ElementParser forceArrayResolution(FieldDefn fieldDefn) { logger.warn("Ambiguous type! JSON field {}" + " contains all empty arrays. Assuming array of JSON text.", fieldDefn.key()); return scalarArrayParserFor(forceResolution(fieldDefn, true)); } private ValueParser forceResolution(FieldDefn fieldDefn, boolean isArray) { return unknownParserFor( fieldDefn.scalarWriterFor( schemaForUnknown(fieldDefn, isArray))); } private ColumnMetadata schemaForUnknown(FieldDefn fieldDefn, boolean isArray) { if (loader.options().unknownsAsJson) { return fieldDefn.schemaFor(MinorType.VARCHAR, isArray); } else { return fieldDefn.schemaFor(loader.options().nullType, isArray); } } private ValueParser unknownParserFor(ScalarWriter writer) { if (loader.options().unknownsAsJson) { return parserFactory().jsonTextParser(new VarCharListener(loader, writer)); } else { return parserFactory().simpleValueParser(scalarListenerFor(writer)); } } private ElementParser resolveField(FieldDefn fieldDefn) { ValueDef valueDef = fieldDefn.lookahead(); Preconditions.checkArgument(!valueDef.type().isUnknown()); if (!valueDef.isArray()) { if (valueDef.type().isObject()) { return objectParserFor(fieldDefn); } else { return scalarParserFor(fieldDefn, false); } } else if (valueDef.dimensions() == 1) { if (valueDef.type().isObject()) { return objectArrayParserFor(fieldDefn); } else { return scalarArrayParserFor(scalarParserFor(fieldDefn, true)); } } else { // 2+ dimensions if (valueDef.type().isObject()) { return multiDimObjectArrayParserFor(fieldDefn); } else { return multiDimScalarArrayParserFor(fieldDefn); } } } public ValueParser scalarParserFor(FieldDefn fieldDefn, boolean isArray) { if (loader.options().allTextMode) { return parserFactory().textValueParser( new VarCharListener(loader, fieldDefn.scalarWriterFor(MinorType.VARCHAR, isArray))); } else { return scalarParserFor(fieldDefn, fieldDefn.schemaFor(scalarTypeFor(fieldDefn), isArray)); } } /** * Create a multi- (2+) dimensional scalar array from a JSON value description. */ private ElementParser multiDimScalarArrayParserFor(FieldDefn fieldDefn) { ColumnMetadata innerSchema = fieldDefn.schemaFor(scalarTypeFor(fieldDefn), true); int dims = fieldDefn.lookahead().dimensions(); ColumnMetadata fieldSchema = repeatedListSchemaFor(fieldDefn.key(), dims, innerSchema); return multiDimScalarArrayFor( fieldDefn.fieldWriterFor(fieldSchema), dims); } /** * Create a map array column and its associated object array listener * for the given key. */ public ElementParser objectArrayParserFor(FieldDefn fieldDefn) { return objectArrayParserFor(fieldDefn, MetadataUtils.newMapArray(fieldDefn.key()), null); } /** * Create a RepeatedList which contains (empty) Map objects using the provided * schema. That is, create a multi-dimensional array of maps. * The map fields are created on the fly, optionally using the provided schema. */ private ElementParser multiDimObjectArrayParserFor(FieldDefn fieldDefn) { ColumnMetadata innerSchema = MetadataUtils.newMapArray(fieldDefn.key()); int dims = fieldDefn.lookahead().dimensions(); ColumnMetadata fieldSchema = repeatedListSchemaFor(fieldDefn.key(), dims, innerSchema); return multiDimObjectArrayFor(fieldDefn.fieldWriterFor(fieldSchema), dims, null); } /** * Create a RepeatedList which contains Unions. (Actually, this is an * array of List objects internally.) The variant is variable, it makes no * sense to specify a schema for the variant. Also, omitting the schema * save a large amount of complexity that will likely never be needed. */ @SuppressWarnings("unused") private ElementParser repeatedListOfVariantListenerFor(FieldDefn fieldDefn) { ColumnMetadata innerSchema = MetadataUtils.newVariant(fieldDefn.key(), DataMode.REPEATED); int dims = fieldDefn.lookahead().dimensions(); ColumnMetadata fieldSchema = repeatedListSchemaFor(fieldDefn.key(), dims, innerSchema); return multiDimVariantArrayParserFor(fieldDefn.fieldWriterFor(fieldSchema), dims); } /** * Convert the JSON type, obtained by looking ahead one token, to a Drill * scalar type. Report an error if the JSON type does not map to a Drill * type (which can occur in a context where we expect a scalar, but got * an object or array.) */ private MinorType scalarTypeFor(FieldDefn fieldDefn) { MinorType colType = drillTypeFor(fieldDefn.lookahead().type()); if (colType == null) { throw loader().unsupportedJsonTypeException( fieldDefn.key(), fieldDefn.lookahead().type()); } return colType; } public MinorType drillTypeFor(JsonType type) { if (loader().options().allTextMode) { return MinorType.VARCHAR; } switch (type) { case BOOLEAN: return MinorType.BIT; case FLOAT: return MinorType.FLOAT8; case INTEGER: if (loader().options().readNumbersAsDouble) { return MinorType.FLOAT8; } else { return MinorType.BIGINT; } case STRING: return MinorType.VARCHAR; default: throw new IllegalStateException(type.name()); } } }
/* * This code is subject to the HIEOS License, Version 1.0 * * Copyright(c) 2011 Vangent, Inc. All rights reserved. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package com.vangent.hieos.services.xds.bridge.transactions; import com.vangent.hieos.hl7v3util.model.exception.ModelBuilderException; import com.vangent.hieos.services.xds.bridge.activity.AddPatientIdActivity; import com.vangent.hieos.services.xds.bridge.activity.CDAToXDSMapperActivity; import com.vangent.hieos.services.xds.bridge.activity .DocumentIdValidationActivity; import com.vangent.hieos.services.xds.bridge.activity .ISubmitDocumentRequestActivity; import com.vangent.hieos.services.xds.bridge.activity .RetrieveReplaceExtrinsicIdActivity; import com.vangent.hieos.services.xds.bridge.activity.SDRActivityContext; import com.vangent.hieos.services.xds.bridge.activity.SubmitPnRActivity; import com.vangent.hieos.services.xds.bridge.message .SubmitDocumentRequestBuilder; import com.vangent.hieos.services.xds.bridge.message .SubmitDocumentResponseBuilder; import com.vangent.hieos.services.xds.bridge.message .SubmitDocumentResponseMessage; import com.vangent.hieos.services.xds.bridge.model.Document; import com.vangent.hieos.services.xds.bridge.model.ResponseType; import com.vangent.hieos.services.xds.bridge.model.ResponseType .ResponseTypeStatus; import com.vangent.hieos.services.xds.bridge.model.SubmitDocumentRequest; import com.vangent.hieos.services.xds.bridge.model.SubmitDocumentResponse; import com.vangent.hieos.services.xds.bridge.model.SubmitDocumentResponse .Status; import com.vangent.hieos.services.xds.bridge.support.XDSBridgeServiceContext; import com.vangent.hieos.xutil.services.framework.XBaseTransaction; import com.vangent.hieos.xutil.xlog.client.XLogMessage; import org.apache.axiom.om.OMElement; import org.apache.axis2.AxisFault; import org.apache.log4j.Logger; import java.util.ArrayList; import java.util.List; /** * Class description * * * @version v1.0, 2011-06-09 * @author Vangent */ public class SubmitDocumentRequestHandler extends XBaseTransaction { /** Field description */ private static final Logger logger = Logger.getLogger(SubmitDocumentRequestHandler.class); /** Field description */ private final AddPatientIdActivity addPatientIdActivity; /** Field description */ private final List<ISubmitDocumentRequestActivity> processActivities; /** Field description */ private final SubmitDocumentRequestBuilder requestBuilder; /** Field description */ private final SubmitDocumentResponseBuilder responseBuilder; /** * Constructs ... * * * @param logMessage * @param context */ public SubmitDocumentRequestHandler(XLogMessage logMessage, XDSBridgeServiceContext context) { super(); // super(logMessage); ?? this.log_message = logMessage; this.requestBuilder = context.getSubmitDocumentRequestBuilder(); this.responseBuilder = context.getSubmitDocumentResponseBuilder(); this.addPatientIdActivity = new AddPatientIdActivity(context.getRegistryClient()); this.processActivities = new ArrayList<ISubmitDocumentRequestActivity>(); this.processActivities.add( new CDAToXDSMapperActivity(context.getMapperFactory())); this.processActivities.add( new DocumentIdValidationActivity(context.getRegistryClient())); this.processActivities.add( new RetrieveReplaceExtrinsicIdActivity( context.getRegistryClient())); this.processActivities.add( new SubmitPnRActivity(context.getRepositoryClient())); } /** * Method description * * * @param sdrRequest * @param sdrResponse * * @return */ private boolean addPatientIdToRegistry(SubmitDocumentRequest sdrRequest, SubmitDocumentResponse sdrResponse) { SDRActivityContext context = new SDRActivityContext(sdrRequest, null, sdrResponse); return this.addPatientIdActivity.execute(context); } /** * Method description * * * @return */ protected XLogMessage getLogMessage() { return this.log_message; } /** * * @return */ @Override public boolean getStatus() { return getLogMessage().isPass(); } /** * Method description * * * @return */ private boolean isLogMessageEnabled() { boolean result = false; XLogMessage logmsg = getLogMessage(); if ((logmsg != null) && logmsg.isLogEnabled()) { result = true; } return result; } /** * Method description * * * @param sdrResponse * * @return */ private OMElement marshalResponse(SubmitDocumentResponse sdrResponse) { // marshal response SubmitDocumentResponseMessage result = this.responseBuilder.buildMessage(sdrResponse); return result.getElement(); } /** * Method description * * * @param messageContext * @param request * * @return * * * @throws AxisFault */ public OMElement run(OMElement request) throws AxisFault { SubmitDocumentResponse sdrResponse = new SubmitDocumentResponse(Status.Failure); SubmitDocumentRequest sdrRequest = unmarshalRequest(request, sdrResponse); if (sdrRequest != null) { boolean pidAdded = addPatientIdToRegistry(sdrRequest, sdrResponse); if (pidAdded) { runActivities(sdrRequest, sdrResponse); } } OMElement result = marshalResponse(sdrResponse); if (isLogMessageEnabled()) { // let's push stats to logbrowser XLogMessage xlogger = getLogMessage(); if (Status.Failure.equals(sdrResponse.getStatus())) { xlogger.setPass(false); } xlogger.addOtherParam("Response", result); xlogger.addOtherParam("PatientID", sdrRequest.getPatientId().getCXFormatted()); xlogger.addOtherParam("ResponseStatus", sdrResponse.getStatus()); for (ResponseType docResponse : sdrResponse.getResponses()) { if (ResponseTypeStatus.Failure.equals( docResponse.getStatus())) { String param = String.format("Document[%s]", docResponse.getDocumentId()); xlogger.addOtherParam(param, docResponse.getErrorMessage()); } } } return result; } /** * Method description * * * @param sdrRequest * @param sdrResponse */ private void runActivities(SubmitDocumentRequest sdrRequest, SubmitDocumentResponse sdrResponse) { // from here we need to start tracking exceptions per document // to send back a proper response of success, partial, failure int failureCount = 0; int documentCount = 0; for (Document document : sdrRequest.getDocuments()) { // each activity will return success/failure // each activity will update the response w/ error SDRActivityContext context = new SDRActivityContext(sdrRequest, document, sdrResponse); boolean success = true; for (ISubmitDocumentRequestActivity activity : this.processActivities) { logger.debug(String.format("Executing %s", activity.getName())); success = activity.execute(context); if (success == false) { logger.info(String.format("Activity %s failed.", activity.getName())); ++failureCount; break; } } if (success) { sdrResponse.addSuccess(document); } ++documentCount; } // set the final status if (failureCount == 0) { sdrResponse.setStatus(Status.Success); } else if (failureCount == documentCount) { sdrResponse.setStatus(Status.Failure); } else { sdrResponse.setStatus(Status.PartialSuccess); } } /** * Method description * * * @param request * @param sdrResponse * * @return */ private SubmitDocumentRequest unmarshalRequest(OMElement request, SubmitDocumentResponse sdrResponse) { SubmitDocumentRequest result = null; try { // unmarshal request result = this.requestBuilder.buildSubmitDocumentRequest(request); } catch (ModelBuilderException e) { // this request failed validation (most likely) String errmsg = String.format( "Request could not be parsed. Failure(s) to follow. %s", e.getMessage()); sdrResponse.setStatus(Status.Failure); sdrResponse.addResponse(ResponseTypeStatus.Failure, errmsg); } return result; } }
package org.saucistophe.thud.model.boards; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; import org.saucistophe.thud.model.Coordinate; import org.saucistophe.thud.model.Piece; import static org.saucistophe.thud.model.Piece.DWARF; import static org.saucistophe.thud.model.Piece.EMPTY; import static org.saucistophe.thud.model.Piece.OUT; import static org.saucistophe.thud.model.Piece.TROLL; import org.saucistophe.utils.Constants; /** The board corresponds to a state of the game, and contains an 2D array of pieces. This class contains AI methods and everything pertaining to game moves. Kind of a god object, but hey, what the heck. */ public abstract class Board implements Cloneable { /** The value considered as a maximum value for score evaluation. */ public static int INFINITY = Integer.MAX_VALUE - 5; /** This class' logger. */ public static Logger LOGGER = Logger.getLogger(Board.class.getName()); /** The grid of pieces. */ public Piece squares[][]; /** The current playing side, true if the dwarves are playing. */ public boolean dwarvesTurn; /** A cache of the actually playable pieces, to avoid non-playable squares. */ private static List<Coordinate> piecesCache = null; /** Returns a set containing each possible move for the specified piece. @param x The X location of the piece to move. @param y The Y location of the piece to move. @param trollShovings A list of the troll shoving move, that will be filled by this method. @return a set of the possible move. This set contains each move only once. */ public abstract List<Coordinate> validMoves(int x, int y, List<Coordinate> trollShovings); /** Clones the current board. @return A board, identical to this one. */ public Board cloneBoard() { Board result = null; try { result = (Board) this.clone(); result.set(this); } catch (CloneNotSupportedException ex) { Logger.getLogger(Board.class.getName()).log(Level.SEVERE, null, ex); } return result; } public void set(Board thatBoard) { this.squares = Arrays.stream(thatBoard.squares).map(x -> x.clone()).toArray(Piece[][]::new); this.dwarvesTurn = thatBoard.dwarvesTurn; } /** @return The width of the board, as infered from the arrays lengths. */ public int getWidth() { return squares.length; } /** @return The height of the board, as infered from the arrays lengths. */ public int getHeight() { return squares[0].length; } /** Returns true if the specified square is near a dwarf. @param x The X location of the square to check. @param y the Y location of the square to check. @return <b>true</b> if the specified place is near a dwarf. */ public boolean isNearADwarf(int x, int y) { return !getNearby(DWARF, x, y).isEmpty(); } /** @param x The X location of the square to check. @param y The Y location of the square to check. @return True if the specified square is inside the board. */ public boolean isInsideBounds(int x, int y) { // If the square is out of the physical board's dimensions: if (x < 0 || y < 0 || x >= getWidth() || y >= getHeight()) { return false; } else { // If the square is marked as out. return squares[x][y] != OUT; } } /** @param coordinate The location of the square to check. @return True if the specified square is inside the board. */ public boolean isInsideBounds(Coordinate coordinate) { return isInsideBounds(coordinate.width, coordinate.height); } /** Moves a piece to a square. @param x1 The X location of the piece to move. @param y1 The Y location of the piece to move. @param x2 The X location of the destination square. @param y2 The Y location of the destination square. @return The list of potential target, for troll players. */ public List<Coordinate> move(int x1, int y1, int x2, int y2) { boolean isTroll = squares[x1][y1] == TROLL; squares[x2][y2] = squares[x1][y1]; squares[x1][y1] = EMPTY; // Change the turn. dwarvesTurn = !dwarvesTurn; // We check if a (or several) dwarf is captured. if (isTroll) { return getNearby(DWARF, x2, y2); } return null; } /** @param pieceType The piece type to look for. @param x The X location of the square to check. @param y The Y location of the square to check. @return The list of dwarves near the given square. */ public List<Coordinate> getNearby(Piece pieceType, int x, int y) { return Arrays.stream(Coordinate.directions) .map(direction -> new Coordinate(x + direction.width, y + direction.height)) // Must be on the board .filter(this::isInsideBounds) // Must be a dwarf .filter(square -> getPiece(square) == pieceType) .collect(Collectors.toList()); } /** Counts the number of the choosen piece on the board. @param piece The chosen type of piece @return The number of occurences of this piece on the board. */ public int numberOf(Piece piece) { return (int) getPiecesStream(piece).count(); } /** @return A list of the possible boards after moving. */ public List<Board> childrenBoards() {// TODO : for NegaScout, order moves to get the more interesting first. List<Board> result = new ArrayList<>(); Stream<Coordinate> piecesToMove = getPiecesStream(dwarvesTurn ? DWARF : TROLL); // Iterate over pieces piecesToMove.forEach(pieceToMove -> { // Iterate over the piece's possible destinations List<Coordinate> trollShovings = null; if (!dwarvesTurn) { trollShovings = new ArrayList<>(); } List<Coordinate> possibleMoves = validMoves(pieceToMove.width, pieceToMove.height, trollShovings); for (Coordinate destination : possibleMoves) { // Create an imaginary board from the move. Board temporaryBoard = this.cloneBoard(); List<Coordinate> dwarvesVictim = temporaryBoard.move(pieceToMove.width, pieceToMove.height, destination.width, destination.height); // If there is no dwarf victim, simply effect the move. if (dwarvesVictim == null || dwarvesVictim.isEmpty()) { result.add(temporaryBoard); } else // If it's a troll shoving, for simplicity, kill all the dwarves. { if (trollShovings != null && trollShovings.contains(destination)) { for (Coordinate victim : dwarvesVictim) { temporaryBoard.squares[victim.width][victim.height] = EMPTY; } result.add(temporaryBoard); } // If not, only one victim can be made, create a board for each one. else { for (Coordinate victim : dwarvesVictim) { Board victimTemporaryBoard = temporaryBoard.cloneBoard(); victimTemporaryBoard.squares[victim.width][victim.height] = EMPTY; result.add(victimTemporaryBoard); } } } } }); return result; } /** Returns a list of the pieces of the given type. @param type The type to get, DWARF or TROLL. @return A list of the coordinates of the pieces. */ public Stream<Coordinate> getPiecesStream(Piece type) { return piecesCache.stream() .filter(coordinate -> squares[coordinate.width][coordinate.height] == type); } /** Writes the board quickly to a new file. @return The file where it is saved. */ public File writeQuick() { File tempFile = new File(this.hashCode() + ".thud"); writeToFile(tempFile); return tempFile; } /** Writes the board to a file, in a quite readable format. @param outputFile The file to write to. */ public void writeToFile(File outputFile) { PrintWriter writer; try { writer = new PrintWriter(outputFile, "UTF-8"); } catch (IOException ex) { Logger.getLogger(Board.class.getName()).log(Level.SEVERE, null, ex); return; } // Iterate on squares for (int j = 0; j < getHeight(); j++) { // Skip the firt line feed if (j != 0) { writer.println(); } for (int i = 0; i < getWidth(); i++) { // Write the playing side on the top left corner. if (i == 0 && j == 0) { writer.print(dwarvesTurn ? "D" : "T"); } else { // Otherwise, print the piece's code. writer.print(squares[i][j].text); } } } writer.close(); } /** Reads a board from a stream to a thud! file. @param inputStream The stream of the file to read. @return The stored board. @throws java.io.IOException When there's a problem reading the file. */ public static Board readFromStream(InputStream inputStream) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, Constants.ENCODING)); // Turn the file to an array of strings. List<String> lines = reader.lines().collect(Collectors.toList()); // First check the longest line in the file. int longestLine = lines.stream().mapToInt(String::length).max().getAsInt(); // Also get the number of lines. int numberOfLines = lines.size(); // TODO add something to decide which class. Board board = new RegularBoard(); // Create the relevant square board. board.squares = new Piece[longestLine][numberOfLines]; int lineNumber = 0; for (String line : lines) { // For each character: int charNumber = 0; for (char c : line.toCharArray()) { // Turn the character to a piece. board.squares[charNumber][lineNumber] = Piece.fromText("" + c); charNumber++; } lineNumber++; } // Change the top-left corner to the playing side. Piece playingSide = board.squares[0][0]; board.squares[0][0] = OUT; // Set the board's attributes. board.dwarvesTurn = playingSide != TROLL; // Fill the cache of playable pieces. piecesCache = new ArrayList<>(); for (int i = 0; i < board.getWidth(); i++) { for (int j = 0; j < board.getHeight(); j++) { if (board.squares[i][j] != OUT) { piecesCache.add(new Coordinate(i, j)); } } } return board; } /** Reads a board from a thud! file. @param inputFile The file to read. @return The stored board. @throws IOException In case of problems when acessing or reading the file. */ public static Board readFromFile(File inputFile) throws IOException { return readFromStream(new FileInputStream(inputFile)); } /** Conveniency method for getting a square's piece from its coordinate. @param coordinate The square's coordinates @return The corresponding piece. */ public Piece getPiece(Coordinate coordinate) { return squares[coordinate.width][coordinate.height]; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Board other = (Board) obj; if (!Arrays.deepEquals(this.squares, other.squares)) { return false; } return this.dwarvesTurn == other.dwarvesTurn; } @Override public int hashCode() { int hash = 7; hash = 73 * hash + Arrays.deepHashCode(this.squares); hash = 73 * hash + (this.dwarvesTurn ? 1 : 0); return hash; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.CommonShortcuts; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.ui.JBMenuItem; import com.intellij.openapi.ui.JBPopupMenu; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.components.JBList; import com.intellij.util.ReflectionUtil; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.UIUtil; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import javax.swing.event.DocumentListener; import javax.swing.plaf.TextUI; import java.awt.*; import java.awt.event.*; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; /** * @author max */ public class SearchTextField extends JPanel { private int myHistorySize = 5; private final MyModel myModel; private final TextFieldWithProcessing myTextField; private JBPopup myPopup; private JLabel myClearFieldLabel; private JLabel myToggleHistoryLabel; private JPopupMenu myNativeSearchPopup; private JMenuItem myNoItems; public SearchTextField() { this(true); } public SearchTextField(boolean historyEnabled) { super(new BorderLayout()); myModel = new MyModel(); myTextField = new TextFieldWithProcessing() { @Override public void processKeyEvent(final KeyEvent e) { if (preprocessEventForTextField(e)) return; super.processKeyEvent(e); } @Override public void setBackground(final Color bg) { super.setBackground(bg); if (!hasIconsOutsideOfTextField()) { if (myClearFieldLabel != null) { myClearFieldLabel.setBackground(bg); } } if (myToggleHistoryLabel != null) { myToggleHistoryLabel.setBackground(bg); } } @Override public void setUI(TextUI ui) { if (SystemInfo.isMac) { try { Class<?> uiClass = UIUtil.isUnderIntelliJLaF() ? Class.forName("com.intellij.ide.ui.laf.intellij.MacIntelliJTextFieldUI") : Class.forName("com.intellij.ide.ui.laf.darcula.ui.DarculaTextFieldUI"); Method method = ReflectionUtil.getMethod(uiClass, "createUI", JComponent.class); if (method != null) { super.setUI((TextUI)method.invoke(uiClass, this)); Class<?> borderClass = UIUtil.isUnderIntelliJLaF() ? Class.forName("com.intellij.ide.ui.laf.intellij.MacIntelliJTextBorder") : Class.forName("com.intellij.ide.ui.laf.darcula.ui.DarculaTextBorder"); setBorder((Border)ReflectionUtil.newInstance(borderClass)); setOpaque(false); } return; } catch (Exception ignored) { } } super.setUI(ui); } }; myTextField.setColumns(15); myTextField.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { onFocusLost(); super.focusLost(e); } @Override public void focusGained(FocusEvent e) { onFocusGained(); super.focusGained(e); } }); add(myTextField, BorderLayout.CENTER); myTextField.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (e.getKeyCode() == KeyEvent.VK_DOWN) { if (isSearchControlUISupported() && myNativeSearchPopup != null) { myNativeSearchPopup.show(myTextField, 5, myTextField.getHeight()); } else if (myPopup == null || !myPopup.isVisible()) { showPopup(); } } } }); if (isSearchControlUISupported()) { myTextField.putClientProperty("JTextField.variant", "search"); myTextField.putClientProperty("JTextField.Search.CancelAction", new ActionListener() { @Override public void actionPerformed(ActionEvent e) { myTextField.setText(""); onFieldCleared(); } }); if (historyEnabled) { myNativeSearchPopup = new JBPopupMenu(); myNoItems = new JBMenuItem("No recent searches"); myNoItems.setEnabled(false); updateMenu(); myTextField.putClientProperty("JTextField.Search.FindPopup", myNativeSearchPopup); } } else { myToggleHistoryLabel = new JLabel(AllIcons.Actions.Search); myToggleHistoryLabel.setOpaque(true); myToggleHistoryLabel.addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent e) { togglePopup(); } }); if (historyEnabled) { add(myToggleHistoryLabel, BorderLayout.WEST); } myClearFieldLabel = new JLabel(UIUtil.isUnderDarcula() ? AllIcons.Actions.Clean : AllIcons.Actions.CleanLight); myClearFieldLabel.setOpaque(true); add(myClearFieldLabel, BorderLayout.EAST); myClearFieldLabel.addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent e) { myTextField.setText(""); onFieldCleared(); } }); if (!hasIconsOutsideOfTextField()) { final Border originalBorder; if (SystemInfo.isMac) { originalBorder = BorderFactory.createLoweredBevelBorder(); } else { originalBorder = myTextField.getBorder(); } myToggleHistoryLabel.setBackground(myTextField.getBackground()); myClearFieldLabel.setBackground(myTextField.getBackground()); setBorder(new CompoundBorder(IdeBorderFactory.createEmptyBorder(2, 0, 2, 0), originalBorder)); myTextField.setOpaque(true); myTextField.setBorder(IdeBorderFactory.createEmptyBorder(0, 5, 0, 5)); } else { setBorder(IdeBorderFactory.createEmptyBorder(2, 0, 2, 0)); } } if (ApplicationManager.getApplication() != null) { //tests final ActionManager actionManager = ActionManager.getInstance(); if (actionManager != null) { final AnAction clearTextAction = actionManager.getAction(IdeActions.ACTION_CLEAR_TEXT); if (clearTextAction.getShortcutSet().getShortcuts().length == 0) { clearTextAction.registerCustomShortcutSet(CommonShortcuts.ESCAPE, this); } } } } protected void onFieldCleared() { } protected void onFocusLost() { } protected void onFocusGained() { } private void updateMenu() { if (myNativeSearchPopup != null) { myNativeSearchPopup.removeAll(); final int itemsCount = myModel.getSize(); if (itemsCount == 0) { myNativeSearchPopup.add(myNoItems); } else { for (int i = 0; i < itemsCount; i++) { final String item = myModel.getElementAt(i); addMenuItem(item); } } } } protected boolean isSearchControlUISupported() { return (SystemInfo.isMacOSLeopard && UIUtil.isUnderAquaLookAndFeel()) || UIUtil.isUnderDarcula() || UIUtil.isUnderIntelliJLaF(); } protected boolean hasIconsOutsideOfTextField() { return UIUtil.isUnderGTKLookAndFeel() || UIUtil.isUnderNimbusLookAndFeel(); } public void addDocumentListener(DocumentListener listener) { getTextEditor().getDocument().addDocumentListener(listener); } public void removeDocumentListener(DocumentListener listener) { getTextEditor().getDocument().removeDocumentListener(listener); } public void addKeyboardListener(final KeyListener listener) { getTextEditor().addKeyListener(listener); } public void setEnabled(boolean enabled) { super.setEnabled(enabled); if (myToggleHistoryLabel != null) { final Color bg = enabled ? UIUtil.getTextFieldBackground() : UIUtil.getPanelBackground(); myToggleHistoryLabel.setBackground(bg); myClearFieldLabel.setBackground(bg); } } public void setHistorySize(int historySize) { if (historySize <= 0) throw new IllegalArgumentException("history size must be a positive number"); myHistorySize = historySize; } public void setHistory(List<String> aHistory) { myModel.setItems(aHistory); } public List<String> getHistory() { final int itemsCount = myModel.getSize(); final List<String> history = new ArrayList<String>(itemsCount); for (int i = 0; i < itemsCount; i++) { history.add(myModel.getElementAt(i)); } return history; } public void setText(String aText) { getTextEditor().setText(aText); } public String getText() { return getTextEditor().getText(); } public void removeNotify() { super.removeNotify(); hidePopup(); } public void addCurrentTextToHistory() { if ((myNativeSearchPopup != null && myNativeSearchPopup.isVisible()) || (myPopup != null && myPopup.isVisible())) { return; } final String item = getText(); myModel.addElement(item); } private void addMenuItem(final String item) { if (myNativeSearchPopup != null) { myNativeSearchPopup.remove(myNoItems); final JMenuItem menuItem = new JBMenuItem(item); myNativeSearchPopup.add(menuItem); menuItem.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { myTextField.setText(item); addCurrentTextToHistory(); } }); } } public void selectText() { getTextEditor().selectAll(); } public JTextField getTextEditor() { return myTextField; } public boolean requestFocusInWindow() { return myTextField.requestFocusInWindow(); } public void requestFocus() { getTextEditor().requestFocus(); } public class MyModel extends AbstractListModel { private List<String> myFullList = new ArrayList<String>(); private String mySelectedItem; public String getElementAt(int index) { return myFullList.get(index); } public int getSize() { return Math.min(myHistorySize, myFullList.size()); } public void addElement(String item) { final String newItem = item.trim(); if (newItem.isEmpty()) { return; } final int length = myFullList.size(); int index = -1; for (int i = 0; i < length; i++) { if (StringUtil.equalsIgnoreCase(myFullList.get(i), newItem)) { index = i; break; } } if (index == 0) { // item is already at the top of the list return; } else if (index > 0) { // move item to top of the list myFullList.remove(index); } else if (myFullList.size() >= myHistorySize && myFullList.size() > 0) { // trim list myFullList.remove(myFullList.size() - 1); } insertElementAt(newItem, 0); } public void insertElementAt(String item, int index) { myFullList.add(index, item); fireContentsChanged(); } public String getSelectedItem() { return mySelectedItem; } public void setSelectedItem(String anItem) { mySelectedItem = anItem; } public void fireContentsChanged() { fireContentsChanged(this, -1, -1); updateMenu(); } public void setItems(List<String> aList) { myFullList = new ArrayList<String>(aList); fireContentsChanged(); } } private void hidePopup() { if (myPopup != null) { myPopup.cancel(); myPopup = null; } } @Override public Dimension getPreferredSize() { Dimension size = super.getPreferredSize(); Border border = super.getBorder(); if (border != null && UIUtil.isUnderAquaLookAndFeel()) { JBInsets.addTo(size, border.getBorderInsets(this)); } return size; } protected Runnable createItemChosenCallback(final JList list) { return new Runnable() { public void run() { final String value = (String)list.getSelectedValue(); getTextEditor().setText(value != null ? value : ""); addCurrentTextToHistory(); if (myPopup != null) { myPopup.cancel(); myPopup = null; } } }; } protected void showPopup() { if (myPopup == null || !myPopup.isVisible()) { final JList list = new JBList(myModel); final Runnable chooseRunnable = createItemChosenCallback(list); myPopup = JBPopupFactory.getInstance().createListPopupBuilder(list) .setMovable(false) .setRequestFocus(true) .setItemChoosenCallback(chooseRunnable).createPopup(); if (isShowing()) { myPopup.showUnderneathOf(getPopupLocationComponent()); } } } protected Component getPopupLocationComponent() { return hasIconsOutsideOfTextField() ? myToggleHistoryLabel : this; } private void togglePopup() { if (myPopup == null) { showPopup(); } else { hidePopup(); } } public void setSelectedItem(final String s) { getTextEditor().setText(s); } public int getSelectedIndex() { return myModel.myFullList.indexOf(getText()); } protected static class TextFieldWithProcessing extends JTextField { public void processKeyEvent(KeyEvent e) { super.processKeyEvent(e); } } public final void keyEventToTextField(KeyEvent e) { myTextField.processKeyEvent(e); } protected boolean preprocessEventForTextField(KeyEvent e) { return false; } public void setSearchIcon(final Icon icon) { if (! isSearchControlUISupported()) { myToggleHistoryLabel.setIcon(icon); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.infra.federation.optimizer; import org.apache.shardingsphere.infra.database.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.database.type.dialect.H2DatabaseType; import org.apache.shardingsphere.infra.federation.optimizer.context.OptimizerContextFactory; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.resource.ShardingSphereResource; import org.apache.shardingsphere.infra.metadata.rule.ShardingSphereRuleMetaData; import org.apache.shardingsphere.infra.metadata.schema.ShardingSphereSchema; import org.apache.shardingsphere.infra.metadata.schema.model.ColumnMetaData; import org.apache.shardingsphere.infra.metadata.schema.model.TableMetaData; import org.apache.shardingsphere.infra.parser.ShardingSphereSQLParserEngine; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.parser.config.SQLParserRuleConfiguration; import org.apache.shardingsphere.parser.rule.SQLParserRule; import org.apache.shardingsphere.parser.rule.builder.DefaultSQLParserRuleConfigurationBuilder; import org.apache.shardingsphere.sql.parser.api.CacheOption; import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; import org.junit.Before; import org.junit.Test; import java.sql.Types; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public final class ShardingSphereOptimizerTest { private static final String SELECT_CROSS_JOIN_CONDITION = "SELECT t_order_federate.order_id, t_order_federate.user_id, t_user_info.user_id " + "FROM t_order_federate JOIN t_user_info ON t_order_federate.user_id = t_user_info.user_id " + "WHERE t_user_info.user_id = 13"; private static final String SELECT_WHERE_ALL_FIELDS = "SELECT user_id, information FROM t_user_info WHERE user_id = 12"; private static final String SELECT_WHERE_SINGLE_FIELD = "SELECT user_id FROM t_user_info WHERE user_id = 12"; private static final String SELECT_CROSS_WHERE = "SELECT t_order_federate.order_id, t_order_federate.user_id, t_user_info.user_id " + "FROM t_order_federate , t_user_info " + "WHERE t_order_federate.user_id = t_user_info.user_id"; private static final String SELECT_CROSS_JOIN = "SELECT t_order_federate.order_id, t_order_federate.user_id, t_user_info.user_id " + "FROM t_order_federate JOIN t_user_info " + "ON t_order_federate.user_id = t_user_info.user_id"; private static final String SELECT_CROSS_WHERE_CONDITION = "SELECT t_order_federate.order_id, t_order_federate.user_id, t_user_info.user_id " + "FROM t_order_federate ,t_user_info " + "WHERE t_order_federate.user_id = t_user_info.user_id AND t_user_info.user_id = 13"; private static final String SELECT_SUBQUERY_FROM = "SELECT user.user_id, user.information " + "FROM (SELECT * FROM t_user_info WHERE user_id > 1) as user "; private static final String SELECT_SUBQUERY_WHERE_EXIST = "SELECT t_order_federate.order_id, t_order_federate.user_id FROM t_order_federate " + "WHERE EXISTS (SELECT * FROM t_user_info WHERE t_order_federate.user_id = t_user_info.user_id)"; private static final String SELECT_SUBQUERY_WHERE_IN = "SELECT t_order_federate.order_id, t_order_federate.user_id FROM t_order_federate " + "WHERE t_order_federate.user_id IN (SELECT t_user_info.user_id FROM t_user_info)"; private static final String SELECT_SUBQUERY_WHERE_BETWEEN = "SELECT t_order_federate.order_id, t_order_federate.user_id FROM t_order_federate " + "WHERE user_id BETWEEN (SELECT user_id FROM t_user_info WHERE information = 'before') " + "AND (SELECT user_id FROM t_user_info WHERE information = 'after')"; private final String schemaName = "federate_jdbc"; private final SQLParserRule sqlParserRule = new SQLParserRule(new DefaultSQLParserRuleConfigurationBuilder().build()); private ShardingSphereOptimizer optimizer; @Before public void init() throws Exception { Map<String, TableMetaData> tableMetaDataMap = new HashMap<>(2, 1); tableMetaDataMap.put("t_order_federate", createOrderTableMetaData()); tableMetaDataMap.put("t_user_info", createUserInfoTableMetaData()); ShardingSphereMetaData metaData = new ShardingSphereMetaData(schemaName, mockResource(), null, new ShardingSphereSchema(tableMetaDataMap)); optimizer = new ShardingSphereOptimizer(OptimizerContextFactory.create(Collections.singletonMap(schemaName, metaData), createGlobalRuleMetaData())); } private ShardingSphereRuleMetaData createGlobalRuleMetaData() { Collection<ShardingSphereRule> rules = new LinkedList<>(); CacheOption cacheOption = new CacheOption(128, 1024L, 4); rules.add(new SQLParserRule(new SQLParserRuleConfiguration(false, cacheOption, cacheOption))); return new ShardingSphereRuleMetaData(Collections.emptyList(), rules); } private ShardingSphereResource mockResource() { ShardingSphereResource result = mock(ShardingSphereResource.class); when(result.getDatabaseType()).thenReturn(new H2DatabaseType()); return result; } private TableMetaData createOrderTableMetaData() { ColumnMetaData orderIdColumn = new ColumnMetaData("order_id", Types.VARCHAR, true, false, false); ColumnMetaData userIdColumn = new ColumnMetaData("user_id", Types.VARCHAR, false, false, false); ColumnMetaData statusColumn = new ColumnMetaData("status", Types.VARCHAR, false, false, false); return new TableMetaData("t_order_federate", Arrays.asList(orderIdColumn, userIdColumn, statusColumn), Collections.emptyList()); } private TableMetaData createUserInfoTableMetaData() { ColumnMetaData userIdColumn = new ColumnMetaData("user_id", Types.VARCHAR, true, false, false); ColumnMetaData informationColumn = new ColumnMetaData("information", Types.VARCHAR, false, false, false); return new TableMetaData("t_user_info", Arrays.asList(userIdColumn, informationColumn), Collections.emptyList()); } @Test public void assertSelectCrossJoinCondition() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_CROSS_JOIN_CONDITION, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableCalc(expr#0..6=[{inputs}],order_id=[$t3],user_id=[$t4],user_id0=[$t0])" + " EnumerableHashJoin(condition=[=($2,$6)],joinType=[inner])" + " EnumerableCalc(expr#0..1=[{inputs}],expr#2=[CAST($t0):VARCHAR],proj#0..2=[{exprs}])" + " EnumerableInterpreterBindableTableScan(table=[[federate_jdbc,t_user_info]],filters=[[=(CAST($0):INTEGER,13)]])" + " EnumerableCalc(expr#0..2=[{inputs}],expr#3=[CAST($t1):VARCHAR],proj#0..3=[{exprs}])" + " EnumerableTableScan(table=[[federate_jdbc,t_order_federate]])"; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } @Test public void assertSelectWhereAllFields() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_WHERE_ALL_FIELDS, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableInterpreter" + " BindableTableScan(table=[[federate_jdbc,t_user_info]],filters=[[=(CAST($0):INTEGER,12)]])"; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } @Test public void assertSelectWhereSingleField() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_WHERE_SINGLE_FIELD, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableInterpreter" + " BindableTableScan(table=[[federate_jdbc,t_user_info]],filters=[[=(CAST($0):INTEGER,12)]],projects=[[0]])"; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } @Test public void assertSelectCrossWhere() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_CROSS_WHERE, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableCalc(expr#0..6=[{inputs}],order_id=[$t3],user_id=[$t4],user_id0=[$t0])" + " EnumerableMergeJoin(condition=[=($2,$6)],joinType=[inner])" + " EnumerableSort(sort0=[$2],dir0=[ASC])" + " EnumerableCalc(expr#0..1=[{inputs}],expr#2=[CAST($t0):VARCHAR],proj#0..2=[{exprs}])" + " EnumerableTableScan(table=[[federate_jdbc,t_user_info]])" + " EnumerableSort(sort0=[$3],dir0=[ASC])" + " EnumerableCalc(expr#0..2=[{inputs}],expr#3=[CAST($t1):VARCHAR],proj#0..3=[{exprs}])" + " EnumerableTableScan(table=[[federate_jdbc,t_order_federate]])"; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } @Test public void assertSelectCrossJoin() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_CROSS_JOIN, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableCalc(expr#0..6=[{inputs}],proj#0..1=[{exprs}],user_id0=[$t4])" + " EnumerableMergeJoin(condition=[=($3,$6)],joinType=[inner])" + " EnumerableSort(sort0=[$3],dir0=[ASC])" + " EnumerableCalc(expr#0..2=[{inputs}],expr#3=[CAST($t1):VARCHAR],proj#0..3=[{exprs}])" + " EnumerableTableScan(table=[[federate_jdbc,t_order_federate]])" + " EnumerableSort(sort0=[$2],dir0=[ASC])" + " EnumerableCalc(expr#0..1=[{inputs}],expr#2=[CAST($t0):VARCHAR],proj#0..2=[{exprs}])" + " EnumerableTableScan(table=[[federate_jdbc,t_user_info]])"; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } @Test public void assertSelectJoinWhere() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_CROSS_WHERE_CONDITION, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableCalc(expr#0..4=[{inputs}],proj#0..1=[{exprs}],user_id0=[$t3])" + " EnumerableInterpreterBindableJoin(condition=[=(CAST($1):VARCHAR,CAST($3):VARCHAR)],joinType=[inner])" + " BindableTableScan(table=[[federate_jdbc,t_order_federate]])" + " BindableTableScan(table=[[federate_jdbc,t_user_info]],filters=[[=(CAST($0):INTEGER,13)]])"; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } @Test public void assertSelectSubQueryFrom() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_SUBQUERY_FROM, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableInterpreter" + " BindableTableScan(table=[[federate_jdbc,t_user_info]],filters=[[>(CAST($0):INTEGER,1)]])"; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } @Test public void assertSelectSubQueryWhereExist() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_SUBQUERY_WHERE_EXIST, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableCalc(expr#0..3=[{inputs}],expr#4=[ISNOTNULL($t3)],proj#0..1=[{exprs}],$condition=[$t4])" + " EnumerableCorrelate(correlation=[$cor0],joinType=[left],requiredColumns=[{1}]) " + " EnumerableTableScan(table=[[federate_jdbc,t_order_federate]]) " + " EnumerableInterpreterBindableAggregate(group=[{}],agg#0=[MIN($0)]) " + " BindableProject($f0=[true]) " + " BindableTableScan(table=[[federate_jdbc,t_user_info]],filters=[[=(CAST($cor0.user_id):VARCHAR,CAST($0):VARCHAR)]],projects=[[0]]) "; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } @Test public void assertSelectSubQueryWhereIn() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_SUBQUERY_WHERE_IN, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableInterpreter" + " BindableProject(order_id=[$0],user_id=[$1])" + " BindableJoin(condition=[=($1,$3)],joinType=[semi])" + " BindableTableScan(table=[[federate_jdbc,t_order_federate]])" + " BindableTableScan(table=[[federate_jdbc,t_user_info]],projects=[[0]])"; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } @Test public void assertSelectSubQueryWhereBetween() { ShardingSphereSQLParserEngine sqlParserEngine = new ShardingSphereSQLParserEngine( DatabaseTypeRegistry.getTrunkDatabaseTypeName(new H2DatabaseType()), sqlParserRule); SQLStatement sqlStatement = sqlParserEngine.parse(SELECT_SUBQUERY_WHERE_BETWEEN, false); String actual = optimizer.optimize(schemaName, sqlStatement).explain(); String expected = "EnumerableCalc(expr#0..4=[{inputs}],proj#0..1=[{exprs}])" + " EnumerableInterpreterBindableFilter(condition=[AND(>=($1,$3),<=($1,$4))])" + " BindableJoin(condition=[true],joinType=[left])" + " BindableJoin(condition=[true],joinType=[left])" + " BindableTableScan(table=[[federate_jdbc,t_order_federate]])" + " BindableAggregate(group=[{}],agg#0=[SINGLE_VALUE($0)])" + " BindableTableScan(table=[[federate_jdbc,t_user_info]],filters=[[=(CAST($1):VARCHAR,'before')]],projects=[[0]])" + " BindableAggregate(group=[{}],agg#0=[SINGLE_VALUE($0)])" + " BindableTableScan(table=[[federate_jdbc,t_user_info]],filters=[[=(CAST($1):VARCHAR,'after')]],projects=[[0]])"; assertThat(actual.replaceAll("\\s*", ""), is(expected.replaceAll("\\s*", ""))); } }
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.metadata.schema; import java.lang.reflect.Array; import java.math.BigDecimal; import java.text.ParseException; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.types.OBinary; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.record.ORecord; import com.orientechnologies.orient.core.serialization.OSerializableStream; import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper; /** * Generic representation of a type.<br/> * allowAssignmentFrom accepts any class, but Array.class means that the type accepts generic Arrays. * * @author Luca Garulli * */ public enum OType { BOOLEAN("Boolean", 0, new Class<?>[] { Boolean.class, Boolean.TYPE }, new Class<?>[] { Boolean.class, Number.class }) { }, INTEGER("Integer", 1, new Class<?>[] { Integer.class, Integer.TYPE }, new Class<?>[] { Integer.class, Number.class }) { }, SHORT("Short", 2, new Class<?>[] { Short.class, Short.TYPE }, new Class<?>[] { Short.class, Number.class }) { }, LONG("Long", 3, new Class<?>[] { Long.class, Long.TYPE }, new Class<?>[] { Long.class, Number.class, }) { }, FLOAT("Float", 4, new Class<?>[] { Float.class, Float.TYPE }, new Class<?>[] { Float.class, Number.class }) { }, DOUBLE("Double", 5, new Class<?>[] { Double.class, Double.TYPE }, new Class<?>[] { Double.class, Number.class }) { }, DATETIME("Datetime", 6, new Class<?>[] { Date.class }, new Class<?>[] { Date.class, Number.class }) { }, STRING("String", 7, new Class<?>[] { String.class }, new Class<?>[] { String.class }) { }, BINARY("Binary", 8, new Class<?>[] { byte[].class }, new Class<?>[] { byte[].class }) { }, EMBEDDED("Embedded", 9, new Class<?>[] { Object.class }, new Class<?>[] { OSerializableStream.class }) { }, EMBEDDEDLIST("EmbeddedList", 10, new Class<?>[] { List.class }, new Class<?>[] { List.class }) { }, EMBEDDEDSET("EmbeddedSet", 11, new Class<?>[] { Set.class }, new Class<?>[] { Set.class }) { }, EMBEDDEDMAP("EmbeddedMap", 12, new Class<?>[] { Map.class }, new Class<?>[] { Map.class }) { }, LINK("Link", 13, new Class<?>[] { Object.class, ORecordId.class }, new Class<?>[] { ORecord.class, ORID.class }) { }, LINKLIST("LinkList", 14, new Class<?>[] { List.class }, new Class<?>[] { List.class }) { }, LINKSET("LinkSet", 15, new Class<?>[] { Set.class }, new Class<?>[] { Set.class }) { }, LINKMAP("LinkMap", 16, new Class<?>[] { Map.class }, new Class<?>[] { Map.class }) { }, BYTE("Byte", 17, new Class<?>[] { Byte.class, Byte.TYPE }, new Class<?>[] { Byte.class, Number.class }) { }, TRANSIENT("Transient", 18, new Class<?>[] {}, new Class<?>[] {}) { }, DATE("Date", 19, new Class<?>[] { Date.class }, new Class<?>[] { Date.class, Number.class }) { }, CUSTOM("Custom", 20, new Class<?>[] { OSerializableStream.class }, new Class<?>[] { OSerializableStream.class }) { }, DECIMAL("Decimal", 21, new Class<?>[] { BigDecimal.class }, new Class<?>[] { BigDecimal.class, Number.class }) { }; protected static final OType[] TYPES = new OType[] { STRING, BOOLEAN, BYTE, INTEGER, SHORT, LONG, FLOAT, DOUBLE, DATE, DATETIME, BINARY, EMBEDDEDLIST, EMBEDDEDSET, EMBEDDEDMAP, LINK, LINKLIST, LINKSET, LINKMAP, EMBEDDED, CUSTOM, TRANSIENT, DECIMAL }; protected String name; protected int id; protected Class<?>[] javaTypes; protected Class<?>[] allowAssignmentFrom; private OType(final String iName, final int iId, final Class<?>[] iJavaTypes, final Class<?>[] iAllowAssignmentBy) { name = iName; id = iId; javaTypes = iJavaTypes; allowAssignmentFrom = iAllowAssignmentBy; } /** * Return the type by ID. * * @param iId * The id to search * @return The type if any, otherwise null */ public static OType getById(final byte iId) { for (OType t : TYPES) { if (iId == t.id) return t; } return null; } /** * Return the correspondent type by checking the "assignability" of the class received as parameter. * * @param iClass * Class to check * @return OType instance if found, otherwise null */ public static OType getTypeByClass(final Class<?> iClass) { if (iClass == null) return null; for (final OType type : TYPES) for (int i = 0; i < type.javaTypes.length; ++i) { if (type.javaTypes[i] == iClass) return type; if (type.javaTypes[i] == Array.class && iClass.isArray()) return type; } int priority = 0; boolean comparedAtLeastOnce; do { comparedAtLeastOnce = false; for (final OType type : TYPES) { if (type.allowAssignmentFrom.length > priority) { if (type.allowAssignmentFrom[priority].isAssignableFrom(iClass)) return type; if (type.allowAssignmentFrom[priority].isArray() && iClass.isArray()) return type; comparedAtLeastOnce = true; } } priority++; } while (comparedAtLeastOnce); return null; } /** * Convert the input object to an integer. * * @param iValue * Any type supported * @return The integer value if the conversion succeed, otherwise the IllegalArgumentException exception */ public int asInt(final Object iValue) { if (iValue instanceof Number) return ((Number) iValue).intValue(); else if (iValue instanceof String) return Integer.valueOf((String) iValue); else if (iValue instanceof Boolean) return ((Boolean) iValue) ? 1 : 0; throw new IllegalArgumentException("Cannot convert value " + iValue + " to int for type: " + name); } /** * Convert the input object to a long. * * @param iValue * Any type supported * @return The long value if the conversion succeed, otherwise the IllegalArgumentException exception */ public long asLong(final Object iValue) { if (iValue instanceof Number) return ((Number) iValue).longValue(); else if (iValue instanceof String) return Long.valueOf((String) iValue); else if (iValue instanceof Boolean) return ((Boolean) iValue) ? 1 : 0; throw new IllegalArgumentException("Cannot convert value " + iValue + " to long for type: " + name); } /** * Convert the input object to a float. * * @param iValue * Any type supported * @return The float value if the conversion succeed, otherwise the IllegalArgumentException exception */ public float asFloat(final Object iValue) { if (iValue instanceof Number) return ((Number) iValue).floatValue(); else if (iValue instanceof String) return Float.valueOf((String) iValue); throw new IllegalArgumentException("Cannot convert value " + iValue + " to float for type: " + name); } /** * Convert the input object to a double. * * @param iValue * Any type supported * @return The double value if the conversion succeed, otherwise the IllegalArgumentException exception */ public double asDouble(final Object iValue) { if (iValue instanceof Number) return ((Number) iValue).doubleValue(); else if (iValue instanceof String) return Double.valueOf((String) iValue); throw new IllegalArgumentException("Cannot convert value " + iValue + " to double for type: " + name); } /** * Convert the input object to a string. * * @param iValue * Any type supported * @return The string if the conversion succeed, otherwise the IllegalArgumentException exception */ public String asString(final Object iValue) { return iValue.toString(); } public static boolean isSimpleType(final Object iObject) { if (iObject == null) return false; final Class<? extends Object> iType = iObject.getClass(); if (iType.isPrimitive() || Number.class.isAssignableFrom(iType) || String.class.isAssignableFrom(iType) || Boolean.class.isAssignableFrom(iType) || Date.class.isAssignableFrom(iType) || (iType.isArray() && (iType.equals(byte[].class) || iType.equals(char[].class) || iType.equals(int[].class) || iType.equals(long[].class) || iType.equals(double[].class) || iType.equals(float[].class) || iType.equals(short[].class) || iType.equals(Integer[].class) || iType.equals(String[].class) || iType.equals(Long[].class) || iType.equals(Short[].class) || iType.equals(Double[].class)))) return true; return false; } /** * Convert types between numbers based on the iTargetClass parameter. * * @param iValue * Value to convert * @param iTargetClass * Expected class * @return The converted value or the original if no conversion was applied */ @SuppressWarnings({ "unchecked", "rawtypes" }) public static Object convert(final Object iValue, final Class<?> iTargetClass) { if (iValue == null) return null; if (iValue.getClass().equals(iTargetClass)) // SAME TYPE: DON'T CONVERT IT return iValue; if (iTargetClass.isAssignableFrom(iValue.getClass())) // COMPATIBLE TYPES: DON'T CONVERT IT return iValue; try { if (iValue instanceof OBinary && iTargetClass.isAssignableFrom(byte[].class)) return ((OBinary) iValue).toByteArray(); else if (byte[].class.isAssignableFrom(iTargetClass)) { return OStringSerializerHelper.getBinaryContent(iValue); } else if (byte[].class.isAssignableFrom(iValue.getClass())) { return iValue; } else if (iTargetClass.isEnum()) { if (iValue instanceof Number) return ((Class<Enum>) iTargetClass).getEnumConstants()[((Number) iValue).intValue()]; return Enum.valueOf((Class<Enum>) iTargetClass, iValue.toString()); } else if (iTargetClass.equals(Byte.TYPE) || iTargetClass.equals(Byte.class)) { if (iValue instanceof Byte) return iValue; else if (iValue instanceof String) return Byte.parseByte((String) iValue); else return ((Number) iValue).byteValue(); } else if (iTargetClass.equals(Short.TYPE) || iTargetClass.equals(Short.class)) { if (iValue instanceof Short) return iValue; else if (iValue instanceof String) return Short.parseShort((String) iValue); else return ((Number) iValue).shortValue(); } else if (iTargetClass.equals(Integer.TYPE) || iTargetClass.equals(Integer.class)) { if (iValue instanceof Integer) return iValue; else if (iValue instanceof String) return Integer.parseInt((String) iValue); else return ((Number) iValue).intValue(); } else if (iTargetClass.equals(Long.TYPE) || iTargetClass.equals(Long.class)) { if (iValue instanceof Long) return iValue; else if (iValue instanceof String) return Long.parseLong((String) iValue); else return ((Number) iValue).longValue(); } else if (iTargetClass.equals(Float.TYPE) || iTargetClass.equals(Float.class)) { if (iValue instanceof Float) return iValue; else if (iValue instanceof String) return Float.parseFloat((String) iValue); else return ((Number) iValue).floatValue(); } else if (iTargetClass.equals(BigDecimal.class)) { if (iValue instanceof BigDecimal) return iValue; else if (iValue instanceof String) return new BigDecimal((String) iValue); else if (iValue instanceof Number) return new BigDecimal(iValue.toString()); } else if (iTargetClass.equals(Double.TYPE) || iTargetClass.equals(Double.class)) { if (iValue instanceof Double) return iValue; else if (iValue instanceof String) return Double.parseDouble((String) iValue); else return ((Number) iValue).doubleValue(); } else if (iTargetClass.equals(Boolean.TYPE) || iTargetClass.equals(Boolean.class)) { if (iValue instanceof Boolean) return ((Boolean) iValue).booleanValue(); else if (iValue instanceof String) { if (((String) iValue).equalsIgnoreCase("true")) return Boolean.TRUE; else if (((String) iValue).equalsIgnoreCase("false")) return Boolean.FALSE; return null; } else if (iValue instanceof Number) return ((Number) iValue).intValue() != 0; } else if (iValue instanceof Collection<?> && Set.class.isAssignableFrom(iTargetClass)) { final Set<Object> set = new HashSet<Object>(); set.addAll((Collection<? extends Object>) iValue); return set; } else if (iTargetClass.equals(Date.class)) { if (iValue instanceof Number) return new Date(((Number) iValue).longValue()); if (iValue instanceof String) { try { return ODatabaseRecordThreadLocal.INSTANCE.get().getStorage().getConfiguration().getDateTimeFormatInstance() .parse((String) iValue); } catch (ParseException e) { return ODatabaseRecordThreadLocal.INSTANCE.get().getStorage().getConfiguration().getDateFormatInstance() .parse((String) iValue); } } } else if (iTargetClass.equals(String.class)) return iValue.toString(); } catch (Exception e) { OLogManager.instance().debug(OType.class, "Error in conversion of value '%s' to type '%s'", iValue, iTargetClass); return null; } return iValue; } public Class<?> getDefaultJavaType() { return javaTypes.length > 0 ? javaTypes[0] : null; } public Class<?>[] getJavaTypes() { return javaTypes; } public static Number increment(Number a, Number b) { if (a == null || b == null) throw new IllegalArgumentException("Cannot increment a null value"); if (a instanceof Integer) { if (b instanceof Integer) return new Integer(a.intValue() + b.intValue()); else if (b instanceof Long) return new Long(a.intValue() + b.longValue()); else if (b instanceof Short) return new Integer(a.intValue() + b.shortValue()); else if (b instanceof Float) return new Float(a.intValue() + b.floatValue()); else if (b instanceof Double) return new Double(a.intValue() + b.doubleValue()); else if (b instanceof BigDecimal) return new BigDecimal(a.intValue()).add((BigDecimal) b); } else if (a instanceof Long) { if (b instanceof Integer) return new Long(a.longValue() + b.intValue()); else if (b instanceof Long) return new Long(a.longValue() + b.longValue()); else if (b instanceof Short) return new Long(a.longValue() + b.shortValue()); else if (b instanceof Float) return new Float(a.longValue() + b.floatValue()); else if (b instanceof Double) return new Double(a.longValue() + b.doubleValue()); else if (b instanceof BigDecimal) return new BigDecimal(a.longValue()).add((BigDecimal) b); } else if (a instanceof Short) { if (b instanceof Integer) return new Integer(a.shortValue() + b.intValue()); else if (b instanceof Long) return new Long(a.shortValue() + b.longValue()); else if (b instanceof Short) return new Short((short) (a.shortValue() + b.shortValue())); else if (b instanceof Float) return new Float(a.shortValue() + b.floatValue()); else if (b instanceof Double) return new Double(a.shortValue() + b.doubleValue()); else if (b instanceof BigDecimal) return new BigDecimal(a.shortValue()).add((BigDecimal) b); } else if (a instanceof Float) { if (b instanceof Integer) return new Float(a.floatValue() + b.intValue()); else if (b instanceof Long) return new Float(a.floatValue() + b.longValue()); else if (b instanceof Short) return new Float(a.floatValue() + b.shortValue()); else if (b instanceof Float) return new Float(a.floatValue() + b.floatValue()); else if (b instanceof Double) return new Double(a.floatValue() + b.doubleValue()); else if (b instanceof BigDecimal) return new BigDecimal(a.floatValue()).add((BigDecimal) b); } else if (a instanceof Double) { if (b instanceof Integer) return new Double(a.doubleValue() + b.intValue()); else if (b instanceof Long) return new Double(a.doubleValue() + b.longValue()); else if (b instanceof Short) return new Double(a.doubleValue() + b.shortValue()); else if (b instanceof Float) return new Double(a.doubleValue() + b.floatValue()); else if (b instanceof Double) return new Double(a.doubleValue() + b.doubleValue()); else if (b instanceof BigDecimal) return new BigDecimal(a.doubleValue()).add((BigDecimal) b); } else if (a instanceof BigDecimal) { if (b instanceof Integer) return ((BigDecimal) a).add(new BigDecimal(b.intValue())); else if (b instanceof Long) return ((BigDecimal) a).add(new BigDecimal(b.longValue())); else if (b instanceof Short) return ((BigDecimal) a).add(new BigDecimal(b.shortValue())); else if (b instanceof Float) return ((BigDecimal) a).add(new BigDecimal(b.floatValue())); else if (b instanceof Double) return ((BigDecimal) a).add(new BigDecimal(b.doubleValue())); else if (b instanceof BigDecimal) return ((BigDecimal) a).add((BigDecimal) b); } throw new IllegalArgumentException("Cannot increment value '" + a + "' (" + a.getClass() + ") with '" + b + "' (" + b.getClass() + ")"); } public boolean isMultiValue() { return this == EMBEDDEDLIST || this == EMBEDDEDMAP || this == EMBEDDEDSET || this == LINKLIST || this == LINKMAP || this == LINKSET; } public boolean isLink() { return this == LINK || this == LINKSET || this == LINKLIST || this == LINKMAP; } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.trans.steps.rowgenerator; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.TransPreviewFactory; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.steps.rowgenerator.RowGeneratorMeta; import org.pentaho.di.ui.core.dialog.EnterNumberDialog; import org.pentaho.di.ui.core.dialog.EnterTextDialog; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.dialog.PreviewRowsDialog; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.dialog.TransPreviewProgressDialog; import org.pentaho.di.ui.trans.step.BaseStepDialog; public class RowGeneratorDialog extends BaseStepDialog implements StepDialogInterface { private static Class<?> PKG = RowGeneratorMeta.class; // for i18n purposes, needed by Translator2!! private Label wlLimit; private TextVar wLimit; private Label wlNeverEnding; private Button wNeverEnding; private Label wlInterval; private TextVar wInterval; private Label wlRowTimeField; private TextVar wRowTimeField; private Label wlLastTimeField; private TextVar wLastTimeField; private Label wlFields; private TableView wFields; private RowGeneratorMeta input; public RowGeneratorDialog( Shell parent, Object in, TransMeta transMeta, String sname ) { super( parent, (BaseStepMeta) in, transMeta, sname ); input = (RowGeneratorMeta) in; } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN ); props.setLook( shell ); setShellImage( shell, input ); ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { input.setChanged(); } }; changed = input.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( PKG, "RowGeneratorDialog.DialogTitle" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Filename line wlStepname = new Label( shell, SWT.RIGHT ); wlStepname.setText( BaseMessages.getString( PKG, "System.Label.StepName" ) ); props.setLook( wlStepname ); fdlStepname = new FormData(); fdlStepname.left = new FormAttachment( 0, 0 ); fdlStepname.right = new FormAttachment( middle, -margin ); fdlStepname.top = new FormAttachment( 0, margin ); wlStepname.setLayoutData( fdlStepname ); wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wStepname.setText( stepname ); props.setLook( wStepname ); wStepname.addModifyListener( lsMod ); fdStepname = new FormData(); fdStepname.left = new FormAttachment( middle, 0 ); fdStepname.top = new FormAttachment( 0, margin ); fdStepname.right = new FormAttachment( 100, 0 ); wStepname.setLayoutData( fdStepname ); Control lastControl = wStepname; wlLimit = new Label( shell, SWT.RIGHT ); wlLimit.setText( BaseMessages.getString( PKG, "RowGeneratorDialog.Limit.Label" ) ); props.setLook( wlLimit ); FormData fdlLimit = new FormData(); fdlLimit.left = new FormAttachment( 0, 0 ); fdlLimit.right = new FormAttachment( middle, -margin ); fdlLimit.top = new FormAttachment( lastControl, margin ); wlLimit.setLayoutData( fdlLimit ); wLimit = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wLimit ); wLimit.addModifyListener( lsMod ); FormData fdLimit = new FormData(); fdLimit.left = new FormAttachment( middle, 0 ); fdLimit.top = new FormAttachment( lastControl, margin ); fdLimit.right = new FormAttachment( 100, 0 ); wLimit.setLayoutData( fdLimit ); lastControl = wLimit; wlNeverEnding = new Label( shell, SWT.RIGHT ); wlNeverEnding.setText( BaseMessages.getString( PKG, "RowGeneratorDialog.NeverEnding.Label" ) ); props.setLook( wlNeverEnding ); FormData fdlNeverEnding = new FormData(); fdlNeverEnding.left = new FormAttachment( 0, 0 ); fdlNeverEnding.right = new FormAttachment( middle, -margin ); fdlNeverEnding.top = new FormAttachment( lastControl, margin ); wlNeverEnding.setLayoutData( fdlNeverEnding ); wNeverEnding = new Button( shell, SWT.CHECK ); props.setLook( wNeverEnding ); wNeverEnding.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { setActive(); } } ); FormData fdNeverEnding = new FormData(); fdNeverEnding.left = new FormAttachment( middle, 0 ); fdNeverEnding.top = new FormAttachment( lastControl, margin ); fdNeverEnding.right = new FormAttachment( 100, 0 ); wNeverEnding.setLayoutData( fdNeverEnding ); lastControl = wNeverEnding; wlInterval = new Label( shell, SWT.RIGHT ); wlInterval.setText( BaseMessages.getString( PKG, "RowGeneratorDialog.Interval.Label" ) ); props.setLook( wlInterval ); FormData fdlInterval = new FormData(); fdlInterval.left = new FormAttachment( 0, 0 ); fdlInterval.right = new FormAttachment( middle, -margin ); fdlInterval.top = new FormAttachment( lastControl, margin ); wlInterval.setLayoutData( fdlInterval ); wInterval = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wInterval ); wInterval.addModifyListener( lsMod ); FormData fdInterval = new FormData(); fdInterval.left = new FormAttachment( middle, 0 ); fdInterval.top = new FormAttachment( lastControl, margin ); fdInterval.right = new FormAttachment( 100, 0 ); wInterval.setLayoutData( fdInterval ); lastControl = wInterval; wlRowTimeField = new Label( shell, SWT.RIGHT ); wlRowTimeField.setText( BaseMessages.getString( PKG, "RowGeneratorDialog.RowTimeField.Label" ) ); props.setLook( wlRowTimeField ); FormData fdlRowTimeField = new FormData(); fdlRowTimeField.left = new FormAttachment( 0, 0 ); fdlRowTimeField.right = new FormAttachment( middle, -margin ); fdlRowTimeField.top = new FormAttachment( lastControl, margin ); wlRowTimeField.setLayoutData( fdlRowTimeField ); wRowTimeField = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wRowTimeField ); wRowTimeField.addModifyListener( lsMod ); FormData fdRowTimeField = new FormData(); fdRowTimeField.left = new FormAttachment( middle, 0 ); fdRowTimeField.top = new FormAttachment( lastControl, margin ); fdRowTimeField.right = new FormAttachment( 100, 0 ); wRowTimeField.setLayoutData( fdRowTimeField ); lastControl = wRowTimeField; wlLastTimeField = new Label( shell, SWT.RIGHT ); wlLastTimeField.setText( BaseMessages.getString( PKG, "RowGeneratorDialog.LastTimeField.Label" ) ); props.setLook( wlLastTimeField ); FormData fdlLastTimeField = new FormData(); fdlLastTimeField.left = new FormAttachment( 0, 0 ); fdlLastTimeField.right = new FormAttachment( middle, -margin ); fdlLastTimeField.top = new FormAttachment( lastControl, margin ); wlLastTimeField.setLayoutData( fdlLastTimeField ); wLastTimeField = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wLastTimeField ); wLastTimeField.addModifyListener( lsMod ); FormData fdLastTimeField = new FormData(); fdLastTimeField.left = new FormAttachment( middle, 0 ); fdLastTimeField.top = new FormAttachment( lastControl, margin ); fdLastTimeField.right = new FormAttachment( 100, 0 ); wLastTimeField.setLayoutData( fdLastTimeField ); lastControl = wLastTimeField; wlFields = new Label( shell, SWT.NONE ); wlFields.setText( BaseMessages.getString( PKG, "RowGeneratorDialog.Fields.Label" ) ); props.setLook( wlFields ); FormData fdlFields = new FormData(); fdlFields.left = new FormAttachment( 0, 0 ); fdlFields.top = new FormAttachment( lastControl, margin ); wlFields.setLayoutData( fdlFields ); lastControl = wlFields; final int FieldsRows = input.getFieldName().length; ColumnInfo[] colinf = new ColumnInfo[] { new ColumnInfo( BaseMessages.getString( PKG, "System.Column.Name" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "System.Column.Type" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMeta .getTypes() ), new ColumnInfo( BaseMessages.getString( PKG, "System.Column.Format" ), ColumnInfo.COLUMN_TYPE_FORMAT, 2 ), new ColumnInfo( BaseMessages.getString( PKG, "System.Column.Length" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "System.Column.Precision" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "System.Column.Currency" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "System.Column.Decimal" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "System.Column.Group" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "System.Column.Value" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "System.Column.SetEmptyString" ), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { BaseMessages.getString( PKG, "System.Combo.Yes" ), BaseMessages.getString( PKG, "System.Combo.No" ) } ) }; wFields = new TableView( transMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI, colinf, FieldsRows, lsMod, props ); FormData fdFields = new FormData(); fdFields.left = new FormAttachment( 0, 0 ); fdFields.top = new FormAttachment( lastControl, margin ); fdFields.right = new FormAttachment( 100, 0 ); fdFields.bottom = new FormAttachment( 100, -50 ); wFields.setLayoutData( fdFields ); wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) ); wPreview = new Button( shell, SWT.PUSH ); wPreview.setText( BaseMessages.getString( PKG, "System.Button.Preview" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) ); setButtonPositions( new Button[] { wOK, wPreview, wCancel }, margin, null ); // Add listeners lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; lsPreview = new Listener() { public void handleEvent( Event e ) { preview(); } }; lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; wOK.addListener( SWT.Selection, lsOK ); wPreview.addListener( SWT.Selection, lsPreview ); wCancel.addListener( SWT.Selection, lsCancel ); lsDef = new SelectionAdapter() { public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; wStepname.addSelectionListener( lsDef ); wLimit.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { public void shellClosed( ShellEvent e ) { cancel(); } } ); lsResize = new Listener() { public void handleEvent( Event event ) { Point size = shell.getSize(); wFields.setSize( size.x - 10, size.y - 50 ); wFields.table.setSize( size.x - 10, size.y - 50 ); wFields.redraw(); } }; shell.addListener( SWT.Resize, lsResize ); // Set the shell size, based upon previous time... setSize(); getData(); input.setChanged( changed ); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected void setActive() { boolean neverEnding = wNeverEnding.getSelection(); wlLimit.setEnabled( !neverEnding ); wLimit.setEnabled( !neverEnding ); wlInterval.setEnabled( neverEnding ); wInterval.setEnabled( neverEnding ); wlRowTimeField.setEnabled( neverEnding ); wRowTimeField.setEnabled( neverEnding ); wlLastTimeField.setEnabled( neverEnding ); wLastTimeField.setEnabled( neverEnding ); } /** * Copy information from the meta-data input to the dialog fields. */ public void getData() { if ( isDebug() ) { logDebug( "getting fields info..." ); } wLimit.setText( input.getRowLimit() ); wNeverEnding.setSelection( input.isNeverEnding() ); wInterval.setText( Const.NVL( input.getIntervalInMs(), "" ) ); wRowTimeField.setText( Const.NVL( input.getRowTimeField(), "" ) ); wLastTimeField.setText( Const.NVL( input.getLastTimeField(), "" ) ); for ( int i = 0; i < input.getFieldName().length; i++ ) { if ( input.getFieldName()[i] != null ) { TableItem item = wFields.table.getItem( i ); int col = 1; item.setText( col++, input.getFieldName()[i] ); String type = input.getFieldType()[i]; String format = input.getFieldFormat()[i]; String length = input.getFieldLength()[i] < 0 ? "" : ( "" + input.getFieldLength()[i] ); String prec = input.getFieldPrecision()[i] < 0 ? "" : ( "" + input.getFieldPrecision()[i] ); String curr = input.getCurrency()[i]; String group = input.getGroup()[i]; String decim = input.getDecimal()[i]; String def = input.getValue()[i]; item.setText( col++, Const.NVL( type, "" ) ); item.setText( col++, Const.NVL( format, "" ) ); item.setText( col++, Const.NVL( length, "" ) ); item.setText( col++, Const.NVL( prec, "" ) ); item.setText( col++, Const.NVL( curr, "" ) ); item.setText( col++, Const.NVL( decim, "" ) ); item.setText( col++, Const.NVL( group, "" ) ); item.setText( col++, Const.NVL( def, "" ) ); item .setText( col++, input.isSetEmptyString()[i] ? BaseMessages.getString( PKG, "System.Combo.Yes" ) : BaseMessages.getString( PKG, "System.Combo.No" ) ); } } wFields.setRowNums(); wFields.optWidth( true ); setActive(); wStepname.selectAll(); wStepname.setFocus(); } private void cancel() { stepname = null; input.setChanged( changed ); dispose(); } private void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { return; } stepname = wStepname.getText(); // return value try { getInfo( new RowGeneratorMeta() ); // to see if there is an exception getInfo( input ); // to put the content on the input structure for real if all is well. dispose(); } catch ( KettleException e ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "RowGeneratorDialog.Illegal.Dialog.Settings.Title" ), BaseMessages .getString( PKG, "RowGeneratorDialog.Illegal.Dialog.Settings.Message" ), e ); } } private void getInfo( RowGeneratorMeta meta ) throws KettleException { meta.setRowLimit( wLimit.getText() ); meta.setNeverEnding( wNeverEnding.getSelection() ); meta.setIntervalInMs( wInterval.getText() ); meta.setRowTimeField( wRowTimeField.getText() ); meta.setLastTimeField( wLastTimeField.getText() ); int nrfields = wFields.nrNonEmpty(); meta.allocate( nrfields ); //CHECKSTYLE:Indentation:OFF for ( int i = 0; i < nrfields; i++ ) { TableItem item = wFields.getNonEmpty( i ); meta.getFieldName()[i] = item.getText( 1 ); meta.getFieldFormat()[i] = item.getText( 3 ); String slength = item.getText( 4 ); String sprec = item.getText( 5 ); meta.getCurrency()[i] = item.getText( 6 ); meta.getDecimal()[i] = item.getText( 7 ); meta.getGroup()[i] = item.getText( 8 ); meta.isSetEmptyString()[i] = BaseMessages.getString( PKG, "System.Combo.Yes" ).equalsIgnoreCase( item.getText( 10 ) ); meta.getValue()[i] = meta.isSetEmptyString()[i] ? "" : item.getText( 9 ); meta.getFieldType()[i] = meta.isSetEmptyString()[i] ? "String" : item.getText( 2 ); meta.getFieldLength()[i] = Const.toInt( slength, -1 ); meta.getFieldPrecision()[i] = Const.toInt( sprec, -1 ); } // Performs checks... /* * Commented out verification : if variables are used, this check is a pain! * * long longLimit = Const.toLong(transMeta.environmentSubstitute( wLimit.getText()), -1L ); if (longLimit<0) { throw * new KettleException( BaseMessages.getString(PKG, "RowGeneratorDialog.Wrong.RowLimit.Number") ); } */ } /** * Preview the data generated by this step. This generates a transformation using this step & a dummy and previews it. * */ private void preview() { RowGeneratorMeta oneMeta = new RowGeneratorMeta(); try { getInfo( oneMeta ); } catch ( KettleException e ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "RowGeneratorDialog.Illegal.Dialog.Settings.Title" ), BaseMessages .getString( PKG, "RowGeneratorDialog.Illegal.Dialog.Settings.Message" ), e ); return; } TransMeta previewMeta = TransPreviewFactory.generatePreviewTransformation( transMeta, oneMeta, wStepname.getText() ); EnterNumberDialog numberDialog = new EnterNumberDialog( shell, props.getDefaultPreviewSize(), BaseMessages.getString( PKG, "System.Dialog.EnterPreviewSize.Title" ), BaseMessages.getString( PKG, "System.Dialog.EnterPreviewSize.Message" ) ); int previewSize = numberDialog.open(); if ( previewSize > 0 ) { TransPreviewProgressDialog progressDialog = new TransPreviewProgressDialog( shell, previewMeta, new String[] { wStepname.getText() }, new int[] { previewSize } ); progressDialog.open(); Trans trans = progressDialog.getTrans(); String loggingText = progressDialog.getLoggingText(); if ( !progressDialog.isCancelled() ) { if ( trans.getResult() != null && trans.getResult().getNrErrors() > 0 ) { EnterTextDialog etd = new EnterTextDialog( shell, BaseMessages.getString( PKG, "System.Dialog.PreviewError.Title" ), BaseMessages .getString( PKG, "System.Dialog.PreviewError.Message" ), loggingText, true ); etd.setReadOnly(); etd.open(); } } PreviewRowsDialog prd = new PreviewRowsDialog( shell, transMeta, SWT.NONE, wStepname.getText(), progressDialog.getPreviewRowsMeta( wStepname .getText() ), progressDialog.getPreviewRows( wStepname.getText() ), loggingText ); prd.open(); } } }
package com.ittianyu.mvvm.application.h_rxjava2.common.bean; import android.arch.persistence.room.Entity; import android.arch.persistence.room.PrimaryKey; import java.io.Serializable; /** * Created by 86839 on 2017/10/4. */ @Entity(tableName = "user") public class User implements Serializable { /** * login : ittianyu * id : 16226068 * avatar_url : https://avatars0.githubusercontent.com/u/16226068?v=4 * gravatar_id : * url : https://api.github.com/users/ittianyu * html_url : https://github.com/ittianyu * followers_url : https://api.github.com/users/ittianyu/followers * following_url : https://api.github.com/users/ittianyu/following{/other_user} * gists_url : https://api.github.com/users/ittianyu/gists{/gist_id} * starred_url : https://api.github.com/users/ittianyu/starred{/owner}{/repo} * subscriptions_url : https://api.github.com/users/ittianyu/subscriptions * organizations_url : https://api.github.com/users/ittianyu/orgs * repos_url : https://api.github.com/users/ittianyu/repos * events_url : https://api.github.com/users/ittianyu/events{/privacy} * received_events_url : https://api.github.com/users/ittianyu/received_events * type : User * site_admin : false * name : ittianyu * company : null * blog : www.ittianyu.com * location : null * email : null * hireable : true * bio : null * public_repos : 4 * public_gists : 0 * followers : 56 * following : 2 * created_at : 2015-12-09T14:37:30Z * updated_at : 2017-09-14T10:08:07Z */ @PrimaryKey private int id; private String login; private String avatar_url; private String gravatar_id; private String url; private String html_url; private String followers_url; private String following_url; private String gists_url; private String starred_url; private String subscriptions_url; private String organizations_url; private String repos_url; private String events_url; private String received_events_url; private String type; private boolean site_admin; private String name; private String company; private String blog; private String location; private String email; private boolean hireable; private String bio; private int public_repos; private int public_gists; private int followers; private int following; private String created_at; private String updated_at; public String getLogin() { return login; } public void setLogin(String login) { this.login = login; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getAvatar_url() { return avatar_url; } public void setAvatar_url(String avatar_url) { this.avatar_url = avatar_url; } public String getGravatar_id() { return gravatar_id; } public void setGravatar_id(String gravatar_id) { this.gravatar_id = gravatar_id; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getHtml_url() { return html_url; } public void setHtml_url(String html_url) { this.html_url = html_url; } public String getFollowers_url() { return followers_url; } public void setFollowers_url(String followers_url) { this.followers_url = followers_url; } public String getFollowing_url() { return following_url; } public void setFollowing_url(String following_url) { this.following_url = following_url; } public String getGists_url() { return gists_url; } public void setGists_url(String gists_url) { this.gists_url = gists_url; } public String getStarred_url() { return starred_url; } public void setStarred_url(String starred_url) { this.starred_url = starred_url; } public String getSubscriptions_url() { return subscriptions_url; } public void setSubscriptions_url(String subscriptions_url) { this.subscriptions_url = subscriptions_url; } public String getOrganizations_url() { return organizations_url; } public void setOrganizations_url(String organizations_url) { this.organizations_url = organizations_url; } public String getRepos_url() { return repos_url; } public void setRepos_url(String repos_url) { this.repos_url = repos_url; } public String getEvents_url() { return events_url; } public void setEvents_url(String events_url) { this.events_url = events_url; } public String getReceived_events_url() { return received_events_url; } public void setReceived_events_url(String received_events_url) { this.received_events_url = received_events_url; } public String getType() { return type; } public void setType(String type) { this.type = type; } public boolean isSite_admin() { return site_admin; } public void setSite_admin(boolean site_admin) { this.site_admin = site_admin; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getCompany() { return company; } public void setCompany(String company) { this.company = company; } public String getBlog() { return blog; } public void setBlog(String blog) { this.blog = blog; } public String getLocation() { return location; } public void setLocation(String location) { this.location = location; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public boolean isHireable() { return hireable; } public void setHireable(boolean hireable) { this.hireable = hireable; } public String getBio() { return bio; } public void setBio(String bio) { this.bio = bio; } public int getPublic_repos() { return public_repos; } public void setPublic_repos(int public_repos) { this.public_repos = public_repos; } public int getPublic_gists() { return public_gists; } public void setPublic_gists(int public_gists) { this.public_gists = public_gists; } public int getFollowers() { return followers; } public void setFollowers(int followers) { this.followers = followers; } public int getFollowing() { return following; } public void setFollowing(int following) { this.following = following; } public String getCreated_at() { return created_at; } public void setCreated_at(String created_at) { this.created_at = created_at; } public String getUpdated_at() { return updated_at; } public void setUpdated_at(String updated_at) { this.updated_at = updated_at; } @Override public String toString() { return "User{" + "login='" + login + '\'' + ", id=" + id + ", avatar_url='" + avatar_url + '\'' + ", gravatar_id='" + gravatar_id + '\'' + ", url='" + url + '\'' + ", html_url='" + html_url + '\'' + ", followers_url='" + followers_url + '\'' + ", following_url='" + following_url + '\'' + ", gists_url='" + gists_url + '\'' + ", starred_url='" + starred_url + '\'' + ", subscriptions_url='" + subscriptions_url + '\'' + ", organizations_url='" + organizations_url + '\'' + ", repos_url='" + repos_url + '\'' + ", events_url='" + events_url + '\'' + ", received_events_url='" + received_events_url + '\'' + ", type='" + type + '\'' + ", site_admin=" + site_admin + ", name='" + name + '\'' + ", company=" + company + ", blog='" + blog + '\'' + ", location=" + location + ", email=" + email + ", hireable=" + hireable + ", bio=" + bio + ", public_repos=" + public_repos + ", public_gists=" + public_gists + ", followers=" + followers + ", following=" + following + ", created_at='" + created_at + '\'' + ", updated_at='" + updated_at + '\'' + '}'; } }
/* * Thumbnailator - a thumbnail generation library * * Copyright (c) 2008-2020 Chris Kroells * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package net.coobird.thumbnailator.tasks.io; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import javax.imageio.ImageIO; import javax.imageio.ImageWriteParam; import javax.imageio.ImageWriter; import javax.imageio.spi.IIORegistry; import javax.imageio.spi.ImageWriterSpi; import net.coobird.thumbnailator.TestUtils; import net.coobird.thumbnailator.ThumbnailParameter; import net.coobird.thumbnailator.test.BufferedImageComparer; import org.junit.Test; import static org.mockito.Matchers.anyFloat; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.*; import static org.junit.Assert.*; public class OutputStreamImageSinkTest { @Test public void validOutputStream() { // given OutputStream os = mock(OutputStream.class); // when OutputStreamImageSink sink = new OutputStreamImageSink(os); // then assertEquals(os, sink.getSink()); } @Test(expected=NullPointerException.class) public void nullOutputStream() { // given OutputStream f = null; try { // when new OutputStreamImageSink(f); } catch (NullPointerException e) { // then assertEquals("OutputStream cannot be null.", e.getMessage()); throw e; } } @Test(expected=NullPointerException.class) public void write_NullImage() throws IOException { // given OutputStream os = mock(OutputStream.class); BufferedImage img = null; OutputStreamImageSink sink = new OutputStreamImageSink(os); sink.setOutputFormatName("png"); try { // when sink.write(img); } catch (NullPointerException e) { // then assertEquals("Cannot write a null image.", e.getMessage()); throw e; } } @Test(expected=IllegalStateException.class) public void write_ValidImage_SetOutputFormat_NotSet() throws IOException { // given ByteArrayOutputStream os = new ByteArrayOutputStream(); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_ARGB); OutputStreamImageSink sink = new OutputStreamImageSink(os); try { // when sink.write(imgToWrite); } catch (IllegalStateException e) { // then assertEquals("Output format has not been set.", e.getMessage()); throw e; } } @Test(expected=IllegalStateException.class) public void write_ValidImage_SetOutputFormat_OriginalFormat() throws IOException { // given ByteArrayOutputStream os = new ByteArrayOutputStream(); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_ARGB); OutputStreamImageSink sink = new OutputStreamImageSink(os); sink.setOutputFormatName(ThumbnailParameter.ORIGINAL_FORMAT); try { // when sink.write(imgToWrite); } catch (IllegalStateException e) { // then assertEquals("Output format has not been set.", e.getMessage()); throw e; } } @Test public void write_ValidImage_SetOutputFormat() throws IOException { // given ByteArrayOutputStream os = new ByteArrayOutputStream(); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_ARGB); OutputStreamImageSink sink = new OutputStreamImageSink(os); // when sink.setOutputFormatName("png"); sink.write(imgToWrite); // then assertEquals(os, sink.getSink()); byte[] imageData = os.toByteArray(); BufferedImage writtenImg = ImageIO.read(new ByteArrayInputStream(imageData)); assertTrue(BufferedImageComparer.isRGBSimilar(imgToWrite, writtenImg)); String formatName = TestUtils.getFormatName(new ByteArrayInputStream(imageData)); assertEquals("png", formatName); } @Test public void write_ValidImage_SetThumbnailParameter_BMP_QualityAndOutputFormatType_BothDefault() throws IOException { // given ByteArrayOutputStream os = new ByteArrayOutputStream(); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_ARGB); ThumbnailParameter param = mock(ThumbnailParameter.class); when(param.getOutputQuality()).thenReturn(ThumbnailParameter.DEFAULT_QUALITY); when(param.getOutputFormatType()).thenReturn(ThumbnailParameter.DEFAULT_FORMAT_TYPE); OutputStreamImageSink sink = new OutputStreamImageSink(os); sink.setThumbnailParameter(param); sink.setOutputFormatName("bmp"); // when sink.write(imgToWrite); // then assertEquals(os, sink.getSink()); byte[] imageData = os.toByteArray(); BufferedImage writtenImg = ImageIO.read(new ByteArrayInputStream(imageData)); assertTrue(BufferedImageComparer.isRGBSimilar(imgToWrite, writtenImg)); String formatName = TestUtils.getFormatName(new ByteArrayInputStream(imageData)); assertEquals("bmp", formatName); verify(param, atLeastOnce()).getOutputQuality(); verify(param, atLeastOnce()).getOutputFormatType(); } @Test public void write_ValidImage_SetThumbnailParameter_BMP_QualityAndOutputFormatType_BothNonDefault() throws IOException { // given ByteArrayOutputStream os = new ByteArrayOutputStream(); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_ARGB); ThumbnailParameter param = mock(ThumbnailParameter.class); when(param.getOutputQuality()).thenReturn(0.5f); when(param.getOutputFormatType()).thenReturn("BI_BITFIELDS"); OutputStreamImageSink sink = new OutputStreamImageSink(os); sink.setThumbnailParameter(param); sink.setOutputFormatName("bmp"); // when sink.write(imgToWrite); // then assertEquals(os, sink.getSink()); byte[] imageData = os.toByteArray(); BufferedImage writtenImg = ImageIO.read(new ByteArrayInputStream(imageData)); assertTrue(BufferedImageComparer.isRGBSimilar(imgToWrite, writtenImg)); String formatName = TestUtils.getFormatName(new ByteArrayInputStream(imageData)); assertEquals("bmp", formatName); verify(param, atLeastOnce()).getOutputQuality(); verify(param, atLeastOnce()).getOutputFormatType(); } @Test public void write_ValidImage_SetThumbnailParameter_BMP_OutputFormatType() throws IOException { // given ByteArrayOutputStream os = new ByteArrayOutputStream(); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_ARGB); ThumbnailParameter param = mock(ThumbnailParameter.class); when(param.getOutputQuality()).thenReturn(ThumbnailParameter.DEFAULT_QUALITY); when(param.getOutputFormatType()).thenReturn("BI_BITFIELDS"); OutputStreamImageSink sink = new OutputStreamImageSink(os); sink.setThumbnailParameter(param); sink.setOutputFormatName("bmp"); // when sink.write(imgToWrite); // then assertEquals(os, sink.getSink()); byte[] imageData = os.toByteArray(); BufferedImage writtenImg = ImageIO.read(new ByteArrayInputStream(imageData)); assertTrue(BufferedImageComparer.isRGBSimilar(imgToWrite, writtenImg)); String formatName = TestUtils.getFormatName(new ByteArrayInputStream(imageData)); assertEquals("bmp", formatName); verify(param, atLeastOnce()).getOutputQuality(); } @Test public void write_ValidImage_WriterCantCompress() throws IOException { // given ImageWriteParam iwParam = mock(ImageWriteParam.class); ImageWriter writer = mock(ImageWriter.class); ImageWriterSpi spi = mock(ImageWriterSpi.class); when(iwParam.canWriteCompressed()).thenReturn(false); when(writer.getDefaultWriteParam()).thenReturn(iwParam); when(writer.getOriginatingProvider()).thenReturn(spi); when(spi.getFormatNames()).thenReturn(new String[] {"foo", "FOO"}); when(spi.getFileSuffixes()).thenReturn(new String[] {"foo", "FOO"}); when(spi.createWriterInstance()).thenReturn(writer); when(spi.createWriterInstance(anyObject())).thenReturn(writer); IIORegistry.getDefaultInstance().registerServiceProvider(spi); ByteArrayOutputStream os = new ByteArrayOutputStream(); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB); ThumbnailParameter param = mock(ThumbnailParameter.class); when(param.getOutputQuality()).thenReturn(0.8f); when(param.getOutputFormatType()).thenReturn(ThumbnailParameter.DEFAULT_FORMAT_TYPE); OutputStreamImageSink sink = new OutputStreamImageSink(os); sink.setThumbnailParameter(param); sink.setOutputFormatName("foo"); // when sink.write(imgToWrite); // then verify(iwParam, never()).setCompressionMode(ImageWriteParam.MODE_EXPLICIT); verify(iwParam, never()).setCompressionType(anyString()); verify(iwParam, never()).setCompressionQuality(anyFloat()); // - check to see that parameters were not read, as this format doesn't // support compression. verify(param, never()).getOutputQuality(); verify(param, never()).getOutputFormatType(); // clean up IIORegistry.getDefaultInstance().deregisterServiceProvider(spi); } @Test public void write_ValidImage_WriterCanCompress_NoCompressionTypeFromWriter() throws IOException { // given ImageWriteParam iwParam = mock(ImageWriteParam.class); ImageWriter writer = mock(ImageWriter.class); ImageWriterSpi spi = mock(ImageWriterSpi.class); when(iwParam.canWriteCompressed()).thenReturn(true); when(iwParam.getCompressionTypes()).thenReturn(null); when(writer.getDefaultWriteParam()).thenReturn(iwParam); when(writer.getOriginatingProvider()).thenReturn(spi); when(spi.getFormatNames()).thenReturn(new String[] {"foo", "FOO"}); when(spi.getFileSuffixes()).thenReturn(new String[] {"foo", "FOO"}); when(spi.createWriterInstance()).thenReturn(writer); when(spi.createWriterInstance(anyObject())).thenReturn(writer); IIORegistry.getDefaultInstance().registerServiceProvider(spi); ByteArrayOutputStream os = new ByteArrayOutputStream(); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB); ThumbnailParameter param = mock(ThumbnailParameter.class); when(param.getOutputQuality()).thenReturn(0.8f); when(param.getOutputFormatType()).thenReturn(ThumbnailParameter.DEFAULT_FORMAT_TYPE); OutputStreamImageSink sink = new OutputStreamImageSink(os); sink.setThumbnailParameter(param); sink.setOutputFormatName("foo"); // when sink.write(imgToWrite); // then verify(iwParam, atLeastOnce()).setCompressionMode(ImageWriteParam.MODE_EXPLICIT); verify(iwParam, never()).setCompressionType(anyString()); verify(iwParam, atLeastOnce()).setCompressionQuality(0.8f); // - check to see that parameters was read verify(param, atLeastOnce()).getOutputQuality(); verify(param, atLeastOnce()).getOutputFormatType(); // clean up IIORegistry.getDefaultInstance().deregisterServiceProvider(spi); } @Test public void write_ValidImage_WriterCanCompress_HasCompressionTypeFromWriter() throws IOException { // given ImageWriteParam iwParam = mock(ImageWriteParam.class); ImageWriter writer = mock(ImageWriter.class); ImageWriterSpi spi = mock(ImageWriterSpi.class); when(iwParam.canWriteCompressed()).thenReturn(true); when(iwParam.getCompressionTypes()).thenReturn(new String[] {"FOOBAR"}); when(writer.getDefaultWriteParam()).thenReturn(iwParam); when(writer.getOriginatingProvider()).thenReturn(spi); when(spi.getFormatNames()).thenReturn(new String[] {"foo", "FOO"}); when(spi.getFileSuffixes()).thenReturn(new String[] {"foo", "FOO"}); when(spi.createWriterInstance()).thenReturn(writer); when(spi.createWriterInstance(anyObject())).thenReturn(writer); IIORegistry.getDefaultInstance().registerServiceProvider(spi); ByteArrayOutputStream os = new ByteArrayOutputStream(); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB); ThumbnailParameter param = mock(ThumbnailParameter.class); when(param.getOutputQuality()).thenReturn(0.8f); when(param.getOutputFormatType()).thenReturn(ThumbnailParameter.DEFAULT_FORMAT_TYPE); OutputStreamImageSink sink = new OutputStreamImageSink(os); sink.setThumbnailParameter(param); sink.setOutputFormatName("foo"); // when sink.write(imgToWrite); // then verify(iwParam, atLeastOnce()).setCompressionMode(ImageWriteParam.MODE_EXPLICIT); verify(iwParam, atLeastOnce()).setCompressionType("FOOBAR"); verify(iwParam, atLeastOnce()).setCompressionQuality(0.8f); // - check to see that parameters was read verify(param, atLeastOnce()).getOutputQuality(); verify(param, atLeastOnce()).getOutputFormatType(); // clean up IIORegistry.getDefaultInstance().deregisterServiceProvider(spi); } @Test public void write_DoesNotCloseOutputStream() throws IOException { // given OutputStream os = mock(OutputStream.class); BufferedImage imgToWrite = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB); OutputStreamImageSink sink = new OutputStreamImageSink(os); sink.setOutputFormatName("jpeg"); // when sink.write(imgToWrite); // then verify(os, never()).close(); } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.worker.netty; import alluxio.RpcUtils; import alluxio.StorageTierAssoc; import alluxio.WorkerStorageTierAssoc; import alluxio.exception.ExceptionMessage; import alluxio.exception.InvalidWorkerStateException; import alluxio.exception.status.AlluxioStatusException; import alluxio.network.protocol.RPCProtoMessage; import alluxio.proto.dataserver.Protocol; import alluxio.util.IdUtils; import alluxio.util.proto.ProtoMessage; import alluxio.worker.block.BlockWorker; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.ExecutorService; import javax.annotation.concurrent.NotThreadSafe; /** * Netty handler that handles short circuit read requests. */ @NotThreadSafe class DataServerShortCircuitWriteHandler extends ChannelInboundHandlerAdapter { private static final Logger LOG = LoggerFactory.getLogger(DataServerShortCircuitWriteHandler.class); private static final long INVALID_SESSION_ID = -1; /** Executor service for execute the RPCs. */ private final ExecutorService mRpcExecutor; /** The block worker. */ private final BlockWorker mBlockWorker; /** An object storing the mapping of tier aliases to ordinals. */ private final StorageTierAssoc mStorageTierAssoc = new WorkerStorageTierAssoc(); private long mSessionId = INVALID_SESSION_ID; /** * Creates an instance of {@link DataServerShortCircuitWriteHandler}. * * @param service the executor to execute the RPCs * @param blockWorker the block worker */ DataServerShortCircuitWriteHandler(ExecutorService service, BlockWorker blockWorker) { mRpcExecutor = service; mBlockWorker = blockWorker; } @Override public void channelRead(ChannelHandlerContext ctx, Object msg) { if (!(msg instanceof RPCProtoMessage)) { ctx.fireChannelRead(msg); return; } ProtoMessage message = ((RPCProtoMessage) msg).getMessage(); if (message.isLocalBlockCreateRequest()) { handleBlockCreateRequest(ctx, message.asLocalBlockCreateRequest()); } else if (message.isLocalBlockCompleteRequest()) { handleBlockCompleteRequest(ctx, message.asLocalBlockCompleteRequest()); } else { ctx.fireChannelRead(msg); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable throwable) { // The RPC handlers do not throw exceptions. All the exception seen here is either // network exception or some runtime exception (e.g. NullPointerException). LOG.error("Failed to handle RPCs.", throwable); ctx.close(); } @Override public void channelUnregistered(ChannelHandlerContext ctx) { if (mSessionId != INVALID_SESSION_ID) { mBlockWorker.cleanupSession(mSessionId); mSessionId = INVALID_SESSION_ID; } ctx.fireChannelUnregistered(); } /** * Handles {@link Protocol.LocalBlockCreateRequest} to create block. No exceptions should be * thrown. * * @param ctx the channel handler context * @param request the local block create request */ private void handleBlockCreateRequest(final ChannelHandlerContext ctx, final Protocol.LocalBlockCreateRequest request) { mRpcExecutor.submit(new Runnable() { @Override public void run() { RpcUtils.nettyRPCAndLog(LOG, new RpcUtils.NettyRPCCallable<Void>() { @Override public Void call() throws Exception { if (request.getOnlyReserveSpace()) { mBlockWorker .requestSpace(mSessionId, request.getBlockId(), request.getSpaceToReserve()); ctx.writeAndFlush(RPCProtoMessage.createOkResponse(null)); } else { if (mSessionId == INVALID_SESSION_ID) { mSessionId = IdUtils.createSessionId(); String path = mBlockWorker.createBlock(mSessionId, request.getBlockId(), mStorageTierAssoc.getAlias(request.getTier()), request.getSpaceToReserve()); Protocol.LocalBlockCreateResponse response = Protocol.LocalBlockCreateResponse.newBuilder().setPath(path).build(); ctx.writeAndFlush(new RPCProtoMessage(new ProtoMessage(response))); } else { LOG.warn("Create block {} without closing the previous session {}.", request.getBlockId(), mSessionId); throw new InvalidWorkerStateException( ExceptionMessage.SESSION_NOT_CLOSED.getMessage(mSessionId)); } } return null; } @Override public void exceptionCaught(Throwable throwable) { if (mSessionId != INVALID_SESSION_ID) { mBlockWorker.cleanupSession(mSessionId); mSessionId = INVALID_SESSION_ID; } ctx.writeAndFlush( RPCProtoMessage.createResponse(AlluxioStatusException.fromThrowable(throwable))); } @Override public String toString() { if (request.getOnlyReserveSpace()) { return String.format("Session %d: reserve space: %s", mSessionId, request.toString()); } else { return String.format("Session %d: create block: %s", mSessionId, request.toString()); } } }); } }); } /** * Handles {@link Protocol.LocalBlockCompleteRequest}. No exceptions should be thrown. * * @param ctx the channel handler context * @param request the local block close request */ private void handleBlockCompleteRequest(final ChannelHandlerContext ctx, final Protocol.LocalBlockCompleteRequest request) { mRpcExecutor.submit(new Runnable() { @Override public void run() { RpcUtils.nettyRPCAndLog(LOG, new RpcUtils.NettyRPCCallable<Void>() { @Override public Void call() throws Exception { if (request.getCancel()) { mBlockWorker.abortBlock(mSessionId, request.getBlockId()); } else { mBlockWorker.commitBlock(mSessionId, request.getBlockId()); } mSessionId = INVALID_SESSION_ID; ctx.writeAndFlush(RPCProtoMessage.createOkResponse(null)); return null; } @Override public void exceptionCaught(Throwable throwable) { ctx.writeAndFlush( RPCProtoMessage.createResponse(AlluxioStatusException.fromThrowable(throwable))); mSessionId = INVALID_SESSION_ID; } @Override public String toString() { if (request.getCancel()) { return String.format("Session %d: abort block: %s", mSessionId, request.toString()); } else { return String.format("Session %d: commit block: %s", mSessionId, request.toString()); } } }); } }); } }
/* * Copyright (C) 2013-2019 The Project Lombok Authors. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package lombok.javac.handlers; import static lombok.core.handlers.HandlerUtil.handleFlagUsage; import static lombok.javac.Javac.*; import static lombok.javac.JavacTreeMaker.TreeTag.treeTag; import static lombok.javac.JavacTreeMaker.TypeTag.typeTag; import static lombok.javac.handlers.JavacHandlerUtil.*; import org.mangosdk.spi.ProviderFor; import com.sun.tools.javac.tree.JCTree.JCAnnotation; import com.sun.tools.javac.tree.JCTree.JCAssert; import com.sun.tools.javac.tree.JCTree.JCAssign; import com.sun.tools.javac.tree.JCTree.JCBinary; import com.sun.tools.javac.tree.JCTree.JCBlock; import com.sun.tools.javac.tree.JCTree.JCExpression; import com.sun.tools.javac.tree.JCTree.JCExpressionStatement; import com.sun.tools.javac.tree.JCTree.JCFieldAccess; import com.sun.tools.javac.tree.JCTree.JCIdent; import com.sun.tools.javac.tree.JCTree.JCIf; import com.sun.tools.javac.tree.JCTree.JCLiteral; import com.sun.tools.javac.tree.JCTree.JCMethodDecl; import com.sun.tools.javac.tree.JCTree.JCMethodInvocation; import com.sun.tools.javac.tree.JCTree.JCParens; import com.sun.tools.javac.tree.JCTree.JCStatement; import com.sun.tools.javac.tree.JCTree.JCSynchronized; import com.sun.tools.javac.tree.JCTree.JCThrow; import com.sun.tools.javac.tree.JCTree.JCTry; import com.sun.tools.javac.tree.JCTree.JCVariableDecl; import com.sun.tools.javac.util.List; import com.sun.tools.javac.util.Name; import lombok.ConfigurationKeys; import lombok.NonNull; import lombok.core.AST.Kind; import lombok.core.AnnotationValues; import lombok.core.HandlerPriority; import lombok.javac.JavacAnnotationHandler; import lombok.javac.JavacNode; @ProviderFor(JavacAnnotationHandler.class) @HandlerPriority(value = 512) // 2^9; onParameter=@__(@NonNull) has to run first. public class HandleNonNull extends JavacAnnotationHandler<NonNull> { @Override public void handle(AnnotationValues<NonNull> annotation, JCAnnotation ast, JavacNode annotationNode) { handleFlagUsage(annotationNode, ConfigurationKeys.NON_NULL_FLAG_USAGE, "@NonNull"); if (annotationNode.up().getKind() == Kind.FIELD) { // This is meaningless unless the field is used to generate a method (@Setter, @RequiredArgsConstructor, etc), // but in that case those handlers will take care of it. However, we DO check if the annotation is applied to // a primitive, because those handlers trigger on any annotation named @NonNull and we only want the warning // behaviour on _OUR_ 'lombok.NonNull'. try { if (isPrimitive(((JCVariableDecl) annotationNode.up().get()).vartype)) { annotationNode.addWarning("@NonNull is meaningless on a primitive."); } } catch (Exception ignore) {} return; } JCMethodDecl declaration; JavacNode paramNode; switch (annotationNode.up().getKind()) { case ARGUMENT: paramNode = annotationNode.up(); break; case TYPE_USE: JavacNode typeNode = annotationNode.directUp(); paramNode = typeNode.directUp(); break; default: return; } if (paramNode.getKind() != Kind.ARGUMENT) return; try { declaration = (JCMethodDecl) paramNode.up().get(); } catch (Exception e) { return; } if (declaration.body == null) { // This used to be a warning, but as @NonNull also has a documentary purpose, better to not warn about this. Since 1.16.7 return; } // Possibly, if 'declaration instanceof ConstructorDeclaration', fetch declaration.constructorCall, search it for any references to our parameter, // and if they exist, create a new method in the class: 'private static <T> T lombok$nullCheck(T expr, String msg) {if (expr == null) throw NPE; return expr;}' and // wrap all references to it in the super/this to a call to this method. JCStatement nullCheck = recursiveSetGeneratedBy(generateNullCheck(annotationNode.getTreeMaker(), paramNode, annotationNode), ast, annotationNode.getContext()); if (nullCheck == null) { // @NonNull applied to a primitive. Kinda pointless. Let's generate a warning. annotationNode.addWarning("@NonNull is meaningless on a primitive."); return; } List<JCStatement> statements = declaration.body.stats; String expectedName = paramNode.getName(); /* Abort if the null check is already there, delving into try and synchronized statements */ { List<JCStatement> stats = statements; int idx = 0; while (stats.size() > idx) { JCStatement stat = stats.get(idx++); if (JavacHandlerUtil.isConstructorCall(stat)) continue; if (stat instanceof JCTry) { stats = ((JCTry) stat).body.stats; idx = 0; continue; } if (stat instanceof JCSynchronized) { stats = ((JCSynchronized) stat).body.stats; idx = 0; continue; } String varNameOfNullCheck = returnVarNameIfNullCheck(stat); if (varNameOfNullCheck == null) break; if (varNameOfNullCheck.equals(expectedName)) return; } } List<JCStatement> tail = statements; List<JCStatement> head = List.nil(); for (JCStatement stat : statements) { if (JavacHandlerUtil.isConstructorCall(stat) || (JavacHandlerUtil.isGenerated(stat) && isNullCheck(stat))) { tail = tail.tail; head = head.prepend(stat); continue; } break; } List<JCStatement> newList = tail.prepend(nullCheck); for (JCStatement stat : head) newList = newList.prepend(stat); declaration.body.stats = newList; annotationNode.getAst().setChanged(); } public boolean isNullCheck(JCStatement stat) { return returnVarNameIfNullCheck(stat) != null; } /** * Checks if the statement is of the form 'if (x == null) {throw WHATEVER;}' or 'assert x != null;', * where the block braces are optional. If it is of this form, returns "x". * If it is not of this form, returns null. */ public String returnVarNameIfNullCheck(JCStatement stat) { boolean isIf = stat instanceof JCIf; boolean isExpression = stat instanceof JCExpressionStatement; if (!isIf && !(stat instanceof JCAssert) && !isExpression) return null; if (isExpression) { /* Check if the statements contains a call to checkNotNull or requireNonNull */ JCExpression expression = ((JCExpressionStatement) stat).expr; if (expression instanceof JCAssign) expression = ((JCAssign) expression).rhs; if (!(expression instanceof JCMethodInvocation)) return null; JCMethodInvocation invocation = (JCMethodInvocation) expression; JCExpression method = invocation.meth; Name name = null; if (method instanceof JCFieldAccess) { name = ((JCFieldAccess) method).name; } else if (method instanceof JCIdent) { name = ((JCIdent) method).name; } if (name == null || (!name.contentEquals("checkNotNull") && !name.contentEquals("requireNonNull"))) return null; if (invocation.args.isEmpty()) return null; JCExpression firstArgument = invocation.args.head; if (!(firstArgument instanceof JCIdent)) return null; return ((JCIdent) firstArgument).toString(); } if (isIf) { /* Check that the if's statement is a throw statement, possibly in a block. */ JCStatement then = ((JCIf) stat).thenpart; if (then instanceof JCBlock) { List<JCStatement> stats = ((JCBlock) then).stats; if (stats.length() == 0) return null; then = stats.get(0); } if (!(then instanceof JCThrow)) return null; } /* Check that the if's conditional is like 'x == null'. Return from this method (don't generate a nullcheck) if 'x' is equal to our own variable's name: There's already a nullcheck here. */ { JCExpression cond = isIf ? ((JCIf) stat).cond : ((JCAssert) stat).cond; while (cond instanceof JCParens) cond = ((JCParens) cond).expr; if (!(cond instanceof JCBinary)) return null; JCBinary bin = (JCBinary) cond; if (isIf) { if (!CTC_EQUAL.equals(treeTag(bin))) return null; } else { if (!CTC_NOT_EQUAL.equals(treeTag(bin))) return null; } if (!(bin.lhs instanceof JCIdent)) return null; if (!(bin.rhs instanceof JCLiteral)) return null; if (!CTC_BOT.equals(typeTag(bin.rhs))) return null; return ((JCIdent) bin.lhs).name.toString(); } } }
package liquibase.serializer; import liquibase.exception.UnexpectedLiquibaseException; import liquibase.parser.core.ParsedNode; import liquibase.parser.core.ParsedNodeException; import liquibase.resource.ResourceAccessor; import liquibase.util.ISODateFormat; import liquibase.util.ObjectUtil; import java.lang.reflect.*; import java.util.*; import java.util.regex.*; public abstract class AbstractLiquibaseSerializable implements LiquibaseSerializable { private Set<String> serializableFields; public void load(ParsedNode parsedNode, ResourceAccessor resourceAccessor) throws ParsedNodeException { for (ParsedNode childNode : parsedNode.getChildren()) { if (!shouldAutoLoad(childNode)) { continue; } try { if (this.getSerializableFields().contains(childNode.getName())) { Class dataTypeClass = this.getSerializableFieldDataTypeClass(childNode.getName()); if (Collection.class.isAssignableFrom(dataTypeClass)) { Type[] dataTypeClassParameters = getSerializableFieldDataTypeClassParameters(childNode.getName()); if (dataTypeClassParameters.length == 1) { Class collectionType = null; if (dataTypeClassParameters[0] instanceof Class) { collectionType = (Class) dataTypeClassParameters[0]; } else if (dataTypeClassParameters[0] instanceof ParameterizedType) { collectionType = (Class) ((ParameterizedType) dataTypeClassParameters[0]).getRawType(); } if (collectionType != null && LiquibaseSerializable.class.isAssignableFrom(collectionType) && !collectionType.isInterface() && !Modifier.isAbstract(collectionType.getModifiers())) { String elementName = ((LiquibaseSerializable) collectionType.newInstance()).getSerializedObjectName(); List<ParsedNode> elementNodes = Collections.emptyList(); if (childNode.getName().equals(elementName)) { elementNodes = Collections.singletonList(childNode); } else if (childNode.getName().equals(childNode.getName())) { elementNodes = childNode.getChildren(null, elementName); } if (!elementNodes.isEmpty()) { Collection collection = ((Collection) getSerializableFieldValue(childNode.getName())); for (ParsedNode node : elementNodes) { LiquibaseSerializable childObject = (LiquibaseSerializable) collectionType.newInstance(); childObject.load(node, resourceAccessor); collection.add(childObject); } } } } } if (LiquibaseSerializable.class.isAssignableFrom(dataTypeClass)) { if (!dataTypeClass.isInterface() && !Modifier.isAbstract(dataTypeClass.getModifiers())) { LiquibaseSerializable childObject = (LiquibaseSerializable) dataTypeClass.newInstance(); childObject.load(childNode, resourceAccessor); setSerializableFieldValue(childNode.getName(), childObject); } } else if (childNode.getValue() != null) { ObjectUtil.setProperty(this, childNode.getName(), convertEscaped(childNode.getValue().toString())); } } else { for (String field : this.getSerializableFields()) { Class dataTypeClass = this.getSerializableFieldDataTypeClass(field); if (Collection.class.isAssignableFrom(dataTypeClass)) { Type[] dataTypeClassParameters = getSerializableFieldDataTypeClassParameters(field); if (dataTypeClassParameters.length == 1) { Class collectionType = null; if (dataTypeClassParameters[0] instanceof Class) { collectionType = (Class) dataTypeClassParameters[0]; } else if (dataTypeClassParameters[0] instanceof ParameterizedType) { collectionType = (Class) ((ParameterizedType) dataTypeClassParameters[0]).getRawType(); } if (collectionType != null && LiquibaseSerializable.class.isAssignableFrom(collectionType) && !collectionType.isInterface() && !Modifier.isAbstract(collectionType.getModifiers())) { String elementName = ((LiquibaseSerializable) collectionType.newInstance()).getSerializedObjectName(); List<ParsedNode> elementNodes = Collections.emptyList(); if (childNode.getName().equals(elementName)) { elementNodes = Collections.singletonList(childNode); } else if (childNode.getName().equals(field)) { elementNodes = childNode.getChildren(null, elementName); } if (!elementNodes.isEmpty()) { Collection collection = ((Collection) getSerializableFieldValue(field)); for (ParsedNode node : elementNodes) { LiquibaseSerializable childObject = (LiquibaseSerializable) collectionType.newInstance(); childObject.load(node, resourceAccessor); collection.add(childObject); } } } } } } } } catch (Exception e) { throw new ParsedNodeException("Error setting property", e); } } if (parsedNode.getValue() != null) { for (String field : this.getSerializableFields()) { if (this.getSerializableFieldType(field) == SerializationType.DIRECT_VALUE) { Object value = parsedNode.getValue(String.class); value = convertEscaped(value); ObjectUtil.setProperty(this, field, value); } } } } protected Object convertEscaped(Object value) { Matcher matcher = Pattern.compile("(.*)!\\{(.*)\\}").matcher((String) value); if (matcher.matches()) { String stringValue = matcher.group(1); try { Class<?> aClass = Class.forName(matcher.group(2)); if (Date.class.isAssignableFrom(aClass)) { Date date = new ISODateFormat().parse(stringValue); value = aClass.getConstructor(long.class).newInstance(date.getTime()); } else if (Enum.class.isAssignableFrom(aClass)) { value = Enum.valueOf((Class<? extends Enum>)aClass, stringValue); } else { value = aClass.getConstructor(String.class).newInstance(stringValue); } } catch (Exception e) { throw new UnexpectedLiquibaseException(e); } } return value; } protected boolean shouldAutoLoad(ParsedNode node) { return true; } @Override public ParsedNode serialize() throws ParsedNodeException { ParsedNode node = new ParsedNode(null, getSerializedObjectName()); for (String field : getSerializableFields()) { Object fieldValue = getSerializableFieldValue(field); fieldValue = serializeValue(fieldValue); if (fieldValue == null) { continue; } SerializationType type = getSerializableFieldType(field); if (type == SerializationType.DIRECT_VALUE) { node.setValue(fieldValue); } else if (type == SerializationType.NAMED_FIELD || type == SerializationType.NESTED_OBJECT) { if (fieldValue instanceof ParsedNode) { node.addChild((ParsedNode) fieldValue); } else { node.addChild(new ParsedNode(null, field).setValue(fieldValue)); } } else { throw new UnexpectedLiquibaseException("Unknown type: "+type); } } return node; } @Override public Set<String> getSerializableFields() { return ReflectionSerializer.getInstance().getFields(this); } @Override public Object getSerializableFieldValue(String field) { return ReflectionSerializer.getInstance().getValue(this, field); } @Override public SerializationType getSerializableFieldType(String field) { return SerializationType.NAMED_FIELD; } protected Class getSerializableFieldDataTypeClass(String field) { return ReflectionSerializer.getInstance().getDataTypeClass(this, field); } protected Type[] getSerializableFieldDataTypeClassParameters(String field) { return ReflectionSerializer.getInstance().getDataTypeClassParameters(this, field); } protected void setSerializableFieldValue(String field, Object value) { ReflectionSerializer.getInstance().setValue(this, field, value); } protected Object serializeValue(Object value) throws ParsedNodeException { if (value instanceof Collection) { List returnList = new ArrayList(); for (Object obj : (Collection) value) { Object objValue = serializeValue(obj); if (objValue != null) { returnList.add(objValue); } } if (((Collection) value).size() == 0) { return null; } else { return returnList; } } else if (value instanceof LiquibaseSerializable) { return ((LiquibaseSerializable) value).serialize(); } else { return value; } } @Override public String getSerializableFieldNamespace(String field) { return getSerializedObjectNamespace(); } }
package org.zstack.header.host; import org.zstack.header.allocator.HostCapacityInventory; import org.zstack.header.cluster.ClusterInventory; import org.zstack.header.configuration.PythonClassInventory; import org.zstack.header.query.ExpandedQueries; import org.zstack.header.query.ExpandedQuery; import org.zstack.header.query.Queryable; import org.zstack.header.search.Inventory; import org.zstack.header.vm.VmInstanceInventory; import org.zstack.header.zone.ZoneInventory; import javax.persistence.JoinColumn; import java.io.Serializable; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * @inventory * inventory for host. Depending on hypervisor, the inventory may have extra fields * * @example { "inventory": { "zoneUuid": "2893ce85c43d4a3a8d78f414da39966e", "name": "host1-192.168.0.203", "uuid": "43673938584447b2a29ab3d53f9d88d3", "clusterUuid": "8524072a4274403892bcc5b1972c2576", "description": "Test", "managementIp": "192.168.0.203", "hypervisorType": "KVM", "state": "Enabled", "status": "Connected", "createDate": "Feb 28, 2014 6:49:24 PM", "lastOpDate": "Feb 28, 2014 6:49:24 PM" } } * @since 0.1.0 */ @Inventory(mappingVOClass = HostVO.class) @PythonClassInventory @ExpandedQueries({ @ExpandedQuery(expandedField = "zone", inventoryClass = ZoneInventory.class, foreignKey = "zoneUuid", expandedInventoryKey = "uuid"), @ExpandedQuery(expandedField = "cluster", inventoryClass = ClusterInventory.class, foreignKey = "clusterUuid", expandedInventoryKey = "uuid"), @ExpandedQuery(expandedField = "vmInstance", inventoryClass = VmInstanceInventory.class, foreignKey = "uuid", expandedInventoryKey = "hostUuid") }) public class HostInventory implements Serializable{ /** * @desc uuid of zone this host belongs to */ private String zoneUuid; /** * @desc max length of 255 characters */ private String name; /** * @desc host uuid */ private String uuid; /** * @desc uuid of cluster this host belongs to */ private String clusterUuid; /** * @desc max length of 2048 characters * @nullable */ private String description; /** * @desc IPv4 address of host's management nic * * .. note:: This field could be DNS name */ private String managementIp; /** * @desc type of hypervisor installed on the host */ private String hypervisorType; /** * @desc * - Disabled: no vm can be created on this host * - PreMaintenance: host is in middle way of entering state Maintenance * - Maintenance: host is ready for maintenance work, for example, upgrading CPU/memory. No vm can be created on this host * * .. note:: PreMaintenance is an ephemeral state after admin switches host state to Maintenance. During entering * Maintenance, zstack will try to live migrate all running vm to other hosts, vm failed to migrate will be stopped. * In maintenance mode, host will not receive any commands from zstack, admin can safely shut it off and do whatever upgrade * work * * @choices * - Enabled * - Disabled * - PreMaintenance * - Maintenance */ private String state; /** * @desc * - Connecting: zstack management server is trying to establish connection to hypervisor agent * - Connected: connection to hypervisor agent has been established * - Disconnected: connection to hypervisor agent is broken, no commands can be sent to hypervisor and no vm can be created * on this host * * @choices * - Connecting * - Connected * - Disconnected */ private String status; @Queryable(mappingClass = HostCapacityInventory.class, joinColumn = @JoinColumn(name="uuid", referencedColumnName = "totalCpu")) private Long totalCpuCapacity; @Queryable(mappingClass = HostCapacityInventory.class, joinColumn = @JoinColumn(name="uuid", referencedColumnName = "availableCpu")) private Long availableCpuCapacity; @Queryable(mappingClass = HostCapacityInventory.class, joinColumn = @JoinColumn(name="uuid", referencedColumnName = "totalMemory")) private Long totalMemoryCapacity; @Queryable(mappingClass = HostCapacityInventory.class, joinColumn = @JoinColumn(name="uuid", referencedColumnName = "availableMemory")) private Long availableMemoryCapacity; /** * @desc the time this resource gets created */ private Timestamp createDate; /** * @desc last time this resource gets operated */ private Timestamp lastOpDate; protected HostInventory(HostVO vo) { this.setStatus(vo.getStatus().toString()); this.setCreateDate(vo.getCreateDate()); this.setDescription(vo.getDescription()); this.setHypervisorType(vo.getHypervisorType()); this.setLastOpDate(vo.getLastOpDate()); this.setManagementIp(vo.getManagementIp()); this.setName(vo.getName()); this.setState(vo.getState().toString()); this.setUuid(vo.getUuid()); this.setZoneUuid(vo.getZoneUuid()); this.setClusterUuid(vo.getClusterUuid()); if (vo.getCapacity() != null) { this.setTotalCpuCapacity(vo.getCapacity().getTotalCpu()); this.setAvailableCpuCapacity(vo.getCapacity().getAvailableCpu()); this.setTotalMemoryCapacity(vo.getCapacity().getTotalMemory()); this.setAvailableMemoryCapacity(vo.getCapacity().getAvailableMemory()); } } public HostInventory() { } public static HostInventory valueOf(HostVO vo) { return new HostInventory(vo); } public static List<HostInventory> valueOf(Collection<HostVO> vos) { List<HostInventory> invs = new ArrayList<HostInventory>(vos.size()); for (HostVO vo : vos) { invs.add(HostInventory.valueOf(vo)); } return invs; } public Long getTotalCpuCapacity() { return totalCpuCapacity; } public void setTotalCpuCapacity(Long totalCpuCapacity) { this.totalCpuCapacity = totalCpuCapacity; } public Long getAvailableCpuCapacity() { return availableCpuCapacity; } public void setAvailableCpuCapacity(Long availableCpuCapacity) { this.availableCpuCapacity = availableCpuCapacity; } public Long getTotalMemoryCapacity() { return totalMemoryCapacity; } public void setTotalMemoryCapacity(Long totalMemoryCapacity) { this.totalMemoryCapacity = totalMemoryCapacity; } public Long getAvailableMemoryCapacity() { return availableMemoryCapacity; } public void setAvailableMemoryCapacity(Long availableMemoryCapacity) { this.availableMemoryCapacity = availableMemoryCapacity; } public String getZoneUuid() { return zoneUuid; } public void setZoneUuid(String zoneUuid) { this.zoneUuid = zoneUuid; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getUuid() { return uuid; } public void setUuid(String uuid) { this.uuid = uuid; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getManagementIp() { return managementIp; } public void setManagementIp(String managementIp) { this.managementIp = managementIp; } public String getHypervisorType() { return hypervisorType; } public void setHypervisorType(String hypervisorType) { this.hypervisorType = hypervisorType; } public String getState() { return state; } public void setState(String state) { this.state = state; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public Timestamp getCreateDate() { return createDate; } public void setCreateDate(Timestamp createDate) { this.createDate = createDate; } public Timestamp getLastOpDate() { return lastOpDate; } public void setLastOpDate(Timestamp lastOpDate) { this.lastOpDate = lastOpDate; } public String getClusterUuid() { return clusterUuid; } public void setClusterUuid(String clusterUuid) { this.clusterUuid = clusterUuid; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.attribute.expression.language; import org.antlr.runtime.tree.Tree; import org.apache.nifi.attribute.expression.language.compile.ExpressionCompiler; import org.apache.nifi.attribute.expression.language.evaluation.Evaluator; import org.apache.nifi.attribute.expression.language.evaluation.EvaluatorState; import org.apache.nifi.attribute.expression.language.evaluation.QueryResult; import org.apache.nifi.attribute.expression.language.evaluation.selection.AttributeEvaluator; import org.apache.nifi.attribute.expression.language.exception.AttributeExpressionLanguageParsingException; import org.apache.nifi.expression.AttributeExpression.ResultType; import org.apache.nifi.expression.AttributeValueDecorator; import org.apache.nifi.parameter.ExpressionLanguageAwareParameterParser; import org.apache.nifi.parameter.ParameterLookup; import org.apache.nifi.parameter.ParameterParser; import org.apache.nifi.parameter.ParameterReference; import org.apache.nifi.parameter.ParameterToken; import org.apache.nifi.parameter.ParameterTokenList; import org.apache.nifi.processor.exception.ProcessException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; /** * Class used for creating and evaluating NiFi Expression Language. Once a Query * has been created, it may be evaluated using the evaluate methods exactly * once. */ public class Query { private final String query; private final Tree tree; private final Evaluator<?> evaluator; private final AtomicBoolean evaluated = new AtomicBoolean(false); private final EvaluatorState context = new EvaluatorState(); private Query(final String query, final Tree tree, final Evaluator<?> evaluator) { this.query = query; this.tree = tree; this.evaluator = evaluator; } public static boolean isValidExpression(final String value) { try { validateExpression(value, false); return true; } catch (final AttributeExpressionLanguageParsingException | ProcessException e) { return false; } } public static ResultType getResultType(final String value) throws AttributeExpressionLanguageParsingException { return Query.compile(value).getResultType(); } public static List<ResultType> extractResultTypes(final String value) throws AttributeExpressionLanguageParsingException { final List<ResultType> types = new ArrayList<>(); for (final Range range : extractExpressionRanges(value)) { final String text = value.substring(range.getStart(), range.getEnd() + 1); types.add(getResultType(text)); } return types; } public static List<String> extractExpressions(final String value) throws AttributeExpressionLanguageParsingException { final List<String> expressions = new ArrayList<>(); for (final Range range : extractExpressionRanges(value)) { expressions.add(value.substring(range.getStart(), range.getEnd() + 1)); } return expressions; } public static List<Range> extractExpressionRanges(final String value) throws AttributeExpressionLanguageParsingException { return extractExpressionRanges(value, false); } public static List<Range> extractEscapedRanges(final String value) throws AttributeExpressionLanguageParsingException { return extractExpressionRanges(value, true); } private static List<Range> extractExpressionRanges(final String value, final boolean extractEscapeSequences) throws AttributeExpressionLanguageParsingException { final List<Range> ranges = new ArrayList<>(); char lastChar = 0; int embeddedCount = 0; int expressionStart = -1; int dollarCount = 0; int backslashCount = 0; charLoop: for (int i = 0; i < value.length(); i++) { final char c = value.charAt(i); if (expressionStart > -1 && (c == '\'' || c == '"') && (lastChar != '\\' || backslashCount % 2 == 0)) { final int endQuoteIndex = findEndQuoteChar(value, i); if (endQuoteIndex < 0) { break charLoop; } i = endQuoteIndex; continue; } if (c == '{') { final boolean evenDollarCount = dollarCount % 2 == 0; if ((evenDollarCount == extractEscapeSequences) && lastChar == '$') { if (embeddedCount == 0) { expressionStart = i - (extractEscapeSequences ? dollarCount : 1); } } // Keep track of the number of opening curly braces that we are embedded within, // if we are within an Expression. If we are outside of an Expression, we can just ignore // curly braces. This allows us to ignore the first character if the value is something // like: { ${abc} } // However, we will count the curly braces if we have something like: ${ $${abc} } if (expressionStart > -1) { embeddedCount++; } } else if (c == '}') { if (embeddedCount <= 0) { continue; } if (--embeddedCount == 0) { if (expressionStart > -1) { // ended expression. Add a new range. final Range range = new Range(expressionStart, i); ranges.add(range); } expressionStart = -1; } } else if (c == '$') { dollarCount++; } else if (c == '\\') { backslashCount++; } else { dollarCount = 0; } lastChar = c; } return ranges; } /** * @param value expression to validate * @param allowSurroundingCharacters whether to allow surrounding chars * @throws AttributeExpressionLanguageParsingException if problems parsing given expression */ public static void validateExpression(final String value, final boolean allowSurroundingCharacters) throws AttributeExpressionLanguageParsingException { if (!allowSurroundingCharacters) { final List<Range> ranges = extractExpressionRanges(value); if (ranges.size() > 1) { throw new AttributeExpressionLanguageParsingException("Found multiple Expressions but expected only 1"); } if (ranges.isEmpty()) { throw new AttributeExpressionLanguageParsingException("No Expressions found"); } final Range range = ranges.get(0); final String expression = value.substring(range.getStart(), range.getEnd() + 1); Query.compile(expression); if (range.getStart() > 0 || range.getEnd() < value.length() - 1) { throw new AttributeExpressionLanguageParsingException("Found characters outside of Expression"); } } else { for (final Range range : extractExpressionRanges(value)) { final String expression = value.substring(range.getStart(), range.getEnd() + 1); Query.compile(expression); } } } static int findEndQuoteChar(final String value, final int quoteStart) { final char quoteChar = value.charAt(quoteStart); int backslashCount = 0; char lastChar = 0; for (int i = quoteStart + 1; i < value.length(); i++) { final char c = value.charAt(i); if (c == '\\') { backslashCount++; } else if (c == quoteChar && (backslashCount % 2 == 0 || lastChar != '\\')) { return i; } lastChar = c; } return -1; } static String evaluateExpression(final Tree tree, final Evaluator<?> rootEvaluator, final String queryText, final EvaluationContext evaluationContext, final AttributeValueDecorator decorator) throws ProcessException { Query query = new Query(queryText, tree, rootEvaluator); final Object evaluated = query.evaluate(evaluationContext).getValue(); if (evaluated == null) { return null; } final String value = evaluated.toString(); return decorator == null ? value : decorator.decorate(value); } static String evaluateExpressions(final String rawValue, Map<String, String> expressionMap, final AttributeValueDecorator decorator, final Map<String, String> stateVariables, final ParameterLookup parameterLookup) throws ProcessException { return Query.prepare(rawValue).evaluateExpressions(new StandardEvaluationContext(expressionMap, stateVariables, parameterLookup), decorator); } static String evaluateExpressions(final String rawValue, final Map<String, String> valueLookup, final ParameterLookup parameterLookup) throws ProcessException { return evaluateExpressions(rawValue, valueLookup, null, parameterLookup); } static String evaluateExpressions(final String rawValue, final Map<String, String> valueLookup, final AttributeValueDecorator decorator, final ParameterLookup parameterLookup) throws ProcessException { return Query.prepare(rawValue).evaluateExpressions(new StandardEvaluationContext(valueLookup, Collections.emptyMap(), parameterLookup), decorator); } /** * Un-escapes ${...} patterns that were escaped * * @param value to un-escape * @return un-escaped value */ public static String unescape(final String value) { return value.replaceAll("\\$\\$(?=\\$*\\{.*?\\})", "\\$"); } public static Query fromTree(final Tree tree, final String text) { final ExpressionCompiler compiler = new ExpressionCompiler(); return new Query(text, tree, compiler.buildEvaluator(tree)); } private static String unescapeLeadingDollarSigns(final String value) { final int index = value.indexOf("{"); if (index < 0) { return value.replace("$$", "$"); } else { final String prefix = value.substring(0, index); return prefix.replace("$$", "$") + value.substring(index); } } private static String unescapeTrailingDollarSigns(final String value, final boolean escapeIfAllDollars) { if (!value.endsWith("$")) { return value; } // count number of $$ at end of string int dollars = 0; for (int i=value.length()-1; i >= 0; i--) { final char c = value.charAt(i); if (c == '$') { dollars++; } else { break; } } // If the given argument consists solely of $ characters, then we if (dollars == value.length() && !escapeIfAllDollars) { return value; } final int charsToRemove = dollars / 2; final int newLength = value.length() - charsToRemove; return value.substring(0, newLength); } public static PreparedQuery prepareWithParametersPreEvaluated(final String query) throws AttributeExpressionLanguageParsingException { return prepare(query, true); } public static PreparedQuery prepare(final String query) throws AttributeExpressionLanguageParsingException { return prepare(query, false); } private static PreparedQuery prepare(final String rawQuery, final boolean escapeParameterReferences) throws AttributeExpressionLanguageParsingException { if (rawQuery == null) { return new EmptyPreparedQuery(null); } final ParameterParser parameterParser = new ExpressionLanguageAwareParameterParser(); final String query; if (escapeParameterReferences) { query = parameterParser.parseTokens(rawQuery).escape(); } else { query = rawQuery; } final List<Range> ranges = extractExpressionRanges(query); if (ranges.isEmpty()) { final List<Expression> expressions = new ArrayList<>(); final List<Range> escapedRanges = extractEscapedRanges(query); int lastIndex = 0; for (final Range range : escapedRanges) { final String treeText = unescapeLeadingDollarSigns(query.substring(range.getStart(), range.getEnd() + 1)); if (range.getStart() > lastIndex) { String substring = unescapeLeadingDollarSigns(query.substring(lastIndex, range.getStart())); addLiteralsAndParameters(parameterParser, substring, expressions, true); } addLiteralsAndParameters(parameterParser, treeText, expressions, true); } if (escapedRanges.isEmpty()) { addLiteralsAndParameters(parameterParser, query, expressions, true); } else { final Range lastRange = escapedRanges.get(escapedRanges.size() - 1); if (lastRange.getEnd() + 1 < query.length()) { final String treeText = unescapeLeadingDollarSigns(query.substring(lastRange.getEnd() + 1)); addLiteralsAndParameters(parameterParser, treeText, expressions, true); } } if (expressions.isEmpty()) { return new EmptyPreparedQuery(query); } return new StandardPreparedQuery(expressions); } final ExpressionCompiler compiler = new ExpressionCompiler(); try { final List<Expression> expressions = new ArrayList<>(); int lastIndex = 0; for (final Range range : ranges) { final String treeText = unescapeLeadingDollarSigns(query.substring(range.getStart(), range.getEnd() + 1)); final CompiledExpression compiledExpression = compiler.compile(treeText); if (range.getStart() > lastIndex) { String substring = unescapeLeadingDollarSigns(query.substring(lastIndex, range.getStart())); // If this string literal evaluator immediately precedes an Attribute Reference, then we need to consider the String Literal to be // Escaping if it ends with $$'s, otherwise not. We also want to avoid un-escaping if the expression consists solely of $$, because // those would have been addressed by the previous #unescapeLeadingDollarSigns() call already. if (compiledExpression.getRootEvaluator() instanceof AttributeEvaluator) { substring = unescapeTrailingDollarSigns(substring, false); } // Do not allow sensitive parameters to be referenced because this is within an actual Expression. // For example, ${#{sensitiveParam}} is not allowed. However, we do support referencing sensitive parameters // for the use case of simply #{sensitiveParam} outside of an Expression. In such a case, the PreparedQuery will // still be used to evaluate this, since all Property Values are evaluated through PreparedQueries. addLiteralsAndParameters(parameterParser, substring, expressions, false); } expressions.add(compiledExpression); lastIndex = range.getEnd() + 1; } final Range lastRange = ranges.get(ranges.size() - 1); if (lastRange.getEnd() + 1 < query.length()) { final String treeText = unescapeLeadingDollarSigns(query.substring(lastRange.getEnd() + 1)); addLiteralsAndParameters(parameterParser, treeText, expressions, false); } return new StandardPreparedQuery(expressions); } catch (final AttributeExpressionLanguageParsingException e) { return new InvalidPreparedQuery(query, e.getMessage()); } } private static void addLiteralsAndParameters(final ParameterParser parser, final String input, final List<Expression> expressions, final boolean allowSensitiveParameterReference) { final ParameterTokenList references = parser.parseTokens(input); int index = 0; ParameterToken lastReference = null; for (final ParameterToken token : references) { if (token.isEscapeSequence()) { expressions.add(new StringLiteralExpression(token.getValue(ParameterLookup.EMPTY))); index = token.getEndOffset() + 1; lastReference = token; continue; } final int start = token.getStartOffset(); if (start > index) { expressions.add(new StringLiteralExpression(input.substring(index, start))); } if (token.isParameterReference()) { final ParameterReference parameterReference = (ParameterReference) token; expressions.add(new ParameterExpression(parameterReference.getParameterName(), allowSensitiveParameterReference)); } else { expressions.add(new StringLiteralExpression(token.getValue(ParameterLookup.EMPTY))); } index = token.getEndOffset() + 1; lastReference = token; } if (lastReference == null) { expressions.add(new StringLiteralExpression(input)); } else if (input.length() > lastReference.getEndOffset() + 1) { expressions.add(new StringLiteralExpression(input.substring(lastReference.getEndOffset() + 1))); } } public static Query compile(final String query) throws AttributeExpressionLanguageParsingException { try { final ExpressionCompiler compiler = new ExpressionCompiler(); final CompiledExpression compiledExpression = compiler.compile(query); return new Query(compiledExpression.getExpression(), compiledExpression.getTree(), compiledExpression.getRootEvaluator()); } catch (final AttributeExpressionLanguageParsingException e) { throw e; } catch (final Exception e) { throw new AttributeExpressionLanguageParsingException(e); } } public ResultType getResultType() { return evaluator.getResultType(); } QueryResult<?> evaluate(final EvaluationContext evaluationContext) { if (evaluated.getAndSet(true)) { throw new IllegalStateException("A Query cannot be evaluated more than once"); } return evaluator.evaluate(evaluationContext); } Tree getTree() { return this.tree; } @Override public String toString() { return "Query [" + query + "]"; } public static class Range { private final int start; private final int end; public Range(final int start, final int end) { this.start = start; this.end = end; } public int getStart() { return start; } public int getEnd() { return end; } @Override public String toString() { return start + " - " + end; } } }
/* * Copyright (c) 2008, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.nio.cs; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.BufferedReader; import java.io.IOException; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.*; import java.security.*; public class CharsetMapping { public static final char UNMAPPABLE_DECODING = '\uFFFD'; public static final int UNMAPPABLE_ENCODING = 0xFFFD; char[] b2cSB; //singlebyte b->c char[] b2cDB1; //dobulebyte b->c /db1 char[] b2cDB2; //dobulebyte b->c /db2 int b2Min, b2Max; //min/max(start/end) value of 2nd byte int b1MinDB1, b1MaxDB1; //min/Max(start/end) value of 1st byte/db1 int b1MinDB2, b1MaxDB2; //min/Max(start/end) value of 1st byte/db2 int dbSegSize; char[] c2b; char[] c2bIndex; // Supplementary char[] b2cSupp; char[] c2bSupp; // Composite Entry[] b2cComp; Entry[] c2bComp; public char decodeSingle(int b) { return b2cSB[b]; } public char decodeDouble(int b1, int b2) { if (b2 >= b2Min && b2 < b2Max) { b2 -= b2Min; if (b1 >= b1MinDB1 && b1 <= b1MaxDB1) { b1 -= b1MinDB1; return b2cDB1[b1 * dbSegSize + b2]; } if (b1 >= b1MinDB2 && b1 <= b1MaxDB2) { b1 -= b1MinDB2; return b2cDB2[b1 * dbSegSize + b2]; } } return UNMAPPABLE_DECODING; } // for jis0213 all supplementary characters are in 0x2xxxx range, // so only the xxxx part is now stored, should actually store the // codepoint value instead. public char[] decodeSurrogate(int db, char[] cc) { int end = b2cSupp.length / 2; int i = Arrays.binarySearch(b2cSupp, 0, end, (char)db); if (i >= 0) { Character.toChars(b2cSupp[end + i] + 0x20000, cc, 0); return cc; } return null; } public char[] decodeComposite(Entry comp, char[] cc) { int i = findBytes(b2cComp, comp); if (i >= 0) { cc[0] = (char)b2cComp[i].cp; cc[1] = (char)b2cComp[i].cp2; return cc; } return null; } public int encodeChar(char ch) { int index = c2bIndex[ch >> 8]; if (index == 0xffff) return UNMAPPABLE_ENCODING; return c2b[index + (ch & 0xff)]; } public int encodeSurrogate(char hi, char lo) { int cp = Character.toCodePoint(hi, lo); if (cp < 0x20000 || cp >= 0x30000) return UNMAPPABLE_ENCODING; int end = c2bSupp.length / 2; int i = Arrays.binarySearch(c2bSupp, 0, end, (char)cp); if (i >= 0) return c2bSupp[end + i]; return UNMAPPABLE_ENCODING; } public boolean isCompositeBase(Entry comp) { if (comp.cp <= 0x31f7 && comp.cp >= 0xe6) { return (findCP(c2bComp, comp) >= 0); } return false; } public int encodeComposite(Entry comp) { int i = findComp(c2bComp, comp); if (i >= 0) return c2bComp[i].bs; return UNMAPPABLE_ENCODING; } // init the CharsetMapping object from the .dat binary file @SuppressWarnings("removal") public static CharsetMapping get(final InputStream is) { return AccessController.doPrivileged(new PrivilegedAction<>() { public CharsetMapping run() { return new CharsetMapping().load(is); } }); } public static class Entry { public int bs; //byte sequence reps public int cp; //Unicode codepoint public int cp2; //CC of composite } static Comparator<Entry> comparatorBytes = new Comparator<Entry>() { public int compare(Entry m1, Entry m2) { return m1.bs - m2.bs; } public boolean equals(Object obj) { return this == obj; } }; static Comparator<Entry> comparatorCP = new Comparator<Entry>() { public int compare(Entry m1, Entry m2) { return m1.cp - m2.cp; } public boolean equals(Object obj) { return this == obj; } }; static Comparator<Entry> comparatorComp = new Comparator<Entry>() { public int compare(Entry m1, Entry m2) { int v = m1.cp - m2.cp; if (v == 0) v = m1.cp2 - m2.cp2; return v; } public boolean equals(Object obj) { return this == obj; } }; static int findBytes(Entry[] a, Entry k) { return Arrays.binarySearch(a, 0, a.length, k, comparatorBytes); } static int findCP(Entry[] a, Entry k) { return Arrays.binarySearch(a, 0, a.length, k, comparatorCP); } static int findComp(Entry[] a, Entry k) { return Arrays.binarySearch(a, 0, a.length, k, comparatorComp); } /*****************************************************************************/ // tags of different charset mapping tables private static final int MAP_SINGLEBYTE = 0x1; // 0..256 : c private static final int MAP_DOUBLEBYTE1 = 0x2; // min..max: c private static final int MAP_DOUBLEBYTE2 = 0x3; // min..max: c [DB2] private static final int MAP_SUPPLEMENT = 0x5; // db,c private static final int MAP_SUPPLEMENT_C2B = 0x6; // c,db private static final int MAP_COMPOSITE = 0x7; // db,base,cc private static final int MAP_INDEXC2B = 0x8; // index table of c->bb private static final boolean readNBytes(InputStream in, byte[] bb, int N) throws IOException { int off = 0; while (N > 0) { int n = in.read(bb, off, N); if (n == -1) return false; N = N - n; off += n; } return true; } int off = 0; byte[] bb; private char[] readCharArray() { // first 2 bytes are the number of "chars" stored in this table int size = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); char [] cc = new char[size]; for (int i = 0; i < size; i++) { cc[i] = (char)(((bb[off++]&0xff)<<8) | (bb[off++]&0xff)); } return cc; } void readSINGLEBYTE() { char[] map = readCharArray(); for (int i = 0; i < map.length; i++) { char c = map[i]; if (c != UNMAPPABLE_DECODING) { c2b[c2bIndex[c >> 8] + (c&0xff)] = (char)i; } } b2cSB = map; } void readINDEXC2B() { char[] map = readCharArray(); for (int i = map.length - 1; i >= 0; i--) { if (c2b == null && map[i] != -1) { c2b = new char[map[i] + 256]; Arrays.fill(c2b, (char)UNMAPPABLE_ENCODING); break; } } c2bIndex = map; } char[] readDB(int b1Min, int b2Min, int segSize) { char[] map = readCharArray(); for (int i = 0; i < map.length; i++) { char c = map[i]; if (c != UNMAPPABLE_DECODING) { int b1 = i / segSize; int b2 = i % segSize; int b = (b1 + b1Min)* 256 + (b2 + b2Min); //System.out.printf(" DB %x\t%x%n", b, c & 0xffff); c2b[c2bIndex[c >> 8] + (c&0xff)] = (char)(b); } } return map; } void readDOUBLEBYTE1() { b1MinDB1 = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); b1MaxDB1 = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); b2Min = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); b2Max = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); dbSegSize = b2Max - b2Min + 1; b2cDB1 = readDB(b1MinDB1, b2Min, dbSegSize); } void readDOUBLEBYTE2() { b1MinDB2 = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); b1MaxDB2 = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); b2Min = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); b2Max = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); dbSegSize = b2Max - b2Min + 1; b2cDB2 = readDB(b1MinDB2, b2Min, dbSegSize); } void readCOMPOSITE() { char[] map = readCharArray(); int mLen = map.length/3; b2cComp = new Entry[mLen]; c2bComp = new Entry[mLen]; for (int i = 0, j= 0; i < mLen; i++) { Entry m = new Entry(); m.bs = map[j++]; m.cp = map[j++]; m.cp2 = map[j++]; b2cComp[i] = m; c2bComp[i] = m; } Arrays.sort(c2bComp, 0, c2bComp.length, comparatorComp); } CharsetMapping load(InputStream in) { try { // The first 4 bytes are the size of the total data followed in // this .dat file. int len = ((in.read()&0xff) << 24) | ((in.read()&0xff) << 16) | ((in.read()&0xff) << 8) | (in.read()&0xff); bb = new byte[len]; off = 0; //System.out.printf("In : Total=%d%n", len); // Read in all bytes if (!readNBytes(in, bb, len)) throw new RuntimeException("Corrupted data file"); in.close(); while (off < len) { int type = ((bb[off++]&0xff)<<8) | (bb[off++]&0xff); switch(type) { case MAP_INDEXC2B: readINDEXC2B(); break; case MAP_SINGLEBYTE: readSINGLEBYTE(); break; case MAP_DOUBLEBYTE1: readDOUBLEBYTE1(); break; case MAP_DOUBLEBYTE2: readDOUBLEBYTE2(); break; case MAP_SUPPLEMENT: b2cSupp = readCharArray(); break; case MAP_SUPPLEMENT_C2B: c2bSupp = readCharArray(); break; case MAP_COMPOSITE: readCOMPOSITE(); break; default: throw new RuntimeException("Corrupted data file"); } } bb = null; return this; } catch (IOException x) { x.printStackTrace(); return null; } } }
package com.datdo.mobilib.util; import android.app.ActivityManager; import android.content.Context; import android.graphics.Bitmap; import android.support.v4.util.LruCache; import android.text.TextUtils; import android.util.Log; import android.util.Pair; import android.view.View; import android.view.ViewGroup; import android.view.ViewGroup.LayoutParams; import android.view.ViewTreeObserver.OnGlobalLayoutListener; import android.widget.ImageView; import android.widget.ListView; import com.nineoldandroids.animation.ObjectAnimator; import junit.framework.Assert; import java.util.Vector; /** * <pre> * DEPRECATED. Should use {@link com.datdo.mobilib.util.MblSimpleImageLoader} instead. * * Smart loader to display images for child views in a {@link ViewGroup}. * Features of this loader: * 1. Load images sequentially. * 2. Automatically scale images to match sizes of {@link ImageView}. * 3. Cache images using {@link LruCache}. * 4. Prioritize loading by last-recently-displayed, which means {@link ImageView} being displayed has higher priority than {@link ImageView} which is no longer displayed. * This feature is very useful when user scrolls a {@link ListView}. * Override abstract methods use customize this loader. * * Here is sample usage of this loader: * * <code> * public class MyAdapter extends BaseAdapter { * * private MblImageLoader{@literal <}Item> mItemImageLoader = new MblImageLoader{@literal <}Item>() { * // override all abstract methods * // ... * }; * * {@literal @}Override * public View getView(int pos, View convertView, ViewGroup parent) { * // create or update view * // ... * * mItemImageLoader.loadImage(view); * * return view; * } * } * </code> * </pre> * @param <T> class of object bound with an child views of {@link ViewGroup} */ @Deprecated public abstract class MblImageLoader<T> { /** * <pre> * Check condition to load image for an item. * </pre> * @return true if should load image for item */ protected abstract boolean shouldLoadImageForItem(T item); /** * <pre> * Get resource id of default image for items those are not necessary to load image * </pre> * @see #shouldLoadImageForItem(Object) */ protected abstract int getDefaultImageResource(T item); /** * <pre> * Get resource id of default image for items those fails to load image * </pre> */ protected abstract int getErrorImageResource(T item); /** * <pre> * Get resource id of default image for items those are being loaded * </pre> */ protected abstract int getLoadingIndicatorImageResource(T item); /** * <pre> * Get data object bound with each child view. * </pre> */ protected abstract T getItemBoundWithView(View view); /** * <pre> * Extract {@link ImageView} used to display image from each child view * </pre> */ protected abstract ImageView getImageViewFromView(View view); /** * <pre> * Specify an ID for each data object. The ID is used for caching so please make it unique throughout the app. * </pre> */ protected abstract String getItemId(T item); /** * <pre> * Do your own image loading here (from HTTP/HTTPS, from file, etc...). * This method is always invoked in main thread. Therefore, it is strongly recommended to do the loading asynchronously. * </pre> * @param item * @param cb call method of this callback when you finished the loading */ protected abstract void retrieveImage(T item, MblRetrieveImageCallback cb); /** * <pre> * Callback class for {@link MblImageLoader#retrieveImage(Object, MblRetrieveImageCallback)} * When loading image finished, call 1 of 2 methods depending on returned data. * If loading image failed, call any method with NULL argument. * </pre> */ public static interface MblRetrieveImageCallback { public void onRetrievedByteArray(byte[] bmData); public void onRetrievedBitmap(Bitmap bm); public void onRetrievedFile(String path); } private static final String TAG = MblUtils.getTag(MblImageLoader.class); private static final int DEFAULT_CACHE_SIZE = 2 * 1024 * 1024; // 2MB private static final String CACHE_KEY_SEPARATOR = "#"; private static final class MblCachedImageData { public int resId = 0; public Bitmap bitmap; protected MblCachedImageData(int resId, Bitmap bitmap) { Assert.assertTrue(resId > 0 || bitmap != null); Assert.assertFalse(resId > 0 && bitmap != null); this.resId = resId; this.bitmap = bitmap; } } private final Vector<Pair<T, View>> mQueue = new Vector<Pair<T,View>>(); private boolean mLoadingImage = false; private static LruCache<String, MblCachedImageData> sStringPictureLruCache; private static boolean sDoubleCacheSize = false; private static void initCacheIfNeeded() { if (sStringPictureLruCache == null) { Context context = MblUtils.getCurrentContext(); int cacheSize = DEFAULT_CACHE_SIZE; if (context != null) { ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); int memoryClassBytes = am.getMemoryClass() * 1024 * 1024; cacheSize = memoryClassBytes / 8; } if (sDoubleCacheSize) { cacheSize = cacheSize * 2; } sStringPictureLruCache = new LruCache<String, MblCachedImageData>(cacheSize) { @Override protected void entryRemoved(boolean evicted, String key, MblCachedImageData oldValue, MblCachedImageData newValue) { Log.v(TAG, "Image cache size: " + size()); } @Override protected int sizeOf(String key, MblCachedImageData value) { if (value.bitmap != null) { Bitmap bm = value.bitmap; return bm.getRowBytes() * bm.getHeight(); } else if (value.resId > 0) { return 4; } return 0; } }; } } private static MblCachedImageData remove(String key) { synchronized (sStringPictureLruCache) { return sStringPictureLruCache.remove(key); } } private static void put(String key, MblCachedImageData val) { synchronized (sStringPictureLruCache) { sStringPictureLruCache.put(key, val); Log.v(TAG, "Image cache size: " + sStringPictureLruCache.size()); } } private static MblCachedImageData get(String key) { return sStringPictureLruCache.get(key); } public MblImageLoader() { initCacheIfNeeded(); } /** * <pre> * Double memory-cache 's size to increase number of bitmap being kept in memory. * Call this method before creating any instance. * </pre> */ public static void doubleCacheSize() { if (sStringPictureLruCache != null) { throw new RuntimeException("doubleCacheSize() must be called before first instance of this class being created"); } sDoubleCacheSize = true; } /** * <pre> * Stop loading. This methods should be called when the view did disappear. * </pre> */ public void stop() { synchronized (mQueue) { mQueue.clear(); } } /** * <pre> * Request loading for a child view. * The loading request is put into a queue and executed sequentially. * </pre> * @param view the child view for which you want to load image */ public void loadImage(final View view) { MblUtils.executeOnMainThread(new Runnable() { @Override public void run() { T item = getItemBoundWithView(view); final ImageView imageView = getImageViewFromView(view); if (item == null || imageView == null) return; if (!shouldLoadImageForItem(item)) { setImageViewResource(imageView, getDefaultImageResource(item)); return; } int w = getImageViewWidth(imageView); int h = getImageViewHeight(imageView); if (w == 0 && h == 0) { final Runnable[] timeoutAction = new Runnable[] { null }; final OnGlobalLayoutListener globalLayoutListener = new OnGlobalLayoutListener() { @Override public void onGlobalLayout() { MblUtils.removeOnGlobalLayoutListener(imageView, this); MblUtils.getMainThreadHandler().removeCallbacks(timeoutAction[0]); loadImage(view); } }; timeoutAction[0] = new Runnable() { @Override public void run() { MblUtils.removeOnGlobalLayoutListener(imageView, globalLayoutListener); loadImage(view); } }; imageView.getViewTreeObserver().addOnGlobalLayoutListener(globalLayoutListener); MblUtils.getMainThreadHandler().postDelayed(timeoutAction[0], 500l); return; } String fullCacheKey = getFullCacheKey(item, w, h); MblCachedImageData pic = get(fullCacheKey); if(pic != null) { if (pic.bitmap != null) { Bitmap bm = pic.bitmap; if (!bm.isRecycled()) { imageView.setImageBitmap(bm); } else { remove(fullCacheKey); handleBitmapUnavailable(view, imageView, item); } } else if (pic.resId > 0) { setImageViewResource(imageView, pic.resId); } } else { handleBitmapUnavailable(view, imageView, item); } } }); } private void handleBitmapUnavailable(View view, ImageView imageView, T item) { setImageViewResource(imageView, getLoadingIndicatorImageResource(item)); synchronized (mQueue) { mQueue.add(new Pair<T, View>(item, view)); } loadNextImage(); } private Pair<T, View> getNextPair() { synchronized (mQueue) { if (mQueue.isEmpty()) { return null; } else { return mQueue.remove(0); } } } private boolean isItemBoundWithView(T item, View view) { // if item and view 's item are same object, just return TRUE T viewItem = getItemBoundWithView(view); if (item != null && item == viewItem) { return true; } // otherwise, compare id String id1 = item != null ? getItemId(item) : null; String id2 = viewItem != null ? getItemId(viewItem) : null; return id1 != null && id2 != null && TextUtils.equals(id1, id2); } private void loadNextImage() { if (mLoadingImage) return; Pair<T, View> pair = getNextPair(); if (pair == null) return; final T item = pair.first; final View view = pair.second; final ImageView imageView = getImageViewFromView(view); if (!isItemBoundWithView(item, view)) { MblUtils.getMainThreadHandler().post(new Runnable() { @Override public void run() { loadNextImage(); } }); return; } if (!shouldLoadImageForItem(item)) { setImageViewResource(imageView, getDefaultImageResource(item)); MblUtils.getMainThreadHandler().post(new Runnable() { @Override public void run() { loadNextImage(); } }); return; } final String fullCacheKey = getFullCacheKey( item, getImageViewWidth(imageView), getImageViewHeight(imageView)); MblCachedImageData pic = get(fullCacheKey); if(pic != null) { boolean isSet = false; if (pic.bitmap != null) { Bitmap bm = pic.bitmap; if (!bm.isRecycled()) { imageView.setImageBitmap(bm); isSet = true; } else { remove(fullCacheKey); } } else if (pic.resId > 0) { setImageViewResource(imageView, pic.resId); isSet = true; } if (isSet) { MblUtils.getMainThreadHandler().post(new Runnable() { @Override public void run() { loadNextImage(); } }); return; } } mLoadingImage = true; final boolean isNetworkConnected = MblUtils.isNetworkConnected(); retrieveImage(item, new MblRetrieveImageCallback() { @Override public void onRetrievedByteArray(final byte[] bmData) { if (MblUtils.isEmpty(bmData)) { handleBadReturnedBitmap(item, view, fullCacheKey, !isNetworkConnected); } else { MblUtils.executeOnAsyncThread(new Runnable() { @Override public void run() { try { int w = getImageViewWidth(imageView); int h = getImageViewHeight(imageView); Bitmap bm = MblUtils.loadBitmapMatchSpecifiedSize(w, h, bmData); if (bm == null) { handleBadReturnedBitmap(item, view, fullCacheKey, !isNetworkConnected); } else { Log.d(TAG, "Scale bitmap: w=" + w + ", h=" + h + ", bm.w=" + bm.getWidth() + ", bm.h=" + bm.getHeight()); handleGoodReturnedBitmap(item, view, fullCacheKey, bm); } } catch (OutOfMemoryError e) { Log.e(TAG, "OutOfMemoryError", e); handleOutOfMemory(item, view, fullCacheKey); } } }); } } @Override public void onRetrievedFile(final String path) { if (MblUtils.isEmpty(path)) { handleBadReturnedBitmap(item, view, fullCacheKey, !isNetworkConnected); } else { MblUtils.executeOnAsyncThread(new Runnable() { @Override public void run() { try { int w = getImageViewWidth(imageView); int h = getImageViewHeight(imageView); Bitmap bm = MblUtils.loadBitmapMatchSpecifiedSize(w, h, path); if (bm == null) { handleBadReturnedBitmap(item, view, fullCacheKey, !isNetworkConnected); } else { Log.d(TAG, "Scale bitmap: w=" + w + ", h=" + h + ", bm.w=" + bm.getWidth() + ", bm.h=" + bm.getHeight()); handleGoodReturnedBitmap(item, view, fullCacheKey, bm); } } catch (OutOfMemoryError e) { Log.e(TAG, "OutOfMemoryError", e); handleOutOfMemory(item, view, fullCacheKey); } } }); } } @Override public void onRetrievedBitmap(Bitmap bm) { if (bm == null) { handleBadReturnedBitmap(item, view, fullCacheKey, !isNetworkConnected); } else { handleGoodReturnedBitmap(item, view, fullCacheKey, bm); } } }); } private void handleGoodReturnedBitmap(final T item, final View view, final String fullCacheKey, final Bitmap bm) { MblUtils.executeOnMainThread(new Runnable() { @Override public void run() { put(fullCacheKey, new MblCachedImageData(0, bm)); postLoadImageForItem(item, view); } }); } private void handleBadReturnedBitmap(final T item, final View view, final String fullCacheKey, final boolean shouldRetry) { MblUtils.executeOnMainThread(new Runnable() { @Override public void run() { int errorImageRes = getErrorImageResource(item); if (errorImageRes > 0) { put(fullCacheKey, new MblCachedImageData(errorImageRes, null)); } postLoadImageForItem(item, view); // failed due to network disconnect -> should try to load later if (shouldRetry) { MblUtils.getMainThreadHandler().post(new Runnable() { @Override public void run() { remove(fullCacheKey); } }); } } }); } private void handleOutOfMemory(final T item, final View view, final String fullCacheKey) { MblUtils.executeOnMainThread(new Runnable() { @Override public void run() { // release 1/2 of cache size for memory synchronized (sStringPictureLruCache) { sStringPictureLruCache.trimToSize(sStringPictureLruCache.size()/2); } System.gc(); handleBadReturnedBitmap(item, view, fullCacheKey, true); } }); } private void postLoadImageForItem(final T item, final View view) { if (isItemBoundWithView(item, view)) { loadImage(view); animateImageView(getImageViewFromView(view)); } // run loadNextImage() using "post" to prevent deep recursion MblUtils.getMainThreadHandler().post(new Runnable() { @Override public void run() { mLoadingImage = false; loadNextImage(); } }); } private void setImageViewResource(ImageView imageView, int resId) { if (resId <= 0) { imageView.setImageBitmap(null); } else { imageView.setImageResource(resId); } } private String getFullCacheKey(T item, int w, int h) { String key = TextUtils.join(CACHE_KEY_SEPARATOR, new Object[] { item.getClass().getSimpleName(), MblUtils.md5(getItemId(item)), w, h }); return key; } private int getImageViewWidth(ImageView imageView) { LayoutParams lp = imageView.getLayoutParams(); if (lp.width == LayoutParams.WRAP_CONTENT) { return -1; // do not care } else if (lp.width == LayoutParams.MATCH_PARENT){ return imageView.getWidth(); // 0 or parent 's width } else { return lp.width; // specified width } } private int getImageViewHeight(ImageView imageView) { LayoutParams lp = imageView.getLayoutParams(); if (lp.height == LayoutParams.WRAP_CONTENT) { return -1; // do not care } else if (lp.height == LayoutParams.MATCH_PARENT){ return imageView.getHeight(); // 0 or parent 's height } else { return lp.height; // specified height } } protected void animateImageView(ImageView imageView) { // animation alpha 0 -> 1 ObjectAnimator.ofFloat(imageView, "alpha", 0, 1) .setDuration(250) .start(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.client.security.user.privileges; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Represents privileges over indices. There is a canonical set of privilege * names (eg. {@code IndicesPrivileges#READ_PRIVILEGE_NAME}) but there is * flexibility in the definition of finer grained, more specialized, privileges. * This also encapsulates field and document level security privileges. These * allow to control what fields or documents are readable or queryable. */ public final class IndicesPrivileges extends AbstractIndicesPrivileges implements ToXContentObject { @SuppressWarnings("unchecked") static final ConstructingObjectParser<IndicesPrivileges, Void> PARSER = new ConstructingObjectParser<>("indices_privileges", false, constructorObjects -> { int i = 0; final List<String> indices = (List<String>) constructorObjects[i++]; final List<String> privileges = (List<String>) constructorObjects[i++]; final boolean allowRestrictedIndices = (Boolean) constructorObjects[i++]; final FieldSecurity fields = (FieldSecurity) constructorObjects[i++]; final String query = (String) constructorObjects[i]; return new IndicesPrivileges(indices, privileges, allowRestrictedIndices, fields, query); }); static { PARSER.declareStringArray(constructorArg(), NAMES); PARSER.declareStringArray(constructorArg(), PRIVILEGES); PARSER.declareBoolean(constructorArg(), ALLOW_RESTRICTED_INDICES); PARSER.declareObject(optionalConstructorArg(), FieldSecurity::parse, FIELD_PERMISSIONS); PARSER.declareStringOrNull(optionalConstructorArg(), QUERY); } private final FieldSecurity fieldSecurity; // missing query means all documents, i.e. no restrictions private final @Nullable String query; private IndicesPrivileges(List<String> indices, List<String> privileges, boolean allowRestrictedIndices, @Nullable FieldSecurity fieldSecurity, @Nullable String query) { super(indices, privileges, allowRestrictedIndices); this.fieldSecurity = fieldSecurity; this.query = query; } /** * The combination of the {@link FieldSecurity#getGrantedFields() granted} and * {@link FieldSecurity#getDeniedFields() denied} document fields. * May be null, in which case no field level security is applicable, and all the document's fields are granted access to. */ public FieldSecurity getFieldSecurity() { return fieldSecurity; } /** * A query limiting the visible documents in the indices. Can be null, in which * case all documents are visible. */ public @Nullable String getQuery() { return this.query; } /** * If {@code true} some documents might not be visible. Only the documents * matching {@code query} will be readable. */ @Override public boolean isUsingDocumentLevelSecurity() { return query != null; } /** * If {@code true} some document fields might not be visible. */ @Override public boolean isUsingFieldLevelSecurity() { return fieldSecurity != null && fieldSecurity.isUsingFieldLevelSecurity(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } IndicesPrivileges that = (IndicesPrivileges) o; return indices.equals(that.indices) && privileges.equals(that.privileges) && allowRestrictedIndices == that.allowRestrictedIndices && Objects.equals(this.fieldSecurity, that.fieldSecurity) && Objects.equals(query, that.query); } @Override public int hashCode() { return Objects.hash(indices, privileges, allowRestrictedIndices, fieldSecurity, query); } @Override public String toString() { try { return XContentHelper.toXContent(this, XContentType.JSON, true).utf8ToString(); } catch (IOException e) { throw new RuntimeException("Unexpected", e); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(NAMES.getPreferredName(), indices); builder.field(PRIVILEGES.getPreferredName(), privileges); builder.field(ALLOW_RESTRICTED_INDICES.getPreferredName(), allowRestrictedIndices); if (fieldSecurity != null) { builder.field(FIELD_PERMISSIONS.getPreferredName(), fieldSecurity, params); } if (isUsingDocumentLevelSecurity()) { builder.field("query", query); } return builder.endObject(); } public static IndicesPrivileges fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } public static Builder builder() { return new Builder(); } public static final class Builder { private @Nullable List<String> indices = null; private @Nullable List<String> privileges = null; private @Nullable List<String> grantedFields = null; private @Nullable List<String> deniedFields = null; private @Nullable String query = null; boolean allowRestrictedIndices = false; public Builder() { } public Builder indices(String... indices) { return indices(Arrays.asList(Objects.requireNonNull(indices, "indices required"))); } public Builder indices(List<String> indices) { this.indices = Objects.requireNonNull(indices, "indices required"); return this; } public Builder privileges(String... privileges) { return privileges(Arrays.asList(Objects.requireNonNull(privileges, "privileges required"))); } public Builder privileges(List<String> privileges) { this.privileges = Objects.requireNonNull(privileges, "privileges required"); return this; } public Builder grantedFields(@Nullable String... grantedFields) { if (grantedFields == null) { this.grantedFields = null; return this; } return grantedFields(Arrays.asList(grantedFields)); } public Builder grantedFields(@Nullable List<String> grantedFields) { this.grantedFields = grantedFields; return this; } public Builder deniedFields(@Nullable String... deniedFields) { if (deniedFields == null) { this.deniedFields = null; return this; } return deniedFields(Arrays.asList(deniedFields)); } public Builder deniedFields(@Nullable List<String> deniedFields) { this.deniedFields = deniedFields; return this; } public Builder query(@Nullable String query) { this.query = query; return this; } public Builder allowRestrictedIndices(boolean allow) { this.allowRestrictedIndices = allow; return this; } public IndicesPrivileges build() { final FieldSecurity fieldSecurity; if (grantedFields == null && deniedFields == null) { fieldSecurity = null; } else { fieldSecurity = new FieldSecurity(grantedFields, deniedFields); } return new IndicesPrivileges(indices, privileges, allowRestrictedIndices, fieldSecurity, query); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE * file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file * to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.apache.kafka.clients.producer; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.internals.DefaultPartitioner; import org.apache.kafka.common.config.AbstractConfig; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; import org.apache.kafka.common.serialization.Serializer; import java.util.HashMap; import java.util.Map; import java.util.Properties; import static org.apache.kafka.common.config.ConfigDef.Range.atLeast; import static org.apache.kafka.common.config.ConfigDef.Range.between; import static org.apache.kafka.common.config.ConfigDef.ValidString.in; /** * Configuration for the Kafka Producer. Documentation for these configurations can be found in the <a * href="http://kafka.apache.org/documentation.html#producerconfigs">Kafka documentation</a> */ public class ProducerConfig extends AbstractConfig { /* * NOTE: DO NOT CHANGE EITHER CONFIG STRINGS OR THEIR JAVA VARIABLE NAMES AS THESE ARE PART OF THE PUBLIC API AND * CHANGE WILL BREAK USER CODE. */ private static final ConfigDef CONFIG; /** <code>bootstrap.servers</code> */ public static final String BOOTSTRAP_SERVERS_CONFIG = CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG; /** <code>metadata.fetch.timeout.ms</code> */ /** * @deprecated This config will be removed in a future release. Please use {@link #MAX_BLOCK_MS_CONFIG} */ @Deprecated public static final String METADATA_FETCH_TIMEOUT_CONFIG = "metadata.fetch.timeout.ms"; private static final String METADATA_FETCH_TIMEOUT_DOC = "The first time data is sent to a topic we must fetch metadata about that topic to know which servers host the topic's partitions. This " + "fetch to succeed before throwing an exception back to the client."; /** <code>metadata.max.age.ms</code> */ public static final String METADATA_MAX_AGE_CONFIG = CommonClientConfigs.METADATA_MAX_AGE_CONFIG; private static final String METADATA_MAX_AGE_DOC = CommonClientConfigs.METADATA_MAX_AGE_DOC; /** <code>batch.size</code> */ public static final String BATCH_SIZE_CONFIG = "batch.size"; private static final String BATCH_SIZE_DOC = "The producer will attempt to batch records together into fewer requests whenever multiple records are being sent" + " to the same partition. This helps performance on both the client and the server. This configuration controls the " + "default batch size in bytes. " + "<p>" + "No attempt will be made to batch records larger than this size. " + "<p>" + "Requests sent to brokers will contain multiple batches, one for each partition with data available to be sent. " + "<p>" + "A small batch size will make batching less common and may reduce throughput (a batch size of zero will disable " + "batching entirely). A very large batch size may use memory a bit more wastefully as we will always allocate a " + "buffer of the specified batch size in anticipation of additional records."; /** <code>buffer.memory</code> */ public static final String BUFFER_MEMORY_CONFIG = "buffer.memory"; private static final String BUFFER_MEMORY_DOC = "The total bytes of memory the producer can use to buffer records waiting to be sent to the server. If records are " + "sent faster than they can be delivered to the server the producer will either block or throw an exception based " + "on the preference specified by <code>block.on.buffer.full</code>. " + "<p>" + "This setting should correspond roughly to the total memory the producer will use, but is not a hard bound since " + "not all memory the producer uses is used for buffering. Some additional memory will be used for compression (if " + "compression is enabled) as well as for maintaining in-flight requests."; /** <code>acks</code> */ public static final String ACKS_CONFIG = "acks"; private static final String ACKS_DOC = "The number of acknowledgments the producer requires the leader to have received before considering a request complete. This controls the " + " durability of records that are sent. The following settings are common: " + " <ul>" + " <li><code>acks=0</code> If set to zero then the producer will not wait for any acknowledgment from the" + " server at all. The record will be immediately added to the socket buffer and considered sent. No guarantee can be" + " made that the server has received the record in this case, and the <code>retries</code> configuration will not" + " take effect (as the client won't generally know of any failures). The offset given back for each record will" + " always be set to -1." + " <li><code>acks=1</code> This will mean the leader will write the record to its local log but will respond" + " without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after" + " acknowledging the record but before the followers have replicated it then the record will be lost." + " <li><code>acks=all</code> This means the leader will wait for the full set of in-sync replicas to" + " acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica" + " remains alive. This is the strongest available guarantee."; /** <code>timeout.ms</code> */ /** * @deprecated This config will be removed in a future release. Please use {@link #REQUEST_TIMEOUT_MS_CONFIG} */ @Deprecated public static final String TIMEOUT_CONFIG = "timeout.ms"; private static final String TIMEOUT_DOC = "The configuration controls the maximum amount of time the server will wait for acknowledgments from followers to " + "meet the acknowledgment requirements the producer has specified with the <code>acks</code> configuration. If the " + "requested number of acknowledgments are not met when the timeout elapses an error will be returned. This timeout " + "is measured on the server side and does not include the network latency of the request."; /** <code>linger.ms</code> */ public static final String LINGER_MS_CONFIG = "linger.ms"; private static final String LINGER_MS_DOC = "The producer groups together any records that arrive in between request transmissions into a single batched request. " + "Normally this occurs only under load when records arrive faster than they can be sent out. However in some circumstances the client may want to " + "reduce the number of requests even under moderate load. This setting accomplishes this by adding a small amount " + "of artificial delay&mdash;that is, rather than immediately sending out a record the producer will wait for up to " + "the given delay to allow other records to be sent so that the sends can be batched together. This can be thought " + "of as analogous to Nagle's algorithm in TCP. This setting gives the upper bound on the delay for batching: once " + "we get <code>batch.size</code> worth of records for a partition it will be sent immediately regardless of this " + "setting, however if we have fewer than this many bytes accumulated for this partition we will 'linger' for the " + "specified time waiting for more records to show up. This setting defaults to 0 (i.e. no delay). Setting <code>linger.ms=5</code>, " + "for example, would have the effect of reducing the number of requests sent but would add up to 5ms of latency to records sent in the absense of load."; /** <code>client.id</code> */ public static final String CLIENT_ID_CONFIG = CommonClientConfigs.CLIENT_ID_CONFIG; /** <code>send.buffer.bytes</code> */ public static final String SEND_BUFFER_CONFIG = CommonClientConfigs.SEND_BUFFER_CONFIG; /** <code>receive.buffer.bytes</code> */ public static final String RECEIVE_BUFFER_CONFIG = CommonClientConfigs.RECEIVE_BUFFER_CONFIG; /** <code>max.request.size</code> */ public static final String MAX_REQUEST_SIZE_CONFIG = "max.request.size"; private static final String MAX_REQUEST_SIZE_DOC = "The maximum size of a request. This is also effectively a cap on the maximum record size. Note that the server " + "has its own cap on record size which may be different from this. This setting will limit the number of record " + "batches the producer will send in a single request to avoid sending huge requests."; /** <code>reconnect.backoff.ms</code> */ public static final String RECONNECT_BACKOFF_MS_CONFIG = CommonClientConfigs.RECONNECT_BACKOFF_MS_CONFIG; /** <code>block.on.buffer.full</code> */ /** * @deprecated This config will be removed in a future release. Also, the {@link #METADATA_FETCH_TIMEOUT_CONFIG} is no longer honored when this property is set to true. */ @Deprecated public static final String BLOCK_ON_BUFFER_FULL_CONFIG = "block.on.buffer.full"; private static final String BLOCK_ON_BUFFER_FULL_DOC = "When our memory buffer is exhausted we must either stop accepting new records (block) or throw errors. By default " + "this setting is true and we block, however in some scenarios blocking is not desirable and it is better to " + "immediately give an error. Setting this to <code>false</code> will accomplish that: the producer will throw a BufferExhaustedException if a recrord is sent and the buffer space is full."; /** <code>retries</code> */ public static final String RETRIES_CONFIG = "retries"; private static final String RETRIES_DOC = "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error." + " Note that this retry is no different than if the client resent the record upon receiving the " + "error. Allowing retries will potentially change the ordering of records because if two records are " + "sent to a single partition, and the first fails and is retried but the second succeeds, then the second record " + "may appear first."; /** <code>retry.backoff.ms</code> */ public static final String RETRY_BACKOFF_MS_CONFIG = CommonClientConfigs.RETRY_BACKOFF_MS_CONFIG; /** <code>compression.type</code> */ public static final String COMPRESSION_TYPE_CONFIG = "compression.type"; private static final String COMPRESSION_TYPE_DOC = "The compression type for all data generated by the producer. The default is none (i.e. no compression). Valid " + " values are <code>none</code>, <code>gzip</code>, <code>snappy</code>, or <code>lz4</code>. " + "Compression is of full batches of data, so the efficacy of batching will also impact the compression ratio (more batching means better compression)."; /** <code>metrics.sample.window.ms</code> */ public static final String METRICS_SAMPLE_WINDOW_MS_CONFIG = CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_CONFIG; /** <code>metrics.num.samples</code> */ public static final String METRICS_NUM_SAMPLES_CONFIG = CommonClientConfigs.METRICS_NUM_SAMPLES_CONFIG; /** <code>metric.reporters</code> */ public static final String METRIC_REPORTER_CLASSES_CONFIG = CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG; /** <code>max.in.flight.requests.per.connection</code> */ public static final String MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION = "max.in.flight.requests.per.connection"; private static final String MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DOC = "The maximum number of unacknowledged requests the client will send on a single connection before blocking." + " Note that if this setting is set to be greater than 1 and there are failed sends, there is a risk of" + " message re-ordering due to retries (i.e., if retries are enabled)."; /** <code>key.serializer</code> */ public static final String KEY_SERIALIZER_CLASS_CONFIG = "key.serializer"; public static final String KEY_SERIALIZER_CLASS_DOC = "Serializer class for key that implements the <code>Serializer</code> interface."; /** <code>value.serializer</code> */ public static final String VALUE_SERIALIZER_CLASS_CONFIG = "value.serializer"; public static final String VALUE_SERIALIZER_CLASS_DOC = "Serializer class for value that implements the <code>Serializer</code> interface."; /** <code>connections.max.idle.ms</code> */ public static final String CONNECTIONS_MAX_IDLE_MS_CONFIG = CommonClientConfigs.CONNECTIONS_MAX_IDLE_MS_CONFIG; /** <code>partitioner.class</code> */ public static final String PARTITIONER_CLASS_CONFIG = "partitioner.class"; private static final String PARTITIONER_CLASS_DOC = "Partitioner class that implements the <code>Partitioner</code> interface."; /** <code>max.block.ms</code> */ public static final String MAX_BLOCK_MS_CONFIG = "max.block.ms"; private static final String MAX_BLOCK_MS_DOC = "The configuration controls how long {@link KafkaProducer#send()} and {@link KafkaProducer#partitionsFor} will block." + "These methods can be blocked either because the buffer is full or metadata unavailable." + "Blocking in the user-supplied serializers or partitioner will not be counted against this timeout."; /** <code>request.timeout.ms</code> */ public static final String REQUEST_TIMEOUT_MS_CONFIG = CommonClientConfigs.REQUEST_TIMEOUT_MS_CONFIG; private static final String REQUEST_TIMEOUT_MS_DOC = CommonClientConfigs.REQUEST_TIMEOUT_MS_DOC; static { CONFIG = new ConfigDef().define(BOOTSTRAP_SERVERS_CONFIG, Type.LIST, Importance.HIGH, CommonClientConfigs.BOOSTRAP_SERVERS_DOC) .define(BUFFER_MEMORY_CONFIG, Type.LONG, 32 * 1024 * 1024L, atLeast(0L), Importance.HIGH, BUFFER_MEMORY_DOC) .define(RETRIES_CONFIG, Type.INT, 0, between(0, Integer.MAX_VALUE), Importance.HIGH, RETRIES_DOC) .define(ACKS_CONFIG, Type.STRING, "1", in("all", "-1", "0", "1"), Importance.HIGH, ACKS_DOC) .define(COMPRESSION_TYPE_CONFIG, Type.STRING, "none", Importance.HIGH, COMPRESSION_TYPE_DOC) .define(BATCH_SIZE_CONFIG, Type.INT, 16384, atLeast(0), Importance.MEDIUM, BATCH_SIZE_DOC) .define(TIMEOUT_CONFIG, Type.INT, 30 * 1000, atLeast(0), Importance.MEDIUM, TIMEOUT_DOC) .define(LINGER_MS_CONFIG, Type.LONG, 0, atLeast(0L), Importance.MEDIUM, LINGER_MS_DOC) .define(CLIENT_ID_CONFIG, Type.STRING, "", Importance.MEDIUM, CommonClientConfigs.CLIENT_ID_DOC) .define(SEND_BUFFER_CONFIG, Type.INT, 128 * 1024, atLeast(0), Importance.MEDIUM, CommonClientConfigs.SEND_BUFFER_DOC) .define(RECEIVE_BUFFER_CONFIG, Type.INT, 32 * 1024, atLeast(0), Importance.MEDIUM, CommonClientConfigs.RECEIVE_BUFFER_DOC) .define(MAX_REQUEST_SIZE_CONFIG, Type.INT, 1 * 1024 * 1024, atLeast(0), Importance.MEDIUM, MAX_REQUEST_SIZE_DOC) .define(BLOCK_ON_BUFFER_FULL_CONFIG, Type.BOOLEAN, false, Importance.LOW, BLOCK_ON_BUFFER_FULL_DOC) .define(RECONNECT_BACKOFF_MS_CONFIG, Type.LONG, 50L, atLeast(0L), Importance.LOW, CommonClientConfigs.RECONNECT_BACKOFF_MS_DOC) .define(METRIC_REPORTER_CLASSES_CONFIG, Type.LIST, "", Importance.LOW, CommonClientConfigs.METRIC_REPORTER_CLASSES_DOC) .define(RETRY_BACKOFF_MS_CONFIG, Type.LONG, 100L, atLeast(0L), Importance.LOW, CommonClientConfigs.RETRY_BACKOFF_MS_DOC) .define(METADATA_FETCH_TIMEOUT_CONFIG, Type.LONG, 60 * 1000, atLeast(0), Importance.LOW, METADATA_FETCH_TIMEOUT_DOC) .define(MAX_BLOCK_MS_CONFIG, Type.LONG, 60 * 1000, atLeast(0), Importance.MEDIUM, MAX_BLOCK_MS_DOC) .define(REQUEST_TIMEOUT_MS_CONFIG, Type.INT, 30 * 1000, atLeast(0), Importance.MEDIUM, REQUEST_TIMEOUT_MS_DOC) .define(METADATA_MAX_AGE_CONFIG, Type.LONG, 5 * 60 * 1000, atLeast(0), Importance.LOW, METADATA_MAX_AGE_DOC) .define(METRICS_SAMPLE_WINDOW_MS_CONFIG, Type.LONG, 30000, atLeast(0), Importance.LOW, CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_DOC) .define(METRICS_NUM_SAMPLES_CONFIG, Type.INT, 2, atLeast(1), Importance.LOW, CommonClientConfigs.METRICS_NUM_SAMPLES_DOC) .define(MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, Type.INT, 5, atLeast(1), Importance.LOW, MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DOC) .define(KEY_SERIALIZER_CLASS_CONFIG, Type.CLASS, Importance.HIGH, KEY_SERIALIZER_CLASS_DOC) .define(VALUE_SERIALIZER_CLASS_CONFIG, Type.CLASS, Importance.HIGH, VALUE_SERIALIZER_CLASS_DOC) /* default is set to be a bit lower than the server default (10 min), to avoid both client and server closing connection at same time */ .define(CONNECTIONS_MAX_IDLE_MS_CONFIG, Type.LONG, 9 * 60 * 1000, Importance.MEDIUM, CommonClientConfigs.CONNECTIONS_MAX_IDLE_MS_DOC) .define(PARTITIONER_CLASS_CONFIG, Type.CLASS, DefaultPartitioner.class.getName(), Importance.MEDIUM, PARTITIONER_CLASS_DOC) // security support .define(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, Type.STRING, CommonClientConfigs.DEFAULT_SECURITY_PROTOCOL, Importance.MEDIUM, CommonClientConfigs.SECURITY_PROTOCOL_DOC) .withClientSslSupport() .withClientSaslSupport(); } public static Map<String, Object> addSerializerToConfig(Map<String, Object> configs, Serializer<?> keySerializer, Serializer<?> valueSerializer) { Map<String, Object> newConfigs = new HashMap<String, Object>(); newConfigs.putAll(configs); if (keySerializer != null) newConfigs.put(KEY_SERIALIZER_CLASS_CONFIG, keySerializer.getClass()); if (valueSerializer != null) newConfigs.put(VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer.getClass()); return newConfigs; } public static Properties addSerializerToConfig(Properties properties, Serializer<?> keySerializer, Serializer<?> valueSerializer) { Properties newProperties = new Properties(); newProperties.putAll(properties); if (keySerializer != null) newProperties.put(KEY_SERIALIZER_CLASS_CONFIG, keySerializer.getClass().getName()); if (valueSerializer != null) newProperties.put(VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer.getClass().getName()); return newProperties; } ProducerConfig(Map<?, ?> props) { super(CONFIG, props); } public static void main(String[] args) { System.out.println(CONFIG.toHtmlTable()); } }
/* * Copyright 2005 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Preconditions; import com.google.javascript.rhino.Node; import java.util.*; /** * Tests for {@link RenameVars}. */ public class RenameVarsTest extends CompilerTestCase { private static final String DEFAULT_PREFIX = ""; private String prefix = DEFAULT_PREFIX; private VariableMap previouslyUsedMap = new VariableMap(new HashMap<String, String>()); private RenameVars renameVars; private boolean withClosurePass = false; private boolean localRenamingOnly = false; private boolean preserveFunctionExpressionNames = false; private boolean useGoogleCodingConvention = true; private boolean generatePseudoNames = false; @Override protected CodingConvention getCodingConvention() { if (useGoogleCodingConvention) { return new GoogleCodingConvention(); } else { return new DefaultCodingConvention(); } } @Override protected CompilerPass getProcessor(Compiler compiler) { if (withClosurePass) { return new ClosurePassAndRenameVars(compiler); } else { return renameVars = new RenameVars(compiler, prefix, localRenamingOnly, preserveFunctionExpressionNames, generatePseudoNames, previouslyUsedMap, null, null); } } @Override protected int getNumRepetitions() { return 1; } @Override protected void setUp() throws Exception { super.setUp(); previouslyUsedMap = new VariableMap(new HashMap<String, String>()); prefix = DEFAULT_PREFIX; withClosurePass = false; localRenamingOnly = false; preserveFunctionExpressionNames = false; generatePseudoNames = false; // TODO(johnlenz): Enable Normalize during these tests. } public void testRenameSimple() { test("function Foo(v1, v2) {return v1;} Foo();", "function a(b, c) {return b;} a();"); } public void testRenameGlobals() { test("var Foo; var Bar, y; function x() { Bar++; }", "var a; var b, c; function d() { b++; }"); } public void testRenameLocals() { test("(function (v1, v2) {}); (function (v3, v4) {});", "(function (a, b) {}); (function (a, b) {});"); test("function f1(v1, v2) {}; function f2(v3, v4) {};", "function c(a, b) {}; function d(a, b) {};"); } public void testRenameRedeclaredGlobals() { test("function f1(v1, v2) {f1()};" + "/** @suppress {duplicate} */" + "function f1(v3, v4) {f1()};", "function a(b, c) {a()};" + "function a(b, c) {a()};"); localRenamingOnly = true; test("function f1(v1, v2) {f1()};" + "/** @suppress {duplicate} */" + "function f1(v3, v4) {f1()};", "function f1(a, b) {f1()};" + "function f1(a, b) {f1()};"); } public void testRecursiveFunctions1() { test("var walk = function walk(node, aFunction) {" + " walk(node, aFunction);" + "};", "var d = function a(b, c) {" + " a(b, c);" + "};"); localRenamingOnly = true; test("var walk = function walk(node, aFunction) {" + " walk(node, aFunction);" + "};", "var walk = function a(b, c) {" + " a(b, c);" + "};"); } public void testRecursiveFunctions2() { preserveFunctionExpressionNames = true; test("var walk = function walk(node, aFunction) {" + " walk(node, aFunction);" + "};", "var c = function walk(a, b) {" + " walk(a, b);" + "};"); localRenamingOnly = true; test("var walk = function walk(node, aFunction) {" + " walk(node, aFunction);" + "};", "var walk = function walk(a, b) {" + " walk(a, b);" + "};"); } public void testRenameLocalsClashingWithGlobals() { test("function a(v1, v2) {return v1;} a();", "function a(b, c) {return b;} a();"); } public void testRenameNested() { test("function f1(v1, v2) { (function(v3, v4) {}) }", "function a(b, c) { (function(d, e) {}) }"); test("function f1(v1, v2) { function f2(v3, v4) {} }", "function a(b, c) { function d(e, f) {} }"); } public void testRenameWithExterns1() { String externs = "var foo;"; test(externs, "var bar; foo(bar);", "var a; foo(a);", null, null); } public void testRenameWithExterns2() { String externs = "var a;"; test(externs, "var b = 5", "var b = 5", null, null); } public void testDoNotRenameExportedName() { test("_foo()", "_foo()"); } public void testRenameWithNameOverlap() { test("var a = 1; var b = 2; b + b;", "var a = 1; var b = 2; b + b;"); } public void testRenameWithPrefix1() { prefix = "PRE_"; test("function Foo(v1, v2) {return v1} Foo();", "function PRE_(a, b) {return a} PRE_();"); prefix = DEFAULT_PREFIX; } public void testRenameWithPrefix2() { prefix = "PRE_"; test("function Foo(v1, v2) {var v3 = v1 + v2; return v3;} Foo();", "function PRE_(a, b) {var c = a + b; return c;} PRE_();"); prefix = DEFAULT_PREFIX; } public void testRenameWithPrefix3() { prefix = "a"; test("function Foo() {return 1;}" + "function Bar() {" + " var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," + " A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;" + " Foo();" + "} Bar();", "function a() {return 1;}" + "function aa() {" + " var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A," + " B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;" + " a();" + "} aa();"); prefix = DEFAULT_PREFIX; } public void testNamingBasedOnOrderOfOccurrence() { test("var q,p,m,n,l,k; " + "(function (r) {}); try { } catch(s) {}; var t = q + q;", "var a,b,c,d,e,f; " + "(function(g) {}); try { } catch(h) {}; var i = a + a;" ); test("function(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z," + "a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,$){};" + "var a4,a3,a2,a1,b4,b3,b2,b1,ab,ac,ad,fg;function foo(){};", "function(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," + "A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$){};" + "var aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la;function ma(){};"); } public void testStableRenameSimple() { VariableMap expectedVariableMap = makeVariableMap( "Foo", "a", "L 0", "b", "L 1", "c"); testRenameMap("function Foo(v1, v2) {return v1;} Foo();", "function a(b, c) {return b;} a();", expectedVariableMap); expectedVariableMap = makeVariableMap( "Foo", "a", "L 0", "b", "L 1", "c", "L 2", "d"); testRenameMapUsingOldMap("function Foo(v1, v2, v3) {return v1;} Foo();", "function a(b, c, d) {return b;} a();", expectedVariableMap); } public void testStableRenameGlobals() { VariableMap expectedVariableMap = makeVariableMap( "Foo", "a", "Bar", "b", "y", "c", "x", "d"); testRenameMap("var Foo; var Bar, y; function x() { Bar++; }", "var a; var b, c; function d() { b++; }", expectedVariableMap); expectedVariableMap = makeVariableMap( "Foo", "a", "Bar", "b", "y", "c", "x", "d", "Baz", "f", "L 0" , "e"); testRenameMapUsingOldMap( "var Foo, Baz; var Bar, y; function x(R) { return R + Bar++; }", "var a, f; var b, c; function d(e) { return e + b++; }", expectedVariableMap); } public void testStableRenameWithPointlesslyAnonymousFunctions() { VariableMap expectedVariableMap = makeVariableMap("L 0", "a", "L 1", "b"); testRenameMap("function (v1, v2) {}; function (v3, v4) {};", "function (a, b) {}; function (a, b) {};", expectedVariableMap); expectedVariableMap = makeVariableMap("L 0", "a", "L 1", "b", "L 2", "c"); testRenameMapUsingOldMap("function (v0, v1, v2) {}; function (v3, v4) {};", "function (a, b, c) {}; function (a, b) {};", expectedVariableMap); } public void testStableRenameLocalsClashingWithGlobals() { test("function a(v1, v2) {return v1;} a();", "function a(b, c) {return b;} a();"); previouslyUsedMap = renameVars.getVariableMap(); test("function bar(){return;}function a(v1, v2) {return v1;} a();", "function d(){return;}function a(b, c) {return b;} a();"); } public void testStableRenameNested() { VariableMap expectedVariableMap = makeVariableMap( "f1", "a", "L 0", "b", "L 1", "c", "L 2", "d", "L 3", "e"); testRenameMap("function f1(v1, v2) { (function(v3, v4) {}) }", "function a(b, c) { (function(d, e) {}) }", expectedVariableMap); expectedVariableMap = makeVariableMap( "f1", "a", "L 0", "b", "L 1", "c", "L 2", "d", "L 3", "e", "L 4", "f"); testRenameMapUsingOldMap("function f1(v1, v2) { (function(v3, v4, v5) {}) }", "function a(b, c) { (function(d, e, f) {}) }", expectedVariableMap); } public void testStableRenameWithExterns1() { String externs = "var foo;"; test(externs, "var bar; foo(bar);", "var a; foo(a);", null, null); previouslyUsedMap = renameVars.getVariableMap(); test(externs, "var bar, baz; foo(bar, baz);", "var a, b; foo(a, b);", null, null); } public void testStableRenameWithExterns2() { String externs = "var a;"; test(externs, "var b = 5", "var b = 5", null, null); previouslyUsedMap = renameVars.getVariableMap(); test(externs, "var b = 5, catty = 9;", "var b = 5, c=9;", null, null); } public void testStableRenameWithNameOverlap() { test("var a = 1; var b = 2; b + b;", "var a = 1; var b = 2; b + b;"); previouslyUsedMap = renameVars.getVariableMap(); test("var a = 1; var c, b = 2; b + b;", "var a = 1; var c, b = 2; b + b;"); } public void testStableRenameWithAnonymousFunctions() { VariableMap expectedVariableMap = makeVariableMap("L 0", "a", "foo", "b"); testRenameMap("function foo(bar){return bar;}foo(function(h){return h;});", "function b(a){return a}b(function(a){return a;})", expectedVariableMap); expectedVariableMap = makeVariableMap("foo", "b", "L 0", "a", "L 1", "c"); testRenameMapUsingOldMap( "function foo(bar) {return bar;}foo(function(g,h) {return g+h;});", "function b(a){return a}b(function(a,c){return a+c;})", expectedVariableMap); } public void testStableRenameSimpleExternsChanges() { VariableMap expectedVariableMap = makeVariableMap( "Foo", "a", "L 0", "b", "L 1", "c"); testRenameMap("function Foo(v1, v2) {return v1;} Foo();", "function a(b, c) {return b;} a();", expectedVariableMap); expectedVariableMap = makeVariableMap("L 0", "b", "L 1", "c", "L 2", "a"); String externs = "var Foo;"; testRenameMapUsingOldMap(externs, "function Foo(v1, v2, v0) {return v1;} Foo();", "function Foo(b, c, a) {return b;} Foo();", expectedVariableMap); } public void testStableRenameSimpleLocalNameExterned() { test("function Foo(v1, v2) {return v1;} Foo();", "function a(b, c) {return b;} a();"); previouslyUsedMap = renameVars.getVariableMap(); String externs = "var b;"; test(externs, "function Foo(v1, v2) {return v1;} Foo(b);", "function a(d, c) {return d;} a(b);", null, null); } public void testStableRenameSimpleGlobalNameExterned() { test("function Foo(v1, v2) {return v1;} Foo();", "function a(b, c) {return b;} a();"); previouslyUsedMap = renameVars.getVariableMap(); String externs = "var Foo;"; test(externs, "function Foo(v1, v2, v0) {return v1;} Foo();", "function Foo(b, c, a) {return b;} Foo();", null, null); } public void testStableRenameWithPrefix1AndUnstableLocalNames() { prefix = "PRE_"; test("function Foo(v1, v2) {return v1} Foo();", "function PRE_(a, b) {return a} PRE_();"); previouslyUsedMap = renameVars.getVariableMap(); prefix = "PRE_"; test("function Foo(v0, v1, v2) {return v1} Foo();", "function PRE_(a, b, c) {return b} PRE_();"); } public void testStableRenameWithPrefix2() { prefix = "a"; test("function Foo() {return 1;}" + "function Bar() {" + " var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," + " A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;" + " Foo();" + "} Bar();", "function a() {return 1;}" + "function aa() {" + " var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A," + " B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;" + " a();" + "} aa();"); previouslyUsedMap = renameVars.getVariableMap(); prefix = "a"; test("function Foo() {return 1;}" + "function Baz() {return 1;}" + "function Bar() {" + " var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," + " A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;" + " Foo();" + "} Bar();", "function a() {return 1;}" + "function ab() {return 1;}" + "function aa() {" + " var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A," + " B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;" + " a();" + "} aa();"); } public void testContrivedExampleWhereConsistentRenamingIsWorse() { previouslyUsedMap = makeVariableMap( "Foo", "LongString", "L 0", "b", "L 1", "c"); test("function Foo(v1, v2) {return v1;} Foo();", "function LongString(b, c) {return b;} LongString();"); previouslyUsedMap = renameVars.getVariableMap(); VariableMap expectedVariableMap = makeVariableMap( "Foo", "LongString", "L 0", "b", "L 1", "c"); assertVariableMapsEqual(expectedVariableMap, previouslyUsedMap); } public void testExportSimpleSymbolReservesName() { test("var goog, x; goog.exportSymbol('a', x);", "var a, b; a.exportSymbol('a', b);"); withClosurePass = true; test("var goog, x; goog.exportSymbol('a', x);", "var b, c; b.exportSymbol('a', c);"); } public void testExportComplexSymbolReservesName() { test("var goog, x; goog.exportSymbol('a.b', x);", "var a, b; a.exportSymbol('a.b', b);"); withClosurePass = true; test("var goog, x; goog.exportSymbol('a.b', x);", "var b, c; b.exportSymbol('a.b', c);"); } public void testExportToNonStringDoesntExplode() { withClosurePass = true; test("var goog, a, b; goog.exportSymbol(a, b);", "var a, b, c; a.exportSymbol(b, c);"); } public void testDollarSignSuperExport1() { useGoogleCodingConvention = false; // See http://code.google.com/p/closure-compiler/issues/detail?id=32 test("var x = function($super,duper,$fantastic){}", "var c = function($super, a, b){}"); localRenamingOnly = false; test("var $super = 1", "var a = 1"); useGoogleCodingConvention = true; test("var x = function($super,duper,$fantastic){}", "var c = function($super,a,b){}"); } public void testDollarSignSuperExport2() { boolean normalizedExpectedJs = false; super.enableNormalize(false); useGoogleCodingConvention = false; // See http://code.google.com/p/closure-compiler/issues/detail?id=32 test("var x = function($super,duper,$fantastic){};" + "var y = function($super,duper){};", "var c = function($super, a, b){};" + "var d = function($super, a){};"); localRenamingOnly = false; test("var $super = 1", "var a = 1"); useGoogleCodingConvention = true; test("var x = function($super,duper,$fantastic){};" + "var y = function($super,duper){};", "var c = function($super, a, b ){};" + "var d = function($super,a){};"); super.disableNormalize(); } public void testPseudoNames() { generatePseudoNames = false; // See http://code.google.com/p/closure-compiler/issues/detail?id=32 test("var foo = function(a, b, c){}", "var d = function(a, b, c){}"); generatePseudoNames = true; test("var foo = function(a, b, c){}", "var $foo$$ = function($a$$, $b$$, $c$$){}"); test("var a = function(a, b, c){}", "var $a$$ = function($a$$, $b$$, $c$$){}"); } private void testRenameMapUsingOldMap(String input, String expected, VariableMap expectedMap) { previouslyUsedMap = renameVars.getVariableMap(); testRenameMap("", input, expected, expectedMap); } private void testRenameMapUsingOldMap(String externs, String input, String expected, VariableMap expectedMap) { previouslyUsedMap = renameVars.getVariableMap(); testRenameMap(externs, input, expected, expectedMap); } private void testRenameMap(String input, String expected, VariableMap expectedRenameMap) { testRenameMap("", input, expected, expectedRenameMap); } private void testRenameMap(String externs, String input, String expected, VariableMap expectedRenameMap) { test(externs, input, expected, null, null); VariableMap renameMap = renameVars.getVariableMap(); assertVariableMapsEqual(expectedRenameMap, renameMap); } private VariableMap makeVariableMap(String... keyValPairs) { Preconditions.checkArgument(keyValPairs.length % 2 == 0); Map<String, String> renameMap = new HashMap<String, String>(); for (int i = 0; i < keyValPairs.length; i += 2) { renameMap.put(keyValPairs[i], keyValPairs[i + 1]); } return new VariableMap(renameMap); } private static void assertVariableMapsEqual(VariableMap a, VariableMap b) { Map<String, String> ma = a.getOriginalNameToNewNameMap(); Map<String, String> mb = b.getOriginalNameToNewNameMap(); assertEquals("VariableMaps not equal", ma, mb); } private class ClosurePassAndRenameVars implements CompilerPass { private final Compiler compiler; private ClosurePassAndRenameVars(Compiler compiler) { this.compiler = compiler; } public void process(Node externs, Node root) { ProcessClosurePrimitives closurePass = new ProcessClosurePrimitives(compiler, CheckLevel.WARNING, true); closurePass.process(externs, root); renameVars = new RenameVars(compiler, prefix, false, false, false, previouslyUsedMap, null, closurePass.getExportedVariableNames()); renameVars.process(externs, root); } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/data/v1beta/data.proto package com.google.analytics.data.v1beta; /** * * * <pre> * A contiguous set of minutes: startMinutesAgo, startMinutesAgo + 1, ..., * endMinutesAgo. Requests are allowed up to 2 minute ranges. * </pre> * * Protobuf type {@code google.analytics.data.v1beta.MinuteRange} */ public final class MinuteRange extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.data.v1beta.MinuteRange) MinuteRangeOrBuilder { private static final long serialVersionUID = 0L; // Use MinuteRange.newBuilder() to construct. private MinuteRange(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MinuteRange() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new MinuteRange(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MinuteRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { bitField0_ |= 0x00000001; startMinutesAgo_ = input.readInt32(); break; } case 16: { bitField0_ |= 0x00000002; endMinutesAgo_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_MinuteRange_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_MinuteRange_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.data.v1beta.MinuteRange.class, com.google.analytics.data.v1beta.MinuteRange.Builder.class); } private int bitField0_; public static final int START_MINUTES_AGO_FIELD_NUMBER = 1; private int startMinutesAgo_; /** * * * <pre> * The inclusive start minute for the query as a number of minutes before now. * For example, `"startMinutesAgo": 29` specifies the report should include * event data from 29 minutes ago and after. Cannot be after `endMinutesAgo`. * If unspecified, `startMinutesAgo` is defaulted to 29. Standard Analytics * properties can request up to the last 30 minutes of event data * (`startMinutesAgo &lt;= 29`), and 360 Analytics properties can request up to * the last 60 minutes of event data (`startMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 start_minutes_ago = 1;</code> * * @return Whether the startMinutesAgo field is set. */ @java.lang.Override public boolean hasStartMinutesAgo() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The inclusive start minute for the query as a number of minutes before now. * For example, `"startMinutesAgo": 29` specifies the report should include * event data from 29 minutes ago and after. Cannot be after `endMinutesAgo`. * If unspecified, `startMinutesAgo` is defaulted to 29. Standard Analytics * properties can request up to the last 30 minutes of event data * (`startMinutesAgo &lt;= 29`), and 360 Analytics properties can request up to * the last 60 minutes of event data (`startMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 start_minutes_ago = 1;</code> * * @return The startMinutesAgo. */ @java.lang.Override public int getStartMinutesAgo() { return startMinutesAgo_; } public static final int END_MINUTES_AGO_FIELD_NUMBER = 2; private int endMinutesAgo_; /** * * * <pre> * The inclusive end minute for the query as a number of minutes before now. * Cannot be before `startMinutesAgo`. For example, `"endMinutesAgo": 15` * specifies the report should include event data from prior to 15 minutes * ago. * If unspecified, `endMinutesAgo` is defaulted to 0. Standard Analytics * properties can request any minute in the last 30 minutes of event data * (`endMinutesAgo &lt;= 29`), and 360 Analytics properties can request any * minute in the last 60 minutes of event data (`endMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 end_minutes_ago = 2;</code> * * @return Whether the endMinutesAgo field is set. */ @java.lang.Override public boolean hasEndMinutesAgo() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The inclusive end minute for the query as a number of minutes before now. * Cannot be before `startMinutesAgo`. For example, `"endMinutesAgo": 15` * specifies the report should include event data from prior to 15 minutes * ago. * If unspecified, `endMinutesAgo` is defaulted to 0. Standard Analytics * properties can request any minute in the last 30 minutes of event data * (`endMinutesAgo &lt;= 29`), and 360 Analytics properties can request any * minute in the last 60 minutes of event data (`endMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 end_minutes_ago = 2;</code> * * @return The endMinutesAgo. */ @java.lang.Override public int getEndMinutesAgo() { return endMinutesAgo_; } public static final int NAME_FIELD_NUMBER = 3; private volatile java.lang.Object name_; /** * * * <pre> * Assigns a name to this minute range. The dimension `dateRange` is valued to * this name in a report response. If set, cannot begin with `date_range_` or * `RESERVED_`. If not set, minute ranges are named by their zero based index * in the request: `date_range_0`, `date_range_1`, etc. * </pre> * * <code>string name = 3;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Assigns a name to this minute range. The dimension `dateRange` is valued to * this name in a report response. If set, cannot begin with `date_range_` or * `RESERVED_`. If not set, minute ranges are named by their zero based index * in the request: `date_range_0`, `date_range_1`, etc. * </pre> * * <code>string name = 3;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(1, startMinutesAgo_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, endMinutesAgo_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, name_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, startMinutesAgo_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, endMinutesAgo_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.data.v1beta.MinuteRange)) { return super.equals(obj); } com.google.analytics.data.v1beta.MinuteRange other = (com.google.analytics.data.v1beta.MinuteRange) obj; if (hasStartMinutesAgo() != other.hasStartMinutesAgo()) return false; if (hasStartMinutesAgo()) { if (getStartMinutesAgo() != other.getStartMinutesAgo()) return false; } if (hasEndMinutesAgo() != other.hasEndMinutesAgo()) return false; if (hasEndMinutesAgo()) { if (getEndMinutesAgo() != other.getEndMinutesAgo()) return false; } if (!getName().equals(other.getName())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStartMinutesAgo()) { hash = (37 * hash) + START_MINUTES_AGO_FIELD_NUMBER; hash = (53 * hash) + getStartMinutesAgo(); } if (hasEndMinutesAgo()) { hash = (37 * hash) + END_MINUTES_AGO_FIELD_NUMBER; hash = (53 * hash) + getEndMinutesAgo(); } hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.data.v1beta.MinuteRange parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1beta.MinuteRange parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1beta.MinuteRange parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1beta.MinuteRange parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1beta.MinuteRange parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1beta.MinuteRange parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1beta.MinuteRange parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.data.v1beta.MinuteRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.data.v1beta.MinuteRange parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.data.v1beta.MinuteRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.data.v1beta.MinuteRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.data.v1beta.MinuteRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.analytics.data.v1beta.MinuteRange prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A contiguous set of minutes: startMinutesAgo, startMinutesAgo + 1, ..., * endMinutesAgo. Requests are allowed up to 2 minute ranges. * </pre> * * Protobuf type {@code google.analytics.data.v1beta.MinuteRange} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.data.v1beta.MinuteRange) com.google.analytics.data.v1beta.MinuteRangeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_MinuteRange_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_MinuteRange_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.data.v1beta.MinuteRange.class, com.google.analytics.data.v1beta.MinuteRange.Builder.class); } // Construct using com.google.analytics.data.v1beta.MinuteRange.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); startMinutesAgo_ = 0; bitField0_ = (bitField0_ & ~0x00000001); endMinutesAgo_ = 0; bitField0_ = (bitField0_ & ~0x00000002); name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_MinuteRange_descriptor; } @java.lang.Override public com.google.analytics.data.v1beta.MinuteRange getDefaultInstanceForType() { return com.google.analytics.data.v1beta.MinuteRange.getDefaultInstance(); } @java.lang.Override public com.google.analytics.data.v1beta.MinuteRange build() { com.google.analytics.data.v1beta.MinuteRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.data.v1beta.MinuteRange buildPartial() { com.google.analytics.data.v1beta.MinuteRange result = new com.google.analytics.data.v1beta.MinuteRange(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.startMinutesAgo_ = startMinutesAgo_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.endMinutesAgo_ = endMinutesAgo_; to_bitField0_ |= 0x00000002; } result.name_ = name_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.data.v1beta.MinuteRange) { return mergeFrom((com.google.analytics.data.v1beta.MinuteRange) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.analytics.data.v1beta.MinuteRange other) { if (other == com.google.analytics.data.v1beta.MinuteRange.getDefaultInstance()) return this; if (other.hasStartMinutesAgo()) { setStartMinutesAgo(other.getStartMinutesAgo()); } if (other.hasEndMinutesAgo()) { setEndMinutesAgo(other.getEndMinutesAgo()); } if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.analytics.data.v1beta.MinuteRange parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.analytics.data.v1beta.MinuteRange) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int startMinutesAgo_; /** * * * <pre> * The inclusive start minute for the query as a number of minutes before now. * For example, `"startMinutesAgo": 29` specifies the report should include * event data from 29 minutes ago and after. Cannot be after `endMinutesAgo`. * If unspecified, `startMinutesAgo` is defaulted to 29. Standard Analytics * properties can request up to the last 30 minutes of event data * (`startMinutesAgo &lt;= 29`), and 360 Analytics properties can request up to * the last 60 minutes of event data (`startMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 start_minutes_ago = 1;</code> * * @return Whether the startMinutesAgo field is set. */ @java.lang.Override public boolean hasStartMinutesAgo() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The inclusive start minute for the query as a number of minutes before now. * For example, `"startMinutesAgo": 29` specifies the report should include * event data from 29 minutes ago and after. Cannot be after `endMinutesAgo`. * If unspecified, `startMinutesAgo` is defaulted to 29. Standard Analytics * properties can request up to the last 30 minutes of event data * (`startMinutesAgo &lt;= 29`), and 360 Analytics properties can request up to * the last 60 minutes of event data (`startMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 start_minutes_ago = 1;</code> * * @return The startMinutesAgo. */ @java.lang.Override public int getStartMinutesAgo() { return startMinutesAgo_; } /** * * * <pre> * The inclusive start minute for the query as a number of minutes before now. * For example, `"startMinutesAgo": 29` specifies the report should include * event data from 29 minutes ago and after. Cannot be after `endMinutesAgo`. * If unspecified, `startMinutesAgo` is defaulted to 29. Standard Analytics * properties can request up to the last 30 minutes of event data * (`startMinutesAgo &lt;= 29`), and 360 Analytics properties can request up to * the last 60 minutes of event data (`startMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 start_minutes_ago = 1;</code> * * @param value The startMinutesAgo to set. * @return This builder for chaining. */ public Builder setStartMinutesAgo(int value) { bitField0_ |= 0x00000001; startMinutesAgo_ = value; onChanged(); return this; } /** * * * <pre> * The inclusive start minute for the query as a number of minutes before now. * For example, `"startMinutesAgo": 29` specifies the report should include * event data from 29 minutes ago and after. Cannot be after `endMinutesAgo`. * If unspecified, `startMinutesAgo` is defaulted to 29. Standard Analytics * properties can request up to the last 30 minutes of event data * (`startMinutesAgo &lt;= 29`), and 360 Analytics properties can request up to * the last 60 minutes of event data (`startMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 start_minutes_ago = 1;</code> * * @return This builder for chaining. */ public Builder clearStartMinutesAgo() { bitField0_ = (bitField0_ & ~0x00000001); startMinutesAgo_ = 0; onChanged(); return this; } private int endMinutesAgo_; /** * * * <pre> * The inclusive end minute for the query as a number of minutes before now. * Cannot be before `startMinutesAgo`. For example, `"endMinutesAgo": 15` * specifies the report should include event data from prior to 15 minutes * ago. * If unspecified, `endMinutesAgo` is defaulted to 0. Standard Analytics * properties can request any minute in the last 30 minutes of event data * (`endMinutesAgo &lt;= 29`), and 360 Analytics properties can request any * minute in the last 60 minutes of event data (`endMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 end_minutes_ago = 2;</code> * * @return Whether the endMinutesAgo field is set. */ @java.lang.Override public boolean hasEndMinutesAgo() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The inclusive end minute for the query as a number of minutes before now. * Cannot be before `startMinutesAgo`. For example, `"endMinutesAgo": 15` * specifies the report should include event data from prior to 15 minutes * ago. * If unspecified, `endMinutesAgo` is defaulted to 0. Standard Analytics * properties can request any minute in the last 30 minutes of event data * (`endMinutesAgo &lt;= 29`), and 360 Analytics properties can request any * minute in the last 60 minutes of event data (`endMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 end_minutes_ago = 2;</code> * * @return The endMinutesAgo. */ @java.lang.Override public int getEndMinutesAgo() { return endMinutesAgo_; } /** * * * <pre> * The inclusive end minute for the query as a number of minutes before now. * Cannot be before `startMinutesAgo`. For example, `"endMinutesAgo": 15` * specifies the report should include event data from prior to 15 minutes * ago. * If unspecified, `endMinutesAgo` is defaulted to 0. Standard Analytics * properties can request any minute in the last 30 minutes of event data * (`endMinutesAgo &lt;= 29`), and 360 Analytics properties can request any * minute in the last 60 minutes of event data (`endMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 end_minutes_ago = 2;</code> * * @param value The endMinutesAgo to set. * @return This builder for chaining. */ public Builder setEndMinutesAgo(int value) { bitField0_ |= 0x00000002; endMinutesAgo_ = value; onChanged(); return this; } /** * * * <pre> * The inclusive end minute for the query as a number of minutes before now. * Cannot be before `startMinutesAgo`. For example, `"endMinutesAgo": 15` * specifies the report should include event data from prior to 15 minutes * ago. * If unspecified, `endMinutesAgo` is defaulted to 0. Standard Analytics * properties can request any minute in the last 30 minutes of event data * (`endMinutesAgo &lt;= 29`), and 360 Analytics properties can request any * minute in the last 60 minutes of event data (`endMinutesAgo &lt;= 59`). * </pre> * * <code>optional int32 end_minutes_ago = 2;</code> * * @return This builder for chaining. */ public Builder clearEndMinutesAgo() { bitField0_ = (bitField0_ & ~0x00000002); endMinutesAgo_ = 0; onChanged(); return this; } private java.lang.Object name_ = ""; /** * * * <pre> * Assigns a name to this minute range. The dimension `dateRange` is valued to * this name in a report response. If set, cannot begin with `date_range_` or * `RESERVED_`. If not set, minute ranges are named by their zero based index * in the request: `date_range_0`, `date_range_1`, etc. * </pre> * * <code>string name = 3;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Assigns a name to this minute range. The dimension `dateRange` is valued to * this name in a report response. If set, cannot begin with `date_range_` or * `RESERVED_`. If not set, minute ranges are named by their zero based index * in the request: `date_range_0`, `date_range_1`, etc. * </pre> * * <code>string name = 3;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Assigns a name to this minute range. The dimension `dateRange` is valued to * this name in a report response. If set, cannot begin with `date_range_` or * `RESERVED_`. If not set, minute ranges are named by their zero based index * in the request: `date_range_0`, `date_range_1`, etc. * </pre> * * <code>string name = 3;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * Assigns a name to this minute range. The dimension `dateRange` is valued to * this name in a report response. If set, cannot begin with `date_range_` or * `RESERVED_`. If not set, minute ranges are named by their zero based index * in the request: `date_range_0`, `date_range_1`, etc. * </pre> * * <code>string name = 3;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * Assigns a name to this minute range. The dimension `dateRange` is valued to * this name in a report response. If set, cannot begin with `date_range_` or * `RESERVED_`. If not set, minute ranges are named by their zero based index * in the request: `date_range_0`, `date_range_1`, etc. * </pre> * * <code>string name = 3;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.data.v1beta.MinuteRange) } // @@protoc_insertion_point(class_scope:google.analytics.data.v1beta.MinuteRange) private static final com.google.analytics.data.v1beta.MinuteRange DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.data.v1beta.MinuteRange(); } public static com.google.analytics.data.v1beta.MinuteRange getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MinuteRange> PARSER = new com.google.protobuf.AbstractParser<MinuteRange>() { @java.lang.Override public MinuteRange parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new MinuteRange(input, extensionRegistry); } }; public static com.google.protobuf.Parser<MinuteRange> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MinuteRange> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.data.v1beta.MinuteRange getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* Copyright (c) 2012-2014 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Johnathan Garrett (LMN Solutions) - initial implementation */ package org.locationtech.geogig.api.porcelain; import java.util.ArrayList; import java.util.List; import org.locationtech.geogig.api.AbstractGeoGigOp; import org.locationtech.geogig.api.ObjectId; import org.locationtech.geogig.api.ProgressListener; import org.locationtech.geogig.api.Ref; import org.locationtech.geogig.api.Remote; import org.locationtech.geogig.api.SymRef; import org.locationtech.geogig.api.plumbing.LsRemote; import org.locationtech.geogig.api.plumbing.RefParse; import org.locationtech.geogig.api.plumbing.UpdateRef; import org.locationtech.geogig.api.plumbing.UpdateSymRef; import org.locationtech.geogig.api.porcelain.ConfigOp.ConfigAction; import org.locationtech.geogig.api.porcelain.ConfigOp.ConfigScope; import org.locationtech.geogig.api.porcelain.TransferSummary.ChangedRef; import org.locationtech.geogig.api.porcelain.TransferSummary.ChangedRef.ChangeTypes; import org.locationtech.geogig.remote.IRemoteRepo; import org.locationtech.geogig.remote.RemoteUtils; import org.locationtech.geogig.repository.Hints; import org.locationtech.geogig.repository.Repository; import org.locationtech.geogig.repository.RepositoryConnectionException; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; /** * Fetches named heads or tags from one or more other repositories, along with the objects necessary * to complete them. */ public class FetchOp extends AbstractGeoGigOp<TransferSummary> { private boolean all; private boolean prune; private boolean fullDepth = false; private List<Remote> remotes = new ArrayList<Remote>(); private Optional<Integer> depth = Optional.absent(); /** * @param all if {@code true}, fetch from all remotes. * @return {@code this} */ public FetchOp setAll(final boolean all) { this.all = all; return this; } public boolean isAll() { return all; } /** * @param prune if {@code true}, remote tracking branches that no longer exist will be removed * locally. * @return {@code this} */ public FetchOp setPrune(final boolean prune) { this.prune = prune; return this; } public boolean isPrune() { return prune; } /** * If no depth is specified, fetch will pull all history from the specified ref(s). If the * repository is shallow, it will maintain the existing depth. * * @param depth maximum commit depth to fetch * @return {@code this} */ public FetchOp setDepth(final int depth) { if (depth > 0) { this.depth = Optional.of(depth); } return this; } public Integer getDepth() { return this.depth.orNull(); } /** * If full depth is set on a shallow clone, then the full history will be fetched. * * @param fulldepth whether or not to fetch the full history * @return {@code this} */ public FetchOp setFullDepth(boolean fullDepth) { this.fullDepth = fullDepth; return this; } public boolean isFullDepth() { return fullDepth; } /** * @param remoteName the name or URL of a remote repository to fetch from * @return {@code this} */ public FetchOp addRemote(final String remoteName) { Preconditions.checkNotNull(remoteName); return addRemote(command(RemoteResolve.class).setName(remoteName)); } public List<String> getRemoteNames() { return Lists.transform(this.remotes, new Function<Remote, String>() { @Override public String apply(Remote remote) { return remote.getName(); } }); } /** * @param remoteSupplier the remote repository to fetch from * @return {@code this} */ public FetchOp addRemote(Supplier<Optional<Remote>> remoteSupplier) { Preconditions.checkNotNull(remoteSupplier); Optional<Remote> remote = remoteSupplier.get(); Preconditions.checkState(remote.isPresent(), "Remote could not be resolved."); remotes.add(remote.get()); return this; } public List<Remote> getRemotes() { return ImmutableList.copyOf(remotes); } /** * Executes the fetch operation. * * @return {@code null} * @see org.locationtech.geogig.api.AbstractGeoGigOp#call() */ @Override protected TransferSummary _call() { if (all) { // Add all remotes to list. ImmutableList<Remote> localRemotes = command(RemoteListOp.class).call(); for (Remote remote : localRemotes) { if (!remotes.contains(remote)) { remotes.add(remote); } } } else if (remotes.size() == 0) { // If no remotes are specified, default to the origin remote addRemote("origin"); } final ProgressListener progressListener = getProgressListener(); progressListener.started(); Optional<Integer> repoDepth = repository().getDepth(); if (repoDepth.isPresent()) { if (fullDepth) { depth = Optional.of(Integer.MAX_VALUE); } if (depth.isPresent()) { if (depth.get() > repoDepth.get()) { command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET) .setScope(ConfigScope.LOCAL).setName(Repository.DEPTH_CONFIG_KEY) .setValue(depth.get().toString()).call(); repoDepth = depth; } } } else if (depth.isPresent() || fullDepth) { // Ignore depth, this is a full repository depth = Optional.absent(); fullDepth = false; } TransferSummary result = new TransferSummary(); for (Remote remote : remotes) { final ImmutableSet<Ref> remoteRemoteRefs = command(LsRemote.class) .setRemote(Suppliers.ofInstance(Optional.of(remote))) .retrieveTags(!remote.getMapped() && (!repoDepth.isPresent() || fullDepth)) .call(); final ImmutableSet<Ref> localRemoteRefs = command(LsRemote.class) .retrieveLocalRefs(true).setRemote(Suppliers.ofInstance(Optional.of(remote))) .call(); // If we have specified a depth to pull, we may have more history to pull from existing // refs. List<ChangedRef> needUpdate = findOutdatedRefs(remote, remoteRemoteRefs, localRemoteRefs, depth); if (prune) { // Delete local refs that aren't in the remote List<Ref> locals = new ArrayList<Ref>(); // only branches, not tags, appear in the remoteRemoteRefs list so we will not catch // any tags in this check. However, we do not track which remote originally // provided a tag so it makes sense not to prune them anyway. for (Ref remoteRef : remoteRemoteRefs) { Optional<Ref> localRef = findLocal(remoteRef, localRemoteRefs); if (localRef.isPresent()) { locals.add(localRef.get()); } } for (Ref localRef : localRemoteRefs) { if (!locals.contains(localRef)) { // Delete the ref ChangedRef changedRef = new ChangedRef(localRef, null, ChangeTypes.REMOVED_REF); needUpdate.add(changedRef); command(UpdateRef.class).setDelete(true).setName(localRef.getName()).call(); } } } Optional<IRemoteRepo> remoteRepo = getRemoteRepo(remote); Preconditions.checkState(remoteRepo.isPresent(), "Failed to connect to the remote."); IRemoteRepo remoteRepoInstance = remoteRepo.get(); try { remoteRepoInstance.open(); } catch (RepositoryConnectionException e) { Throwables.propagate(e); } try { int refCount = 0; for (ChangedRef ref : needUpdate) { if (ref.getType() != ChangeTypes.REMOVED_REF) { refCount++; Optional<Integer> newFetchLimit = depth; // If we haven't specified a depth, but this is a shallow repository, set // the // fetch limit to the current repository depth. if (!newFetchLimit.isPresent() && repoDepth.isPresent() && ref.getType() == ChangeTypes.ADDED_REF) { newFetchLimit = repoDepth; } // Fetch updated data from this ref Ref newRef = ref.getNewRef(); remoteRepoInstance.fetchNewData(newRef, newFetchLimit, progressListener); if (repoDepth.isPresent() && !fullDepth) { // Update the repository depth if it is deeper than before. int newDepth; try { newDepth = repository().graphDatabase().getDepth( newRef.getObjectId()); } catch (IllegalStateException e) { throw new RuntimeException(ref.toString(), e); } if (newDepth > repoDepth.get()) { command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET) .setScope(ConfigScope.LOCAL) .setName(Repository.DEPTH_CONFIG_KEY) .setValue(Integer.toString(newDepth)).call(); repoDepth = Optional.of(newDepth); } } // Update the ref Ref updatedRef = updateLocalRef(newRef, remote, localRemoteRefs); ref.setNewRef(updatedRef); } } if (needUpdate.size() > 0) { result.addAll(remote.getFetchURL(), needUpdate); } // Update HEAD ref if (!remote.getMapped()) { Ref remoteHead = remoteRepoInstance.headRef(); if (remoteHead != null) { updateLocalRef(remoteHead, remote, localRemoteRefs); } } } finally { remoteRepoInstance.close(); } } if (fullDepth) { // The full history was fetched, this is no longer a shallow clone command(ConfigOp.class).setAction(ConfigAction.CONFIG_UNSET) .setScope(ConfigScope.LOCAL).setName(Repository.DEPTH_CONFIG_KEY).call(); } progressListener.complete(); return result; } /** * @param remote the remote to get * @return an interface for the remote repository */ public Optional<IRemoteRepo> getRemoteRepo(Remote remote) { return RemoteUtils.newRemote(repository(), remote, Hints.readOnly()); } private Ref updateLocalRef(Ref remoteRef, Remote remote, ImmutableSet<Ref> localRemoteRefs) { final String refName; if (remoteRef.getName().startsWith(Ref.TAGS_PREFIX)) { refName = remoteRef.getName(); } else { refName = Ref.REMOTES_PREFIX + remote.getName() + "/" + remoteRef.localName(); } Ref updatedRef = remoteRef; if (remoteRef instanceof SymRef) { String targetBranch = Ref.localName(((SymRef) remoteRef).getTarget()); String newTarget = Ref.REMOTES_PREFIX + remote.getName() + "/" + targetBranch; command(UpdateSymRef.class).setName(refName).setNewValue(newTarget).call(); } else { ObjectId effectiveId = remoteRef.getObjectId(); if (remote.getMapped() && !repository().commitExists(remoteRef.getObjectId())) { effectiveId = graphDatabase().getMapping(effectiveId); updatedRef = new Ref(remoteRef.getName(), effectiveId); } command(UpdateRef.class).setName(refName).setNewValue(effectiveId).call(); } return updatedRef; } /** * Filters the remote references for the given remote that are not present or outdated in the * local repository */ private List<ChangedRef> findOutdatedRefs(Remote remote, ImmutableSet<Ref> remoteRefs, ImmutableSet<Ref> localRemoteRefs, Optional<Integer> depth) { List<ChangedRef> changedRefs = Lists.newLinkedList(); for (Ref remoteRef : remoteRefs) {// refs/heads/xxx or refs/tags/yyy, though we don't handle // tags yet if (remote.getMapped() && !remoteRef.localName().equals(Ref.localName(remote.getMappedBranch()))) { // for a mapped remote, we are only interested in the branch we are mapped to continue; } Optional<Ref> local = findLocal(remoteRef, localRemoteRefs); if (local.isPresent()) { if (!local.get().getObjectId().equals(remoteRef.getObjectId())) { ChangedRef changedRef = new ChangedRef(local.get(), remoteRef, ChangeTypes.CHANGED_REF); changedRefs.add(changedRef); } else if (depth.isPresent()) { int commitDepth = graphDatabase().getDepth(local.get().getObjectId()); if (depth.get() > commitDepth) { ChangedRef changedRef = new ChangedRef(local.get(), remoteRef, ChangeTypes.DEEPENED_REF); changedRefs.add(changedRef); } } } else { ChangedRef changedRef = new ChangedRef(null, remoteRef, ChangeTypes.ADDED_REF); changedRefs.add(changedRef); } } return changedRefs; } /** * Finds the corresponding local reference in {@code localRemoteRefs} for the given remote ref * * @param remoteRef a ref in the {@code refs/heads} or {@code refs/tags} namespace as given by * {@link LsRemote} when querying a remote repository * @param localRemoteRefs the list of locally known references of the given remote in the * {@code refs/remotes/<remote name>/} namespace */ private Optional<Ref> findLocal(Ref remoteRef, ImmutableSet<Ref> localRemoteRefs) { if (remoteRef.getName().startsWith(Ref.TAGS_PREFIX)) { return command(RefParse.class).setName(remoteRef.getName()).call(); } else { for (Ref localRef : localRemoteRefs) { if (localRef.localName().equals(remoteRef.localName())) { return Optional.of(localRef); } } return Optional.absent(); } } }
/* * Copyright (c) 2011-2014, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.segmentation.watershed; import boofcv.alg.InputSanityCheck; import boofcv.alg.misc.ImageMiscOps; import boofcv.struct.image.ImageSInt32; import boofcv.struct.image.ImageUInt8; import org.ddogleg.struct.CircularQueue_I32; import org.ddogleg.struct.GrowQueue_I32; /** * <p> * Fast watershed based upon Vincient and Soille's 1991 paper [1]. Watershed segments an image using the idea * of immersion simulation. For example, the image is treated as a topological map and if you let a droplet * of water flow down from each pixel the location the droplets cluster in defines a region. Two different * methods are provided for processing the image, a new region is created at each local minima or the user * provides an initial seed for each region for it to grow from. The output will be a segmented image * with watersheds being assign a value of 0 and each region a value > 0. Watersheds are assigned to pixels * which are exactly the same distance from multiple regions, thus it is ambiguous which one it is a member of. * </p> * * <p> * If the image is processed with {@link #process(boofcv.struct.image.ImageUInt8)} then a new region is * created at each local minima and assigned a unique ID > 0. The total number of regions found is returned * by {@link #getTotalRegions()}. This technique will lead to over segmentation on many images. * </p> * * <p> * Initial seeds are provided with a call to {@link #process(boofcv.struct.image.ImageUInt8, boofcv.struct.image.ImageSInt32)}. * No new regions will be created. By providing an initial set of seeds over segmentation can be avoided, but * prior knowledge of the image is typically needed to create the seeds. * </p> * * <p> * NOTES:<br> * <ul> * <li>For faster processing, the internal labeled image has a 1 pixel border around it. If you call * {@link #getOutput()} this border is removed automatically by creating a sub-image.</li> * <li>Connectivity is handled by child sub-classes. An index of neighbors could have been used, but the * additional additional array access/loop slows things down a little bit.</li> * <li>Watersheds are included. To remove them using {@link RemoveWatersheds}</li> * <li>Pixel values are assumed to range from 0 to 255, inclusive.</li> * </ul> * </p> * * <p> * [1] Vincent, Luc, and Pierre Soille. "Watersheds in digital spaces: an efficient algorithm based on * immersion simulations." IEEE transactions on pattern analysis and machine intelligence 13.6 (1991): 583-598. * </p> * * @author Peter Abeles */ public abstract class WatershedVincentSoille1991 { // values of pixels belonging to the watershed public static final int WSHED = 0; // initial value of the labeled output image public static final int INIT = -1; // Initial value of a threshold level public static final int MASK = -2; // index of the marker pixel. Fictitious public static final int MARKER_PIXEL = -1; // histogram for sorting the image. 8-bits so 256 possible values // each element refers to a pixel in the input image protected GrowQueue_I32 histogram[] = new GrowQueue_I32[256]; // Output image. This is im_o in the paper. // The output image has a 1-pixel wide border which means that bound checks don't need // to happen when examining a pixel's neighbor. protected ImageSInt32 output = new ImageSInt32(1,1); // storage for sub-image output protected ImageSInt32 outputSub = new ImageSInt32(); // work image of distances. im_d in the paper // also has a 1 pixel border protected ImageSInt32 distance = new ImageSInt32(1,1); protected int currentDistance; // label of the region being marked protected int currentLabel; // FIFO circular queue protected CircularQueue_I32 fifo = new CircularQueue_I32(); // used to remove watersheds protected RemoveWatersheds removeWatersheds = new RemoveWatersheds(); boolean removedWatersheds; public WatershedVincentSoille1991() { for( int i = 0; i < histogram.length; i++ ) { histogram[i] = new GrowQueue_I32(); } } /** * Perform watershed segmentation on the provided input image. New basins are created at each local minima. * * @param input Input gray-scale image. */ public void process( ImageUInt8 input ) { // input = im_0 removedWatersheds = false; output.reshape(input.width+2,input.height+2); distance.reshape(input.width+2,input.height+2); ImageMiscOps.fill(output, INIT); ImageMiscOps.fill(distance, 0); fifo.reset(); // sort pixels sortPixels(input); currentLabel = 0; for( int i = 0; i < histogram.length; i++ ) { GrowQueue_I32 level = histogram[i]; if( level.size == 0 ) continue; // Go through each pixel at this level and mark them according to their neighbors for( int j = 0; j < level.size; j++ ) { int index = level.data[j]; output.data[index] = MASK; // see if its neighbors has been labeled, if so set its distance and add to queue assignNewToNeighbors(index); } currentDistance = 1; fifo.add(MARKER_PIXEL); while( true ) { int p = fifo.popHead(); // end of a cycle. Exit the loop if it is done or increase the distance and continue processing if( p == MARKER_PIXEL) { if( fifo.isEmpty() ) break; else { fifo.add(MARKER_PIXEL); currentDistance++; p = fifo.popHead(); } } // look at its neighbors and see if they have been labeled or belong to a watershed // and update its distance checkNeighborsAssign(p); } // see if new minima have been discovered for( int j = 0; j < level.size; j++ ) { int index = level.get(j); // distance associated with p is reset to 0 distance.data[index] = 0; if( output.data[index] == MASK ) { currentLabel++; fifo.add(index); output.data[index] = currentLabel; // grow the new region into the surrounding connected pixels while( !fifo.isEmpty() ) { checkNeighborsMasks(fifo.popHead()); } } } } } /** * <p> * Segments the image using initial seeds for each region. This is often done to avoid * over segmentation but requires additional preprocessing and/or knowledge on the image structure. Initial * seeds are specified in the input image 'seeds'. A seed is any pixel with a value > 0. New new regions * will be created beyond those seeds. The final segmented image is provided by {@link #getOutput()}. * </p> * * <p> * NOTE: If seeds are used then {@link #getTotalRegions()} will not return a correct solution. * </p> * * @param input (Input) Input image * @param seeds (Output) Segmented image containing seeds. Note that all seeds should have a value > 0 and have a * value <= numRegions. */ public void process( ImageUInt8 input , ImageSInt32 seeds ) { InputSanityCheck.checkSameShape(input,seeds); removedWatersheds = false; output.reshape(input.width+2,input.height+2); distance.reshape(input.width+2,input.height+2); ImageMiscOps.fill(output, INIT); ImageMiscOps.fill(distance, 0); fifo.reset(); // copy the seeds into the output directory for( int y = 0; y < seeds.height; y++ ) { int indexSeeds = seeds.startIndex + y*seeds.stride; int indexOut = (y+1)*output.stride + 1; for( int x = 0; x < seeds.width; x++ , indexSeeds++, indexOut++ ) { int v = seeds.data[indexSeeds]; if( v > 0 ) { output.data[indexOut] = v; } } } // sort pixels sortPixels(input); // perform watershed for( int i = 0; i < histogram.length; i++ ) { GrowQueue_I32 level = histogram[i]; if( level.size == 0 ) continue; // Go through each pixel at this level and mark them according to their neighbors for( int j = 0; j < level.size; j++ ) { int index = level.data[j]; // If not has not already been labeled by a seed then try assigning it values // from its neighbors if( output.data[index] == INIT ) { output.data[index] = MASK; assignNewToNeighbors(index); } } currentDistance = 1; fifo.add(MARKER_PIXEL); while( true ) { int p = fifo.popHead(); // end of a cycle. Exit the loop if it is done or increase the distance and continue processing if( p == MARKER_PIXEL) { if( fifo.isEmpty() ) break; else { fifo.add(MARKER_PIXEL); currentDistance++; p = fifo.popHead(); } } // look at its neighbors and see if they have been labeled or belong to a watershed // and update its distance checkNeighborsAssign(p); } // Ensure that all pixels have a distance of zero // Could probably do this a bit more intelligently... ImageMiscOps.fill(distance, 0); } } /** * See if a neighbor has a label ( > 0 ) or has been assigned WSHED ( == 0 ). If so * set distance of pixel index to 1 and add it to fifo. * * @param index Pixel whose neighbors are being examined */ protected abstract void assignNewToNeighbors(int index); /** * Check the neighbors to see if it should become a member or a watershed * @param index Index of the target pixel */ protected abstract void checkNeighborsAssign(int index); protected void handleNeighborAssign(int indexTarget, int indexNeighbor) { int regionNeighbor = output.data[indexNeighbor]; int distanceNeighbor = distance.data[indexNeighbor]; // if neighbor has been assigned a region or is WSHED if( regionNeighbor >= 0 && distanceNeighbor < currentDistance ) { int regionTarget = output.data[indexTarget]; // see if the target belongs to an already labeled basin or watershed if( regionNeighbor > 0 ) { if( regionTarget < 0 ) {// if is MASK output.data[indexTarget] = regionNeighbor; } else if( regionTarget == 0 ) { // if it is a watershed only assign to the neighbor value if it would be closer // this is a deviation from what's in the paper. There might be a type-o there or I miss read it if( distanceNeighbor+1 < currentDistance ) { output.data[indexTarget] = regionNeighbor; } } else if( regionTarget != regionNeighbor ) { output.data[indexTarget] = WSHED; } } else if( regionTarget == MASK ) { output.data[indexTarget] = WSHED; } } else if( regionNeighbor == MASK && distanceNeighbor == 0) { distance.data[indexNeighbor] = currentDistance + 1; fifo.add(indexNeighbor); } } /** * Checks neighbors of pixel 'index' to see if their region is MASK, if so they are assigned the * currentLabel and added to fifo. * * @param index Pixel whose neighbors are being examined. */ protected abstract void checkNeighborsMasks(int index); protected void checkMask(int index) { if( output.data[index] == MASK ) { output.data[index] = currentLabel; fifo.add(index); } } /** * Very fast histogram based sorting. Index of each pixel is placed inside a list for its intensity level. */ protected void sortPixels(ImageUInt8 input) { // initialize histogram for( int i = 0; i < histogram.length; i++ ) { histogram[i].reset(); } // sort by creating a histogram for( int y = 0; y < input.height; y++ ) { int index = input.startIndex + y*input.stride; int indexOut = (y+1)*output.stride + 1; for (int x = 0; x < input.width; x++ , index++ , indexOut++) { int value = input.data[index] & 0xFF; histogram[value].add(indexOut); } } } /** * Segmented output image with watersheds. This is a sub-image of {@link #getOutputBorder()} to remove * the outside border of -1 valued pixels. */ public ImageSInt32 getOutput() { output.subimage(1,1,output.width-1,output.height-1,outputSub); return outputSub; } /** * The entire segmented image used internally. This contains a 1-pixel border around the entire * image filled with pixels of value -1. */ public ImageSInt32 getOutputBorder() { return output; } /** * Removes watershed pixels from the output image by merging them into an arbitrary neighbor. */ public void removeWatersheds() { removedWatersheds = true; removeWatersheds.remove(output); } /** * Returns the total number of regions labeled. If watersheds have not * been removed then this will including the watershed. * * <p>THIS IS NOT VALID IF SEEDS ARE USED!!!</p> * * @return number of regions. */ public int getTotalRegions() { return removedWatersheds ? currentLabel : currentLabel + 1; } /** * Implementation which uses a 4-connect rule */ public static class Connect4 extends WatershedVincentSoille1991 { @Override protected void assignNewToNeighbors(int index) { if( output.data[index+1] >= 0 ) { // (x+1,y) distance.data[index] = 1; fifo.add(index); } else if( output.data[index-1] >= 0 ) { // (x-1,y) distance.data[index] = 1; fifo.add(index); } else if( output.data[index+output.stride] >= 0 ) { // (x,y+1) distance.data[index] = 1; fifo.add(index); } else if( output.data[index-output.stride] >= 0 ) { // (x,y-1) distance.data[index] = 1; fifo.add(index); } } @Override protected void checkNeighborsAssign(int index) { handleNeighborAssign(index, index + 1); handleNeighborAssign(index, index - 1); handleNeighborAssign(index, index + output.stride); handleNeighborAssign(index, index - output.stride); } @Override protected void checkNeighborsMasks(int index) { checkMask(index + 1); checkMask(index - 1); checkMask(index + output.stride); checkMask(index - output.stride); } } /** * Implementation which uses a 8-connect rule */ public static class Connect8 extends WatershedVincentSoille1991 { @Override protected void assignNewToNeighbors(int index) { if( output.data[index+1] >= 0 ) { // (x+1,y) distance.data[index] = 1; fifo.add(index); } else if( output.data[index-1] >= 0 ) { // (x-1,y) distance.data[index] = 1; fifo.add(index); } else if( output.data[index+output.stride] >= 0 ) { // (x,y+1) distance.data[index] = 1; fifo.add(index); } else if( output.data[index-output.stride] >= 0 ) { // (x,y-1) distance.data[index] = 1; fifo.add(index); } else if( output.data[index+1+output.stride] >= 0 ) { // (x+1,y+1) distance.data[index] = 1; fifo.add(index); } else if( output.data[index-1+output.stride] >= 0 ) { // (x-1,y+1) distance.data[index] = 1; fifo.add(index); } else if( output.data[index+1-output.stride] >= 0 ) { // (x+1,y-1) distance.data[index] = 1; fifo.add(index); } else if( output.data[index-1-output.stride] >= 0 ) { // (x-1,y-1) distance.data[index] = 1; fifo.add(index); } } @Override protected void checkNeighborsAssign(int index) { handleNeighborAssign(index, index + 1); handleNeighborAssign(index, index - 1); handleNeighborAssign(index, index + output.stride); handleNeighborAssign(index, index - output.stride); handleNeighborAssign(index, index + 1 + output.stride); handleNeighborAssign(index, index - 1 + output.stride); handleNeighborAssign(index, index + 1 - output.stride); handleNeighborAssign(index, index - 1 - output.stride); } @Override protected void checkNeighborsMasks(int index) { checkMask(index + 1); checkMask(index - 1); checkMask(index + output.stride); checkMask(index - output.stride); checkMask(index + 1 + output.stride); checkMask(index - 1 + output.stride); checkMask(index + 1 - output.stride); checkMask(index - 1 - output.stride); } } }
/* * (c) Copyright 2019 EntIT Software LLC, a Micro Focus company, L.P. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudslang.content.excel.services; import io.cloudslang.content.excel.entities.GetCellInputs; import org.apache.poi.ss.usermodel.*; import org.jetbrains.annotations.NotNull; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.List; import java.util.Map; import static io.cloudslang.content.excel.services.ExcelServiceImpl.*; import static io.cloudslang.content.excel.utils.Constants.YES; import static io.cloudslang.content.excel.utils.Outputs.GetCellOutputs.COLUMNS_COUNT; import static io.cloudslang.content.excel.utils.Outputs.GetCellOutputs.HEADER; import static io.cloudslang.content.excel.utils.Outputs.GetRowIndexByCondition.ROWS_COUNT; import static io.cloudslang.content.utils.OutputUtilities.getFailureResultsMap; import static io.cloudslang.content.utils.OutputUtilities.getSuccessResultsMap; import static org.apache.commons.lang3.StringUtils.EMPTY; import static org.apache.commons.lang3.StringUtils.defaultIfEmpty; public class GetCellService { @NotNull public static Map<String, String> getCell(@NotNull final GetCellInputs getCellInputs) { try { final Workbook excelDoc = getExcelDoc(getCellInputs.getCommonInputs().getExcelFileName()); final Sheet worksheet = getWorksheet(excelDoc, getCellInputs.getCommonInputs().getWorksheetName()); int firstRowIndex = Integer.parseInt(getCellInputs.getFirstRowIndex()); final int lastRowIndex = worksheet.getLastRowNum(); final int firstColumnIndex = 0; final int lastColumnIndex = getLastColumnIndex(worksheet, firstRowIndex, lastRowIndex); final String rowDelimiter = getCellInputs.getRowDelimiter(); final String columnDelimiter = getCellInputs.getColumnDelimiter(); final String hasHeader = getCellInputs.getHasHeader(); final String enablingRoundingFunction = getCellInputs.getEnablingRoundingFunction(); if (hasHeader.equals(YES)) firstRowIndex++; final String rowIndexDefault = firstRowIndex + ":" + lastRowIndex; final String columnIndexDefault = firstColumnIndex + ":" + lastColumnIndex; final String rowIndex = defaultIfEmpty(getCellInputs.getRowIndex(), rowIndexDefault); final String columnIndex = defaultIfEmpty(getCellInputs.getColumnIndex(), columnIndexDefault); final List<Integer> rowIndexList = validateIndex(processIndex(rowIndex), firstRowIndex, lastRowIndex, true); final List<Integer> columnIndexList = validateIndex(processIndex(columnIndex), firstColumnIndex, lastColumnIndex, false); final String resultString = getCellFromWorksheet(excelDoc, worksheet, columnIndexList, rowIndexList, rowDelimiter, columnDelimiter, enablingRoundingFunction); final Map<String, String> results = getSuccessResultsMap(resultString); if (hasHeader.equals(YES)) { final String headerString = getHeader(worksheet, firstRowIndex, columnIndexList, columnDelimiter); results.put(HEADER, headerString); } results.put(ROWS_COUNT, String.valueOf(rowIndexList.size())); results.put(COLUMNS_COUNT, String.valueOf(columnIndexList.size())); return results; } catch (Exception e) { return getFailureResultsMap(e.getMessage()); } } private static String getCellFromWorksheet(final Workbook excelDoc, final Sheet worksheet, final List<Integer> columnIndex, final List<Integer> rowIndex, final String rowDelimiter, final String columnDelimiter, final String enablingRoundingFunction) { StringBuilder result = new StringBuilder(); final DataFormatter formatter = new DataFormatter(); for (int rIndex : rowIndex) { Row row = worksheet.getRow(rIndex); if (row == null) { row = worksheet.createRow(rIndex); } if (row != null) { for (int cIndex : columnIndex) { Cell cell = row.getCell(cIndex); if (cell == null) { cell = row.createCell(cIndex); } String cellString = formatter.formatCellValue(cell); FormulaEvaluator evaluator = excelDoc.getCreationHelper().createFormulaEvaluator(); if (cell != null) { //fraction if (cellString.indexOf("?/?") > 1 && cell.getCellType() == CellType.NUMERIC) { result.append(cell.getNumericCellValue()); } //Formula else if (cell.getCellType() == CellType.FORMULA) { CellValue cellValue = evaluator.evaluate(cell); switch (cellValue.getCellType()) { case BOOLEAN: result.append(cellValue.getBooleanValue()); break; case NUMERIC: result.append(cellValue.getNumberValue()); break; case STRING: result.append(cellValue.getStringValue()); break; case BLANK: break; case ERROR: break; // CellType.FORMULA will never happen case FORMULA: break; } } //string else { if (enablingRoundingFunction.toLowerCase().equals("true")) { //Fix for QCIM1D248808 and Fix for QCIM1293510 if (!cell.toString().isEmpty() && isNumericCell(cell) && !DateUtil.isCellDateFormatted(cell)) { double aCellValue = cell.getNumericCellValue(); cellString = round(Double.toString(aCellValue)); } } result.append(cellString); } } result.append(columnDelimiter); } //get rid of last column delimiter int index = result.lastIndexOf(columnDelimiter); if (index > -1) result = new StringBuilder(result.substring(0, index)); } result.append(rowDelimiter); } int index = result.lastIndexOf(rowDelimiter); if (index > -1) result = new StringBuilder(result.substring(0, index)); return result.toString(); } /** * retrieves data from header row * * @param worksheet an Excel worksheet * @param columnIndex a list of column indexes * @param colDelimiter a column delimiter * @return a string of delimited header data */ private static String getHeader(final Sheet worksheet, final int firstRowIndex, final List<Integer> columnIndex, final String colDelimiter) { StringBuilder result = new StringBuilder(); int headerIndex = firstRowIndex - 1; final Row headerRow = worksheet.getRow(headerIndex); if (headerRow == null) return EMPTY; for (int cIndex : columnIndex) { final Cell cell = headerRow.getCell(cIndex); if (cell != null) { String cellString = headerRow.getCell(cIndex).toString(); result.append(cellString); } result.append(colDelimiter); } //get rid of last column index final int index = result.lastIndexOf(colDelimiter); if (index > -1) result = new StringBuilder(result.substring(0, index)); return result.toString(); } private static boolean isNumericCell(final Cell cell) { try { cell.getNumericCellValue(); return true; } catch (IllegalStateException e) { return false; } } private static String round(final String value) { BigDecimal bd = new BigDecimal(value); bd = bd.setScale(2, RoundingMode.HALF_UP).stripTrailingZeros(); return bd.toString(); } }
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigquery; import static com.google.common.base.Preconditions.checkArgument; import com.google.cloud.FieldSelector; import com.google.cloud.FieldSelector.Helper; import com.google.cloud.Page; import com.google.cloud.Service; import com.google.cloud.bigquery.spi.BigQueryRpc; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.util.List; /** * An interface for Google Cloud BigQuery. * * @see <a href="https://cloud.google.com/bigquery/what-is-bigquery">Google Cloud BigQuery</a> */ public interface BigQuery extends Service<BigQueryOptions> { /** * Fields of a BigQuery Dataset resource. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/datasets#resource">Dataset * Resource</a> */ enum DatasetField implements FieldSelector { ACCESS("access"), CREATION_TIME("creationTime"), DATASET_REFERENCE("datasetReference"), DEFAULT_TABLE_EXPIRATION_MS("defaultTableExpirationMsS"), DESCRIPTION("description"), ETAG("etag"), FRIENDLY_NAME("friendlyName"), ID("id"), LAST_MODIFIED_TIME("lastModifiedTime"), LOCATION("location"), SELF_LINK("selfLink"); static final List<? extends FieldSelector> REQUIRED_FIELDS = ImmutableList.of(DATASET_REFERENCE); private final String selector; DatasetField(String selector) { this.selector = selector; } @Override @Deprecated public String selector() { return getSelector(); } @Override public String getSelector() { return selector; } } /** * Fields of a BigQuery Table resource. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/tables#resource">Table * Resource</a> */ enum TableField implements FieldSelector { CREATION_TIME("creationTime"), DESCRIPTION("description"), ETAG("etag"), EXPIRATION_TIME("expirationTime"), EXTERNAL_DATA_CONFIGURATION("externalDataConfiguration"), FRIENDLY_NAME("friendlyName"), ID("id"), LAST_MODIFIED_TIME("lastModifiedTime"), LOCATION("location"), NUM_BYTES("numBytes"), NUM_ROWS("numRows"), SCHEMA("schema"), SELF_LINK("selfLink"), STREAMING_BUFFER("streamingBuffer"), TABLE_REFERENCE("tableReference"), TIME_PARTITIONING("timePartitioning"), TYPE("type"), VIEW("view"); static final List<? extends FieldSelector> REQUIRED_FIELDS = ImmutableList.of(TABLE_REFERENCE, TYPE); private final String selector; TableField(String selector) { this.selector = selector; } @Override @Deprecated public String selector() { return getSelector(); } @Override public String getSelector() { return selector; } } /** * Fields of a BigQuery Job resource. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#resource">Job Resource * </a> */ enum JobField implements FieldSelector { CONFIGURATION("configuration"), ETAG("etag"), ID("id"), JOB_REFERENCE("jobReference"), SELF_LINK("selfLink"), STATISTICS("statistics"), STATUS("status"), USER_EMAIL("user_email"); static final List<? extends FieldSelector> REQUIRED_FIELDS = ImmutableList.of(JOB_REFERENCE, CONFIGURATION); private final String selector; JobField(String selector) { this.selector = selector; } @Override @Deprecated public String selector() { return getSelector(); } @Override public String getSelector() { return selector; } } /** * Class for specifying dataset list options. */ class DatasetListOption extends Option { private static final long serialVersionUID = 8660294969063340498L; private DatasetListOption(BigQueryRpc.Option option, Object value) { super(option, value); } /** * Returns an option to specify the maximum number of datasets returned per page. */ public static DatasetListOption pageSize(long pageSize) { return new DatasetListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing datasets. */ public static DatasetListOption pageToken(String pageToken) { return new DatasetListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } /** * Returns an options to list all datasets, even hidden ones. */ public static DatasetListOption all() { return new DatasetListOption(BigQueryRpc.Option.ALL_DATASETS, true); } } /** * Class for specifying dataset get, create and update options. */ class DatasetOption extends Option { private static final long serialVersionUID = 1674133909259913250L; private DatasetOption(BigQueryRpc.Option option, Object value) { super(option, value); } /** * Returns an option to specify the dataset's fields to be returned by the RPC call. If this * option is not provided all dataset's fields are returned. {@code DatasetOption.fields} can * be used to specify only the fields of interest. {@link Dataset#datasetId()} is always * returned, even if not specified. */ public static DatasetOption fields(DatasetField... fields) { return new DatasetOption(BigQueryRpc.Option.FIELDS, Helper.selector(DatasetField.REQUIRED_FIELDS, fields)); } } /** * Class for specifying dataset delete options. */ class DatasetDeleteOption extends Option { private static final long serialVersionUID = -7166083569900951337L; private DatasetDeleteOption(BigQueryRpc.Option option, Object value) { super(option, value); } /** * Returns an option to delete a dataset even if non-empty. If not provided, attempting to * delete a non-empty dataset will result in a {@link BigQueryException} being thrown. */ public static DatasetDeleteOption deleteContents() { return new DatasetDeleteOption(BigQueryRpc.Option.DELETE_CONTENTS, true); } } /** * Class for specifying table list options. */ class TableListOption extends Option { private static final long serialVersionUID = 8660294969063340498L; private TableListOption(BigQueryRpc.Option option, Object value) { super(option, value); } /** * Returns an option to specify the maximum number of tables returned per page. */ public static TableListOption pageSize(long pageSize) { checkArgument(pageSize >= 0); return new TableListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing tables. */ public static TableListOption pageToken(String pageToken) { return new TableListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } } /** * Class for specifying table get, create and update options. */ class TableOption extends Option { private static final long serialVersionUID = -1723870134095936772L; private TableOption(BigQueryRpc.Option option, Object value) { super(option, value); } /** * Returns an option to specify the table's fields to be returned by the RPC call. If this * option is not provided all table's fields are returned. {@code TableOption.fields} can be * used to specify only the fields of interest. {@link Table#tableId()} and type (which is part * of {@link Table#definition()}) are always returned, even if not specified. */ public static TableOption fields(TableField... fields) { return new TableOption(BigQueryRpc.Option.FIELDS, Helper.selector(TableField.REQUIRED_FIELDS, fields)); } } /** * Class for specifying table data list options. */ class TableDataListOption extends Option { private static final long serialVersionUID = 8488823381738864434L; private TableDataListOption(BigQueryRpc.Option option, Object value) { super(option, value); } /** * Returns an option to specify the maximum number of rows returned per page. */ public static TableDataListOption pageSize(long pageSize) { checkArgument(pageSize >= 0); return new TableDataListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing table data. */ public static TableDataListOption pageToken(String pageToken) { return new TableDataListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } /** * Returns an option that sets the zero-based index of the row from which to start listing table * data. */ public static TableDataListOption startIndex(long index) { checkArgument(index >= 0); return new TableDataListOption(BigQueryRpc.Option.START_INDEX, index); } } /** * Class for specifying job list options. */ class JobListOption extends Option { private static final long serialVersionUID = -8207122131226481423L; private JobListOption(BigQueryRpc.Option option, Object value) { super(option, value); } /** * Returns an option to list all jobs, even the ones issued by other users. */ public static JobListOption allUsers() { return new JobListOption(BigQueryRpc.Option.ALL_USERS, true); } /** * Returns an option to list only jobs that match the provided state filters. */ public static JobListOption stateFilter(JobStatus.State... stateFilters) { List<String> stringFilters = Lists.transform(ImmutableList.copyOf(stateFilters), new Function<JobStatus.State, String>() { @Override public String apply(JobStatus.State state) { return state.name().toLowerCase(); } }); return new JobListOption(BigQueryRpc.Option.STATE_FILTER, stringFilters); } /** * Returns an option to specify the maximum number of jobs returned per page. */ public static JobListOption pageSize(long pageSize) { checkArgument(pageSize >= 0); return new JobListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing jobs. */ public static JobListOption pageToken(String pageToken) { return new JobListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } /** * Returns an option to specify the job's fields to be returned by the RPC call. If this option * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to * specify only the fields of interest. {@link Job#jobId()}, {@link JobStatus#state()}, * {@link JobStatus#error()} as well as type-specific configuration (e.g. * {@link QueryJobConfiguration#query()} for Query Jobs) are always returned, even if not * specified. {@link JobField#SELF_LINK} and {@link JobField#ETAG} can not be selected when * listing jobs. */ public static JobListOption fields(JobField... fields) { return new JobListOption(BigQueryRpc.Option.FIELDS, Helper.listSelector("jobs", JobField.REQUIRED_FIELDS, fields, "state", "errorResult")); } } /** * Class for specifying table get and create options. */ class JobOption extends Option { private static final long serialVersionUID = -3111736712316353665L; private JobOption(BigQueryRpc.Option option, Object value) { super(option, value); } /** * Returns an option to specify the job's fields to be returned by the RPC call. If this option * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to * specify only the fields of interest. {@link Job#jobId()} as well as type-specific * configuration (e.g. {@link QueryJobConfiguration#query()} for Query Jobs) are always * returned, even if not specified. */ public static JobOption fields(JobField... fields) { return new JobOption(BigQueryRpc.Option.FIELDS, Helper.selector(JobField.REQUIRED_FIELDS, fields)); } } /** * Class for specifying query results options. */ class QueryResultsOption extends Option { private static final long serialVersionUID = 3788898503226985525L; private QueryResultsOption(BigQueryRpc.Option option, Object value) { super(option, value); } /** * Returns an option to specify the maximum number of rows returned per page. */ public static QueryResultsOption pageSize(long pageSize) { checkArgument(pageSize >= 0); return new QueryResultsOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start getting query results. */ public static QueryResultsOption pageToken(String pageToken) { return new QueryResultsOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } /** * Returns an option that sets the zero-based index of the row from which to start getting query * results. */ public static QueryResultsOption startIndex(long startIndex) { checkArgument(startIndex >= 0); return new QueryResultsOption(BigQueryRpc.Option.START_INDEX, startIndex); } /** * Returns an option that sets how long to wait for the query to complete, in milliseconds, * before returning. Default is 10 seconds. If the timeout passes before the job completes, * {@link QueryResponse#jobCompleted()} will be {@code false}. */ public static QueryResultsOption maxWaitTime(long maxWaitTime) { checkArgument(maxWaitTime >= 0); return new QueryResultsOption(BigQueryRpc.Option.TIMEOUT, maxWaitTime); } } /** * Creates a new dataset. * * <p>Example of creating a dataset. * <pre> {@code * String datasetName = "my_dataset_name"; * Dataset dataset = null; * DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build(); * try { * // the dataset was created * dataset = bigquery.create(datasetInfo); * } catch (BigQueryException e) { * // the dataset was not created * } * }</pre> * * @throws BigQueryException upon failure */ Dataset create(DatasetInfo datasetInfo, DatasetOption... options); /** * Creates a new table. * * <p>Example of creating a table. * <pre> {@code * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * String fieldName = "string_field"; * TableId tableId = TableId.of(datasetName, tableName); * // Table field definition * Field field = Field.of(fieldName, Field.Type.string()); * // Table schema definition * Schema schema = Schema.of(field); * TableDefinition tableDefinition = StandardTableDefinition.of(schema); * TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); * Table table = bigquery.create(tableInfo); * }</pre> * * @throws BigQueryException upon failure */ Table create(TableInfo tableInfo, TableOption... options); /** * Creates a new job. * * <p>Example of creating a query job. * <pre> {@code * String query = "SELECT field FROM my_dataset_name.my_table_name"; * Job job = null; * JobConfiguration jobConfiguration = QueryJobConfiguration.of(query); * JobInfo jobInfo = JobInfo.of(jobConfiguration); * try { * job = bigquery.create(jobInfo); * } catch (BigQueryException e) { * // the job was not created * } * }</pre> * * @throws BigQueryException upon failure */ Job create(JobInfo jobInfo, JobOption... options); /** * Returns the requested dataset or {@code null} if not found. * * <p>Example of getting a dataset. * <pre> {@code * String datasetName = "my_dataset"; * Dataset dataset = bigquery.getDataset(datasetName); * }</pre> * * @throws BigQueryException upon failure */ Dataset getDataset(String datasetId, DatasetOption... options); /** * Returns the requested dataset or {@code null} if not found. * * <p>Example of getting a dataset. * <pre> {@code * String projectId = "my_project_id"; * String datasetName = "my_dataset_name"; * DatasetId datasetId = DatasetId.of(projectId, datasetName); * Dataset dataset = bigquery.getDataset(datasetId); * }</pre> * * @throws BigQueryException upon failure */ Dataset getDataset(DatasetId datasetId, DatasetOption... options); /** * Lists the project's datasets. This method returns partial information on each dataset: * ({@link Dataset#datasetId()}, {@link Dataset#friendlyName()} and * {@link Dataset#generatedId()}). To get complete information use either * {@link #getDataset(String, DatasetOption...)} or * {@link #getDataset(DatasetId, DatasetOption...)}. * * <p>Example of listing datasets, specifying the page size. * <pre> {@code * Page<Dataset> datasets = bigquery.listDatasets(DatasetListOption.pageSize(100)); * Iterator<Dataset> datasetIterator = datasets.iterateAll(); * while (datasetIterator.hasNext()) { * Dataset dataset = datasetIterator.next(); * // do something with the dataset * } * }</pre> * * @throws BigQueryException upon failure */ Page<Dataset> listDatasets(DatasetListOption... options); /** * Lists the datasets in the provided project. This method returns partial information on each * dataset: ({@link Dataset#datasetId()}, {@link Dataset#friendlyName()} and * {@link Dataset#generatedId()}). To get complete information use either * {@link #getDataset(String, DatasetOption...)} or * {@link #getDataset(DatasetId, DatasetOption...)}. * * <p>Example of listing datasets in a project, specifying the page size. * <pre> {@code * String projectId = "my_project_id"; * Page<Dataset> datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100)); * Iterator<Dataset> datasetIterator = datasets.iterateAll(); * while (datasetIterator.hasNext()) { * Dataset dataset = datasetIterator.next(); * // do something with the dataset * } * }</pre> * * @throws BigQueryException upon failure */ Page<Dataset> listDatasets(String projectId, DatasetListOption... options); /** * Deletes the requested dataset. * * <p>Example of deleting a dataset from its id, even if non-empty. * <pre> {@code * String datasetName = "my_dataset_name"; * Boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents()); * if (deleted) { * // the dataset was deleted * } else { * // the dataset was not found * } * }</pre> * * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ boolean delete(String datasetId, DatasetDeleteOption... options); /** * Deletes the requested dataset. * * <p>Example of deleting a dataset, even if non-empty. * <pre> {@code * String projectId = "my_project_id"; * String datasetName = "my_dataset_name"; * DatasetId datasetId = DatasetId.of(projectId, datasetName); * Boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents()); * if (deleted) { * // the dataset was deleted * } else { * // the dataset was not found * } * }</pre> * * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ boolean delete(DatasetId datasetId, DatasetDeleteOption... options); /** * Deletes the requested table. * * <p>Example of deleting a table. * <pre> {@code * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * Boolean deleted = bigquery.delete(datasetName, tableName); * if (deleted) { * // the table was deleted * } else { * // the table was not found * } * }</pre> * * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ boolean delete(String datasetId, String tableId); /** * Deletes the requested table. * * <p>Example of deleting a table. * <pre> {@code * String projectId = "my_project_id"; * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * TableId tableId = TableId.of(projectId, datasetName, tableName); * Boolean deleted = bigquery.delete(tableId); * if (deleted) { * // the table was deleted * } else { * // the table was not found * } * }</pre> * * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ boolean delete(TableId tableId); /** * Updates dataset information. * * <p>Example of updating a dataset by changing its friendly name. * <pre> {@code * String datasetName = "my_dataset_name"; * String newFriendlyName = "some_new_friendly_name"; * Dataset oldDataset = bigquery.getDataset(datasetName); * DatasetInfo datasetInfo = oldDataset.toBuilder().setFriendlyName(newFriendlyName).build(); * Dataset newDataset = bigquery.update(datasetInfo); * }</pre> * * @throws BigQueryException upon failure */ Dataset update(DatasetInfo datasetInfo, DatasetOption... options); /** * Updates table information. * * <p>Example of updating a table by changing its friendly name. * <pre> {@code * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * String newFriendlyName = "new_friendly_name"; * Table oldTable = bigquery.getTable(datasetName, tableName); * TableInfo tableInfo = oldTable.toBuilder().setFriendlyName(newFriendlyName).build(); * Table newTable = bigquery.update(tableInfo); * }</pre> * * @throws BigQueryException upon failure */ Table update(TableInfo tableInfo, TableOption... options); /** * Returns the requested table or {@code null} if not found. * * <p>Example of getting a table. * <pre> {@code * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * Table table = bigquery.getTable(datasetName, tableName); * }</pre> * * @throws BigQueryException upon failure */ Table getTable(String datasetId, String tableId, TableOption... options); /** * Returns the requested table or {@code null} if not found. * * <p>Example of getting a table. * <pre> {@code * String projectId = "my_project_id"; * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * TableId tableId = TableId.of(projectId, datasetName, tableName); * Table table = bigquery.getTable(tableId); * }</pre> * * @throws BigQueryException upon failure */ Table getTable(TableId tableId, TableOption... options); /** * Lists the tables in the dataset. This method returns partial information on each table: * ({@link Table#tableId()}, {@link Table#friendlyName()}, {@link Table#generatedId()} and type, * which is part of {@link Table#definition()}). To get complete information use either * {@link #getTable(TableId, TableOption...)} or * {@link #getTable(String, String, TableOption...)}. * * <p>Example of listing the tables in a dataset, specifying the page size. * <pre> {@code * String datasetName = "my_dataset_name"; * Page<Table> tables = bigquery.listTables(datasetName, TableListOption.pageSize(100)); * Iterator<Table> tableIterator = tables.iterateAll(); * while (tableIterator.hasNext()) { * Table table = tableIterator.next(); * // do something with the table * } * }</pre> * * @throws BigQueryException upon failure */ Page<Table> listTables(String datasetId, TableListOption... options); /** * Lists the tables in the dataset. This method returns partial information on each table: * ({@link Table#tableId()}, {@link Table#friendlyName()}, {@link Table#generatedId()} and type, * which is part of {@link Table#definition()}). To get complete information use either * {@link #getTable(TableId, TableOption...)} or * {@link #getTable(String, String, TableOption...)}. * * <p>Example of listing the tables in a dataset. * <pre> {@code * String projectId = "my_project_id"; * String datasetName = "my_dataset_name"; * DatasetId datasetId = DatasetId.of(projectId, datasetName); * Page<Table> tables = bigquery.listTables(datasetId, TableListOption.pageSize(100)); * Iterator<Table> tableIterator = tables.iterateAll(); * while (tableIterator.hasNext()) { * Table table = tableIterator.next(); * // do something with the table * } * }</pre> * * @throws BigQueryException upon failure */ Page<Table> listTables(DatasetId datasetId, TableListOption... options); /** * Sends an insert all request. * * <p>Example of inserting rows into a table without running a load job. * <pre> {@code * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * TableId tableId = TableId.of(datasetName, tableName); * // Values of the row to insert * Map<String, Object> rowContent = new HashMap<>(); * rowContent.put("booleanField", true); * // Bytes are passed in base64 * rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64 * // Records are passed as a map * Map<String, Object> recordsContent = new HashMap<>(); * recordsContent.put("stringField", "Hello, World!"); * rowContent.put("recordField", recordsContent); * InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId) * .addRow("rowId", rowContent) * // More rows can be added in the same RPC by invoking .addRow() on the builder * .build()); * if (response.hasErrors()) { * // If any of the insertions failed, this lets you inspect the errors * for (Entry<Long, List<BigQueryError>> entry : response.getInsertErrors().entrySet()) { * // inspect row error * } * } * }</pre> * * @throws BigQueryException upon failure */ InsertAllResponse insertAll(InsertAllRequest request); /** * Lists the table's rows. * * <p>Example of listing table rows, specifying the page size. * <pre> {@code * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * Page<List<FieldValue>> tableData = * bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100)); * Iterator<List<FieldValue>> rowIterator = tableData.iterateAll(); * while (rowIterator.hasNext()) { * List<FieldValue> row = rowIterator.next(); * // do something with the row * } * }</pre> * * @throws BigQueryException upon failure */ Page<List<FieldValue>> listTableData(String datasetId, String tableId, TableDataListOption... options); /** * Lists the table's rows. * * <p>Example of listing table rows, specifying the page size. * <pre> {@code * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * TableId tableIdObject = TableId.of(datasetName, tableName); * Page<List<FieldValue>> tableData = * bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100)); * Iterator<List<FieldValue>> rowIterator = tableData.iterateAll(); * while (rowIterator.hasNext()) { * List<FieldValue> row = rowIterator.next(); * // do something with the row * } * }</pre> * * @throws BigQueryException upon failure */ Page<List<FieldValue>> listTableData(TableId tableId, TableDataListOption... options); /** * Returns the requested job or {@code null} if not found. * * <p>Example of getting a job. * <pre> {@code * String jobName = "my_job_name"; * Job job = bigquery.getJob(jobName); * if (job == null) { * // job was not found * } * }</pre> * * @throws BigQueryException upon failure */ Job getJob(String jobId, JobOption... options); /** * Returns the requested job or {@code null} if not found. * * <p>Example of getting a job. * <pre> {@code * String jobName = "my_job_name"; * JobId jobIdObject = JobId.of(jobName); * Job job = bigquery.getJob(jobIdObject); * if (job == null) { * // job was not found * } * }</pre> * * @throws BigQueryException upon failure */ Job getJob(JobId jobId, JobOption... options); /** * Lists the jobs. * * <p>Example of listing jobs, specifying the page size. * <pre> {@code * Page<Job> jobs = bigquery.listJobs(JobListOption.pageSize(100)); * Iterator<Job> jobIterator = jobs.iterateAll(); * while (jobIterator.hasNext()) { * Job job = jobIterator.next(); * // do something with the job * } * }</pre> * * @throws BigQueryException upon failure */ Page<Job> listJobs(JobListOption... options); /** * Sends a job cancel request. This call will return immediately. The job status can then be * checked using either {@link #getJob(JobId, JobOption...)} or * {@link #getJob(String, JobOption...)}). * * <p>Example of cancelling a job. * <pre> {@code * String jobName = "my_job_name"; * boolean success = bigquery.cancel(jobName); * if (success) { * // job was cancelled * } else { * // job was not found * } * }</pre> * * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found * @throws BigQueryException upon failure */ boolean cancel(String jobId); /** * Sends a job cancel request. This call will return immediately. The job status can then be * checked using either {@link #getJob(JobId, JobOption...)} or * {@link #getJob(String, JobOption...)}). * * <p>Example of cancelling a job. * <pre> {@code * String jobName = "my_job_name"; * JobId jobId = JobId.of(jobName); * boolean success = bigquery.cancel(jobId); * if (success) { * // job was cancelled * } else { * // job was not found * } * }</pre> * * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found * @throws BigQueryException upon failure */ boolean cancel(JobId jobId); /** * Runs the query associated with the request. * * <p>Example of running a query. * <pre> {@code * String query = "SELECT unique(corpus) FROM [bigquery-public-data:samples.shakespeare]"; * QueryRequest request = QueryRequest.of(query); * QueryResponse response = bigquery.query(request); * // Wait for things to finish * while (!response.jobCompleted()) { * Thread.sleep(1000); * response = bigquery.getQueryResults(response.getJobId()); * } * if (response.hasErrors()) { * // handle errors * } * QueryResult result = response.getResult(); * Iterator<List<FieldValue>> rowIterator = result.iterateAll(); * while (rowIterator.hasNext()) { * List<FieldValue> row = rowIterator.next(); * // do something with the data * } * }</pre> * * <p>Example of running a query with query parameters. * <pre> {@code * String query = "SELECT distinct(corpus) FROM `bigquery-public-data.samples.shakespeare` where word_count > ?"; * QueryRequest request = QueryRequest.newBuilder(query) * .setUseLegacySql(false) // standard SQL is required to use query parameters * .addPositionalParameter(QueryParameterValue.int64(5)) * .build(); * QueryResponse response = bigquery.query(request); * // Wait for things to finish * while (!response.jobCompleted()) { * Thread.sleep(1000); * response = bigquery.getQueryResults(response.getJobId()); * } * if (response.hasErrors()) { * // handle errors * } * QueryResult result = response.getResult(); * Iterator<List<FieldValue>> rowIterator = result.iterateAll(); * while (rowIterator.hasNext()) { * List<FieldValue> row = rowIterator.next(); * // do something with the data * } * }</pre> * * @throws BigQueryException upon failure */ QueryResponse query(QueryRequest request); /** * Returns results of the query associated with the provided job. * * <p>Example of getting the results of query. * <pre> {@code * String query = "SELECT unique(corpus) FROM [bigquery-public-data:samples.shakespeare]"; * QueryRequest request = QueryRequest.of(query); * QueryResponse response = bigquery.query(request); * // Wait for things to finish * while (!response.jobCompleted()) { * Thread.sleep(1000); * response = bigquery.getQueryResults(response.getJobId()); * } * if (response.hasErrors()) { * // handle errors * } * QueryResult result = response.getResult(); * Iterator<List<FieldValue>> rowIterator = result.iterateAll(); * while (rowIterator.hasNext()) { * List<FieldValue> row = rowIterator.next(); * // do something with the data * } * }</pre> * * @throws BigQueryException upon failure */ QueryResponse getQueryResults(JobId jobId, QueryResultsOption... options); /** * Returns a channel to write data to be inserted into a BigQuery table. Data format and other * options can be configured using the {@link WriteChannelConfiguration} parameter. * * <p>Example of creating a channel with which to write to a table. * <pre> {@code * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * String csvData = "StringValue1\nStringValue2\n"; * TableId tableId = TableId.of(datasetName, tableName); * WriteChannelConfiguration writeChannelConfiguration = * WriteChannelConfiguration.newBuilder(tableId) * .setFormatOptions(FormatOptions.csv()) * .build(); * TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration); * // Write data to writer * try { * writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8))); * } finally { * writer.close(); * } * // Get load job * Job job = writer.getJob(); * job = job.waitFor(); * LoadStatistics stats = job.getStatistics(); * return stats.getOutputRows(); * }</pre> * * <p>Example of writing a local file to a table. * <pre> {@code * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv"); * TableId tableId = TableId.of(datasetName, tableName); * WriteChannelConfiguration writeChannelConfiguration = * WriteChannelConfiguration.newBuilder(tableId) * .setFormatOptions(FormatOptions.csv()) * .build(); * TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration); * // Write data to writer * try (OutputStream stream = Channels.newOutputStream(writer)) { * Files.copy(csvPath, stream); * } * // Get load job * Job job = writer.getJob(); * job = job.waitFor(); * LoadStatistics stats = job.getStatistics(); * return stats.getOutputRows(); * }</pre> * * @throws BigQueryException upon failure */ TableDataWriteChannel writer(WriteChannelConfiguration writeChannelConfiguration); }
package crazypants.enderzoo.config; import java.io.File; import java.util.ArrayList; import java.util.List; import crazypants.enderzoo.EnderZoo; import crazypants.enderzoo.Log; import net.minecraft.enchantment.Enchantment.Rarity; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.common.config.Configuration; import net.minecraftforge.fml.client.event.ConfigChangedEvent.OnConfigChangedEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; public final class Config { public static class Section { public final String name; public final String lang; public Section(String name, String lang) { this.name = name; this.lang = lang; register(); } private void register() { sections.add(this); } public String lc() { return name.toLowerCase(); } } public static final List<Section> sections; static { sections = new ArrayList<Section>(); } public static Configuration config; public static File configDirectory; public static final String CONFIG_RESOURCE_PATH = "/assets/enderzoo/config/"; public static final Section sectionDifficulty = new Section("Difficulty", "difficulty"); public static boolean enderZooDifficultyModifierEnabled = true; public static double enderZooEasyHealthModifier = 0.9; public static double enderZooEasyAttackModifier = 0.9; public static double enderZooNormalHealthModifier = 1; public static double enderZooNormalAttackModifier = 1; public static double enderZooHardHealthModifier = 1.1; public static double enderZooHardAttackModifier = 1.1; public static boolean globalDifficultyModifierEnabled = true; public static double globalEasyHealthModifier = 0.9; public static double globalEasyAttackModifier = 0.9; public static double globalNormalHealthModifier = 1; public static double globalNormalAttackModifier = 1; public static double globalHardHealthModifier = 1.1; public static double globalHardAttackModifier = 1.1; public static final Section sectionEnderminy = new Section("Enderminy", "enderminy"); public static int enderminyId = 689990; public static boolean enderminyEnabled = true; public static boolean enderminyAttacksPlayerOnSight = false; public static boolean enderminyAttacksCreepers = true; public static int enderminyAttackDamage = 10; public static int enderminyHealth = 20; public static boolean enderminyGroupAgro = true; public static int enderminyMaxGroupSize = 3; public static boolean enderminySpawnInLitAreas = false; public static boolean enderminySpawnOnlyOnGrass = true; public static int enderminyMinSpawnY = 0; public static boolean enderminyOldTexture = true; public static final Section sectionConCreeper = new Section("Concussion Creeper", "concussionCreeper"); public static int concussionCreeperId = 689991; public static boolean concussionCreeperEnabled = true; public static int concussionCreeperMaxTeleportRange = 32; public static int concussionCreeperConfusionDuration = 100; public static int concussionCreeperExplosionRange = 5; public static double concussionCreeperHealth = 20; public static boolean concussionCreeperOldTexture = true; public static final Section sectionFallenKnight = new Section("Fallen Knight", "fallenKnight"); public static int fallenKnightId = 689992; public static boolean fallenKnightEnabled = true; public static double fallenKnightBaseDamage = 4.0; public static double fallenKnightHealth = 20; public static double fallenKnightFollowRange = 40.0; public static double fallenKnightChargeSpeed = 1.2; public static int fallenKnightRangedMinAttackPause = 20; public static int fallenKnightRangedMaxAttackPause = 60; public static float fallenKnightRangedMaxRange = 15; public static float fallenKnightChancePerArmorPiece = 0.7f; public static float fallenKnightChancePerArmorPieceHard = 0.9f; public static float fallenKnightRangedRatio = 0.25f; public static float fallenKnightChanceMounted = 0.75f; public static float fallenKnightChanceArmorUpgradeHard = 0.4f; public static float fallenKnightChanceArmorUpgrade = 0.2f; public static double fallenKnightChanceShield = 0.3; public static boolean fallKnightMountedArchesMaintainDistance = true; public static boolean fallenKnightArchersSwitchToMelee = true; public static final Section sectionFallenMount = new Section("Fallen Mount", "fallenMount"); public static int fallenMountId = 689993; public static boolean fallenMountEnabled = true; public static double fallenMountChargeSpeed = 2.5f; public static double fallenMountBaseAttackDamage = 4; public static boolean fallenMountShadedByRider = true; public static float fallenMountChanceArmored = 0.5f; public static float fallenMountChanceArmoredHard = 0.9f; public static float fallenMountChanceArmorUpgrade = 0.01f; public static float fallenMountChanceArmorUpgradeHard = 0.05f; public static double fallenMountHealth = 30; public static final Section sectionWitherWitch = new Section("Wither Witch", "witherWitch"); public static int witherWitchId = 689994; public static boolean witherWitchEnabled = true; public static double witherWitchHealth = 30; public static int witherWitchMinCats = 1; public static int witherWitchMaxCats = 2; public static final Section sectionWitherCat = new Section("Wither Cat", "witherCat"); public static int witherCatId = 689995; public static boolean witherCatEnabled = true; public static double witherCatHealth = 12; public static double witherCatAttackDamage = 3; public static double witherCatAngryHealth = 30; public static double witherCatAngryAttackDamage = 9; public static double witherCatAngryAttackDamageHardModifier = 2; public static final Section sectionDireWolf = new Section("Dire Wolf", "direWolf"); public static int direWolfId = 689996; public static boolean direWolfEnabled = true; public static boolean direWolfPackAttackEnabled = true; public static double direWolfHealth = 20; public static double direWolfAttackDamage = 10; public static double direWolfHardAttackModifier = 1; public static double direWolfAggresiveRange = 4; public static double direWolfHowlVolumeMult = 8; public static double direWolfHowlChance = 0.05; public static double direWolfPackHowlChance = 0.5; public static int direWolfPackHowlAmount = 8; public static final Section sectionDireSlime = new Section("Dire Slime", "direSlime"); public static int direSlimeId = 689997; public static boolean direSlimeEnabled = true; public static double direSlimeHealth = 4; public static double direSlimeHealthMedium = 8; public static double direSlimeHealthLarge = 20; public static double direSlimeAttackDamage = 3; public static double direSlimeAttackDamageMedium = 5; public static double direSlimeAttackDamageLarge = 8; public static double direSlimeChance = 0.2; public static double direSlimeChanceLarge = 0.2; public static double direSlimeChanceMedium = 0.4; public static final Section sectionOwl = new Section("Owl", "owl"); public static boolean owlEnabled = true; public static int owlId = 689998; public static int owlHealth = 10; public static int owlAttachDamage = 4; public static float owlSpiderDamageMultiplier = 2; public static float owlHootVolumeMult = 0.8f; public static int owlHootInterval = 1000; public static int owlTimeBetweenEggsMin = 12000; public static int owlTimeBetweenEggsMax = 24000; public static int entityOwlEggId = 679991; public static final Section sectionEnchants = new Section("Enchantments", "enchantments"); public static Rarity enchantmentWitherArrowRarity = Rarity.UNCOMMON; public static int enchantmentWitherArrowDuration = 200; public static int enchantmentWitherArrowMinEnchantability = 20; public static int enchantmentWitherArrowMaxEnchantability = 50; public static Rarity enchantmentWitherWeaponRarity = Rarity.UNCOMMON; public static int enchantmentWitherWeaponDuration = 200; public static int enchantmentWitherWeaponMinEnchantability = 20; public static int enchantmentWitherWeaponMaxEnchantability = 50; public static final Section sectionCharges = new Section("Charges", "charges"); public static int entityPrimedChargeId = 699998; public static boolean confusingChargeEnabled = true; public static double confusingChargeRange = 6; public static int confusingChargeEffectDuration = 300; public static boolean enderChargeEnabled = true; public static double enderChargeRange = 6; public static int enderChargeMaxTeleportRange = 64; public static boolean concussionChargeEnabled = true; public static final Section sectionDebug = new Section("Debug", "debug"); public static boolean spawnConfigPrintDetailedOutput = false; public static final Section sectionGuardian = new Section("Guardian", "guardian"); public static boolean guardiansBowEnabled = true; public static int guardiansBowDrawTime = 14; public static float guardiansBowDamageBonus = 0f; public static float guardiansBowForceMultiplier = 1.5f; public static float guardiansBowFovMultiplier = 0.35F; public static final Section sectionPotions = new Section("Potions", "potions"); public static int witherPotionID = 71400; public static int witherPotionLongID = 71401; public static int confusingPotionID = 71410; public static int confusingPotionLongID = 71411; public static int floatingPotionID = 71420; public static int floatingPotionLongID = 71421; public static int floatingPotionTwoID = 71422; public static boolean floatingPotionEnabled = true; public static double floatingPotionSpeed = 0.15; public static double floatingPotionAcceleration = 0.085; public static int floatingPotionDuration = 70; public static int floatingPotionDurationSplash = 50; public static int floatingPotionDurationLong = 120; public static int floatingPotionDurationLongSplash = 100; public static double floatingPotionTwoSpeed = 1.2; public static double floatingPotionTwoAcceleration = 0.3; public static int floatingPotionTwoDuration = 12; public static int floatingPotionTwoDurationSplash = 8; public static void load(FMLPreInitializationEvent event) { MinecraftForge.EVENT_BUS.register(new Config()); configDirectory = new File(event.getModConfigurationDirectory(), EnderZoo.MODID.toLowerCase()); if (!configDirectory.exists()) { configDirectory.mkdir(); } File configFile = new File(configDirectory, "EnderZoo.cfg"); config = new Configuration(configFile); syncConfig(); } public static void syncConfig() { try { Config.processConfig(config); } catch (Exception e) { Log.error("EnderZoo has a problem loading it's configuration"); e.printStackTrace(); } finally { if (config.hasChanged()) { config.save(); } } } @SubscribeEvent public void onConfigChanged(OnConfigChangedEvent event) { if (event.getModID().equals(EnderZoo.MODID)) { Log.info("Updating config..."); syncConfig(); } } public static void processConfig(Configuration config) { enderminyId = config.get(sectionEnderminy.name, "enderminyId", enderminyId, "Mob ID").getInt(enderminyId); enderminyEnabled = config.getBoolean("enderminyEnabled", sectionEnderminy.name, enderminyEnabled, "Wether Enderminies are enabled"); enderminyAttacksPlayerOnSight = config.getBoolean("enderminyAttacksPlayerOnSight", sectionEnderminy.name, enderminyAttacksPlayerOnSight, "When true an Enderminy will attack a player if it looks at them, otherwise they are neutral mobs."); enderminyAttacksCreepers = config.getBoolean("enderminyAttacksCreepers", sectionEnderminy.name, enderminyAttacksCreepers, "When true Enderminies will attack creepers"); enderminyAttackDamage = config .get(sectionEnderminy.name, "enderminyAttackDamage", enderminyAttackDamage, "Attack damage of Enderminies. 7=Enderman damage, 3=Zombie damage") .getInt(enderminyAttackDamage); enderminyHealth = config.get(sectionEnderminy.name, "enderminyHealth", enderminyHealth, "Health of Enderminies. 40=Enderman health, 20=Zombie health") .getInt(enderminyHealth); enderminyGroupAgro = config.getBoolean("enderminyGroupAgro", sectionEnderminy.name, enderminyGroupAgro, "When true attacking one Enderminy will cause other Enderminies who witness the attack to attack the player as well"); enderminyMaxGroupSize = config .get(sectionEnderminy.name, "enderminyMaxGroupSize", enderminyMaxGroupSize, "Maximum number of Enderminies that will spawn in a single group") .getInt(enderminyMaxGroupSize); enderminySpawnInLitAreas = config.getBoolean("enderminySpawnInLitAreas", sectionEnderminy.name, enderminySpawnInLitAreas, "When true enderminies will spawn in well lit areas, when false they will only spawn in dark areas."); enderminySpawnOnlyOnGrass = config.getBoolean("enderminySpawnOnlyOnGrass", sectionEnderminy.name, enderminySpawnOnlyOnGrass, "When true enderminies will spawn only on grass blocks."); enderminyMinSpawnY = config.get(sectionEnderminy.name, "enderminyMinSpawnY", enderminyMinSpawnY, "The minimum Y level at which enderminies will spawn") .getInt(enderminyMinSpawnY); enderminyOldTexture = config.get(sectionEnderminy.name, "enderminyOldTexture", enderminyOldTexture, "If true, uses the old texture for the Enderminy.") .getBoolean(); concussionCreeperId = config.get(sectionConCreeper.name, "concussionCreeperId", concussionCreeperId, "Mob ID").getInt(concussionCreeperId); concussionCreeperEnabled = config.getBoolean("concussionCreeperEnabled", sectionConCreeper.name, concussionCreeperEnabled, "Wether ConcussionCreepers are enabled"); concussionCreeperMaxTeleportRange = config.get(sectionConCreeper.name, "concussionCreeperMaxTeleportRange", concussionCreeperMaxTeleportRange, "Sets the max range entites can be telported when the creeper explodes").getInt(concussionCreeperMaxTeleportRange); concussionCreeperConfusionDuration = config.get(sectionConCreeper.name, "concussionCreeperConfusionDuration", concussionCreeperConfusionDuration, "Sets the durtaion in ticks of the confusion effect applied on explosion").getInt(concussionCreeperConfusionDuration); concussionCreeperExplosionRange = config .get(sectionConCreeper.name, "concussionCreeperExplosionRange", concussionCreeperExplosionRange, "The range of the 'teleport explosion'") .getInt(concussionCreeperExplosionRange); concussionCreeperHealth = config .get(sectionConCreeper.name, "concussionCreeperHealth", concussionCreeperHealth, "Health of Concussion Creeper. 40=Enderman health, 20=Zombie health") .getDouble(concussionCreeperHealth); concussionCreeperOldTexture = config .get(sectionConCreeper.name, "concussionCreeperOldTexture", concussionCreeperOldTexture, "If true, uses the old texture for the Concussion Creeper.") .getBoolean(); fallenKnightId = config.get(sectionFallenKnight.name, "fallenKnightId", fallenKnightId, "Mob ID").getInt(fallenKnightId); fallenKnightEnabled = config.getBoolean("fallenKnightEnabled", sectionFallenKnight.name, fallenKnightEnabled, "Wether Fallen Knights are enabled"); fallenKnightBaseDamage = config.get(sectionFallenKnight.name, "fallenKnightBaseDamage", fallenKnightBaseDamage, "Base damage of a knight") .getDouble(fallenKnightBaseDamage); fallenKnightHealth = config.get(sectionFallenKnight.name, "fallenKnightHealth", fallenKnightHealth, "Health of a knight").getDouble(fallenKnightHealth); fallenKnightFollowRange = config.get(sectionFallenKnight.name, "fallenKnightFollowRange", fallenKnightFollowRange, "Follow range of a knight") .getDouble(fallenKnightFollowRange); fallenKnightChargeSpeed = config .get(sectionFallenKnight.name, "fallenKnightChargeSpeed", fallenKnightChargeSpeed, "The speed at which a knight will charge its target") .getDouble(fallenKnightChargeSpeed); fallenKnightRangedMinAttackPause = config .get(sectionFallenKnight.name, "fallenKnightRangedMinAttackPause", fallenKnightRangedMinAttackPause, "The min number of ticks between ranged attacks") .getInt(fallenKnightRangedMinAttackPause); fallenKnightRangedMaxAttackPause = config .get(sectionFallenKnight.name, "fallenKnightRangedMaxAttackPause", fallenKnightRangedMaxAttackPause, "The max number of ticks between ranged attacks") .getInt(fallenKnightRangedMaxAttackPause); fallenKnightRangedMaxRange = (float) config .get(sectionFallenKnight.name, "fallenKnightRangedMaxRange", fallenKnightRangedMaxRange, "The max attack range when using a bow") .getDouble(fallenKnightRangedMaxRange); fallenKnightChancePerArmorPiece = (float) config.get(sectionFallenKnight.name, "fallenKnightChancePerArmorPiece", fallenKnightChancePerArmorPiece, "The chance each armor piece has of being added to a spawned knight").getDouble(fallenKnightChancePerArmorPiece); fallenKnightChancePerArmorPieceHard = (float) config.get(sectionFallenKnight.name, "fallenKnightChancePerArmorPieceHard", fallenKnightChancePerArmorPieceHard, "The chance each armor piece has of being added to a spawned knight when difficulty is set to hard") .getDouble(fallenKnightChancePerArmorPieceHard); fallenKnightRangedRatio = (float) config .get(sectionFallenKnight.name, "fallenKnightRangedRatio", fallenKnightRangedRatio, "The precentage of spawned knoghts equipped with bows") .getDouble(fallenKnightRangedRatio); fallenKnightChanceMounted = (float) config .get(sectionFallenKnight.name, "fallenKnightChanceMounted", fallenKnightChanceMounted, "The chance a spawned knight will be mounted") .getDouble(fallenKnightChanceMounted); fallenKnightChanceArmorUpgradeHard = (float) config.get(sectionFallenKnight.name, "fallenKnightChanceArmorUpgradeHard", fallenKnightChanceArmorUpgradeHard, "The chance the type of armor equipped will be improved when dificult is hard").getDouble(fallenKnightChanceArmorUpgradeHard); fallenKnightChanceArmorUpgrade = (float) config.get(sectionFallenKnight.name, "fallenKnightChanceArmorUpgrade", fallenKnightChanceArmorUpgrade, "The chance the type of armor equipped will be improved").getDouble(fallenKnightChanceArmorUpgrade); fallenKnightChanceShield= (float) config.get(sectionFallenKnight.name, "fallenKnightChanceShield", fallenKnightChanceShield, "The chance ta shield will be equipped").getDouble(fallenKnightChanceShield); fallKnightMountedArchesMaintainDistance = config.getBoolean("fallKnightMountedArchesMaintainDistance", sectionFallenKnight.name, fallKnightMountedArchesMaintainDistance, "When true mounted archer knigts will attempt to keep distance between themselves and their target"); fallenKnightArchersSwitchToMelee = config.getBoolean("fallenKnightArchersSwitchToMelee", sectionFallenKnight.name, fallenKnightArchersSwitchToMelee, "When true archer knigts will switch to a sword when target is within melee range. " + "Doesn't apply to mounted archers if fallKnightMountedArchesMaintainDistance is true"); fallenMountId = config.get(sectionFallenMount.name, "fallenMountId", fallenMountId, "Mob ID").getInt(fallenMountId); fallenMountEnabled = config.getBoolean("fallenMountEnabled", sectionFallenMount.name, fallenMountEnabled, "If false fallen mounts will be disabled"); fallenMountChargeSpeed = config .get(sectionFallenMount.name, "fallenMountChargeSpeed", fallenMountChargeSpeed, "he speed at which a mount will charge its target") .getDouble(fallenMountChargeSpeed); fallenMountBaseAttackDamage = config .get(sectionFallenMount.name, "fallenMountBaseAttackDamage", fallenMountBaseAttackDamage, "Base attack damage of the mount") .getDouble(fallenMountBaseAttackDamage); fallenMountHealth = config.get(sectionFallenMount.name, "fallenMountHealth", fallenMountHealth, "Base attack health of the mount") .getDouble(fallenMountHealth); fallenMountShadedByRider = config.getBoolean("fallenMountShadedByRider", sectionFallenMount.name, fallenMountShadedByRider, "When true a mount will not burn in the sun unless its rider is"); fallenMountChanceArmored = (float) config .get(sectionFallenMount.name, "fallenMountChanceArmored", fallenMountChanceArmored, "The chance a spawned mount will be armored") .getDouble(fallenMountChanceArmored); fallenMountChanceArmoredHard = (float) config.get(sectionFallenMount.name, "fallenMountChanceArmoredHard", fallenMountChanceArmoredHard, "The chance a spawned mount will be armored when difficult is hard").getDouble(fallenMountChanceArmoredHard); fallenMountChanceArmorUpgrade = (float) config .get(sectionFallenMount.name, "fallenMountChanceArmorUpgrade", fallenMountChanceArmorUpgrade, "The chance a mount's armor will be upgraded") .getDouble(fallenMountChanceArmorUpgrade); fallenMountChanceArmorUpgradeHard = (float) config.get(sectionFallenMount.name, "fallenMountChanceArmorUpgradeHard", fallenMountChanceArmorUpgradeHard, "The chance a mount's armor will be upgraded when difficulty is hard").getDouble(fallenMountChanceArmorUpgradeHard); witherWitchId = config.get(sectionWitherWitch.name, "witherWitchId", witherWitchId, "Mob ID").getInt(witherWitchId); witherWitchEnabled = config.getBoolean("witherWitchEnabled", sectionWitherWitch.name, witherWitchEnabled, "If false Wither Witches will be disabled"); witherWitchHealth = config.get(sectionWitherWitch.name, "witherWitchHealth", witherWitchHealth, "Base attack damage of the mount") .getDouble(witherWitchHealth); witherWitchMinCats = config.get(sectionWitherWitch.name, "witherWitchMinCats", witherWitchMinCats, "The minimum number of cats spawned with a Wither Witch") .getInt(witherWitchMinCats); witherWitchMaxCats = config.get(sectionWitherWitch.name, "witherWitchMaxCats", witherWitchMaxCats, "The maximum number of cats spawned with a Wither Witch") .getInt(witherWitchMaxCats); witherCatId = config.get(sectionWitherCat.name, "witherCatId", witherCatId, "Mob ID").getInt(witherCatId); witherCatEnabled = config.getBoolean("witherCatEnabled", sectionWitherCat.name, witherCatEnabled, "If false Wither Cats will be disabled"); witherCatHealth = config.get(sectionWitherCat.name, "witherCatHealth", witherCatHealth, "Base health of the wither cat").getDouble(witherCatHealth); witherCatAttackDamage = config.get(sectionWitherCat.name, "witherCatAttackDamage", witherCatAttackDamage, "Base attack damage of the wither cat") .getDouble(witherCatAttackDamage); witherCatAngryAttackDamageHardModifier = config.get(sectionWitherCat.name, "witherCatAngryAttackDamageHardModifier", witherCatAngryAttackDamageHardModifier, "The increase to damage when playing on hard").getDouble(witherCatAngryAttackDamageHardModifier); direWolfId = config.get(sectionDireWolf.name, "direWolfId", direWolfId, "Mob ID").getInt(direWolfId); direWolfEnabled = config.getBoolean("direWolfEnabled", sectionDireWolf.name, direWolfEnabled, "If false Dire Wolves will be disabled"); direWolfPackAttackEnabled = config.getBoolean("direWolfPackAttackEnabled", sectionDireWolf.name, direWolfPackAttackEnabled, "When true all nearby dire wolves will join an attack"); direWolfHealth = config.get(sectionDireWolf.name, "direWolfHealth", direWolfHealth, "Base health of the Dire Wolf").getDouble(direWolfHealth); direWolfAttackDamage = config.get(sectionDireWolf.name, "direWolfAttackDamage", direWolfAttackDamage, "Base attack damage of the dire wolf") .getDouble(direWolfAttackDamage); direWolfHardAttackModifier = config .get(sectionDireWolf.name, "direWolfHardAttackModifier", direWolfHardAttackModifier, "The increase to damage when playing on hard") .getDouble(direWolfHardAttackModifier); direWolfAggresiveRange = config .get(sectionDireWolf.name, "direWolfAggresiveRange", direWolfAggresiveRange, "If a player gets within this range they will be attacked") .getDouble(direWolfAggresiveRange); direWolfHowlVolumeMult = config .get(sectionDireWolf.name, "direWolfHowlVolumeMult", direWolfHowlVolumeMult, "The volume multiplier for the dire wolf's howl. 12 is default.") .getDouble(); direWolfHowlChance = config.get(sectionDireWolf.name, "direWolfHowlChance", direWolfHowlChance, "The chance a dire wolf will howl when it is asked to play a sound. Defaults to 0.1 (10%)").getDouble(); direWolfPackHowlChance = config.get(sectionDireWolf.name, "direWolfPackHowlChance", direWolfPackHowlChance, "The chance that when a dire wolf howls, nearby dire wolves will \"join in\" to a pack howl. Defaults to 0.6 (60%)").getDouble(); direWolfPackHowlAmount = config.get(sectionDireWolf.name, "direWolfPackHowlAmount", direWolfPackHowlAmount, "The amount of other dire wolves that will \"join in\" with the initial howl, per pack howl.").getInt(); direSlimeId = config.get(sectionDireSlime.name, "direSlimeId", direSlimeId, "Mob ID").getInt(direSlimeId); direSlimeEnabled = config.getBoolean("direSlimeEnabled", sectionDireSlime.name, direSlimeEnabled, "If false Dire Slime will be disabled"); direSlimeAttackDamage = config.get(sectionDireSlime.name, "direSlimeAttackDamage", direSlimeAttackDamage, "Base attack damage of the dire slime.") .getDouble(direSlimeAttackDamage); direSlimeAttackDamageMedium = config .get(sectionDireSlime.name, "direSlimeAttackDamageMedium", direSlimeAttackDamageMedium, "Base attack damage of the medium dire slime.") .getDouble(direSlimeAttackDamageMedium); direSlimeAttackDamageLarge = config .get(sectionDireSlime.name, "direSlimeAttackDamageLarge", direSlimeAttackDamageLarge, "Base attack damage of the large dire slime.") .getDouble(direSlimeAttackDamageLarge); direSlimeHealth = config.get(sectionDireSlime.name, "direSlimeHealth", direSlimeHealth, "Base health of the Dire Slime. ").getDouble(direSlimeHealth); direSlimeHealthMedium = config.get(sectionDireSlime.name, "direSlimeHealthMedium", direSlimeHealthMedium, "Base health of the medium Dire Slime. ") .getDouble(direSlimeHealthMedium); direSlimeHealthLarge = config.get(sectionDireSlime.name, "direSlimeHealthLarge", direSlimeHealthLarge, "Base health of the medium Dire Slime. ") .getDouble(direSlimeHealthLarge); direSlimeChance = config .get(sectionDireSlime.name, "direSlimeChance", direSlimeChance, "The chance that a Dire Slime will be spawned (0 = never, 1 = always).") .getDouble(direSlimeChance); direSlimeChanceMedium = config.get(sectionDireSlime.name, "direSlimeChanceMedium", direSlimeChanceMedium, "The chance a medium will spawn when a small Dire Slimes is killed (eg 0.12 for a 12% chance).").getDouble(direSlimeChanceMedium); direSlimeChanceLarge = config.get(sectionDireSlime.name, "direSlimeChanceLarge", direSlimeChanceLarge, "The chance a large will spawn when a medium Dire Slimes is killed (eg 0.02 for a 2% chance)").getDouble(direSlimeChanceLarge); owlEnabled = config.getBoolean("owlEnabled", sectionOwl.name, owlEnabled, "If false Owl will be disabled"); owlId = config.get(sectionOwl.name, "owlId", owlId, "Mob ID").getInt(owlId); owlHealth = config.get(sectionOwl.name, "owlHealth", owlHealth, "Owl Health").getInt(owlHealth); owlAttachDamage = config.get(sectionOwl.name, "owlAttachDamage", owlAttachDamage, "Owl Attack Damage").getInt(owlAttachDamage); owlSpiderDamageMultiplier = (float) config.get(sectionOwl.name, "owlSpiderDamageMultiplier", owlSpiderDamageMultiplier, "Damage multiplier against spiders") .getDouble(owlSpiderDamageMultiplier); owlHootVolumeMult = (float) config .get(sectionOwl.name, "owlHootVolumeMult", owlHootVolumeMult, "Adjusts the owls hoot volume. Higher value is loader") .getDouble(owlHootVolumeMult); owlHootInterval = config.get(sectionOwl.name, "owlHootInterval", owlHootInterval, "Aprox. number of ticks between hoots").getInt(owlHootInterval); owlTimeBetweenEggsMin = config.get(sectionOwl.name, "owlTimeBetweenEggsMin", owlTimeBetweenEggsMin, "Min ticks between egg laying") .getInt(owlTimeBetweenEggsMin); owlTimeBetweenEggsMax = config.get(sectionOwl.name, "owlTimeBetweenEggsMax", owlTimeBetweenEggsMax, "Max ticks between egg laying") .getInt(owlTimeBetweenEggsMax); entityOwlEggId = config.get(sectionOwl.name, "entityOwlEggId", entityOwlEggId, "ID for thrown owl egg Entity").getInt(entityOwlEggId); String rareStr = config.get(sectionEnchants.name, "enchantmentWitherArrowWeight", enchantmentWitherArrowRarity.toString(), "The rarity of the enchantment. COMMON, UNCOMMON, RARE, VERY_RARE ").getString(); try { enchantmentWitherArrowRarity = Rarity.valueOf(rareStr); } catch (Exception e) { Log.warn("Could not set value config entry enchantmentWitherArrowRarity Specified value " + rareStr); e.printStackTrace(); } enchantmentWitherArrowDuration = config .get(sectionEnchants.name, "enchantmentWitherArrowDuration", enchantmentWitherArrowDuration, "Duration of the wither effect in ticks") .getInt(enchantmentWitherArrowDuration); enchantmentWitherArrowMinEnchantability = config.get(sectionEnchants.name, "enchantmentWitherArrowMinEnchantability", enchantmentWitherArrowMinEnchantability, "The minimum required enchantability level").getInt(enchantmentWitherArrowMinEnchantability); enchantmentWitherArrowMaxEnchantability = config .get(sectionEnchants.name, "enchantmentWitherArrowMaxEnchantability", enchantmentWitherArrowMaxEnchantability, "The maximum required level") .getInt(enchantmentWitherArrowMaxEnchantability); rareStr = config.get(sectionEnchants.name, "enchantmentWitherWeaponWeight", enchantmentWitherWeaponRarity.toString(), "The rarity of the enchantment. COMMON, UNCOMMON, RARE, VERY_RARE ").getString(); try { enchantmentWitherWeaponRarity = Rarity.valueOf(rareStr); } catch (Exception e) { Log.warn("Could not set value config entry enchantmentWitherArrowRarity Specified value " + rareStr); e.printStackTrace(); } enchantmentWitherWeaponDuration = config .get(sectionEnchants.name, "enchantmentWitherWeaponDuration", enchantmentWitherWeaponDuration, "Duration of the wither effect in ticks") .getInt(enchantmentWitherWeaponDuration); enchantmentWitherWeaponMinEnchantability = config.get(sectionEnchants.name, "enchantmentWitherWeaponMinEnchantability", enchantmentWitherWeaponMinEnchantability, "The minimum required enchantability level").getInt(enchantmentWitherWeaponMinEnchantability); enchantmentWitherWeaponMaxEnchantability = config .get(sectionEnchants.name, "enchantmentWitherWeaponMaxEnchantability", enchantmentWitherWeaponMaxEnchantability, "The maximum required level") .getInt(enchantmentWitherWeaponMaxEnchantability); entityPrimedChargeId = config.get(sectionCharges.name, "entityPrimedChargeId", entityPrimedChargeId, "ID for charge entities").getInt(entityPrimedChargeId); confusingChargeEnabled = config.getBoolean("confusingChargeEnabled", sectionCharges.name, confusingChargeEnabled, "If false Confusing Charges will be disabled"); confusingChargeRange = config.get(sectionCharges.name, "confusingChargeRange", confusingChargeRange, "The range of the confusion charges effect") .getDouble(confusingChargeRange); confusingChargeEffectDuration = config.get(sectionCharges.name, "confusingChargeEffectDuration", confusingChargeEffectDuration, "Numer of ticks the confusion effect active. Scales with distance from the expolosion").getInt(confusingChargeEffectDuration); enderChargeEnabled = config.getBoolean("enderChargeEnabled", sectionCharges.name, enderChargeEnabled, "If false Ender Charges will be disabled"); enderChargeRange = config.get(sectionCharges.name, "enderChargeRange", enderChargeRange, "The range of the ender charges effect") .getDouble(enderChargeRange); enderChargeMaxTeleportRange = config.get(sectionCharges.name, "enderChargeMaxTeleportRange", enderChargeMaxTeleportRange, "The max range effected entities will be teleported. Distance is randomised").getInt(enderChargeMaxTeleportRange); concussionChargeEnabled = config.getBoolean("concussionChargeEnabled", sectionCharges.name, concussionChargeEnabled, "If false Concussion Charges will be disabled"); enderZooDifficultyModifierEnabled = config.getBoolean("enderZooDifficultyModifierEnabled", sectionDifficulty.name, enderZooDifficultyModifierEnabled, "When enabled health and base damage for all Ender Zoo mobs will be modified based on difficulty"); enderZooEasyHealthModifier = config.get(sectionDifficulty.name, "enderZooEasyHealthModifier", enderZooEasyHealthModifier, "When in easy difficulty base health is multiplied by this value, rounded to the nearest whole 'heart'").getDouble(enderZooEasyHealthModifier); enderZooNormalHealthModifier = config.get(sectionDifficulty.name, "enderZooNormalHealthModifier", enderZooNormalHealthModifier, "When in normal difficultry base health is multiplied by this value, rounded to the nearest whole 'heart'").getDouble(enderZooNormalHealthModifier); enderZooHardHealthModifier = config.get(sectionDifficulty.name, "enderZooHardHealthModifier", enderZooHardHealthModifier, "When in hard mode base health is multiplied by this value, rounded to the nearest whole 'heart'").getDouble(enderZooHardHealthModifier); enderZooEasyAttackModifier = config.get(sectionDifficulty.name, "enderZooEasyAttackModifier", enderZooEasyAttackModifier, "When in easy difficulty base attack damage is multiplied by this value").getDouble(enderZooEasyAttackModifier); enderZooNormalAttackModifier = config.get(sectionDifficulty.name, "enderZooNormalAttackModifier", enderZooNormalAttackModifier, "When in easy difficulty base attack damage is multiplied by this value").getDouble(enderZooNormalAttackModifier); enderZooHardAttackModifier = config.get(sectionDifficulty.name, "enderZooHardAttackModifier", enderZooHardAttackModifier, "When in easy difficulty base attack damage is multiplied by this value").getDouble(enderZooHardAttackModifier); globalDifficultyModifierEnabled = config.getBoolean("globalDifficultyModifierEnabled", sectionDifficulty.name, globalDifficultyModifierEnabled, "When enabled health and base damage for all non Ender Zoo mobs will be modified based on difficulty"); globalEasyHealthModifier = config.get(sectionDifficulty.name, "globalEasyHealthModifier", globalEasyHealthModifier, "When in easy difficulty base health is multiplied by this value, rounded to the nearest whole 'heart'").getDouble(globalEasyHealthModifier); globalNormalHealthModifier = config.get(sectionDifficulty.name, "globalNormalHealthModifier", globalNormalHealthModifier, "When in normal difficultry base health is multiplied by this value, rounded to the nearest whole 'heart'").getDouble(globalNormalHealthModifier); globalHardHealthModifier = config.get(sectionDifficulty.name, "globalHardHealthModifier", globalHardHealthModifier, "When in hard mode base health is multiplied by this value, rounded to the nearest whole 'heart'").getDouble(globalHardHealthModifier); globalEasyAttackModifier = config.get(sectionDifficulty.name, "globalEasyAttackModifier", globalEasyAttackModifier, "When in easy difficulty base attack damage is multiplied by this value").getDouble(globalEasyAttackModifier); globalNormalAttackModifier = config.get(sectionDifficulty.name, "globalNormalAttackModifier", globalNormalAttackModifier, "When in easy difficulty base attack damage is multiplied by this value").getDouble(globalNormalAttackModifier); globalHardAttackModifier = config.get(sectionDifficulty.name, "globalHardAttackModifier", globalHardAttackModifier, "When in easy difficulty base attack damage is multiplied by this value").getDouble(globalHardAttackModifier); spawnConfigPrintDetailedOutput = config.getBoolean("spawnConfigPrintDetailedOutput", sectionDebug.name, spawnConfigPrintDetailedOutput, "When enabled detailed information about spawn config will be printed to the log."); guardiansBowEnabled = config.getBoolean("guardiansBowEnabled", sectionGuardian.name, guardiansBowEnabled, "If false the Guardians Bow will be disabled"); guardiansBowDrawTime = config.get(sectionGuardian.name, "guardiansBowDrawTime", guardiansBowDrawTime, "The number of ticks it takes to fully draw the guardians bow. A 'vanilla' bow takes 20 ticks.").getInt(guardiansBowDrawTime); guardiansBowDamageBonus = (float) config .get(sectionGuardian.name, "guardiansBowDamageBonus", guardiansBowDamageBonus, "The damage bonus applied to arrows fire from the bow.") .getDouble(guardiansBowDamageBonus); guardiansBowForceMultiplier = (float) config.get(sectionGuardian.name, "guardiansBowForceMultiplier", guardiansBowForceMultiplier, "Effects the speed with which arrows leave the bow. A 'vanilla' bow has a multiplier of 2.").getDouble(guardiansBowForceMultiplier); guardiansBowFovMultiplier = (float) config.get(sectionGuardian.name, "guardiansBowFovMultiplier", guardiansBowFovMultiplier, "The reduction in FOV when the bow is fullen drawn (the zoom level). A 'vanilla' bow has a value of 0.15").getDouble(guardiansBowFovMultiplier); witherPotionID = config.get(sectionPotions.name, "witherPotionID", witherPotionID, "Potion ID").getInt(witherPotionID); witherPotionLongID = config.get(sectionPotions.name, "witherPotionLongID", witherPotionLongID, "Potion ID").getInt(witherPotionLongID); confusingPotionID = config.get(sectionPotions.name, "confusingPotionID", confusingPotionID, "Potion ID").getInt(confusingPotionID); confusingPotionLongID = config.get(sectionPotions.name, "confusingPotionLongID", confusingPotionLongID, "Potion ID").getInt(confusingPotionLongID); floatingPotionID = config.get(sectionPotions.name, "floatingPotionID", floatingPotionID, "Potion ID").getInt(floatingPotionID); floatingPotionLongID = config.get(sectionPotions.name, "floatingPotionLongID", floatingPotionLongID, "Potion ID").getInt(floatingPotionLongID); floatingPotionTwoID = config.get(sectionPotions.name, "floatingPotionTwoID", floatingPotionTwoID, "Potion ID").getInt(floatingPotionTwoID); floatingPotionEnabled = config.getBoolean("floatingPotionEnabled", sectionPotions.name, floatingPotionEnabled, "If false floating potions will be disabled"); floatingPotionSpeed = config.get(sectionPotions.name, "floatingPotionSpeed", floatingPotionSpeed, "Max rising speed.").getDouble(floatingPotionSpeed); floatingPotionAcceleration = config.get(sectionPotions.name, "floatingPotionAcceleration", floatingPotionAcceleration, "Vertical acceleration rate") .getDouble(floatingPotionAcceleration); floatingPotionDuration = config.get(sectionPotions.name, "floatingPotionDuration", floatingPotionDuration, "Effect duration (ticks)") .getInt(floatingPotionDuration); floatingPotionDurationSplash = config.get(sectionPotions.name, "floatingPotionDurationSplash", floatingPotionDurationSplash, "Effect duration (ticks)") .getInt(floatingPotionDurationSplash); floatingPotionDurationLong = config.get(sectionPotions.name, "floatingPotionDurationLong", floatingPotionDurationLong, "Effect duration (ticks)") .getInt(floatingPotionDurationLong); floatingPotionDurationLongSplash = config .get(sectionPotions.name, "floatingPotionDurationLongSplash", floatingPotionDurationLongSplash, "Effect duration (ticks)") .getInt(floatingPotionDurationLongSplash); floatingPotionTwoSpeed = config.get(sectionPotions.name, "floatingPotionTwoSpeed", floatingPotionTwoSpeed, "Max rising speed.") .getDouble(floatingPotionTwoSpeed); floatingPotionTwoAcceleration = config .get(sectionPotions.name, "floatingPotionTwoAcceleration", floatingPotionTwoAcceleration, "Vertical acceleration rate") .getDouble(floatingPotionTwoAcceleration); floatingPotionTwoDuration = config.get(sectionPotions.name, "floatingPotionTwoDuration", floatingPotionTwoDuration, "Effect duration (ticks)") .getInt(floatingPotionTwoDuration); floatingPotionTwoDurationSplash = config .get(sectionPotions.name, "floatingPotionTwoDurationSplash", floatingPotionTwoDurationSplash, "Effect duration (ticks)") .getInt(floatingPotionTwoDurationSplash); } private Config() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a.commit; import java.io.IOException; import org.junit.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3a.commit.magic.MagicS3GuardCommitter; import org.apache.hadoop.fs.s3a.commit.staging.DirectoryStagingCommitter; import org.apache.hadoop.fs.s3a.commit.staging.PartitionedStagingCommitter; import org.apache.hadoop.fs.s3a.commit.staging.StagingCommitter; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.PathOutputCommitter; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.test.LambdaTestUtils; import static org.apache.hadoop.fs.s3a.commit.CommitConstants.*; /** * Tests for some aspects of the committer factory. * All tests are grouped into one single test so that only one * S3A FS client is set up and used for the entire run. * Saves time and money. */ public class ITestS3ACommitterFactory extends AbstractCommitITest { protected static final String INVALID_NAME = "invalid-name"; /** * Counter to guarantee that even in parallel test runs, no job has the same * ID. */ private String jobId; // A random task attempt id for testing. private String attempt0; private TaskAttemptID taskAttempt0; private Path outDir; private S3ACommitterFactory factory; private TaskAttemptContext tContext; /** * Parameterized list of bindings of committer name in config file to * expected class instantiated. */ private static final Object[][] bindings = { {COMMITTER_NAME_FILE, FileOutputCommitter.class}, {COMMITTER_NAME_DIRECTORY, DirectoryStagingCommitter.class}, {COMMITTER_NAME_PARTITIONED, PartitionedStagingCommitter.class}, {InternalCommitterConstants.COMMITTER_NAME_STAGING, StagingCommitter.class}, {COMMITTER_NAME_MAGIC, MagicS3GuardCommitter.class} }; /** * This is a ref to the FS conf, so changes here are visible * to callers querying the FS config. */ private Configuration filesystemConfRef; private Configuration taskConfRef; @Override public void setup() throws Exception { super.setup(); jobId = randomJobId(); attempt0 = "attempt_" + jobId + "_m_000000_0"; taskAttempt0 = TaskAttemptID.forName(attempt0); outDir = path(getMethodName()); factory = new S3ACommitterFactory(); Configuration conf = new Configuration(); conf.set(FileOutputFormat.OUTDIR, outDir.toUri().toString()); conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt0); conf.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 1); filesystemConfRef = getFileSystem().getConf(); tContext = new TaskAttemptContextImpl(conf, taskAttempt0); taskConfRef = tContext.getConfiguration(); } @Override public boolean useInconsistentClient() { return false; } @Test public void testEverything() throws Throwable { testImplicitFileBinding(); testBindingsInTask(); testBindingsInFSConfig(); testInvalidFileBinding(); testInvalidTaskBinding(); } /** * Verify that if all config options are unset, the FileOutputCommitter * * is returned. */ public void testImplicitFileBinding() throws Throwable { taskConfRef.unset(FS_S3A_COMMITTER_NAME); filesystemConfRef.unset(FS_S3A_COMMITTER_NAME); assertFactoryCreatesExpectedCommitter(FileOutputCommitter.class); } /** * Verify that task bindings are picked up. */ public void testBindingsInTask() throws Throwable { // set this to an invalid value to be confident it is not // being checked. filesystemConfRef.set(FS_S3A_COMMITTER_NAME, "INVALID"); taskConfRef.set(FS_S3A_COMMITTER_NAME, COMMITTER_NAME_FILE); assertFactoryCreatesExpectedCommitter(FileOutputCommitter.class); for (Object[] binding : bindings) { taskConfRef.set(FS_S3A_COMMITTER_NAME, (String) binding[0]); assertFactoryCreatesExpectedCommitter((Class) binding[1]); } } /** * Verify that FS bindings are picked up. */ public void testBindingsInFSConfig() throws Throwable { taskConfRef.unset(FS_S3A_COMMITTER_NAME); filesystemConfRef.set(FS_S3A_COMMITTER_NAME, COMMITTER_NAME_FILE); assertFactoryCreatesExpectedCommitter(FileOutputCommitter.class); for (Object[] binding : bindings) { taskConfRef.set(FS_S3A_COMMITTER_NAME, (String) binding[0]); assertFactoryCreatesExpectedCommitter((Class) binding[1]); } } /** * Create an invalid committer via the FS binding, */ public void testInvalidFileBinding() throws Throwable { taskConfRef.unset(FS_S3A_COMMITTER_NAME); filesystemConfRef.set(FS_S3A_COMMITTER_NAME, INVALID_NAME); LambdaTestUtils.intercept(PathCommitException.class, INVALID_NAME, () -> createCommitter()); } /** * Create an invalid committer via the task attempt. */ public void testInvalidTaskBinding() throws Throwable { filesystemConfRef.unset(FS_S3A_COMMITTER_NAME); taskConfRef.set(FS_S3A_COMMITTER_NAME, INVALID_NAME); LambdaTestUtils.intercept(PathCommitException.class, INVALID_NAME, () -> createCommitter()); } /** * Assert that the factory creates the expected committer. * @param expected expected committer class. * @throws IOException IO failure. */ protected void assertFactoryCreatesExpectedCommitter( final Class expected) throws IOException { assertEquals("Wrong Committer from factory", expected, createCommitter().getClass()); } /** * Create a committer. * @return the committer * @throws IOException IO failure. */ private PathOutputCommitter createCommitter() throws IOException { return factory.createOutputCommitter(outDir, tContext); } }
package xyz.elmot.oscill; import fi.iki.elonen.NanoHTTPD; import fi.iki.elonen.NanoHTTPD.Response.Status; import org.apache.commons.io.IOUtils; import xyz.elmot.oscill.web.Resource; import javax.swing.*; import javax.swing.border.BevelBorder; import javax.swing.event.PopupMenuEvent; import javax.swing.event.PopupMenuListener; import java.awt.*; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.URI; import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.TreeMap; import static xyz.elmot.oscill.Main.PORT_NAME; /** * (c) elmot on 9.3.2017. */ public class WebMain extends NanoHTTPD { private static final int PORT = 1515; private final static Map<String, Resource> staticResources = new TreeMap<>(); private CommFacility<byte[]> commFacility = new CommFacility<>(b -> b); private JLabel connectStatus; private String connectionStatusText = "Not Started"; private boolean connectionStatusError = true; private static void registerResource(String name, String mimeType) { registerResource(name, name, mimeType); } private static void registerResource(String name, String realName, String mimeType) { staticResources.put("/" + name, new Resource(mimeType, () -> WebMain.class.getResourceAsStream(realName)) ); } static { registerResource("content.html", "text/html;encoding=utf-8"); registerResource("ui.js", "application/javascript"); registerResource("hw.js", "hw_web.js", "application/javascript"); registerResource("oscilloscope.css", "text/css"); registerResource("icon.png", "image/png"); } private WebMain() { super(PORT); } @Override public Response serve(IHTTPSession session) { try { switch (session.getMethod()) { case GET: return serveStaticResources(session); case POST: return serveCommand((HTTPSession) session); default: return responseStatus(Status.METHOD_NOT_ALLOWED); } } catch (IOException e) { return newFixedLengthResponse(Status.INTERNAL_ERROR, "text/plain", e.getMessage()); } } private Response serveCommand(HTTPSession session) throws IOException { if ("/frame".equals(session.getUri())) { byte[] frame = commFacility.getDataResponse(); if (frame == null) { Response response = newFixedLengthResponse(Status.NO_CONTENT, NanoHTTPD.MIME_PLAINTEXT, ""); if (connectionStatusError) { response.addHeader("X-Comm-Error", connectionStatusText); } return response; } else { return newFixedLengthResponse(Status.OK, "application/binary" , new ByteArrayInputStream(frame), frame.length); } } else if ("/cmd".equals(session.getUri())) { long bodySize = session.getBodySize(); if (bodySize >= 1000000) { return responseStatus(Status.PAYLOAD_TOO_LARGE); } byte[] bytes = new byte[(int) bodySize]; IOUtils.read(session.getInputStream(), bytes); String result = commFacility.getCommandResponse(new String(bytes, StandardCharsets.US_ASCII)); if (result.isEmpty()) { Response response = responseStatus(Status.NO_CONTENT); if (connectionStatusError) { response.addHeader("X-Comm-Error", connectionStatusText); } return response; } else { return newFixedLengthResponse(Status.OK, "text/plain", result); } } else return responseStatus(Status.METHOD_NOT_ALLOWED); } private Response serveStaticResources(IHTTPSession session) { String uri = session.getUri(); if (uri == null || "".equals(uri) || "/".equals(uri)) { Response response = responseStatus(Status.REDIRECT); response.addHeader("Location", "/content.html"); return response; } Resource resource = staticResources.get(uri); if (resource != null) { return newChunkedResponse(Status.OK, resource.mimeType, resource.data.get()); } return responseStatus(Status.NOT_FOUND); } private Response responseStatus(Status status) { return newFixedLengthResponse(status, NanoHTTPD.MIME_PLAINTEXT, status.getDescription()); } /** * Create the GUI and show it. For thread safety, * this method should be invoked from the * event-dispatching thread. */ private void createAndShowGUI() { //Create and set up the window. JFrame frame = new JFrame("Oscilloscope server"); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); frame.setIconImage(Toolkit.getDefaultToolkit().getImage(WebMain.class.getResource("icon_128.png"))); JComboBox<String> portNames = new JComboBox<>(CommFacility.ports()); Container contentPane = frame.getContentPane(); contentPane.setLayout(new GridBagLayout()); contentPane.add(new JLabel("Port:"), new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(10, 10, 10, 10), 5, 5)); contentPane.add(portNames, new GridBagConstraints(1, 0, 1, 1, 1, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(10, 10, 10, 10), 5, 5)); JButton button = new JButton("Open Browser"); button.addActionListener(e -> { try { Desktop.getDesktop().browse(new URI("http://localhost:" + PORT + "/")); } catch (Exception e1) { e1.printStackTrace(); } }); contentPane.add(button, new GridBagConstraints(0, 1, 2, 1, 1, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(10, 10, 10, 10), 5, 5)); connectStatus = new JLabel(connectionStatusText); connectStatus.setBorder(BorderFactory.createBevelBorder(BevelBorder.LOWERED)); commFacility.setPortStatusConsumer((text, q) -> { connectionStatusText = text; connectionStatusError = !q; SwingUtilities.invokeLater(() -> { connectStatus.setText(connectionStatusText); connectStatus.setForeground(q ? Color.GREEN.darker() : Color.RED.darker()); } ); } ); contentPane.add(connectStatus, new GridBagConstraints(0, 2, 2, 1, 1, 0, GridBagConstraints.SOUTHWEST, GridBagConstraints.HORIZONTAL, new Insets(10, 10, 10, 10), 5, 5)); portNames.addActionListener(e -> { commFacility.close(); commFacility.setPortName((String) portNames.getSelectedItem()); } ); portNames.addPopupMenuListener(new PopupMenuListener() { @Override public void popupMenuWillBecomeVisible(PopupMenuEvent e) { DefaultComboBoxModel<String> model = new DefaultComboBoxModel<>(CommFacility.ports()); model.setSelectedItem(portNames.getSelectedItem()); portNames.setModel(model); } @Override public void popupMenuWillBecomeInvisible(PopupMenuEvent e) { } @Override public void popupMenuCanceled(PopupMenuEvent e) { } }); portNames.setSelectedItem(PORT_NAME); //Display the window. frame.pack(); frame.setLocationByPlatform(true); frame.setVisible(true); } public static void main(String[] args){ //Schedule a job for the event-dispatching thread: //creating and showing this application's GUI. WebMain webMain = new WebMain(); try { webMain.start(); } catch (IOException e) { e.printStackTrace(); } javax.swing.SwingUtilities.invokeLater(webMain::createAndShowGUI); } @Override protected boolean useGzipWhenAccepted(Response r) { return false; } }
package org.keycloak.models.utils; import org.keycloak.models.AuthenticationExecutionModel; import org.keycloak.models.AuthenticationFlowModel; import org.keycloak.models.RealmModel; import org.keycloak.models.RequiredCredentialModel; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class DefaultAuthenticationFlows { public static final String REGISTRATION_FLOW = "registration"; public static final String REGISTRATION_FORM_FLOW = "registration form"; public static final String BROWSER_FLOW = "browser"; public static final String DIRECT_GRANT_FLOW = "direct grant"; public static final String RESET_CREDENTIALS_FLOW = "reset credentials"; public static final String LOGIN_FORMS_FLOW = "forms"; public static final String CLIENT_AUTHENTICATION_FLOW = "clients"; public static void addFlows(RealmModel realm) { if (realm.getFlowByAlias(BROWSER_FLOW) == null) browserFlow(realm); if (realm.getFlowByAlias(DIRECT_GRANT_FLOW) == null) directGrantFlow(realm, false); if (realm.getFlowByAlias(REGISTRATION_FLOW) == null) registrationFlow(realm); if (realm.getFlowByAlias(RESET_CREDENTIALS_FLOW) == null) resetCredentialsFlow(realm); if (realm.getFlowByAlias(CLIENT_AUTHENTICATION_FLOW) == null) clientAuthFlow(realm); } public static void migrateFlows(RealmModel realm) { if (realm.getFlowByAlias(BROWSER_FLOW) == null) browserFlow(realm, true); if (realm.getFlowByAlias(DIRECT_GRANT_FLOW) == null) directGrantFlow(realm, true); if (realm.getFlowByAlias(REGISTRATION_FLOW) == null) registrationFlow(realm); if (realm.getFlowByAlias(RESET_CREDENTIALS_FLOW) == null) resetCredentialsFlow(realm); if (realm.getFlowByAlias(CLIENT_AUTHENTICATION_FLOW) == null) clientAuthFlow(realm); } public static void registrationFlow(RealmModel realm) { AuthenticationFlowModel registrationFlow = new AuthenticationFlowModel(); registrationFlow.setAlias(REGISTRATION_FLOW); registrationFlow.setDescription("registration flow"); registrationFlow.setProviderId("basic-flow"); registrationFlow.setTopLevel(true); registrationFlow.setBuiltIn(true); registrationFlow = realm.addAuthenticationFlow(registrationFlow); realm.setRegistrationFlow(registrationFlow); AuthenticationFlowModel registrationFormFlow = new AuthenticationFlowModel(); registrationFormFlow.setAlias(REGISTRATION_FORM_FLOW); registrationFormFlow.setDescription("registration form"); registrationFormFlow.setProviderId("form-flow"); registrationFormFlow.setTopLevel(false); registrationFormFlow.setBuiltIn(true); registrationFormFlow = realm.addAuthenticationFlow(registrationFormFlow); AuthenticationExecutionModel execution; execution = new AuthenticationExecutionModel(); execution.setParentFlow(registrationFlow.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); execution.setAuthenticator("registration-page-form"); execution.setPriority(10); execution.setAuthenticatorFlow(true); execution.setFlowId(registrationFormFlow.getId()); realm.addAuthenticatorExecution(execution); execution = new AuthenticationExecutionModel(); execution.setParentFlow(registrationFormFlow.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); execution.setAuthenticator("registration-user-creation"); execution.setPriority(20); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); execution = new AuthenticationExecutionModel(); execution.setParentFlow(registrationFormFlow.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); execution.setAuthenticator("registration-profile-action"); execution.setPriority(40); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); execution = new AuthenticationExecutionModel(); execution.setParentFlow(registrationFormFlow.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); execution.setAuthenticator("registration-password-action"); execution.setPriority(50); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); //AuthenticatorConfigModel captchaConfig = new AuthenticatorConfigModel(); //captchaConfig.setAlias("Recaptcha Config"); //Map<String, String> config = new HashMap<>(); //config.put("site.key", "6LcFEAkTAAAAAOaY-5RJk3zIYw4AalNtqfac27Bn"); //config.put("secret", "6LcFEAkTAAAAAM0SErEs9NlfhYpOTRj_vOVJSAMI"); //captchaConfig.setConfig(config); //captchaConfig = realm.addAuthenticatorConfig(captchaConfig); execution = new AuthenticationExecutionModel(); execution.setParentFlow(registrationFormFlow.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.DISABLED); execution.setAuthenticator("registration-recaptcha-action"); execution.setPriority(60); execution.setAuthenticatorFlow(false); //execution.setAuthenticatorConfig(captchaConfig.getId()); realm.addAuthenticatorExecution(execution); } public static void browserFlow(RealmModel realm) { browserFlow(realm, false); } private static boolean hasCredentialType(RealmModel realm, String type) { for (RequiredCredentialModel requiredCredentialModel : realm.getRequiredCredentials()) { if (type.equals(requiredCredentialModel.getType())) { return true; } } return false; } public static void resetCredentialsFlow(RealmModel realm) { AuthenticationFlowModel grant = new AuthenticationFlowModel(); grant.setAlias(RESET_CREDENTIALS_FLOW); grant.setDescription("Reset credentials for a user if they forgot their password or something"); grant.setProviderId("basic-flow"); grant.setTopLevel(true); grant.setBuiltIn(true); grant = realm.addAuthenticationFlow(grant); realm.setResetCredentialsFlow(grant); // username AuthenticationExecutionModel execution = new AuthenticationExecutionModel(); execution.setParentFlow(grant.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); execution.setAuthenticator("reset-credentials-choose-user"); execution.setPriority(10); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); // send email execution = new AuthenticationExecutionModel(); execution.setParentFlow(grant.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); execution.setAuthenticator("reset-credential-email"); execution.setPriority(20); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); // password execution = new AuthenticationExecutionModel(); execution.setParentFlow(grant.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); execution.setAuthenticator("reset-password"); execution.setPriority(30); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); // otp execution = new AuthenticationExecutionModel(); execution.setParentFlow(grant.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.OPTIONAL); execution.setAuthenticator("reset-otp"); execution.setPriority(40); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); } public static void directGrantFlow(RealmModel realm, boolean migrate) { AuthenticationFlowModel grant = new AuthenticationFlowModel(); grant.setAlias(DIRECT_GRANT_FLOW); grant.setDescription("OpenID Connect Resource Owner Grant"); grant.setProviderId("basic-flow"); grant.setTopLevel(true); grant.setBuiltIn(true); grant = realm.addAuthenticationFlow(grant); realm.setDirectGrantFlow(grant); // username AuthenticationExecutionModel execution = new AuthenticationExecutionModel(); execution.setParentFlow(grant.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); execution.setAuthenticator("direct-grant-validate-username"); execution.setPriority(10); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); // password execution = new AuthenticationExecutionModel(); execution.setParentFlow(grant.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); if (migrate && !hasCredentialType(realm, RequiredCredentialModel.PASSWORD.getType())) { execution.setRequirement(AuthenticationExecutionModel.Requirement.DISABLED); } execution.setAuthenticator("direct-grant-validate-password"); execution.setPriority(20); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); // otp execution = new AuthenticationExecutionModel(); execution.setParentFlow(grant.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.OPTIONAL); if (migrate && hasCredentialType(realm, RequiredCredentialModel.TOTP.getType())) { execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); } execution.setAuthenticator("direct-grant-validate-otp"); execution.setPriority(30); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); } public static void browserFlow(RealmModel realm, boolean migrate) { AuthenticationFlowModel browser = new AuthenticationFlowModel(); browser.setAlias(BROWSER_FLOW); browser.setDescription("browser based authentication"); browser.setProviderId("basic-flow"); browser.setTopLevel(true); browser.setBuiltIn(true); browser = realm.addAuthenticationFlow(browser); realm.setBrowserFlow(browser); AuthenticationExecutionModel execution = new AuthenticationExecutionModel(); execution.setParentFlow(browser.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.ALTERNATIVE); execution.setAuthenticator("auth-cookie"); execution.setPriority(10); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); execution = new AuthenticationExecutionModel(); execution.setParentFlow(browser.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.DISABLED); if (migrate && hasCredentialType(realm, RequiredCredentialModel.KERBEROS.getType())) { execution.setRequirement(AuthenticationExecutionModel.Requirement.ALTERNATIVE); } execution.setAuthenticator("auth-spnego"); execution.setPriority(20); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); AuthenticationFlowModel forms = new AuthenticationFlowModel(); forms.setTopLevel(false); forms.setBuiltIn(true); forms.setAlias(LOGIN_FORMS_FLOW); forms.setDescription("Username, password, otp and other auth forms."); forms.setProviderId("basic-flow"); forms = realm.addAuthenticationFlow(forms); execution = new AuthenticationExecutionModel(); execution.setParentFlow(browser.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.ALTERNATIVE); execution.setFlowId(forms.getId()); execution.setPriority(30); execution.setAuthenticatorFlow(true); realm.addAuthenticatorExecution(execution); // forms // Username Password processing execution = new AuthenticationExecutionModel(); execution.setParentFlow(forms.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); execution.setAuthenticator("auth-username-password-form"); execution.setPriority(10); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); // otp processing execution = new AuthenticationExecutionModel(); execution.setParentFlow(forms.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.OPTIONAL); if (migrate && hasCredentialType(realm, RequiredCredentialModel.TOTP.getType())) { execution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED); } execution.setAuthenticator("auth-otp-form"); execution.setPriority(20); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); } public static void clientAuthFlow(RealmModel realm) { AuthenticationFlowModel clients = new AuthenticationFlowModel(); clients.setAlias(CLIENT_AUTHENTICATION_FLOW); clients.setDescription("Base authentication for clients"); clients.setProviderId("client-flow"); clients.setTopLevel(true); clients.setBuiltIn(true); clients = realm.addAuthenticationFlow(clients); realm.setClientAuthenticationFlow(clients); AuthenticationExecutionModel execution = new AuthenticationExecutionModel(); execution.setParentFlow(clients.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.ALTERNATIVE); execution.setAuthenticator("client-secret"); execution.setPriority(10); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); execution = new AuthenticationExecutionModel(); execution.setParentFlow(clients.getId()); execution.setRequirement(AuthenticationExecutionModel.Requirement.ALTERNATIVE); execution.setAuthenticator("client-jwt"); execution.setPriority(20); execution.setAuthenticatorFlow(false); realm.addAuthenticatorExecution(execution); } }
/* * Copyright (C) 2014-2022 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.schematron.pure.validation; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import com.helger.commons.ValueEnforcer; import com.helger.commons.error.level.EErrorLevel; import com.helger.commons.error.level.IErrorLevel; import com.helger.commons.log.LogHelper; import com.helger.commons.state.EContinue; import com.helger.commons.string.StringHelper; import com.helger.schematron.pure.model.PSAssertReport; import com.helger.schematron.pure.model.PSPattern; import com.helger.schematron.pure.model.PSPhase; import com.helger.schematron.pure.model.PSRule; import com.helger.schematron.pure.model.PSSchema; import com.helger.xml.XMLDebug; /** * A logging implementation of {@link IPSValidationHandler} * * @author Philip Helger * @since 5.3.0 */ public class LoggingPSValidationHandler implements IPSValidationHandler { private final Logger m_aLogger; private IErrorLevel m_aLogLevel = EErrorLevel.INFO; private String m_sLogPrefix; /** * Default constructor. */ public LoggingPSValidationHandler () { this (LoggerFactory.getLogger (LoggingPSValidationHandler.class)); } /** * Constructor with a custom logger * * @param aLogger * The logger to use. May not be <code>null</code>. */ public LoggingPSValidationHandler (@Nonnull final Logger aLogger) { ValueEnforcer.notNull (aLogger, "Logger"); m_aLogger = aLogger; } @Nonnull public final Logger getLogger () { return m_aLogger; } @Nonnull public final IErrorLevel getLogLevel () { return m_aLogLevel; } @Nonnull public final LoggingPSValidationHandler setLogLevel (@Nonnull final IErrorLevel aLogLevel) { ValueEnforcer.notNull (aLogLevel, "LogLevel"); m_aLogLevel = aLogLevel; return this; } @Nullable public final String getLogPrefix () { return m_sLogPrefix; } @Nonnull public final LoggingPSValidationHandler setLogPrefix (@Nullable final String sLogPrefix) { m_sLogPrefix = sLogPrefix; return this; } private void _log (@Nonnull final String sMsg) { LogHelper.log (m_aLogger, m_aLogLevel, StringHelper.getConcatenatedOnDemand (m_sLogPrefix, sMsg)); } @Nonnull public static String getAsString (@Nonnull final Node aNode) { return XMLDebug.getNodeTypeAsString (aNode.getNodeType ()) + ": " + aNode.toString (); } @Nonnull public static String getAsString (@Nonnull final NodeList aNL) { final int nLen = aNL.getLength (); final StringBuilder aSB = new StringBuilder (); aSB.append ("NodeList[").append (nLen).append ("]("); for (int i = 0; i < nLen; ++i) { if (i > 0) aSB.append (", "); aSB.append (getAsString (aNL.item (i))); } aSB.append (')'); return aSB.toString (); } @Override public void onStart (@Nonnull final PSSchema aSchema, @Nullable final PSPhase aActivePhase, @Nullable final String sBaseURI) throws SchematronValidationException { _log ("onStart (" + aSchema + ", " + aActivePhase + ", " + sBaseURI + ")"); } @Override public void onPattern (@Nonnull final PSPattern aPattern) throws SchematronValidationException { _log ("onPattern (" + aPattern + ")"); } @Override public void onRuleStart (@Nonnull final PSRule aRule, @Nonnull final NodeList aContextList) throws SchematronValidationException { _log ("onRuleStart (" + aRule + ", " + getAsString (aContextList) + ")"); } @Override public void onFiredRule (@Nonnull final PSRule aRule, @Nonnull final String sContext, @Nonnegative final int nNodeIndex, @Nonnegative final int nNodeCount) throws SchematronValidationException { _log ("onFiredRule (" + aRule + ", " + sContext + ", " + nNodeIndex + ", " + nNodeCount + ")"); } @Nonnull @Override public EContinue onFailedAssert (@Nonnull final PSAssertReport aAssertReport, @Nonnull final String sTestExpression, @Nonnull final Node aRuleMatchingNode, final int nNodeIndex, @Nullable final Object aContext) throws SchematronValidationException { _log ("onFailedAssert (" + aAssertReport + ", " + sTestExpression + ", " + getAsString (aRuleMatchingNode) + ", " + nNodeIndex + ", " + aContext + ")"); return EContinue.CONTINUE; } @Nonnull @Override public EContinue onSuccessfulReport (@Nonnull final PSAssertReport aAssertReport, @Nonnull final String sTestExpression, @Nonnull final Node aRuleMatchingNode, final int nNodeIndex, @Nullable final Object aContext) throws SchematronValidationException { _log ("onSuccessfulReport (" + aAssertReport + ", " + sTestExpression + ", " + getAsString (aRuleMatchingNode) + ", " + nNodeIndex + ", " + aContext + ")"); return EContinue.CONTINUE; } @Override public void onEnd (@Nonnull final PSSchema aSchema, @Nullable final PSPhase aActivePhase) throws SchematronValidationException { _log ("onEnd (" + aSchema + ", " + aActivePhase + ")"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authentication.external.impl; import java.security.Principal; import java.util.HashMap; import java.util.Map; import java.util.Set; import javax.jcr.Credentials; import javax.jcr.RepositoryException; import javax.security.auth.Subject; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.login.LoginException; import org.apache.jackrabbit.api.security.user.UserManager; import org.apache.jackrabbit.oak.api.AuthInfo; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.commons.DebugTimer; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.plugins.value.jcr.ValueFactoryImpl; import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters; import org.apache.jackrabbit.oak.spi.security.authentication.AbstractLoginModule; import org.apache.jackrabbit.oak.spi.security.authentication.AuthInfoImpl; import org.apache.jackrabbit.oak.spi.security.authentication.ImpersonationCredentials; import org.apache.jackrabbit.oak.spi.security.authentication.PreAuthenticatedLogin; import org.apache.jackrabbit.oak.spi.security.authentication.credentials.SimpleCredentialsSupport; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityException; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityProvider; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityProviderManager; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityRef; import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalUser; import org.apache.jackrabbit.oak.spi.security.authentication.credentials.CredentialsSupport; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncContext; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncException; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncHandler; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncManager; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncResult; import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncedIdentity; import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard; import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * {@code ExternalLoginModule} implements a {@code LoginModule} that uses an * {@link ExternalIdentityProvider} for authentication. */ public class ExternalLoginModule extends AbstractLoginModule { private static final Logger log = LoggerFactory.getLogger(ExternalLoginModule.class); private static final int MAX_SYNC_ATTEMPTS = 50; /** * Name of the parameter that configures the name of the external identity provider. */ public static final String PARAM_IDP_NAME = SyncHandlerMapping.PARAM_IDP_NAME; /** * Name of the parameter that configures the name of the synchronization handler. */ public static final String PARAM_SYNC_HANDLER_NAME = SyncHandlerMapping.PARAM_SYNC_HANDLER_NAME; private ExternalIdentityProviderManager idpManager; private SyncManager syncManager; private CredentialsSupport credentialsSupport = SimpleCredentialsSupport.getInstance(); /** * internal configuration when invoked from a factory rather than jaas */ private ConfigurationParameters osgiConfig; /** * The external identity provider as specified by the {@link #PARAM_IDP_NAME} */ private ExternalIdentityProvider idp; /** * The configured sync handler as specified by the {@link #PARAM_SYNC_HANDLER_NAME} */ private SyncHandler syncHandler; /** * The external user as resolved in the login call. */ private ExternalUser externalUser; /** * Login credentials */ private Credentials credentials; /** * Default constructor for the OSGIi LoginModuleFactory case and the default non-OSGi JAAS case. */ @SuppressWarnings("UnusedDeclaration") public ExternalLoginModule() { } /** * Creates a new ExternalLoginModule with the given OSGi config. * @param osgiConfig the config */ public ExternalLoginModule(ConfigurationParameters osgiConfig) { this.osgiConfig = osgiConfig; } //--------------------------------------------------------< LoginModule >--- @Override public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> sharedState, Map<String, ?> opts) { super.initialize(subject, callbackHandler, sharedState, opts); // merge options with osgi options if needed if (osgiConfig != null) { options = ConfigurationParameters.of(osgiConfig, options); } Whiteboard whiteboard = getWhiteboard(); if (whiteboard == null) { log.error("External login module needs whiteboard. Will not be used for login."); return; } String idpName = options.getConfigValue(PARAM_IDP_NAME, ""); if (idpName.isEmpty()) { log.error("External login module needs IPD name. Will not be used for login."); } else { if (idpManager == null) { idpManager = WhiteboardUtils.getService(whiteboard, ExternalIdentityProviderManager.class); } if (idpManager == null) { log.error("External login module needs IDPManager. Will not be used for login."); } else { idp = idpManager.getProvider(idpName); if (idp == null) { log.error("No IDP found with name {}. Will not be used for login.", idpName); } } } String syncHandlerName = options.getConfigValue(PARAM_SYNC_HANDLER_NAME, ""); if (syncHandlerName.isEmpty()) { log.error("External login module needs SyncHandler name. Will not be used for login."); } else { if (syncManager == null) { syncManager = WhiteboardUtils.getService(whiteboard, SyncManager.class); } if (syncManager == null) { log.error("External login module needs SyncManager. Will not be used for login."); } else { syncHandler = syncManager.getSyncHandler(syncHandlerName); if (syncHandler == null) { log.error("No SyncHandler found with name {}. Will not be used for login.", syncHandlerName); } } } if (idp instanceof CredentialsSupport) { credentialsSupport = (CredentialsSupport) idp; } else { log.debug("No 'SupportedCredentials' configured. Using default implementation supporting 'SimpleCredentials'."); } } @Override public boolean login() throws LoginException { if (idp == null || syncHandler == null) { return false; } credentials = getCredentials(); // check if we have a pre authenticated login from a previous login module final PreAuthenticatedLogin preAuthLogin = getSharedPreAuthLogin(); final String userId = getUserId(preAuthLogin, credentials); if (userId == null && credentials == null) { log.debug("No credentials|userId found for external login module. ignoring."); return false; } // remember identification for log-output Object logId = (userId != null) ? userId : credentials; try { // check if there exists a user with the given ID that has been synchronized // before into the repository. SyncedIdentity sId = getSyncedIdentity(userId); // if there exists an authorizable with the given userid (syncedIdentity != null), // ignore it if any of the following conditions is met: // - identity is local (i.e. not an external identity) // - identity belongs to another IDP // - identity is valid but we have a preAuthLogin and the user doesn't need an updating sync (OAK-3508) if (ignore(sId, preAuthLogin)) { return false; } if (preAuthLogin != null) { externalUser = idp.getUser(preAuthLogin.getUserId()); } else { externalUser = idp.authenticate(credentials); } if (externalUser != null) { log.debug("IDP {} returned valid user {}", idp.getName(), externalUser); if (credentials != null) { //noinspection unchecked sharedState.put(SHARED_KEY_CREDENTIALS, credentials); } //noinspection unchecked sharedState.put(SHARED_KEY_LOGIN_NAME, externalUser.getId()); syncUser(externalUser); return true; } else { debug("IDP {} returned null for {}", idp.getName(), logId.toString()); if (sId != null) { // invalidate the user if it exists as synced variant log.debug("local user exists for '{}'. re-validating.", sId.getId()); validateUser(sId.getId()); } return false; } } catch (ExternalIdentityException e) { log.error("Error while authenticating '{}' with {}", logId, idp.getName(), e); return false; } catch (LoginException e) { log.debug("IDP {} throws login exception for '{}': {}", idp.getName(), logId, e.getMessage()); throw e; } catch (Exception e) { log.debug("SyncHandler {} throws sync exception for '{}'", syncHandler.getName(), logId, e); LoginException le = new LoginException("Error while syncing user."); le.initCause(e); throw le; } } @Override public boolean commit() { if (externalUser == null) { // login attempt in this login module was not successful clearState(); return false; } Set<? extends Principal> principals = getPrincipals(externalUser.getId()); if (!principals.isEmpty()) { if (!subject.isReadOnly()) { subject.getPrincipals().addAll(principals); if (credentials != null) { subject.getPublicCredentials().add(credentials); } setAuthInfo(createAuthInfo(externalUser.getId(), principals), subject); } else { log.debug("Could not add information to read only subject {}", subject); } return true; } clearState(); return false; } @Override public boolean abort() { clearState(); // do we need to remove the user again, in case we created it during login() ? return true; } //------------------------------------------------------------< private >--- @Nullable private String getUserId(@Nullable PreAuthenticatedLogin preAuthLogin, @Nullable Credentials credentials) { if (preAuthLogin != null) { return preAuthLogin.getUserId(); } else if (credentials != null){ return credentialsSupport.getUserId(credentials); } else { return null; } } @Nullable private SyncedIdentity getSyncedIdentity(@Nullable String userId) throws RepositoryException { UserManager userMgr = getUserManager(); if (userId != null && userMgr != null) { return syncHandler.findIdentity(userMgr, userId); } else { return null; } } private boolean ignore(@Nullable SyncedIdentity syncedIdentity, @Nullable PreAuthenticatedLogin preAuthLogin) { if (syncedIdentity != null) { ExternalIdentityRef externalIdRef = syncedIdentity.getExternalIdRef(); if (externalIdRef == null) { debug("ignoring local user: {}", syncedIdentity.getId()); return true; } else if (!idp.getName().equals(externalIdRef.getProviderName())) { debug("ignoring foreign identity: {} (idp={})", externalIdRef.getString(), idp.getName()); return true; } if (preAuthLogin != null && !syncHandler.requiresSync(syncedIdentity)) { debug("pre-authenticated external user {} does not require syncing.", syncedIdentity.toString()); return true; } } return false; } /** * Initiates synchronization of the external user. * @param user the external user * @throws SyncException if an error occurs */ private void syncUser(@NotNull ExternalUser user) throws SyncException { Root root = getRoot(); if (root == null) { throw new SyncException("Cannot synchronize user. root == null"); } UserManager userManager = getUserManager(); if (userManager == null) { throw new SyncException("Cannot synchronize user. userManager == null"); } int numAttempt = 0; while (numAttempt++ < MAX_SYNC_ATTEMPTS) { SyncContext context = null; try { DebugTimer timer = new DebugTimer(); context = syncHandler.createContext(idp, userManager, new ValueFactoryImpl(root, NamePathMapper.DEFAULT)); SyncResult syncResult = context.sync(user); timer.mark("sync"); if (root.hasPendingChanges()) { root.commit(); timer.mark("commit"); } debug("syncUser({}) {}, status: {}", user.getId(), timer.getString(), syncResult.getStatus().toString()); return; } catch (CommitFailedException e) { log.warn("User synchronization failed during commit: {}. (attempt {}/{})", e.toString(), numAttempt, MAX_SYNC_ATTEMPTS); root.refresh(); } finally { if (context != null) { context.close(); } } } throw new SyncException("User synchronization failed during commit after " + MAX_SYNC_ATTEMPTS + " attempts"); } /** * Initiates synchronization of a possible remove user * @param id the user id */ private void validateUser(@NotNull String id) throws SyncException { SyncContext context = null; try { Root root = getRoot(); if (root == null) { throw new SyncException("Cannot synchronize user. root == null"); } UserManager userManager = getUserManager(); if (userManager == null) { throw new SyncException("Cannot synchronize user. userManager == null"); } DebugTimer timer = new DebugTimer(); context = syncHandler.createContext(idp, userManager, new ValueFactoryImpl(root, NamePathMapper.DEFAULT)); context.sync(id); timer.mark("sync"); root.commit(); timer.mark("commit"); debug("validateUser({}) {}", id, timer.getString()); } catch (CommitFailedException e) { throw new SyncException("User synchronization failed during commit.", e); } finally { if (context != null) { context.close(); } } } @NotNull private AuthInfo createAuthInfo(@NotNull String userId, @NotNull Set<? extends Principal> principals) { Credentials creds; if (credentials instanceof ImpersonationCredentials) { creds = ((ImpersonationCredentials) credentials).getBaseCredentials(); } else { creds = credentials; } Map<String, Object> attributes = new HashMap<>(); Object shared = sharedState.get(SHARED_KEY_ATTRIBUTES); if (shared instanceof Map) { for (Map.Entry<?,?> entry : ((Map<?,?>) shared).entrySet()) { attributes.put(entry.getKey().toString(), entry.getValue()); } } else if (creds != null) { attributes.putAll(credentialsSupport.getAttributes(creds)); } return new AuthInfoImpl(userId, attributes, principals); } private static void debug(@NotNull String msg, String... args) { if (log.isDebugEnabled()) { log.debug(msg, args); } } //------------------------------------------------< AbstractLoginModule >--- @Override protected void clearState() { super.clearState(); externalUser = null; credentials = null; } /** * @return the set of credentials classes as exposed by the configured * {@link CredentialsSupport} implementation. */ @NotNull @Override protected Set<Class> getSupportedCredentials() { return credentialsSupport.getCredentialClasses(); } //----------------------------------------------< public setters (JAAS) >--- public void setSyncManager(@NotNull SyncManager syncManager) { this.syncManager = syncManager; } public void setIdpManager(@NotNull ExternalIdentityProviderManager idpManager) { this.idpManager = idpManager; } }
package org.imaginea.botbot; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.net.HttpURLConnection; import java.net.Socket; import java.net.URL; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import android.os.AsyncTask; import android.util.Log; public class CommandTransmitter { Socket soc = null; PrintStream ps = null; static ServerProperties sp = new ServerProperties(); static String sessionID = null; static String tempSession=null; String serverIP = sp.serverIP; String port=sp.port; String sessionName=sp.sessionName; String serverName=sp.serverName; BlockingQueue<String> queue = new LinkedBlockingQueue<String>(); static String sUrl=""; public CommandTransmitter() { if(!serverName.equalsIgnoreCase("")){ sUrl="http://" + serverIP +":"+port+"/"+serverName; }else{ sUrl="http://" + serverIP +":"+port; } System.out.println("Creating session...."); new CreateSessionTask().execute(); } public void publish(Command command) { queue.offer(command.getData()); } public boolean checkSession(String id) { try { URL url = null; url = new URL(sUrl+"/api/recordsessions/" + id); HttpURLConnection connection = (HttpURLConnection) url .openConnection(); connection.setDoInput(true); connection.setRequestProperty("Accept", "application/json"); connection.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); BufferedReader in = new BufferedReader(new InputStreamReader( connection.getInputStream())); String inputLine; while ((inputLine = in.readLine()) != null) System.out.println(inputLine); in.close(); return true; } catch (Exception e) { return false; } } /** * @author moiz * * CreateSessionTask is a sub class to create an AsyncTask * AsyncTask is a thread which runs parallel to the main UI thread. * * */ class CreateSessionTask extends AsyncTask<Void, Void, Void> { int recordID=1; int prevRecord=1; /** * This method is called when CreateSessionTask.execute() is invoked. * This method performs two actions, first it creates a session and then it waits for population of * queue in a while loop, when queue is not empty it removes the head and writes the data to the server. */ @Override protected Void doInBackground(Void... nothing) { // creating session System.out.println("In background"); recordID=1; prevRecord=0; while (true) { try { String data=queue.take(); String session=getSession(); //it waits until queue is populated postRecord(data,session); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } //Performs writing data to the server void postRecord(String data,String session) { // TODO Auto-generated method stub if (!session.equalsIgnoreCase("")) { try { DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String currentDateTime = df.format(new Date()); Log.i("bot-bot", "In create record Async task:- " + session + data); String postData = "{\"entryNo\":\"" + recordID + "\"," + "\"prevEntryNo\":\"" + prevRecord + "\",\"recordSession\":{\"id\":\"" + session + "\"},\"entryTime\":\"" + currentDateTime + "\",\"payload\":\"" + data + "\"}"; Log.i("bot-bot", postData); URL url = null; url = new URL(sUrl + "/api/recordentries"); HttpURLConnection connection = (HttpURLConnection) url .openConnection(); connection.setDoOutput(true); connection.setRequestProperty("Accept", "application/json"); connection.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); OutputStreamWriter out = new OutputStreamWriter( connection.getOutputStream()); out.write(postData); out.close(); prevRecord = recordID; recordID++; Map<String, List<String>> respHeaders = connection .getHeaderFields(); if (connection.getResponseCode() == 201) { if (respHeaders.containsKey("Location")) { Log.i("bot-bot", respHeaders.get("Location") .toString()); } else { Log.i("bot-bot", respHeaders.get("location") .toString()); } } else { Log.i("bot-bot", "Invalid Response"); } BufferedReader in = new BufferedReader( new InputStreamReader(connection.getInputStream())); String inputLine; while ((inputLine = in.readLine()) != null) System.out.println(inputLine); in.close(); } catch (Exception e) { Log.i("bot-bot", "Exception occured in postMethod: "+e.getMessage()); } } else { Log.i("bot-bot", "Inside post record. Data not posted as session was empty."); } } String getSession(){ String session = ""; if (CommandTransmitter.sessionID == null || CommandTransmitter.sessionID.contentEquals("")) { CommandTransmitter.sessionID=createNewSession(); session = CommandTransmitter.sessionID; } else { try { URL url = new URL(sUrl + "/api/recordsessions/"+CommandTransmitter.sessionID); HttpURLConnection connection = (HttpURLConnection) url .openConnection(); BufferedReader rd = new BufferedReader( new InputStreamReader(connection.getInputStream())); StringBuffer sb = new StringBuffer(); String line; while (rd.ready()) { line = rd.readLine(); sb.append(line); } rd.close(); String status=""; String result = sb.toString(); //if(result.indexOf("<status>")>0 && result.indexOf("</status>")>0){ status = result.substring( result.indexOf("<status>") + 8, result.indexOf("</status>")); //} if (status.contentEquals("stopped")) { recordID=1; prevRecord=0; CommandTransmitter.sessionID = createNewSession(); session = CommandTransmitter.sessionID; } else if (status.contentEquals("started")) { session = CommandTransmitter.sessionID; } } catch (IOException e) { Log.i("bot-bot", "Unable to get status of the record. Application will continue without recording"); Log.i("bot-bot","Url is : "+ sUrl + "/api/recordsessions/"+CommandTransmitter.sessionID); Log.i("bot-bot", "Error is: "+e.toString()); }catch (Exception e) { Log.i("bot-bot", "Unable to get status of the record. Application will continue without recording"); Log.i("bot-bot","Url is : "+ sUrl + "/api/recordsessions/"+CommandTransmitter.sessionID); Log.i("bot-bot", "Error is: "+e.toString()); session = CommandTransmitter.sessionID; } } return session; } String createNewSession() { String genSessionId = ""; try{ URL url = null; url = new URL(sUrl + "/api/recordsessions"); HttpURLConnection connection = (HttpURLConnection) url .openConnection(); connection.setDoOutput(true); connection.setRequestProperty("Accept", "application/json"); connection.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); OutputStreamWriter out = new OutputStreamWriter( connection.getOutputStream()); out.write("{\n\"name\":\"" + sessionName + "\",\"status\":\"started\"\n}"); out.close(); Map<String, List<String>> respHeaders = connection .getHeaderFields(); if (connection.getResponseCode() == 201) { String temp; if (respHeaders.containsKey("Location")) { temp = respHeaders.get("Location").get(0); } else { temp = respHeaders.get("location").get(0); } genSessionId = temp.substring(temp .lastIndexOf("/") + 1); Log.i("TASK", "Session ID received: " + genSessionId); } else { throw new Exception("Invalid responce"); } }catch(Exception e){ Log.i("bot-bot","Unable to create record because of :" + e); Log.i("bot-bot","System will continue to work without recording."); } return genSessionId; } } }
/* * Copyright 2006 The Apache Software Foundation or its licensors, as applicable * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Igor A. Pyankov * @version $Revision: 1.3 $ */ package org.apache.harmony.test.reliability.api.kernel.thread.Synchronization; import org.apache.harmony.test.reliability.share.Test; /** * Goal: check thread synchronization * The test does: * 1. Reads parameter, which is: * param[0] - number of iterations to run each thread * 2. Create 7 threads which work with the same object * - The object has several variables that are changed by various methods * with "synchronized" block. * 3. Checks that each thread PASSed. * 4. Each thread, being started: * a. Runs param[0] iterations in a cycle, on each iteration: * b. call method with "synchronized" block. * c. stops if variable changed in "synchronized" block. */ public class SynchroTest extends Test { static int iteration = 50; static volatile int finish; SynchroThread[] synchroThread = new SynchroThread[7]; public static void main(String[] args) { System.exit(new SynchroTest().test(args)); } public int test(String[] params) { parseParams(params); WorkingClass workingObj = new WorkingClass(); synchroThread[0] = new SynchroThread(workingObj, 1); synchroThread[1] = new SynchroThread(workingObj, 2); synchroThread[2] = new SynchroThread(workingObj, 3); synchroThread[3] = new SynchroThread(workingObj, 4); synchroThread[4] = new SynchroThread(workingObj, 12); synchroThread[5] = new SynchroThread(workingObj, 34); synchroThread[6] = new SynchroThread(workingObj, 1234); finish = synchroThread.length; for (int i = 0; i < synchroThread.length; i++) { synchroThread[i].start(); } while(finish > 0) {}; for (int i = 0; i < synchroThread.length; i++) { if (synchroThread[i].status == SynStatus.FAIL) { log.add("Status of thread " + i + " is FAIL"); return fail("Synchronization is broken"); } } return pass("OK"); } public void parseParams(String[] params) { if (params.length >= 1) { iteration = Integer.parseInt(params[0]); } } } class SynchroThread extends Thread { WorkingClass workingObj; int selector; int iteration; int status; public SynchroThread(WorkingClass wc, int sel) { workingObj = wc; selector = sel; iteration = SynchroTest.iteration; status = SynStatus.PASS; } public void run() { boolean res = true; while (res && iteration-- > 0 ) { switch (selector) { case 1: res = workingObj.do_1(); case 2: res = workingObj.do_2(); case 12: res = workingObj.do_12(); case 3: res = workingObj.do_3(); case 4: res = workingObj.do_4(); case 34: res = workingObj.do_34(); case 1234: res = workingObj.do_1234(); } if (!res) { status = SynStatus.FAIL; } } synchronized (workingObj) { SynchroTest.finish--; } return; } } class WorkingClass { public int var1; public int var2; public int var3; public int var4; public Object lock12; public Object lock34; public Object lock1234; WorkingClass (){ var1 = 0; var2 = 0; var3 = 0; var4 = 0; lock12 = new Object(); lock34 = new Object(); lock1234 = new Object(); } public boolean do_1() { boolean result; synchronized (lock12) { var1 = 1; Thread.yield(); result = (var1 == 1); } return result; } public boolean do_2() { boolean result; synchronized (lock12) { var2 = 2; Thread.yield(); result = (var2 == 2); } return result; } public boolean do_12() { boolean result; synchronized (lock12) { var1 = 12; var2 = 12; Thread.yield(); result = ((var1 == 12) && (var2 == 12)); } return result; } public boolean do_3() { boolean result; synchronized (lock34) { var3 = 3; Thread.yield(); result = (var3 == 3); } return result; } public boolean do_4() { boolean result; synchronized (lock34) { var4 = 4; Thread.yield(); result = (var4 == 4); } return result; } public boolean do_34() { boolean result; synchronized (lock34) { var3 = 34; var4 = 34; Thread.yield(); result = ((var3 == 34) && (var3 == 34)); } return result; } public boolean do_1234() { boolean result; synchronized (lock12) { synchronized (lock34) { var1 = 1234; var2 = 1234; var3 = 1234; var4 = 1234; Thread.yield(); result = ((var1 == 1234) && (var2 == 1234) && (var3 == 1234) && (var4 == 1234)); } } return result; } } class SynStatus { public static final int FAIL = -1; public static final int PASS = 1; }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.maodian.flyingcat.xmpp.codec; import java.io.ByteArrayInputStream; import java.io.InputStream; /** * A bogus key store which provides all the required information to * create an example SSL connection. * * To generate a bogus key store: * <pre> * keytool -genkey -alias securechat -keysize 2048 -validity 36500 * -keyalg RSA -dname "CN=securechat" * -keypass secret -storepass secret * -keystore cert.jks * </pre> */ public final class SecureKeyStore { private static final short[] DATA = { 0xfe, 0xed, 0xfe, 0xed, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x07, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x00, 0x00, 0x01, 0x1a, 0x9f, 0x57, 0xa5, 0x27, 0x00, 0x00, 0x01, 0x9a, 0x30, 0x82, 0x01, 0x96, 0x30, 0x0e, 0x06, 0x0a, 0x2b, 0x06, 0x01, 0x04, 0x01, 0x2a, 0x02, 0x11, 0x01, 0x01, 0x05, 0x00, 0x04, 0x82, 0x01, 0x82, 0x48, 0x6d, 0xcf, 0x16, 0xb5, 0x50, 0x95, 0x36, 0xbf, 0x47, 0x27, 0x50, 0x58, 0x0d, 0xa2, 0x52, 0x7e, 0x25, 0xab, 0x14, 0x1a, 0x26, 0x5e, 0x2d, 0x8a, 0x23, 0x90, 0x60, 0x7f, 0x12, 0x20, 0x56, 0xd1, 0x43, 0xa2, 0x6b, 0x47, 0x5d, 0xed, 0x9d, 0xd4, 0xe5, 0x83, 0x28, 0x89, 0xc2, 0x16, 0x4c, 0x76, 0x06, 0xad, 0x8e, 0x8c, 0x29, 0x1a, 0x9b, 0x0f, 0xdd, 0x60, 0x4b, 0xb4, 0x62, 0x82, 0x9e, 0x4a, 0x63, 0x83, 0x2e, 0xd2, 0x43, 0x78, 0xc2, 0x32, 0x1f, 0x60, 0xa9, 0x8a, 0x7f, 0x0f, 0x7c, 0xa6, 0x1d, 0xe6, 0x92, 0x9e, 0x52, 0xc7, 0x7d, 0xbb, 0x35, 0x3b, 0xaa, 0x89, 0x73, 0x4c, 0xfb, 0x99, 0x54, 0x97, 0x99, 0x28, 0x6e, 0x66, 0x5b, 0xf7, 0x9b, 0x7e, 0x6d, 0x8a, 0x2f, 0xfa, 0xc3, 0x1e, 0x71, 0xb9, 0xbd, 0x8f, 0xc5, 0x63, 0x25, 0x31, 0x20, 0x02, 0xff, 0x02, 0xf0, 0xc9, 0x2c, 0xdd, 0x3a, 0x10, 0x30, 0xab, 0xe5, 0xad, 0x3d, 0x1a, 0x82, 0x77, 0x46, 0xed, 0x03, 0x38, 0xa4, 0x73, 0x6d, 0x36, 0x36, 0x33, 0x70, 0xb2, 0x63, 0x20, 0xca, 0x03, 0xbf, 0x5a, 0xf4, 0x7c, 0x35, 0xf0, 0x63, 0x1a, 0x12, 0x33, 0x12, 0x58, 0xd9, 0xa2, 0x63, 0x6b, 0x63, 0x82, 0x41, 0x65, 0x70, 0x37, 0x4b, 0x99, 0x04, 0x9f, 0xdd, 0x5e, 0x07, 0x01, 0x95, 0x9f, 0x36, 0xe8, 0xc3, 0x66, 0x2a, 0x21, 0x69, 0x68, 0x40, 0xe6, 0xbc, 0xbb, 0x85, 0x81, 0x21, 0x13, 0xe6, 0xa4, 0xcf, 0xd3, 0x67, 0xe3, 0xfd, 0x75, 0xf0, 0xdf, 0x83, 0xe0, 0xc5, 0x36, 0x09, 0xac, 0x1b, 0xd4, 0xf7, 0x2a, 0x23, 0x57, 0x1c, 0x5c, 0x0f, 0xf4, 0xcf, 0xa2, 0xcf, 0xf5, 0xbd, 0x9c, 0x69, 0x98, 0x78, 0x3a, 0x25, 0xe4, 0xfd, 0x85, 0x11, 0xcc, 0x7d, 0xef, 0xeb, 0x74, 0x60, 0xb1, 0xb7, 0xfb, 0x1f, 0x0e, 0x62, 0xff, 0xfe, 0x09, 0x0a, 0xc3, 0x80, 0x2f, 0x10, 0x49, 0x89, 0x78, 0xd2, 0x08, 0xfa, 0x89, 0x22, 0x45, 0x91, 0x21, 0xbc, 0x90, 0x3e, 0xad, 0xb3, 0x0a, 0xb4, 0x0e, 0x1c, 0xa1, 0x93, 0x92, 0xd8, 0x72, 0x07, 0x54, 0x60, 0xe7, 0x91, 0xfc, 0xd9, 0x3c, 0xe1, 0x6f, 0x08, 0xe4, 0x56, 0xf6, 0x0b, 0xb0, 0x3c, 0x39, 0x8a, 0x2d, 0x48, 0x44, 0x28, 0x13, 0xca, 0xe9, 0xf7, 0xa3, 0xb6, 0x8a, 0x5f, 0x31, 0xa9, 0x72, 0xf2, 0xde, 0x96, 0xf2, 0xb1, 0x53, 0xb1, 0x3e, 0x24, 0x57, 0xfd, 0x18, 0x45, 0x1f, 0xc5, 0x33, 0x1b, 0xa4, 0xe8, 0x21, 0xfa, 0x0e, 0xb2, 0xb9, 0xcb, 0xc7, 0x07, 0x41, 0xdd, 0x2f, 0xb6, 0x6a, 0x23, 0x18, 0xed, 0xc1, 0xef, 0xe2, 0x4b, 0xec, 0xc9, 0xba, 0xfb, 0x46, 0x43, 0x90, 0xd7, 0xb5, 0x68, 0x28, 0x31, 0x2b, 0x8d, 0xa8, 0x51, 0x63, 0xf7, 0x53, 0x99, 0x19, 0x68, 0x85, 0x66, 0x00, 0x00, 0x00, 0x01, 0x00, 0x05, 0x58, 0x2e, 0x35, 0x30, 0x39, 0x00, 0x00, 0x02, 0x3a, 0x30, 0x82, 0x02, 0x36, 0x30, 0x82, 0x01, 0xe0, 0xa0, 0x03, 0x02, 0x01, 0x02, 0x02, 0x04, 0x48, 0x59, 0xf1, 0x92, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x05, 0x05, 0x00, 0x30, 0x81, 0xa0, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x4b, 0x52, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x13, 0x0a, 0x4b, 0x79, 0x75, 0x6e, 0x67, 0x67, 0x69, 0x2d, 0x64, 0x6f, 0x31, 0x14, 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x0b, 0x53, 0x65, 0x6f, 0x6e, 0x67, 0x6e, 0x61, 0x6d, 0x2d, 0x73, 0x69, 0x31, 0x1a, 0x30, 0x18, 0x06, 0x03, 0x55, 0x04, 0x0a, 0x13, 0x11, 0x54, 0x68, 0x65, 0x20, 0x4e, 0x65, 0x74, 0x74, 0x79, 0x20, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x31, 0x18, 0x30, 0x16, 0x06, 0x03, 0x55, 0x04, 0x0b, 0x13, 0x0f, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x73, 0x31, 0x30, 0x30, 0x2e, 0x06, 0x03, 0x55, 0x04, 0x03, 0x13, 0x27, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x63, 0x68, 0x61, 0x74, 0x2e, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x2e, 0x6e, 0x65, 0x74, 0x74, 0x79, 0x2e, 0x67, 0x6c, 0x65, 0x61, 0x6d, 0x79, 0x6e, 0x6f, 0x64, 0x65, 0x2e, 0x6e, 0x65, 0x74, 0x30, 0x20, 0x17, 0x0d, 0x30, 0x38, 0x30, 0x36, 0x31, 0x39, 0x30, 0x35, 0x34, 0x31, 0x33, 0x38, 0x5a, 0x18, 0x0f, 0x32, 0x31, 0x38, 0x37, 0x31, 0x31, 0x32, 0x34, 0x30, 0x35, 0x34, 0x31, 0x33, 0x38, 0x5a, 0x30, 0x81, 0xa0, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x4b, 0x52, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x13, 0x0a, 0x4b, 0x79, 0x75, 0x6e, 0x67, 0x67, 0x69, 0x2d, 0x64, 0x6f, 0x31, 0x14, 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x0b, 0x53, 0x65, 0x6f, 0x6e, 0x67, 0x6e, 0x61, 0x6d, 0x2d, 0x73, 0x69, 0x31, 0x1a, 0x30, 0x18, 0x06, 0x03, 0x55, 0x04, 0x0a, 0x13, 0x11, 0x54, 0x68, 0x65, 0x20, 0x4e, 0x65, 0x74, 0x74, 0x79, 0x20, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x31, 0x18, 0x30, 0x16, 0x06, 0x03, 0x55, 0x04, 0x0b, 0x13, 0x0f, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x73, 0x31, 0x30, 0x30, 0x2e, 0x06, 0x03, 0x55, 0x04, 0x03, 0x13, 0x27, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x63, 0x68, 0x61, 0x74, 0x2e, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x2e, 0x6e, 0x65, 0x74, 0x74, 0x79, 0x2e, 0x67, 0x6c, 0x65, 0x61, 0x6d, 0x79, 0x6e, 0x6f, 0x64, 0x65, 0x2e, 0x6e, 0x65, 0x74, 0x30, 0x5c, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x01, 0x05, 0x00, 0x03, 0x4b, 0x00, 0x30, 0x48, 0x02, 0x41, 0x00, 0xc3, 0xe3, 0x5e, 0x41, 0xa7, 0x87, 0x11, 0x00, 0x42, 0x2a, 0xb0, 0x4b, 0xed, 0xb2, 0xe0, 0x23, 0xdb, 0xb1, 0x3d, 0x58, 0x97, 0x35, 0x60, 0x0b, 0x82, 0x59, 0xd3, 0x00, 0xea, 0xd4, 0x61, 0xb8, 0x79, 0x3f, 0xb6, 0x3c, 0x12, 0x05, 0x93, 0x2e, 0x9a, 0x59, 0x68, 0x14, 0x77, 0x3a, 0xc8, 0x50, 0x25, 0x57, 0xa4, 0x49, 0x18, 0x63, 0x41, 0xf0, 0x2d, 0x28, 0xec, 0x06, 0xfb, 0xb4, 0x9f, 0xbf, 0x02, 0x03, 0x01, 0x00, 0x01, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x05, 0x05, 0x00, 0x03, 0x41, 0x00, 0x65, 0x6c, 0x30, 0x01, 0xc2, 0x8e, 0x3e, 0xcb, 0xb3, 0x77, 0x48, 0xe9, 0x66, 0x61, 0x9a, 0x40, 0x86, 0xaf, 0xf6, 0x03, 0xeb, 0xba, 0x6a, 0xf2, 0xfd, 0xe2, 0xaf, 0x36, 0x5e, 0x7b, 0xaa, 0x22, 0x04, 0xdd, 0x2c, 0x20, 0xc4, 0xfc, 0xdd, 0xd0, 0x82, 0x20, 0x1c, 0x3d, 0xd7, 0x9e, 0x5e, 0x5c, 0x92, 0x5a, 0x76, 0x71, 0x28, 0xf5, 0x07, 0x7d, 0xa2, 0x81, 0xba, 0x77, 0x9f, 0x2a, 0xd9, 0x44, 0x00, 0x00, 0x00, 0x01, 0x00, 0x05, 0x6d, 0x79, 0x6b, 0x65, 0x79, 0x00, 0x00, 0x01, 0x1a, 0x9f, 0x5b, 0x56, 0xa0, 0x00, 0x00, 0x01, 0x99, 0x30, 0x82, 0x01, 0x95, 0x30, 0x0e, 0x06, 0x0a, 0x2b, 0x06, 0x01, 0x04, 0x01, 0x2a, 0x02, 0x11, 0x01, 0x01, 0x05, 0x00, 0x04, 0x82, 0x01, 0x81, 0x29, 0xa8, 0xb6, 0x08, 0x0c, 0x85, 0x75, 0x3e, 0xdd, 0xb5, 0xe5, 0x1a, 0x87, 0x68, 0xd1, 0x90, 0x4b, 0x29, 0x31, 0xee, 0x90, 0xbc, 0x9d, 0x73, 0xa0, 0x3f, 0xe9, 0x0b, 0xa4, 0xef, 0x30, 0x9b, 0x36, 0x9a, 0xb2, 0x54, 0x77, 0x81, 0x07, 0x4b, 0xaa, 0xa5, 0x77, 0x98, 0xe1, 0xeb, 0xb5, 0x7c, 0x4e, 0x48, 0xd5, 0x08, 0xfc, 0x2c, 0x36, 0xe2, 0x65, 0x03, 0xac, 0xe5, 0xf3, 0x96, 0xb7, 0xd0, 0xb5, 0x3b, 0x92, 0xe4, 0x14, 0x05, 0x7a, 0x6a, 0x92, 0x56, 0xfe, 0x4e, 0xab, 0xd3, 0x0e, 0x32, 0x04, 0x22, 0x22, 0x74, 0x47, 0x7d, 0xec, 0x21, 0x99, 0x30, 0x31, 0x64, 0x46, 0x64, 0x9b, 0xc7, 0x13, 0xbf, 0xbe, 0xd0, 0x31, 0x49, 0xe7, 0x3c, 0xbf, 0xba, 0xb1, 0x20, 0xf9, 0x42, 0xf4, 0xa9, 0xa9, 0xe5, 0x13, 0x65, 0x32, 0xbf, 0x7c, 0xcc, 0x91, 0xd3, 0xfd, 0x24, 0x47, 0x0b, 0xe5, 0x53, 0xad, 0x50, 0x30, 0x56, 0xd1, 0xfa, 0x9c, 0x37, 0xa8, 0xc1, 0xce, 0xf6, 0x0b, 0x18, 0xaa, 0x7c, 0xab, 0xbd, 0x1f, 0xdf, 0xe4, 0x80, 0xb8, 0xa7, 0xe0, 0xad, 0x7d, 0x50, 0x74, 0xf1, 0x98, 0x78, 0xbc, 0x58, 0xb9, 0xc2, 0x52, 0xbe, 0xd2, 0x5b, 0x81, 0x94, 0x83, 0x8f, 0xb9, 0x4c, 0xee, 0x01, 0x2b, 0x5e, 0xc9, 0x6e, 0x9b, 0xf5, 0x63, 0x69, 0xe4, 0xd8, 0x0b, 0x47, 0xd8, 0xfd, 0xd8, 0xe0, 0xed, 0xa8, 0x27, 0x03, 0x74, 0x1e, 0x5d, 0x32, 0xe6, 0x5c, 0x63, 0xc2, 0xfb, 0x3f, 0xee, 0xb4, 0x13, 0xc6, 0x0e, 0x6e, 0x74, 0xe0, 0x22, 0xac, 0xce, 0x79, 0xf9, 0x43, 0x68, 0xc1, 0x03, 0x74, 0x2b, 0xe1, 0x18, 0xf8, 0x7f, 0x76, 0x9a, 0xea, 0x82, 0x3f, 0xc2, 0xa6, 0xa7, 0x4c, 0xfe, 0xae, 0x29, 0x3b, 0xc1, 0x10, 0x7c, 0xd5, 0x77, 0x17, 0x79, 0x5f, 0xcb, 0xad, 0x1f, 0xd8, 0xa1, 0xfd, 0x90, 0xe1, 0x6b, 0xb2, 0xef, 0xb9, 0x41, 0x26, 0xa4, 0x0b, 0x4f, 0xc6, 0x83, 0x05, 0x6f, 0xf0, 0x64, 0x40, 0xe1, 0x44, 0xc4, 0xf9, 0x40, 0x2b, 0x3b, 0x40, 0xdb, 0xaf, 0x35, 0xa4, 0x9b, 0x9f, 0xc4, 0x74, 0x07, 0xe5, 0x18, 0x60, 0xc5, 0xfe, 0x15, 0x0e, 0x3a, 0x25, 0x2a, 0x11, 0xee, 0x78, 0x2f, 0xb8, 0xd1, 0x6e, 0x4e, 0x3c, 0x0a, 0xb5, 0xb9, 0x40, 0x86, 0x27, 0x6d, 0x8f, 0x53, 0xb7, 0x77, 0x36, 0xec, 0x5d, 0xed, 0x32, 0x40, 0x43, 0x82, 0xc3, 0x52, 0x58, 0xc4, 0x26, 0x39, 0xf3, 0xb3, 0xad, 0x58, 0xab, 0xb7, 0xf7, 0x8e, 0x0e, 0xba, 0x8e, 0x78, 0x9d, 0xbf, 0x58, 0x34, 0xbd, 0x77, 0x73, 0xa6, 0x50, 0x55, 0x00, 0x60, 0x26, 0xbf, 0x6d, 0xb4, 0x98, 0x8a, 0x18, 0x83, 0x89, 0xf8, 0xcd, 0x0d, 0x49, 0x06, 0xae, 0x51, 0x6e, 0xaf, 0xbd, 0xe2, 0x07, 0x13, 0xd8, 0x64, 0xcc, 0xbf, 0x00, 0x00, 0x00, 0x01, 0x00, 0x05, 0x58, 0x2e, 0x35, 0x30, 0x39, 0x00, 0x00, 0x02, 0x34, 0x30, 0x82, 0x02, 0x30, 0x30, 0x82, 0x01, 0xda, 0xa0, 0x03, 0x02, 0x01, 0x02, 0x02, 0x04, 0x48, 0x59, 0xf2, 0x84, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x05, 0x05, 0x00, 0x30, 0x81, 0x9d, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x4b, 0x52, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x13, 0x0a, 0x4b, 0x79, 0x75, 0x6e, 0x67, 0x67, 0x69, 0x2d, 0x64, 0x6f, 0x31, 0x14, 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x0b, 0x53, 0x65, 0x6f, 0x6e, 0x67, 0x6e, 0x61, 0x6d, 0x2d, 0x73, 0x69, 0x31, 0x1a, 0x30, 0x18, 0x06, 0x03, 0x55, 0x04, 0x0a, 0x13, 0x11, 0x54, 0x68, 0x65, 0x20, 0x4e, 0x65, 0x74, 0x74, 0x79, 0x20, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x31, 0x15, 0x30, 0x13, 0x06, 0x03, 0x55, 0x04, 0x0b, 0x13, 0x0c, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x6f, 0x72, 0x73, 0x31, 0x30, 0x30, 0x2e, 0x06, 0x03, 0x55, 0x04, 0x03, 0x13, 0x27, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x63, 0x68, 0x61, 0x74, 0x2e, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x2e, 0x6e, 0x65, 0x74, 0x74, 0x79, 0x2e, 0x67, 0x6c, 0x65, 0x61, 0x6d, 0x79, 0x6e, 0x6f, 0x64, 0x65, 0x2e, 0x6e, 0x65, 0x74, 0x30, 0x20, 0x17, 0x0d, 0x30, 0x38, 0x30, 0x36, 0x31, 0x39, 0x30, 0x35, 0x34, 0x35, 0x34, 0x30, 0x5a, 0x18, 0x0f, 0x32, 0x31, 0x38, 0x37, 0x31, 0x31, 0x32, 0x33, 0x30, 0x35, 0x34, 0x35, 0x34, 0x30, 0x5a, 0x30, 0x81, 0x9d, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x4b, 0x52, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x13, 0x0a, 0x4b, 0x79, 0x75, 0x6e, 0x67, 0x67, 0x69, 0x2d, 0x64, 0x6f, 0x31, 0x14, 0x30, 0x12, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x0b, 0x53, 0x65, 0x6f, 0x6e, 0x67, 0x6e, 0x61, 0x6d, 0x2d, 0x73, 0x69, 0x31, 0x1a, 0x30, 0x18, 0x06, 0x03, 0x55, 0x04, 0x0a, 0x13, 0x11, 0x54, 0x68, 0x65, 0x20, 0x4e, 0x65, 0x74, 0x74, 0x79, 0x20, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x31, 0x15, 0x30, 0x13, 0x06, 0x03, 0x55, 0x04, 0x0b, 0x13, 0x0c, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x6f, 0x72, 0x73, 0x31, 0x30, 0x30, 0x2e, 0x06, 0x03, 0x55, 0x04, 0x03, 0x13, 0x27, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x63, 0x68, 0x61, 0x74, 0x2e, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x2e, 0x6e, 0x65, 0x74, 0x74, 0x79, 0x2e, 0x67, 0x6c, 0x65, 0x61, 0x6d, 0x79, 0x6e, 0x6f, 0x64, 0x65, 0x2e, 0x6e, 0x65, 0x74, 0x30, 0x5c, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x01, 0x05, 0x00, 0x03, 0x4b, 0x00, 0x30, 0x48, 0x02, 0x41, 0x00, 0x95, 0xb3, 0x47, 0x17, 0x95, 0x0f, 0x57, 0xcf, 0x66, 0x72, 0x0a, 0x7e, 0x5b, 0x54, 0xea, 0x8c, 0x6f, 0x79, 0xde, 0x94, 0xac, 0x0b, 0x5a, 0xd4, 0xd6, 0x1b, 0x58, 0x12, 0x1a, 0x16, 0x3d, 0xfe, 0xdf, 0xa5, 0x2b, 0x86, 0xbc, 0x64, 0xd4, 0x80, 0x1e, 0x3f, 0xf9, 0xe2, 0x04, 0x03, 0x79, 0x9b, 0xc1, 0x5c, 0xf0, 0xf1, 0xf3, 0xf1, 0xe3, 0xbf, 0x3f, 0xc0, 0x1f, 0xdd, 0xdb, 0xc0, 0x5b, 0x21, 0x02, 0x03, 0x01, 0x00, 0x01, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x05, 0x05, 0x00, 0x03, 0x41, 0x00, 0x02, 0xd7, 0xdd, 0xbd, 0x0c, 0x8e, 0x21, 0x20, 0xef, 0x9e, 0x4f, 0x1f, 0xf5, 0x49, 0xf1, 0xae, 0x58, 0x9b, 0x94, 0x3a, 0x1f, 0x70, 0x33, 0xf0, 0x9b, 0xbb, 0xe9, 0xc0, 0xf3, 0x72, 0xcb, 0xde, 0xb6, 0x56, 0x72, 0xcc, 0x1c, 0xf0, 0xd6, 0x5a, 0x2a, 0xbc, 0xa1, 0x7e, 0x23, 0x83, 0xe9, 0xe7, 0xcf, 0x9e, 0xa5, 0xf9, 0xcc, 0xc2, 0x61, 0xf4, 0xdb, 0x40, 0x93, 0x1d, 0x63, 0x8a, 0x50, 0x4c, 0x11, 0x39, 0xb1, 0x91, 0xc1, 0xe6, 0x9d, 0xd9, 0x1a, 0x62, 0x1b, 0xb8, 0xd3, 0xd6, 0x9a, 0x6d, 0xb9, 0x8e, 0x15, 0x51 }; public static InputStream asInputStream() { byte[] data = new byte[DATA.length]; for (int i = 0; i < data.length; i ++) { data[i] = (byte) DATA[i]; } return new ByteArrayInputStream(data); } public static char[] getCertificatePassword() { return "secret".toCharArray(); } public static char[] getKeyStorePassword() { return "secret".toCharArray(); } private SecureKeyStore() { // Unused } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.service; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.UUID; import com.google.common.collect.Lists; import org.apache.cassandra.io.util.FileInputStreamPlus; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.apache.cassandra.AbstractSerializationsTester; import org.apache.cassandra.Util; import org.apache.cassandra.Util.PartitionerSwitcher; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.dht.IPartitioner; import org.apache.cassandra.dht.RandomPartitioner; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.util.DataOutputStreamPlus; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.repair.SyncNodePair; import org.apache.cassandra.repair.RepairJobDesc; import org.apache.cassandra.repair.Validator; import org.apache.cassandra.repair.messages.*; import org.apache.cassandra.schema.TableId; import org.apache.cassandra.streaming.PreviewKind; import org.apache.cassandra.streaming.SessionSummary; import org.apache.cassandra.streaming.StreamSummary; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.MerkleTrees; import org.apache.cassandra.utils.UUIDGen; public class SerializationsTest extends AbstractSerializationsTester { private static PartitionerSwitcher partitionerSwitcher; private static UUID RANDOM_UUID; private static Range<Token> FULL_RANGE; private static RepairJobDesc DESC; private static final int PORT = 7010; @BeforeClass public static void defineSchema() throws Exception { DatabaseDescriptor.daemonInitialization(); partitionerSwitcher = Util.switchPartitioner(RandomPartitioner.instance); RANDOM_UUID = UUID.fromString("b5c3d033-75aa-4c2f-a819-947aac7a0c54"); FULL_RANGE = new Range<>(Util.testPartitioner().getMinimumToken(), Util.testPartitioner().getMinimumToken()); DESC = new RepairJobDesc(RANDOM_UUID, RANDOM_UUID, "Keyspace1", "Standard1", Arrays.asList(FULL_RANGE)); } @AfterClass public static void tearDown() { partitionerSwitcher.close(); } private <T extends RepairMessage> void testRepairMessageWrite(String fileName, IVersionedSerializer<T> serializer, T... messages) throws IOException { try (DataOutputStreamPlus out = getOutput(fileName)) { for (T message : messages) { testSerializedSize(message, serializer); serializer.serialize(message, out, getVersion()); } } } private void testValidationRequestWrite() throws IOException { ValidationRequest message = new ValidationRequest(DESC, 1234); testRepairMessageWrite("service.ValidationRequest.bin", ValidationRequest.serializer, message); } @Test public void testValidationRequestRead() throws IOException { if (EXECUTE_WRITES) testValidationRequestWrite(); try (FileInputStreamPlus in = getInput("service.ValidationRequest.bin")) { ValidationRequest message = ValidationRequest.serializer.deserialize(in, getVersion()); assert DESC.equals(message.desc); assert message.nowInSec == 1234; } } private void testValidationCompleteWrite() throws IOException { IPartitioner p = RandomPartitioner.instance; MerkleTrees mts = new MerkleTrees(p); // empty validation mts.addMerkleTree((int) Math.pow(2, 15), FULL_RANGE); Validator v0 = new Validator(DESC, FBUtilities.getBroadcastAddressAndPort(), -1, PreviewKind.NONE); ValidationResponse c0 = new ValidationResponse(DESC, mts); // validation with a tree mts = new MerkleTrees(p); mts.addMerkleTree(Integer.MAX_VALUE, FULL_RANGE); for (int i = 0; i < 10; i++) mts.split(p.getRandomToken()); Validator v1 = new Validator(DESC, FBUtilities.getBroadcastAddressAndPort(), -1, PreviewKind.NONE); ValidationResponse c1 = new ValidationResponse(DESC, mts); // validation failed ValidationResponse c3 = new ValidationResponse(DESC); testRepairMessageWrite("service.ValidationComplete.bin", ValidationResponse.serializer, c0, c1, c3); } @Test public void testValidationCompleteRead() throws IOException { if (EXECUTE_WRITES) testValidationCompleteWrite(); try (FileInputStreamPlus in = getInput("service.ValidationComplete.bin")) { // empty validation ValidationResponse message = ValidationResponse.serializer.deserialize(in, getVersion()); assert DESC.equals(message.desc); assert message.success(); assert message.trees != null; // validation with a tree message = ValidationResponse.serializer.deserialize(in, getVersion()); assert DESC.equals(message.desc); assert message.success(); assert message.trees != null; // failed validation message = ValidationResponse.serializer.deserialize(in, getVersion()); assert DESC.equals(message.desc); assert !message.success(); assert message.trees == null; } } private void testSyncRequestWrite() throws IOException { InetAddressAndPort local = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.1", PORT); InetAddressAndPort src = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.2", PORT); InetAddressAndPort dest = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.3", PORT); SyncRequest message = new SyncRequest(DESC, local, src, dest, Collections.singleton(FULL_RANGE), PreviewKind.NONE, false); testRepairMessageWrite("service.SyncRequest.bin", SyncRequest.serializer, message); } @Test public void testSyncRequestRead() throws IOException { if (EXECUTE_WRITES) testSyncRequestWrite(); InetAddressAndPort local = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.1", PORT); InetAddressAndPort src = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.2", PORT); InetAddressAndPort dest = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.3", PORT); try (FileInputStreamPlus in = getInput("service.SyncRequest.bin")) { SyncRequest message = SyncRequest.serializer.deserialize(in, getVersion()); assert DESC.equals(message.desc); assert local.equals(message.initiator); assert src.equals(message.src); assert dest.equals(message.dst); assert message.ranges.size() == 1 && message.ranges.contains(FULL_RANGE); assert !message.asymmetric; } } private void testSyncCompleteWrite() throws IOException { InetAddressAndPort src = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.2", PORT); InetAddressAndPort dest = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.3", PORT); // sync success List<SessionSummary> summaries = new ArrayList<>(); summaries.add(new SessionSummary(src, dest, Lists.newArrayList(new StreamSummary(TableId.fromUUID(UUIDGen.getTimeUUID()), 5, 100)), Lists.newArrayList(new StreamSummary(TableId.fromUUID(UUIDGen.getTimeUUID()), 500, 10)) )); SyncResponse success = new SyncResponse(DESC, src, dest, true, summaries); // sync fail SyncResponse fail = new SyncResponse(DESC, src, dest, false, Collections.emptyList()); testRepairMessageWrite("service.SyncComplete.bin", SyncResponse.serializer, success, fail); } @Test public void testSyncCompleteRead() throws IOException { if (EXECUTE_WRITES) testSyncCompleteWrite(); InetAddressAndPort src = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.2", PORT); InetAddressAndPort dest = InetAddressAndPort.getByNameOverrideDefaults("127.0.0.3", PORT); SyncNodePair nodes = new SyncNodePair(src, dest); try (FileInputStreamPlus in = getInput("service.SyncComplete.bin")) { // success SyncResponse message = SyncResponse.serializer.deserialize(in, getVersion()); assert DESC.equals(message.desc); System.out.println(nodes); System.out.println(message.nodes); assert nodes.equals(message.nodes); assert message.success; // fail message = SyncResponse.serializer.deserialize(in, getVersion()); assert DESC.equals(message.desc); assert nodes.equals(message.nodes); assert !message.success; } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.dynamodbv2.model; import java.io.Serializable; /** * <p> * Represents the data for an attribute. You can set one, and only one, of the * elements. * </p> * <p> * Each attribute in an item is a name-value pair. An attribute can be * single-valued or multi-valued set. For example, a book item can have title * and authors attributes. Each book has one title but can have many authors. * The multi-valued attribute is a set; duplicate values are not allowed. * </p> */ public class AttributeValue implements Serializable, Cloneable { /** * <p> * A String data type. * </p> */ private String s; /** * <p> * A Number data type. * </p> */ private String n; /** * <p> * A Binary data type. * </p> */ private java.nio.ByteBuffer b; /** * <p> * A String Set data type. * </p> */ private java.util.List<String> sS; /** * <p> * A Number Set data type. * </p> */ private java.util.List<String> nS; /** * <p> * A Binary Set data type. * </p> */ private java.util.List<java.nio.ByteBuffer> bS; /** * <p> * A Map of attribute values. * </p> */ private java.util.Map<String, AttributeValue> m; /** * <p> * A List of attribute values. * </p> */ private java.util.List<AttributeValue> l; /** * <p> * A Null data type. * </p> */ private Boolean nULLValue; /** * <p> * A Boolean data type. * </p> */ private Boolean bOOL; /** * Default constructor for AttributeValue object. Callers should use the * setter or fluent setter (with...) methods to initialize the object after * creating it. */ public AttributeValue() { } /** * Constructs a new AttributeValue object. Callers should use the setter or * fluent setter (with...) methods to initialize any additional object * members. * * @param s * A String data type. */ public AttributeValue(String s) { setS(s); } /** * Constructs a new AttributeValue object. Callers should use the setter or * fluent setter (with...) methods to initialize any additional object * members. * * @param sS * A String Set data type. */ public AttributeValue(java.util.List<String> sS) { setSS(sS); } /** * <p> * A String data type. * </p> * * @param s * A String data type. */ public void setS(String s) { this.s = s; } /** * <p> * A String data type. * </p> * * @return A String data type. */ public String getS() { return this.s; } /** * <p> * A String data type. * </p> * * @param s * A String data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withS(String s) { setS(s); return this; } /** * <p> * A Number data type. * </p> * * @param n * A Number data type. */ public void setN(String n) { this.n = n; } /** * <p> * A Number data type. * </p> * * @return A Number data type. */ public String getN() { return this.n; } /** * <p> * A Number data type. * </p> * * @param n * A Number data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withN(String n) { setN(n); return this; } /** * <p> * A Binary data type. * </p> * <p> * AWS SDK for Java performs a Base64 encoding on this field before sending * this request to AWS service by default. Users of the SDK should not * perform Base64 encoding on this field. * </p> * * @param b * A Binary data type. */ public void setB(java.nio.ByteBuffer b) { this.b = b; } /** * <p> * A Binary data type. * </p> * <p> * {@code ByteBuffer}s are stateful. Calling their {@code get} methods * changes their {@code position}. We recommend using * {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view * of the buffer with an independent {@code position}, and calling * {@code get} methods on this rather than directly on the returned * {@code ByteBuffer}. Doing so will ensure that anyone else using the * {@code ByteBuffer} will not be affected by changes to the {@code position} * . * </p> * * @return A Binary data type. */ public java.nio.ByteBuffer getB() { return this.b; } /** * <p> * A Binary data type. * </p> * * @param b * A Binary data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withB(java.nio.ByteBuffer b) { setB(b); return this; } /** * <p> * A String Set data type. * </p> * * @return A String Set data type. */ public java.util.List<String> getSS() { return sS; } /** * <p> * A String Set data type. * </p> * * @param sS * A String Set data type. */ public void setSS(java.util.Collection<String> sS) { if (sS == null) { this.sS = null; return; } this.sS = new java.util.ArrayList<String>(sS); } /** * <p> * A String Set data type. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setSS(java.util.Collection)} or * {@link #withSS(java.util.Collection)} if you want to override the * existing values. * </p> * * @param sS * A String Set data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withSS(String... sS) { if (this.sS == null) { setSS(new java.util.ArrayList<String>(sS.length)); } for (String ele : sS) { this.sS.add(ele); } return this; } /** * <p> * A String Set data type. * </p> * * @param sS * A String Set data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withSS(java.util.Collection<String> sS) { setSS(sS); return this; } /** * <p> * A Number Set data type. * </p> * * @return A Number Set data type. */ public java.util.List<String> getNS() { return nS; } /** * <p> * A Number Set data type. * </p> * * @param nS * A Number Set data type. */ public void setNS(java.util.Collection<String> nS) { if (nS == null) { this.nS = null; return; } this.nS = new java.util.ArrayList<String>(nS); } /** * <p> * A Number Set data type. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setNS(java.util.Collection)} or * {@link #withNS(java.util.Collection)} if you want to override the * existing values. * </p> * * @param nS * A Number Set data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withNS(String... nS) { if (this.nS == null) { setNS(new java.util.ArrayList<String>(nS.length)); } for (String ele : nS) { this.nS.add(ele); } return this; } /** * <p> * A Number Set data type. * </p> * * @param nS * A Number Set data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withNS(java.util.Collection<String> nS) { setNS(nS); return this; } /** * <p> * A Binary Set data type. * </p> * * @return A Binary Set data type. */ public java.util.List<java.nio.ByteBuffer> getBS() { return bS; } /** * <p> * A Binary Set data type. * </p> * * @param bS * A Binary Set data type. */ public void setBS(java.util.Collection<java.nio.ByteBuffer> bS) { if (bS == null) { this.bS = null; return; } this.bS = new java.util.ArrayList<java.nio.ByteBuffer>(bS); } /** * <p> * A Binary Set data type. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setBS(java.util.Collection)} or * {@link #withBS(java.util.Collection)} if you want to override the * existing values. * </p> * * @param bS * A Binary Set data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withBS(java.nio.ByteBuffer... bS) { if (this.bS == null) { setBS(new java.util.ArrayList<java.nio.ByteBuffer>(bS.length)); } for (java.nio.ByteBuffer ele : bS) { this.bS.add(ele); } return this; } /** * <p> * A Binary Set data type. * </p> * * @param bS * A Binary Set data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withBS(java.util.Collection<java.nio.ByteBuffer> bS) { setBS(bS); return this; } /** * <p> * A Map of attribute values. * </p> * * @return A Map of attribute values. */ public java.util.Map<String, AttributeValue> getM() { return m; } /** * <p> * A Map of attribute values. * </p> * * @param m * A Map of attribute values. */ public void setM(java.util.Map<String, AttributeValue> m) { this.m = m; } /** * <p> * A Map of attribute values. * </p> * * @param m * A Map of attribute values. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withM(java.util.Map<String, AttributeValue> m) { setM(m); return this; } public AttributeValue addMEntry(String key, AttributeValue value) { if (null == this.m) { this.m = new java.util.HashMap<String, AttributeValue>(); } if (this.m.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.m.put(key, value); return this; } /** * Removes all the entries added into M. &lt;p> Returns a reference to this * object so that method calls can be chained together. */ public AttributeValue clearMEntries() { this.m = null; return this; } /** * <p> * A List of attribute values. * </p> * * @return A List of attribute values. */ public java.util.List<AttributeValue> getL() { return l; } /** * <p> * A List of attribute values. * </p> * * @param l * A List of attribute values. */ public void setL(java.util.Collection<AttributeValue> l) { if (l == null) { this.l = null; return; } this.l = new java.util.ArrayList<AttributeValue>(l); } /** * <p> * A List of attribute values. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setL(java.util.Collection)} or * {@link #withL(java.util.Collection)} if you want to override the existing * values. * </p> * * @param l * A List of attribute values. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withL(AttributeValue... l) { if (this.l == null) { setL(new java.util.ArrayList<AttributeValue>(l.length)); } for (AttributeValue ele : l) { this.l.add(ele); } return this; } /** * <p> * A List of attribute values. * </p> * * @param l * A List of attribute values. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withL(java.util.Collection<AttributeValue> l) { setL(l); return this; } /** * <p> * A Null data type. * </p> * * @param nULLValue * A Null data type. */ public void setNULL(Boolean nULLValue) { this.nULLValue = nULLValue; } /** * <p> * A Null data type. * </p> * * @return A Null data type. */ public Boolean getNULL() { return this.nULLValue; } /** * <p> * A Null data type. * </p> * * @param nULLValue * A Null data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withNULL(Boolean nULLValue) { setNULL(nULLValue); return this; } /** * <p> * A Null data type. * </p> * * @return A Null data type. */ public Boolean isNULL() { return this.nULLValue; } /** * <p> * A Boolean data type. * </p> * * @param bOOL * A Boolean data type. */ public void setBOOL(Boolean bOOL) { this.bOOL = bOOL; } /** * <p> * A Boolean data type. * </p> * * @return A Boolean data type. */ public Boolean getBOOL() { return this.bOOL; } /** * <p> * A Boolean data type. * </p> * * @param bOOL * A Boolean data type. * @return Returns a reference to this object so that method calls can be * chained together. */ public AttributeValue withBOOL(Boolean bOOL) { setBOOL(bOOL); return this; } /** * <p> * A Boolean data type. * </p> * * @return A Boolean data type. */ public Boolean isBOOL() { return this.bOOL; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getS() != null) sb.append("S: " + getS() + ","); if (getN() != null) sb.append("N: " + getN() + ","); if (getB() != null) sb.append("B: " + getB() + ","); if (getSS() != null) sb.append("SS: " + getSS() + ","); if (getNS() != null) sb.append("NS: " + getNS() + ","); if (getBS() != null) sb.append("BS: " + getBS() + ","); if (getM() != null) sb.append("M: " + getM() + ","); if (getL() != null) sb.append("L: " + getL() + ","); if (getNULL() != null) sb.append("NULL: " + getNULL() + ","); if (getBOOL() != null) sb.append("BOOL: " + getBOOL()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof AttributeValue == false) return false; AttributeValue other = (AttributeValue) obj; if (other.getS() == null ^ this.getS() == null) return false; if (other.getS() != null && other.getS().equals(this.getS()) == false) return false; if (other.getN() == null ^ this.getN() == null) return false; if (other.getN() != null && other.getN().equals(this.getN()) == false) return false; if (other.getB() == null ^ this.getB() == null) return false; if (other.getB() != null && other.getB().equals(this.getB()) == false) return false; if (other.getSS() == null ^ this.getSS() == null) return false; if (other.getSS() != null && other.getSS().equals(this.getSS()) == false) return false; if (other.getNS() == null ^ this.getNS() == null) return false; if (other.getNS() != null && other.getNS().equals(this.getNS()) == false) return false; if (other.getBS() == null ^ this.getBS() == null) return false; if (other.getBS() != null && other.getBS().equals(this.getBS()) == false) return false; if (other.getM() == null ^ this.getM() == null) return false; if (other.getM() != null && other.getM().equals(this.getM()) == false) return false; if (other.getL() == null ^ this.getL() == null) return false; if (other.getL() != null && other.getL().equals(this.getL()) == false) return false; if (other.getNULL() == null ^ this.getNULL() == null) return false; if (other.getNULL() != null && other.getNULL().equals(this.getNULL()) == false) return false; if (other.getBOOL() == null ^ this.getBOOL() == null) return false; if (other.getBOOL() != null && other.getBOOL().equals(this.getBOOL()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getS() == null) ? 0 : getS().hashCode()); hashCode = prime * hashCode + ((getN() == null) ? 0 : getN().hashCode()); hashCode = prime * hashCode + ((getB() == null) ? 0 : getB().hashCode()); hashCode = prime * hashCode + ((getSS() == null) ? 0 : getSS().hashCode()); hashCode = prime * hashCode + ((getNS() == null) ? 0 : getNS().hashCode()); hashCode = prime * hashCode + ((getBS() == null) ? 0 : getBS().hashCode()); hashCode = prime * hashCode + ((getM() == null) ? 0 : getM().hashCode()); hashCode = prime * hashCode + ((getL() == null) ? 0 : getL().hashCode()); hashCode = prime * hashCode + ((getNULL() == null) ? 0 : getNULL().hashCode()); hashCode = prime * hashCode + ((getBOOL() == null) ? 0 : getBOOL().hashCode()); return hashCode; } @Override public AttributeValue clone() { try { return (AttributeValue) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.record.path; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.StandardCharsets; import java.sql.Date; import java.text.DateFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.nifi.record.path.exception.RecordPathException; import org.apache.nifi.serialization.SimpleRecordSchema; import org.apache.nifi.serialization.record.DataType; import org.apache.nifi.serialization.record.MapRecord; import org.apache.nifi.serialization.record.Record; import org.apache.nifi.serialization.record.RecordField; import org.apache.nifi.serialization.record.RecordFieldType; import org.apache.nifi.serialization.record.RecordSchema; import org.apache.nifi.serialization.record.util.DataTypeUtils; import org.junit.Test; public class TestRecordPath { @Test public void testCompile() { System.out.println(RecordPath.compile("/person/name/last")); System.out.println(RecordPath.compile("/person[2]")); System.out.println(RecordPath.compile("//person[2]")); System.out.println(RecordPath.compile("/person/child[1]//sibling/name")); // contains is a 'filter function' so can be used as the predicate RecordPath.compile("/name[contains(., 'hello')]"); // substring is not a filter function so cannot be used as a predicate try { RecordPath.compile("/name[substring(., 1, 2)]"); } catch (final RecordPathException e) { // expected } // substring is not a filter function so can be used as *part* of a predicate but not as the entire predicate RecordPath.compile("/name[substring(., 1, 2) = 'e']"); } @Test public void testChildField() { final Map<String, Object> accountValues = new HashMap<>(); accountValues.put("id", 1); accountValues.put("balance", 123.45D); final Record accountRecord = new MapRecord(getAccountSchema(), accountValues); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("mainAccount", accountRecord); final Record record = new MapRecord(schema, values); assertEquals(48, RecordPath.compile("/id").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(record, RecordPath.compile("/id").evaluate(record).getSelectedFields().findFirst().get().getParentRecord().get()); assertEquals("John Doe", RecordPath.compile("/name").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(record, RecordPath.compile("/name").evaluate(record).getSelectedFields().findFirst().get().getParentRecord().get()); assertEquals(accountRecord, RecordPath.compile("/mainAccount").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(record, RecordPath.compile("/mainAccount").evaluate(record).getSelectedFields().findFirst().get().getParentRecord().get()); assertEquals(1, RecordPath.compile("/mainAccount/id").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(accountRecord, RecordPath.compile("/mainAccount/id").evaluate(record).getSelectedFields().findFirst().get().getParentRecord().get()); assertEquals(123.45D, RecordPath.compile("/mainAccount/balance").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(accountRecord, RecordPath.compile("/mainAccount/id").evaluate(record).getSelectedFields().findFirst().get().getParentRecord().get()); } @Test public void testRootRecord() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); final FieldValue fieldValue = RecordPath.compile("/").evaluate(record).getSelectedFields().findFirst().get(); assertEquals(Optional.empty(), fieldValue.getParent()); assertEquals(record, fieldValue.getValue()); } @Test public void testWildcardChild() { final Map<String, Object> accountValues = new HashMap<>(); accountValues.put("id", 1); accountValues.put("balance", 123.45D); final Record accountRecord = new MapRecord(getAccountSchema(), accountValues); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("mainAccount", accountRecord); final Record record = new MapRecord(schema, values); final List<FieldValue> fieldValues = RecordPath.compile("/mainAccount/*").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(2, fieldValues.size()); for (final FieldValue fieldValue : fieldValues) { assertEquals(accountRecord, fieldValue.getParentRecord().get()); } assertEquals("id", fieldValues.get(0).getField().getFieldName()); assertEquals(1, fieldValues.get(0).getValue()); assertEquals("balance", fieldValues.get(1).getField().getFieldName()); assertEquals(123.45D, fieldValues.get(1).getValue()); RecordPath.compile("/mainAccount/*[. > 100]").evaluate(record).getSelectedFields().forEach(field -> field.updateValue(122.44D)); assertEquals(1, accountValues.get("id")); assertEquals(122.44D, accountValues.get("balance")); } @Test public void testWildcardWithArray() { final Map<String, Object> accountValues = new HashMap<>(); accountValues.put("id", 1); accountValues.put("balance", 123.45D); final Record accountRecord = new MapRecord(getAccountSchema(), accountValues); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("accounts", new Object[] {accountRecord}); final Record record = new MapRecord(schema, values); final List<FieldValue> fieldValues = RecordPath.compile("/*[0]").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); final FieldValue fieldValue = fieldValues.get(0); assertEquals("accounts", fieldValue.getField().getFieldName()); assertEquals(record, fieldValue.getParentRecord().get()); assertEquals(accountRecord, fieldValue.getValue()); final Map<String, Object> updatedAccountValues = new HashMap<>(accountValues); updatedAccountValues.put("balance", 122.44D); final Record updatedAccountRecord = new MapRecord(getAccountSchema(), updatedAccountValues); RecordPath.compile("/*[0]").evaluate(record).getSelectedFields().forEach(field -> field.updateValue(updatedAccountRecord)); final Object[] accountRecords = (Object[]) record.getValue("accounts"); assertEquals(1, accountRecords.length); final Record recordToVerify = (Record) accountRecords[0]; assertEquals(122.44D, recordToVerify.getValue("balance")); assertEquals(48, record.getValue("id")); assertEquals("John Doe", record.getValue("name")); } @Test public void testDescendantField() { final Map<String, Object> accountValues = new HashMap<>(); accountValues.put("id", 1); accountValues.put("balance", 123.45D); final Record accountRecord = new MapRecord(getAccountSchema(), accountValues); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("mainAccount", accountRecord); final Record record = new MapRecord(schema, values); final List<FieldValue> fieldValues = RecordPath.compile("//id").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(2, fieldValues.size()); final FieldValue first = fieldValues.get(0); final FieldValue second = fieldValues.get(1); assertEquals(RecordFieldType.INT, first.getField().getDataType().getFieldType()); assertEquals(RecordFieldType.INT, second.getField().getDataType().getFieldType()); assertEquals(48, first.getValue()); assertEquals(1, second.getValue()); } @Test public void testParent() { final Map<String, Object> accountValues = new HashMap<>(); accountValues.put("id", 1); accountValues.put("balance", 123.45D); final Record accountRecord = new MapRecord(getAccountSchema(), accountValues); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("mainAccount", accountRecord); final Record record = new MapRecord(schema, values); final List<FieldValue> fieldValues = RecordPath.compile("//id/..").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(2, fieldValues.size()); final FieldValue first = fieldValues.get(0); final FieldValue second = fieldValues.get(1); assertEquals(RecordFieldType.RECORD, first.getField().getDataType().getFieldType()); assertEquals(RecordFieldType.RECORD, second.getField().getDataType().getFieldType()); assertEquals(record, first.getValue()); assertEquals(accountRecord, second.getValue()); } @Test public void testMapKey() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, String> attributes = new HashMap<>(); attributes.put("city", "New York"); attributes.put("state", "NY"); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("attributes", attributes); final Record record = new MapRecord(schema, values); final FieldValue fieldValue = RecordPath.compile("/attributes['city']").evaluate(record).getSelectedFields().findFirst().get(); assertTrue(fieldValue.getField().getFieldName().equals("attributes")); assertEquals("New York", fieldValue.getValue()); assertEquals(record, fieldValue.getParentRecord().get()); } @Test @SuppressWarnings("unchecked") public void testUpdateMap() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, String> attributes = new HashMap<>(); attributes.put("city", "New York"); attributes.put("state", "NY"); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("attributes", attributes); final Record record = new MapRecord(schema, values); RecordPath.compile("/attributes['city']").evaluate(record).getSelectedFields().findFirst().get().updateValue("Boston"); assertEquals("Boston", ((Map<String, Object>) record.getValue("attributes")).get("city")); } @Test public void testMapWildcard() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, String> attributes = new HashMap<>(); attributes.put("city", "New York"); attributes.put("state", "NY"); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("attributes", attributes); final Record record = new MapRecord(schema, values); final List<FieldValue> fieldValues = RecordPath.compile("/attributes[*]").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(2, fieldValues.size()); assertEquals("New York", fieldValues.get(0).getValue()); assertEquals("NY", fieldValues.get(1).getValue()); for (final FieldValue fieldValue : fieldValues) { assertEquals("attributes", fieldValue.getField().getFieldName()); assertEquals(record, fieldValue.getParentRecord().get()); } RecordPath.compile("/attributes[*]").evaluate(record).getSelectedFields().forEach(field -> field.updateValue("Unknown")); assertEquals("Unknown", attributes.get("city")); assertEquals("Unknown", attributes.get("state")); RecordPath.compile("/attributes[*][fieldName(.) = 'attributes']").evaluate(record).getSelectedFields().forEach(field -> field.updateValue("Unknown")); assertEquals("Unknown", attributes.get("city")); assertEquals("Unknown", attributes.get("state")); } @Test public void testMapMultiKey() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, String> attributes = new HashMap<>(); attributes.put("city", "New York"); attributes.put("state", "NY"); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("attributes", attributes); final Record record = new MapRecord(schema, values); final List<FieldValue> fieldValues = RecordPath.compile("/attributes['city', 'state']").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(2, fieldValues.size()); assertEquals("New York", fieldValues.get(0).getValue()); assertEquals("NY", fieldValues.get(1).getValue()); for (final FieldValue fieldValue : fieldValues) { assertEquals("attributes", fieldValue.getField().getFieldName()); assertEquals(record, fieldValue.getParentRecord().get()); } RecordPath.compile("/attributes['city', 'state']").evaluate(record).getSelectedFields().forEach(field -> field.updateValue("Unknown")); assertEquals("Unknown", attributes.get("city")); assertEquals("Unknown", attributes.get("state")); } @Test public void testEscapedFieldName() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name,date", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name,date", "John Doe"); final Record record = new MapRecord(schema, values); final FieldValue fieldValue = RecordPath.compile("/'name,date'").evaluate(record).getSelectedFields().findFirst().get(); assertEquals("name,date", fieldValue.getField().getFieldName()); assertEquals("John Doe", fieldValue.getValue()); assertEquals(record, fieldValue.getParentRecord().get()); } @Test public void testSingleArrayIndex() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("numbers", new Object[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); final Record record = new MapRecord(schema, values); final FieldValue fieldValue = RecordPath.compile("/numbers[3]").evaluate(record).getSelectedFields().findFirst().get(); assertEquals("numbers", fieldValue.getField().getFieldName()); assertEquals(3, fieldValue.getValue()); assertEquals(record, fieldValue.getParentRecord().get()); } @Test public void testSingleArrayRange() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("numbers", new Object[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); final Record record = new MapRecord(schema, values); final List<FieldValue> fieldValues = RecordPath.compile("/numbers[0..1]").evaluate(record).getSelectedFields().collect(Collectors.toList()); for (final FieldValue fieldValue : fieldValues) { assertEquals("numbers", fieldValue.getField().getFieldName()); assertEquals(record, fieldValue.getParentRecord().get()); } assertEquals(2, fieldValues.size()); for (int i = 0; i < 1; i++) { assertEquals(i, fieldValues.get(0).getValue()); } } @Test public void testMultiArrayIndex() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("numbers", new Object[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); final Record record = new MapRecord(schema, values); final List<FieldValue> fieldValues = RecordPath.compile("/numbers[3,6, -1, -2]").evaluate(record).getSelectedFields().collect(Collectors.toList()); int i = 0; final int[] expectedValues = new int[] {3, 6, 9, 8}; for (final FieldValue fieldValue : fieldValues) { assertEquals("numbers", fieldValue.getField().getFieldName()); assertEquals(expectedValues[i++], fieldValue.getValue()); assertEquals(record, fieldValue.getParentRecord().get()); } RecordPath.compile("/numbers[3,6, -1, -2]").evaluate(record).getSelectedFields().forEach(field -> field.updateValue(99)); assertArrayEquals(new Object[] {0, 1, 2, 99, 4, 5, 99, 7, 99, 99}, (Object[]) values.get("numbers")); } @Test public void testMultiArrayIndexWithRanges() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("numbers", new Object[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); final Record record = new MapRecord(schema, values); List<FieldValue> fieldValues = RecordPath.compile("/numbers[0, 2, 4..7, 9]").evaluate(record).getSelectedFields().collect(Collectors.toList()); for (final FieldValue fieldValue : fieldValues) { assertEquals("numbers", fieldValue.getField().getFieldName()); assertEquals(record, fieldValue.getParentRecord().get()); } int[] expectedValues = new int[] {0, 2, 4, 5, 6, 7, 9}; assertEquals(expectedValues.length, fieldValues.size()); for (int i = 0; i < expectedValues.length; i++) { assertEquals(expectedValues[i], fieldValues.get(i).getValue()); } fieldValues = RecordPath.compile("/numbers[0..-1]").evaluate(record).getSelectedFields().collect(Collectors.toList()); for (final FieldValue fieldValue : fieldValues) { assertEquals("numbers", fieldValue.getField().getFieldName()); assertEquals(record, fieldValue.getParentRecord().get()); } expectedValues = new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; assertEquals(expectedValues.length, fieldValues.size()); for (int i = 0; i < expectedValues.length; i++) { assertEquals(expectedValues[i], fieldValues.get(i).getValue()); } fieldValues = RecordPath.compile("/numbers[-1..-1]").evaluate(record).getSelectedFields().collect(Collectors.toList()); for (final FieldValue fieldValue : fieldValues) { assertEquals("numbers", fieldValue.getField().getFieldName()); assertEquals(record, fieldValue.getParentRecord().get()); } expectedValues = new int[] {9}; assertEquals(expectedValues.length, fieldValues.size()); for (int i = 0; i < expectedValues.length; i++) { assertEquals(expectedValues[i], fieldValues.get(i).getValue()); } fieldValues = RecordPath.compile("/numbers[*]").evaluate(record).getSelectedFields().collect(Collectors.toList()); for (final FieldValue fieldValue : fieldValues) { assertEquals("numbers", fieldValue.getField().getFieldName()); assertEquals(record, fieldValue.getParentRecord().get()); } expectedValues = new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; assertEquals(expectedValues.length, fieldValues.size()); for (int i = 0; i < expectedValues.length; i++) { assertEquals(expectedValues[i], fieldValues.get(i).getValue()); } fieldValues = RecordPath.compile("/xx[1,2,3]").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(0, fieldValues.size()); } @Test public void testEqualsPredicate() { final Map<String, Object> accountValues = new HashMap<>(); accountValues.put("id", 1); accountValues.put("balance", 123.45D); final Record accountRecord = new MapRecord(getAccountSchema(), accountValues); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("mainAccount", accountRecord); values.put("numbers", new Object[] {1, 2, 3, 4, 4, 4, 5}); final Record record = new MapRecord(schema, values); List<FieldValue> fieldValues = RecordPath.compile("/numbers[0..-1][. = 4]").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(3, fieldValues.size()); for (final FieldValue fieldValue : fieldValues) { final String fieldName = fieldValue.getField().getFieldName(); assertEquals("numbers", fieldName); assertEquals(RecordFieldType.INT, fieldValue.getField().getDataType().getFieldType()); assertEquals(4, fieldValue.getValue()); assertEquals(record, fieldValue.getParentRecord().get()); } fieldValues = RecordPath.compile("//id[. = 48]").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); final FieldValue fieldValue = fieldValues.get(0); assertEquals("id", fieldValue.getField().getFieldName()); assertEquals(RecordFieldType.INT.getDataType(), fieldValue.getField().getDataType()); assertEquals(48, fieldValue.getValue()); assertEquals(record, fieldValue.getParentRecord().get()); } @Test public void testRelativePath() { final Map<String, Object> accountValues = new HashMap<>(); accountValues.put("id", 1); accountValues.put("balance", 123.45D); final Record accountRecord = new MapRecord(getAccountSchema(), accountValues); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("mainAccount", accountRecord); final Record record = new MapRecord(schema, values); final List<FieldValue> fieldValues = RecordPath.compile("/mainAccount/././balance/.").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); final FieldValue fieldValue = fieldValues.get(0); assertEquals(accountRecord, fieldValue.getParentRecord().get()); assertEquals(123.45D, fieldValue.getValue()); assertEquals("balance", fieldValue.getField().getFieldName()); RecordPath.compile("/mainAccount/././balance/.").evaluate(record).getSelectedFields().forEach(field -> field.updateValue(123.44D)); assertEquals(123.44D, accountValues.get("balance")); } @Test public void testCompareToLiteral() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("numbers", new Object[] {0, 1, 2}); final Record record = new MapRecord(schema, values); List<FieldValue> fieldValues = RecordPath.compile("/id[. > 42]").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); fieldValues = RecordPath.compile("/id[. < 42]").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(0, fieldValues.size()); } @Test public void testCompareToAbsolute() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("numbers", new Object[] {0, 1, 2}); final Record record = new MapRecord(schema, values); List<FieldValue> fieldValues = RecordPath.compile("/numbers[0..-1][. < /id]").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(3, fieldValues.size()); fieldValues = RecordPath.compile("/id[. > /numbers[-1]]").evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); } @Test public void testCompareWithEmbeddedPaths() { final Map<String, Object> accountValues1 = new HashMap<>(); accountValues1.put("id", 1); accountValues1.put("balance", 10_000.00D); final Record accountRecord1 = new MapRecord(getAccountSchema(), accountValues1); final Map<String, Object> accountValues2 = new HashMap<>(); accountValues2.put("id", 2); accountValues2.put("balance", 48.02D); final Record accountRecord2 = new MapRecord(getAccountSchema(), accountValues2); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("accounts", new Object[] {accountRecord1, accountRecord2}); final Record record = new MapRecord(schema, values); final RecordPath recordPath = RecordPath.compile("/accounts[0..-1][./balance > 100]"); List<FieldValue> fieldValues = recordPath.evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); final FieldValue fieldValue = fieldValues.get(0); assertEquals("accounts", fieldValue.getField().getFieldName()); assertEquals(0, ((ArrayIndexFieldValue) fieldValue).getArrayIndex()); assertEquals(record, fieldValue.getParentRecord().get()); assertEquals(accountRecord1, fieldValue.getValue()); } @Test public void testPredicateInMiddleOfPath() { final Map<String, Object> accountValues1 = new HashMap<>(); accountValues1.put("id", 1); accountValues1.put("balance", 10_000.00D); final Record accountRecord1 = new MapRecord(getAccountSchema(), accountValues1); final Map<String, Object> accountValues2 = new HashMap<>(); accountValues2.put("id", 2); accountValues2.put("balance", 48.02D); final Record accountRecord2 = new MapRecord(getAccountSchema(), accountValues2); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("accounts", new Object[] {accountRecord1, accountRecord2}); final Record record = new MapRecord(schema, values); final RecordPath recordPath = RecordPath.compile("/accounts[0..-1][./balance > 100]/id"); List<FieldValue> fieldValues = recordPath.evaluate(record).getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); final FieldValue fieldValue = fieldValues.get(0); assertEquals("id", fieldValue.getField().getFieldName()); assertEquals(accountRecord1, fieldValue.getParentRecord().get()); assertEquals(1, fieldValue.getValue()); } @Test public void testUpdateValueOnMatchingFields() { final Map<String, Object> accountValues1 = new HashMap<>(); accountValues1.put("id", 1); accountValues1.put("balance", 10_000.00D); final Record accountRecord1 = new MapRecord(getAccountSchema(), accountValues1); final Map<String, Object> accountValues2 = new HashMap<>(); accountValues2.put("id", 2); accountValues2.put("balance", 48.02D); final Record accountRecord2 = new MapRecord(getAccountSchema(), accountValues2); final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); values.put("accounts", new Object[] {accountRecord1, accountRecord2}); final Record record = new MapRecord(schema, values); final RecordPath recordPath = RecordPath.compile("/accounts[0..-1][./balance > 100]/id"); recordPath.evaluate(record).getSelectedFields().findFirst().get().updateValue(100); assertEquals(48, record.getValue("id")); assertEquals(100, accountRecord1.getValue("id")); assertEquals(2, accountRecord2.getValue("id")); } @Test public void testPredicateDoesNotIncludeFieldsThatDontHaveRelativePath() { final List<RecordField> addressFields = new ArrayList<>(); addressFields.add(new RecordField("city", RecordFieldType.STRING.getDataType())); addressFields.add(new RecordField("state", RecordFieldType.STRING.getDataType())); addressFields.add(new RecordField("zip", RecordFieldType.STRING.getDataType())); final RecordSchema addressSchema = new SimpleRecordSchema(addressFields); final List<RecordField> detailsFields = new ArrayList<>(); detailsFields.add(new RecordField("position", RecordFieldType.STRING.getDataType())); detailsFields.add(new RecordField("managerName", RecordFieldType.STRING.getDataType())); final RecordSchema detailsSchema = new SimpleRecordSchema(detailsFields); final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("address", RecordFieldType.RECORD.getRecordDataType(addressSchema))); fields.add(new RecordField("details", RecordFieldType.RECORD.getRecordDataType(detailsSchema))); final RecordSchema recordSchema = new SimpleRecordSchema(fields); final Record record = new MapRecord(recordSchema, new HashMap<>()); record.setValue("name", "John Doe"); final Record addressRecord = new MapRecord(addressSchema, new HashMap<>()); addressRecord.setValue("city", "San Francisco"); addressRecord.setValue("state", "CA"); addressRecord.setValue("zip", "12345"); record.setValue("address", addressRecord); final Record detailsRecord = new MapRecord(detailsSchema, new HashMap<>()); detailsRecord.setValue("position", "Developer"); detailsRecord.setValue("managerName", "Jane Doe"); record.setValue("details", detailsRecord); final RecordPath recordPath = RecordPath.compile("/*[./state != 'NY']"); final RecordPathResult result = recordPath.evaluate(record); final List<FieldValue> fieldValues = result.getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); final FieldValue fieldValue = fieldValues.get(0); assertEquals("address", fieldValue.getField().getFieldName()); assertEquals("12345", RecordPath.compile("/*[./state != 'NY']/zip").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testPredicateWithAbsolutePath() { final List<RecordField> addressFields = new ArrayList<>(); addressFields.add(new RecordField("city", RecordFieldType.STRING.getDataType())); addressFields.add(new RecordField("state", RecordFieldType.STRING.getDataType())); addressFields.add(new RecordField("zip", RecordFieldType.STRING.getDataType())); final RecordSchema addressSchema = new SimpleRecordSchema(addressFields); final List<RecordField> detailsFields = new ArrayList<>(); detailsFields.add(new RecordField("position", RecordFieldType.STRING.getDataType())); detailsFields.add(new RecordField("preferredState", RecordFieldType.STRING.getDataType())); final RecordSchema detailsSchema = new SimpleRecordSchema(detailsFields); final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("address1", RecordFieldType.RECORD.getRecordDataType(addressSchema))); fields.add(new RecordField("address2", RecordFieldType.RECORD.getRecordDataType(addressSchema))); fields.add(new RecordField("details", RecordFieldType.RECORD.getRecordDataType(detailsSchema))); final RecordSchema recordSchema = new SimpleRecordSchema(fields); final Record record = new MapRecord(recordSchema, new HashMap<>()); record.setValue("name", "John Doe"); final Record address1Record = new MapRecord(addressSchema, new HashMap<>()); address1Record.setValue("city", "San Francisco"); address1Record.setValue("state", "CA"); address1Record.setValue("zip", "12345"); record.setValue("address1", address1Record); final Record address2Record = new MapRecord(addressSchema, new HashMap<>()); address2Record.setValue("city", "New York"); address2Record.setValue("state", "NY"); address2Record.setValue("zip", "01234"); record.setValue("address2", address2Record); final Record detailsRecord = new MapRecord(detailsSchema, new HashMap<>()); detailsRecord.setValue("position", "Developer"); detailsRecord.setValue("preferredState", "NY"); record.setValue("details", detailsRecord); final RecordPath recordPath = RecordPath.compile("/*[./state = /details/preferredState]"); final RecordPathResult result = recordPath.evaluate(record); final List<FieldValue> fieldValues = result.getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); final FieldValue fieldValue = fieldValues.get(0); assertEquals("address2", fieldValue.getField().getFieldName()); } @Test public void testRelativePathOnly() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); final FieldValue recordFieldValue = new StandardFieldValue(record, new RecordField("record", RecordFieldType.RECORD.getDataType()), null); final List<FieldValue> fieldValues = RecordPath.compile("./name").evaluate(record, recordFieldValue).getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); final FieldValue fieldValue = fieldValues.get(0); assertEquals("John Doe", fieldValue.getValue()); assertEquals(record, fieldValue.getParentRecord().get()); assertEquals("name", fieldValue.getField().getFieldName()); } @Test public void testRelativePathAgainstNonRecordField() { final RecordSchema schema = new SimpleRecordSchema(getDefaultFields()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); final FieldValue recordFieldValue = new StandardFieldValue(record, new RecordField("root", RecordFieldType.RECORD.getRecordDataType(record.getSchema())), null); final FieldValue nameFieldValue = new StandardFieldValue("John Doe", new RecordField("name", RecordFieldType.STRING.getDataType()), recordFieldValue); final List<FieldValue> fieldValues = RecordPath.compile(".").evaluate(record, nameFieldValue).getSelectedFields().collect(Collectors.toList()); assertEquals(1, fieldValues.size()); final FieldValue fieldValue = fieldValues.get(0); assertEquals("John Doe", fieldValue.getValue()); assertEquals(record, fieldValue.getParentRecord().get()); assertEquals("name", fieldValue.getField().getFieldName()); fieldValue.updateValue("Jane Doe"); assertEquals("Jane Doe", record.getValue("name")); } @Test public void testSubstringFunction() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); final FieldValue fieldValue = RecordPath.compile("substring(/name, 0, 4)").evaluate(record).getSelectedFields().findFirst().get(); assertEquals("John", fieldValue.getValue()); assertEquals("John", RecordPath.compile("substring(/name, 0, -5)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("", RecordPath.compile("substring(/name, 1000, 1005)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("", RecordPath.compile("substring(/name, 4, 3)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("substring(/name, 0, 10000)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("", RecordPath.compile("substring(/name, -50, -1)").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testSubstringBeforeFunction() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals("John", RecordPath.compile("substringBefore(/name, ' ')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("substringBefore(/name, 'XYZ')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("substringBefore(/name, '')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John D", RecordPath.compile("substringBeforeLast(/name, 'o')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("substringBeforeLast(/name, 'XYZ')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("substringBeforeLast(/name, '')").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testSubstringAfterFunction() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals("hn Doe", RecordPath.compile("substringAfter(/name, 'o')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("substringAfter(/name, 'XYZ')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("substringAfter(/name, '')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("n Doe", RecordPath.compile("substringAfter(/name, 'oh')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("e", RecordPath.compile("substringAfterLast(/name, 'o')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("substringAfterLast(/name, 'XYZ')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("substringAfterLast(/name, '')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("n Doe", RecordPath.compile("substringAfterLast(/name, 'oh')").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testContains() { final Record record = createSimpleRecord(); assertEquals("John Doe", RecordPath.compile("/name[contains(., 'o')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(0L, RecordPath.compile("/name[contains(., 'x')]").evaluate(record).getSelectedFields().count()); record.setValue("name", "John Doe 48"); assertEquals("John Doe 48", RecordPath.compile("/name[contains(., /id)]").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testStartsWith() { final Record record = createSimpleRecord(); assertEquals("John Doe", RecordPath.compile("/name[startsWith(., 'J')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(0L, RecordPath.compile("/name[startsWith(., 'x')]").evaluate(record).getSelectedFields().count()); assertEquals("John Doe", RecordPath.compile("/name[startsWith(., '')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testEndsWith() { final Record record = createSimpleRecord(); assertEquals("John Doe", RecordPath.compile("/name[endsWith(., 'e')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(0L, RecordPath.compile("/name[endsWith(., 'x')]").evaluate(record).getSelectedFields().count()); assertEquals("John Doe", RecordPath.compile("/name[endsWith(., '')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testIsEmpty() { final Record record = createSimpleRecord(); assertEquals("John Doe", RecordPath.compile("/name[isEmpty(../missing)]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("/name[isEmpty(/missing)]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(0L, RecordPath.compile("/name[isEmpty(../id)]").evaluate(record).getSelectedFields().count()); record.setValue("missing", " "); assertEquals(0L, RecordPath.compile("/name[isEmpty(/missing)]").evaluate(record).getSelectedFields().count()); } @Test public void testIsBlank() { final Record record = createSimpleRecord(); assertEquals("John Doe", RecordPath.compile("/name[isBlank(../missing)]").evaluate(record).getSelectedFields().findFirst().get().getValue()); record.setValue("missing", " "); assertEquals("John Doe", RecordPath.compile("/name[isBlank(../missing)]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("/name[isBlank(/missing)]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(0L, RecordPath.compile("/name[isBlank(../id)]").evaluate(record).getSelectedFields().count()); } @Test public void testContainsRegex() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals("John Doe", RecordPath.compile("/name[containsRegex(., 'o')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("/name[containsRegex(., '[xo]')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(0L, RecordPath.compile("/name[containsRegex(., 'x')]").evaluate(record).getSelectedFields().count()); } @Test public void testNot() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals("John Doe", RecordPath.compile("/name[not(contains(., 'x'))]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(0L, RecordPath.compile("/name[not(. = 'John Doe')]").evaluate(record).getSelectedFields().count()); assertEquals("John Doe", RecordPath.compile("/name[not(. = 'Jane Doe')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testChainingFunctions() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals("John Doe", RecordPath.compile("/name[contains(substringAfter(., 'o'), 'h')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testMatchesRegex() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals(0L, RecordPath.compile("/name[matchesRegex(., 'John D')]").evaluate(record).getSelectedFields().count()); assertEquals("John Doe", RecordPath.compile("/name[matchesRegex(., '[John Doe]{8}')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testReplace() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals("John Doe", RecordPath.compile("/name[replace(../id, 48, 18) = 18]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(0L, RecordPath.compile("/name[replace(../id, 48, 18) = 48]").evaluate(record).getSelectedFields().count()); assertEquals("Jane Doe", RecordPath.compile("replace(/name, 'ohn', 'ane')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("replace(/name, 'ohnny', 'ane')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John 48", RecordPath.compile("replace(/name, 'Doe', /id)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("23", RecordPath.compile("replace(/id, 48, 23)").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testReplaceRegex() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals("ohn oe", RecordPath.compile("replaceRegex(/name, '[JD]', '')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("replaceRegex(/name, 'ohnny', 'ane')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("11", RecordPath.compile("replaceRegex(/id, '[0-9]', 1)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("Jxohn Dxoe", RecordPath.compile("replaceRegex(/name, '([JD])', '$1x')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("Jxohn Dxoe", RecordPath.compile("replaceRegex(/name, '(?<hello>[JD])', '${hello}x')").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("48ohn 48oe", RecordPath.compile("replaceRegex(/name, '(?<hello>[JD])', /id)").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testReplaceNull() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("missing", RecordFieldType.LONG.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals(48, RecordPath.compile("replaceNull(/missing, /id)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(14, RecordPath.compile("replaceNull(/missing, 14)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(48, RecordPath.compile("replaceNull(/id, 14)").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testConcat() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("fullName", RecordFieldType.INT.getDataType())); fields.add(new RecordField("lastName", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("firstName", RecordFieldType.LONG.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("lastName", "Doe"); values.put("firstName", "John"); final Record record = new MapRecord(schema, values); assertEquals("John Doe: 48", RecordPath.compile("concat(/firstName, ' ', /lastName, ': ', 48)").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testFieldName() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals("name", RecordPath.compile("fieldName(/name)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("name", RecordPath.compile("fieldName(/*)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("//*[startsWith(fieldName(.), 'na')]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("name", RecordPath.compile("fieldName(//*[startsWith(fieldName(.), 'na')])").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("John Doe", RecordPath.compile("//name[not(startsWith(fieldName(.), 'xyz'))]").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(0L, RecordPath.compile("//name[not(startsWith(fieldName(.), 'n'))]").evaluate(record).getSelectedFields().count()); } @Test public void testToDateFromString() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("date", RecordFieldType.DATE.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("date", "2017-10-20T11:00:00Z"); final Record record = new MapRecord(schema, values); assertTrue(RecordPath.compile("toDate(/date, \"yyyy-MM-dd'T'HH:mm:ss'Z'\")").evaluate(record).getSelectedFields().findFirst().get().getValue() instanceof Date); } @Test public void testToDateFromLong() throws ParseException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("date", RecordFieldType.LONG.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final DateFormat dateFormat = DataTypeUtils.getDateFormat("yyyy-MM-dd"); final long dateValue = dateFormat.parse("2017-10-20T11:00:00Z").getTime(); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("date", dateValue); final Record record = new MapRecord(schema, values); // since the field is a long it shouldn't do the conversion and should return the value unchanged assertTrue(RecordPath.compile("toDate(/date, \"yyyy-MM-dd'T'HH:mm:ss'Z'\")").evaluate(record).getSelectedFields().findFirst().get().getValue() instanceof Long); } @Test public void testToDateFromNonDateString() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.DATE.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); // since the field is a string it shouldn't do the conversion and should return the value unchanged final FieldValue fieldValue = RecordPath.compile("toDate(/name, \"yyyy-MM-dd'T'HH:mm:ss'Z'\")").evaluate(record).getSelectedFields().findFirst().get(); assertEquals("John Doe", fieldValue.getValue()); } @Test public void testFormatDateFromString() throws ParseException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("date", RecordFieldType.DATE.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("date", "2017-10-20T11:00:00Z"); final Record record = new MapRecord(schema, values); final FieldValue fieldValue = RecordPath.compile("format( toDate(/date, \"yyyy-MM-dd'T'HH:mm:ss'Z'\"), 'yyyy-MM-dd' )").evaluate(record).getSelectedFields().findFirst().get(); assertEquals("2017-10-20", fieldValue.getValue()); final FieldValue fieldValueUnchanged = RecordPath.compile("format( toDate(/date, \"yyyy-MM-dd'T'HH:mm:ss'Z'\"), 'INVALID' )").evaluate(record).getSelectedFields().findFirst().get(); assertEquals(DataTypeUtils.getDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").parse("2017-10-20T11:00:00Z"), fieldValueUnchanged.getValue()); } @Test public void testFormatDateFromLong() throws ParseException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("date", RecordFieldType.LONG.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final DateFormat dateFormat = DataTypeUtils.getDateFormat("yyyy-MM-dd"); final long dateValue = dateFormat.parse("2017-10-20").getTime(); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("date", dateValue); final Record record = new MapRecord(schema, values); assertEquals("2017-10-20", RecordPath.compile("format(/date, 'yyyy-MM-dd' )").evaluate(record).getSelectedFields().findFirst().get().getValue()); final FieldValue fieldValueUnchanged = RecordPath.compile("format(/date, 'INVALID' )").evaluate(record).getSelectedFields().findFirst().get(); assertEquals(dateValue, fieldValueUnchanged.getValue()); } @Test public void testFormatDateFromDate() throws ParseException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("date", RecordFieldType.DATE.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final DateFormat dateFormat = DataTypeUtils.getDateFormat("yyyy-MM-dd"); final java.util.Date utilDate = dateFormat.parse("2017-10-20"); final Date dateValue = new Date(utilDate.getTime()); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("date", dateValue); final Record record = new MapRecord(schema, values); assertEquals("2017-10-20", RecordPath.compile("format(/date, 'yyyy-MM-dd')").evaluate(record).getSelectedFields().findFirst().get().getValue()); final FieldValue fieldValueUnchanged = RecordPath.compile("format(/date, 'INVALID')").evaluate(record).getSelectedFields().findFirst().get(); assertEquals(dateValue, fieldValueUnchanged.getValue()); } @Test public void testFormatDateWhenNotDate() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); assertEquals("John Doe", RecordPath.compile("format(/name, 'yyyy-MM')").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test public void testToString() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("bytes", RecordFieldType.CHOICE.getChoiceDataType(RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BYTE.getDataType())))); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("bytes", "Hello World!".getBytes(StandardCharsets.UTF_16)); final Record record = new MapRecord(schema, values); assertEquals("Hello World!", RecordPath.compile("toString(/bytes, \"UTF-16\")").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test(expected = IllegalCharsetNameException.class) public void testToStringBadCharset() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("bytes", RecordFieldType.CHOICE.getChoiceDataType(RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BYTE.getDataType())))); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("bytes", "Hello World!".getBytes(StandardCharsets.UTF_16)); final Record record = new MapRecord(schema, values); RecordPath.compile("toString(/bytes, \"NOT A REAL CHARSET\")").evaluate(record).getSelectedFields().findFirst().get().getValue(); } @Test public void testToBytes() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("s", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("s", "Hello World!"); final Record record = new MapRecord(schema, values); assertArrayEquals("Hello World!".getBytes(StandardCharsets.UTF_16LE), (byte[]) RecordPath.compile("toBytes(/s, \"UTF-16LE\")").evaluate(record).getSelectedFields().findFirst().get().getValue()); } @Test(expected = IllegalCharsetNameException.class) public void testToBytesBadCharset() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("s", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("s", "Hello World!"); final Record record = new MapRecord(schema, values); RecordPath.compile("toBytes(/s, \"NOT A REAL CHARSET\")").evaluate(record).getSelectedFields().findFirst().get().getValue(); } @Test public void testBase64Encode() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("firstName", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("lastName", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("b", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BYTE.getDataType()))); final RecordSchema schema = new SimpleRecordSchema(fields); final List<Object> expectedValues = Arrays.asList( Base64.getEncoder().encodeToString("John".getBytes(StandardCharsets.UTF_8)), Base64.getEncoder().encodeToString("Doe".getBytes(StandardCharsets.UTF_8)), Base64.getEncoder().encode("xyz".getBytes(StandardCharsets.UTF_8)) ); final Map<String, Object> values = new HashMap<>(); values.put("firstName", "John"); values.put("lastName", "Doe"); values.put("b", "xyz".getBytes(StandardCharsets.UTF_8)); final Record record = new MapRecord(schema, values); assertEquals(Base64.getEncoder().encodeToString("John".getBytes(StandardCharsets.UTF_8)), RecordPath.compile("base64Encode(/firstName)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals(Base64.getEncoder().encodeToString("Doe".getBytes(StandardCharsets.UTF_8)), RecordPath.compile("base64Encode(/lastName)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertTrue(Arrays.equals(Base64.getEncoder().encode("xyz".getBytes(StandardCharsets.UTF_8)), (byte[]) RecordPath.compile("base64Encode(/b)").evaluate(record).getSelectedFields().findFirst().get().getValue())); List<Object> actualValues = RecordPath.compile("base64Encode(/*)").evaluate(record).getSelectedFields().map(FieldValue::getValue).collect(Collectors.toList()); IntStream.range(0, 3).forEach(i -> { Object expectedObject = expectedValues.get(i); Object actualObject = actualValues.get(i); if (actualObject instanceof String) { assertEquals(expectedObject, actualObject); } else if (actualObject instanceof byte[]) { assertTrue(Arrays.equals((byte[]) expectedObject, (byte[]) actualObject)); } }); } @Test public void testBase64Decode() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("firstName", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("lastName", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("b", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BYTE.getDataType()))); final RecordSchema schema = new SimpleRecordSchema(fields); final List<Object> expectedValues = Arrays.asList("John", "Doe", "xyz".getBytes(StandardCharsets.UTF_8)); final Map<String, Object> values = new HashMap<>(); values.put("firstName", Base64.getEncoder().encodeToString("John".getBytes(StandardCharsets.UTF_8))); values.put("lastName", Base64.getEncoder().encodeToString("Doe".getBytes(StandardCharsets.UTF_8))); values.put("b", Base64.getEncoder().encode("xyz".getBytes(StandardCharsets.UTF_8))); final Record record = new MapRecord(schema, values); assertEquals("John", RecordPath.compile("base64Decode(/firstName)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertEquals("Doe", RecordPath.compile("base64Decode(/lastName)").evaluate(record).getSelectedFields().findFirst().get().getValue()); assertTrue(Arrays.equals("xyz".getBytes(StandardCharsets.UTF_8), (byte[]) RecordPath.compile("base64Decode(/b)").evaluate(record).getSelectedFields().findFirst().get().getValue())); List<Object> actualValues = RecordPath.compile("base64Decode(/*)").evaluate(record).getSelectedFields().map(FieldValue::getValue).collect(Collectors.toList()); IntStream.range(0, 3).forEach(i -> { Object expectedObject = expectedValues.get(i); Object actualObject = actualValues.get(i); if (actualObject instanceof String) { assertEquals(expectedObject, actualObject); } else if (actualObject instanceof byte[]) { assertTrue(Arrays.equals((byte[]) expectedObject, (byte[]) actualObject)); } }); } private List<RecordField> getDefaultFields() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("attributes", RecordFieldType.MAP.getMapDataType(RecordFieldType.STRING.getDataType()))); fields.add(new RecordField("mainAccount", RecordFieldType.RECORD.getRecordDataType(getAccountSchema()))); fields.add(new RecordField("numbers", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.INT.getDataType()))); final DataType accountDataType = RecordFieldType.RECORD.getRecordDataType(getAccountSchema()); final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountDataType); final RecordField accountsField = new RecordField("accounts", accountsType); fields.add(accountsField); return fields; } private RecordSchema getAccountSchema() { final List<RecordField> accountFields = new ArrayList<>(); accountFields.add(new RecordField("id", RecordFieldType.INT.getDataType())); accountFields.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType())); final RecordSchema accountSchema = new SimpleRecordSchema(accountFields); return accountSchema; } private Record createSimpleRecord() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.INT.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("missing", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", 48); values.put("name", "John Doe"); final Record record = new MapRecord(schema, values); return record; } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Bits; import java.io.IOException; import java.util.Arrays; /** * Exposes {@link TermsEnum} API, merged from {@link TermsEnum} API of sub-segments. * This does a merge sort, by term text, of the sub-readers. * * @lucene.experimental */ public final class MultiTermsEnum extends TermsEnum { private final TermMergeQueue queue; private final TermsEnumWithSlice[] subs; // all of our subs (one per sub-reader) private final TermsEnumWithSlice[] currentSubs; // current subs that have at least one term for this field private final TermsEnumWithSlice[] top; private final MultiDocsEnum.EnumWithSlice[] subDocs; private final MultiDocsAndPositionsEnum.EnumWithSlice[] subDocsAndPositions; private BytesRef lastSeek; private boolean lastSeekExact; private final BytesRefBuilder lastSeekScratch = new BytesRefBuilder(); private int numTop; private int numSubs; private BytesRef current; static class TermsEnumIndex { public final static TermsEnumIndex[] EMPTY_ARRAY = new TermsEnumIndex[0]; final int subIndex; final TermsEnum termsEnum; public TermsEnumIndex(TermsEnum termsEnum, int subIndex) { this.termsEnum = termsEnum; this.subIndex = subIndex; } } /** Returns how many sub-reader slices contain the current * term. @see #getMatchArray */ public int getMatchCount() { return numTop; } /** Returns sub-reader slices positioned to the current term. */ public TermsEnumWithSlice[] getMatchArray() { return top; } /** Sole constructor. * @param slices Which sub-reader slices we should * merge. */ public MultiTermsEnum(ReaderSlice[] slices) { queue = new TermMergeQueue(slices.length); top = new TermsEnumWithSlice[slices.length]; subs = new TermsEnumWithSlice[slices.length]; subDocs = new MultiDocsEnum.EnumWithSlice[slices.length]; subDocsAndPositions = new MultiDocsAndPositionsEnum.EnumWithSlice[slices.length]; for(int i=0;i<slices.length;i++) { subs[i] = new TermsEnumWithSlice(i, slices[i]); subDocs[i] = new MultiDocsEnum.EnumWithSlice(); subDocs[i].slice = slices[i]; subDocsAndPositions[i] = new MultiDocsAndPositionsEnum.EnumWithSlice(); subDocsAndPositions[i].slice = slices[i]; } currentSubs = new TermsEnumWithSlice[slices.length]; } @Override public BytesRef term() { return current; } /** The terms array must be newly created TermsEnum, ie * {@link TermsEnum#next} has not yet been called. */ public TermsEnum reset(TermsEnumIndex[] termsEnumsIndex) throws IOException { assert termsEnumsIndex.length <= top.length; numSubs = 0; numTop = 0; queue.clear(); for(int i=0;i<termsEnumsIndex.length;i++) { final TermsEnumIndex termsEnumIndex = termsEnumsIndex[i]; assert termsEnumIndex != null; final BytesRef term = termsEnumIndex.termsEnum.next(); if (term != null) { final TermsEnumWithSlice entry = subs[termsEnumIndex.subIndex]; entry.reset(termsEnumIndex.termsEnum, term); queue.add(entry); currentSubs[numSubs++] = entry; } else { // field has no terms } } if (queue.size() == 0) { return TermsEnum.EMPTY; } else { return this; } } @Override public boolean seekExact(BytesRef term) throws IOException { queue.clear(); numTop = 0; boolean seekOpt = false; if (lastSeek != null && lastSeek.compareTo(term) <= 0) { seekOpt = true; } lastSeek = null; lastSeekExact = true; for(int i=0;i<numSubs;i++) { final boolean status; // LUCENE-2130: if we had just seek'd already, prior // to this seek, and the new seek term is after the // previous one, don't try to re-seek this sub if its // current term is already beyond this new seek term. // Doing so is a waste because this sub will simply // seek to the same spot. if (seekOpt) { final BytesRef curTerm = currentSubs[i].current; if (curTerm != null) { final int cmp = term.compareTo(curTerm); if (cmp == 0) { status = true; } else if (cmp < 0) { status = false; } else { status = currentSubs[i].terms.seekExact(term); } } else { status = false; } } else { status = currentSubs[i].terms.seekExact(term); } if (status) { top[numTop++] = currentSubs[i]; current = currentSubs[i].current = currentSubs[i].terms.term(); assert term.equals(currentSubs[i].current); } } // if at least one sub had exact match to the requested // term then we found match return numTop > 0; } @Override public SeekStatus seekCeil(BytesRef term) throws IOException { queue.clear(); numTop = 0; lastSeekExact = false; boolean seekOpt = false; if (lastSeek != null && lastSeek.compareTo(term) <= 0) { seekOpt = true; } lastSeekScratch.copyBytes(term); lastSeek = lastSeekScratch.get(); for(int i=0;i<numSubs;i++) { final SeekStatus status; // LUCENE-2130: if we had just seek'd already, prior // to this seek, and the new seek term is after the // previous one, don't try to re-seek this sub if its // current term is already beyond this new seek term. // Doing so is a waste because this sub will simply // seek to the same spot. if (seekOpt) { final BytesRef curTerm = currentSubs[i].current; if (curTerm != null) { final int cmp = term.compareTo(curTerm); if (cmp == 0) { status = SeekStatus.FOUND; } else if (cmp < 0) { status = SeekStatus.NOT_FOUND; } else { status = currentSubs[i].terms.seekCeil(term); } } else { status = SeekStatus.END; } } else { status = currentSubs[i].terms.seekCeil(term); } if (status == SeekStatus.FOUND) { top[numTop++] = currentSubs[i]; current = currentSubs[i].current = currentSubs[i].terms.term(); } else { if (status == SeekStatus.NOT_FOUND) { currentSubs[i].current = currentSubs[i].terms.term(); assert currentSubs[i].current != null; queue.add(currentSubs[i]); } else { // enum exhausted currentSubs[i].current = null; } } } if (numTop > 0) { // at least one sub had exact match to the requested term return SeekStatus.FOUND; } else if (queue.size() > 0) { // no sub had exact match, but at least one sub found // a term after the requested term -- advance to that // next term: pullTop(); return SeekStatus.NOT_FOUND; } else { return SeekStatus.END; } } @Override public void seekExact(long ord) { throw new UnsupportedOperationException(); } @Override public long ord() { throw new UnsupportedOperationException(); } private void pullTop() { // extract all subs from the queue that have the same // top term assert numTop == 0; while(true) { top[numTop++] = queue.pop(); if (queue.size() == 0 || !(queue.top()).current.bytesEquals(top[0].current)) { break; } } current = top[0].current; } private void pushTop() throws IOException { // call next() on each top, and put back into queue for(int i=0;i<numTop;i++) { top[i].current = top[i].terms.next(); if (top[i].current != null) { queue.add(top[i]); } else { // no more fields in this reader } } numTop = 0; } @Override public BytesRef next() throws IOException { if (lastSeekExact) { // Must seekCeil at this point, so those subs that // didn't have the term can find the following term. // NOTE: we could save some CPU by only seekCeil the // subs that didn't match the last exact seek... but // most impls short-circuit if you seekCeil to term // they are already on. final SeekStatus status = seekCeil(current); assert status == SeekStatus.FOUND; lastSeekExact = false; } lastSeek = null; // restore queue pushTop(); // gather equal top fields if (queue.size() > 0) { pullTop(); } else { current = null; } return current; } @Override public int docFreq() throws IOException { int sum = 0; for(int i=0;i<numTop;i++) { sum += top[i].terms.docFreq(); } return sum; } @Override public long totalTermFreq() throws IOException { long sum = 0; for(int i=0;i<numTop;i++) { final long v = top[i].terms.totalTermFreq(); if (v == -1) { return v; } sum += v; } return sum; } @Override public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException { MultiDocsEnum docsEnum; // Can only reuse if incoming enum is also a MultiDocsEnum if (reuse != null && reuse instanceof MultiDocsEnum) { docsEnum = (MultiDocsEnum) reuse; // ... and was previously created w/ this MultiTermsEnum: if (!docsEnum.canReuse(this)) { docsEnum = new MultiDocsEnum(this, subs.length); } } else { docsEnum = new MultiDocsEnum(this, subs.length); } final MultiBits multiLiveDocs; if (liveDocs instanceof MultiBits) { multiLiveDocs = (MultiBits) liveDocs; } else { multiLiveDocs = null; } int upto = 0; for(int i=0;i<numTop;i++) { final TermsEnumWithSlice entry = top[i]; final Bits b; if (multiLiveDocs != null) { // optimize for common case: requested skip docs is a // congruent sub-slice of MultiBits: in this case, we // just pull the liveDocs from the sub reader, rather // than making the inefficient // Slice(Multi(sub-readers)): final MultiBits.SubResult sub = multiLiveDocs.getMatchingSub(entry.subSlice); if (sub.matches) { b = sub.result; } else { // custom case: requested skip docs is foreign: // must slice it on every access b = new BitsSlice(liveDocs, entry.subSlice); } } else if (liveDocs != null) { b = new BitsSlice(liveDocs, entry.subSlice); } else { // no deletions b = null; } assert entry.index < docsEnum.subDocsEnum.length: entry.index + " vs " + docsEnum.subDocsEnum.length + "; " + subs.length; final DocsEnum subDocsEnum = entry.terms.docs(b, docsEnum.subDocsEnum[entry.index], flags); if (subDocsEnum != null) { docsEnum.subDocsEnum[entry.index] = subDocsEnum; subDocs[upto].docsEnum = subDocsEnum; subDocs[upto].slice = entry.subSlice; upto++; } else { // should this be an error? assert false : "One of our subs cannot provide a docsenum"; } } if (upto == 0) { return null; } else { return docsEnum.reset(subDocs, upto); } } @Override public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException { MultiDocsAndPositionsEnum docsAndPositionsEnum; // Can only reuse if incoming enum is also a MultiDocsAndPositionsEnum if (reuse != null && reuse instanceof MultiDocsAndPositionsEnum) { docsAndPositionsEnum = (MultiDocsAndPositionsEnum) reuse; // ... and was previously created w/ this MultiTermsEnum: if (!docsAndPositionsEnum.canReuse(this)) { docsAndPositionsEnum = new MultiDocsAndPositionsEnum(this, subs.length); } } else { docsAndPositionsEnum = new MultiDocsAndPositionsEnum(this, subs.length); } final MultiBits multiLiveDocs; if (liveDocs instanceof MultiBits) { multiLiveDocs = (MultiBits) liveDocs; } else { multiLiveDocs = null; } int upto = 0; for(int i=0;i<numTop;i++) { final TermsEnumWithSlice entry = top[i]; final Bits b; if (multiLiveDocs != null) { // Optimize for common case: requested skip docs is a // congruent sub-slice of MultiBits: in this case, we // just pull the liveDocs from the sub reader, rather // than making the inefficient // Slice(Multi(sub-readers)): final MultiBits.SubResult sub = multiLiveDocs.getMatchingSub(top[i].subSlice); if (sub.matches) { b = sub.result; } else { // custom case: requested skip docs is foreign: // must slice it on every access (very // inefficient) b = new BitsSlice(liveDocs, top[i].subSlice); } } else if (liveDocs != null) { b = new BitsSlice(liveDocs, top[i].subSlice); } else { // no deletions b = null; } assert entry.index < docsAndPositionsEnum.subDocsAndPositionsEnum.length: entry.index + " vs " + docsAndPositionsEnum.subDocsAndPositionsEnum.length + "; " + subs.length; final DocsAndPositionsEnum subPostings = entry.terms.docsAndPositions(b, docsAndPositionsEnum.subDocsAndPositionsEnum[entry.index], flags); if (subPostings != null) { docsAndPositionsEnum.subDocsAndPositionsEnum[entry.index] = subPostings; subDocsAndPositions[upto].docsAndPositionsEnum = subPostings; subDocsAndPositions[upto].slice = entry.subSlice; upto++; } else { if (entry.terms.docs(b, null, DocsEnum.FLAG_NONE) != null) { // At least one of our subs does not store // offsets or positions -- we can't correctly // produce a MultiDocsAndPositions enum return null; } } } if (upto == 0) { return null; } else { return docsAndPositionsEnum.reset(subDocsAndPositions, upto); } } final static class TermsEnumWithSlice { private final ReaderSlice subSlice; TermsEnum terms; public BytesRef current; final int index; public TermsEnumWithSlice(int index, ReaderSlice subSlice) { this.subSlice = subSlice; this.index = index; assert subSlice.length >= 0: "length=" + subSlice.length; } public void reset(TermsEnum terms, BytesRef term) { this.terms = terms; current = term; } @Override public String toString() { return subSlice.toString()+":"+terms; } } private final static class TermMergeQueue extends PriorityQueue<TermsEnumWithSlice> { TermMergeQueue(int size) { super(size); } @Override protected boolean lessThan(TermsEnumWithSlice termsA, TermsEnumWithSlice termsB) { final int cmp = termsA.current.compareTo(termsB.current); if (cmp != 0) { return cmp < 0; } else { return termsA.subSlice.start < termsB.subSlice.start; } } } @Override public String toString() { return "MultiTermsEnum(" + Arrays.toString(subs) + ")"; } }
/* * Copyright 2010 - 2013 Ed Venaglia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.venaglia.nondairy.util; import com.intellij.lang.ASTNode; import com.intellij.lang.Language; import com.intellij.mock.MockDocument; import com.intellij.openapi.editor.Document; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.FileViewProvider; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.PsiReference; import com.intellij.testFramework.LightVirtualFile; import net.venaglia.nondairy.SoyTestUtil; import net.venaglia.nondairy.mocks.MockProjectEnvironment; import net.venaglia.nondairy.mocks.MockSoyFile; import net.venaglia.nondairy.soylang.SoyFileType; import net.venaglia.nondairy.soylang.SoyLanguage; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.nio.charset.Charset; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; /** * User: ed * Date: 3/11/12 * Time: 9:53 PM */ public class SourceTuple { private static final CharSequence SOURCE_AS_RESOURCE = new ImmutableCharSequence(""); public final String name; public final PsiElement root; public final PsiFile psi; public final VirtualFile file; public final String fileUrl; public final Document document; public SourceTuple(@NonNls String name) { this(name, SOURCE_AS_RESOURCE); } public SourceTuple(@NonNls @NotNull String name, @NonNls @NotNull CharSequence source) { this.name = name; PsiElement[] children = { null }; psi = new MockSoyFile(children, new MyFileViewProvider()); root = source == SOURCE_AS_RESOURCE ? SoyTestUtil.getPsiTreeFor(psi, name) : SoyTestUtil.getPsiTreeFor(psi, name, source); children[0] = root; String text = source == SOURCE_AS_RESOURCE ? loadSource(name) : source.toString(); file = new LightVirtualFile(name, SoyFileType.INSTANCE, text, Charset.defaultCharset(), System.currentTimeMillis()); fileUrl = file.getUrl(); document = new MockDocument(text); } private String loadSource(String name) { try { return SoyTestUtil.getTestSourceBuffer(name); } catch (IOException e) { throw new RuntimeException(e); } } private class MyFileViewProvider implements FileViewProvider { private Map<Key,Object> userData = new HashMap<Key,Object>(); @NotNull @Override public PsiManager getManager() { return MockProjectEnvironment.getUnitTestPsiManager(); } @Override public Document getDocument() { return document; } @NotNull @Override public CharSequence getContents() { return root.getText(); } @NotNull @Override public VirtualFile getVirtualFile() { return file; } @NotNull @Override public Language getBaseLanguage() { return SoyLanguage.INSTANCE; } @NotNull @Override public Set<Language> getLanguages() { return Collections.<Language>singleton(SoyLanguage.INSTANCE); } @Override public PsiFile getPsi(@NotNull Language target) { return psi; } @NotNull @Override public List<PsiFile> getAllFiles() { return Collections.singletonList(psi); } @Override public boolean isEventSystemEnabled() { return true; } @Override public boolean isPhysical() { return true; } @Override public long getModificationStamp() { return file.getModificationStamp(); } @Override public boolean supportsIncrementalReparse(@NotNull Language rootLanguage) { return false; } @Override public void rootChanged(@NotNull PsiFile psiFile) { } @Override public void beforeContentsSynchronized() { } @Override public void contentsSynchronized() { } @SuppressWarnings("CloneDoesntCallSuperClone") @Override public FileViewProvider clone() { return new MyFileViewProvider(); } @Override public PsiElement findElementAt(int offset) { ASTNode node = root.getNode(); if (offset >= node.getTextLength()) { return null; } boolean foundChild = true; while (foundChild) { foundChild = false; for (ASTNode child : node.getChildren(null)) { int startOffset = child.getStartOffset(); if (offset >= startOffset && offset < startOffset + child.getTextLength()) { node = child; foundChild = true; break; } } } return node.getPsi(); } @Override public PsiReference findReferenceAt(int offset) { PsiElement element = findElementAt(offset); return element == null ? null : element.getReference(); } @Override public PsiElement findElementAt(int offset, @NotNull Language language) { if (language == Language.ANY || language.equals(SoyLanguage.INSTANCE)) { return findElementAt(offset); } return null; } @Override public PsiElement findElementAt(int offset, @NotNull Class<? extends Language> lang) { if (SoyLanguage.class.isAssignableFrom(lang)) { return findElementAt(offset); } return null; } @Override public PsiReference findReferenceAt(int offsetInElement, @NotNull Language language) { if (language == Language.ANY || language.equals(SoyLanguage.INSTANCE)) { return findReferenceAt(offsetInElement); } return null; } @NotNull @Override public FileViewProvider createCopy(@NotNull VirtualFile copy) { return new MyFileViewProvider(); } @Override public <T> T getUserData(@NotNull Key<T> key) { return key.get(userData); } @Override public <T> void putUserData(@NotNull Key<T> key, @Nullable T value) { userData.put(key, value); } @NotNull @Override public PsiFile getStubBindingRoot() { return root.getContainingFile(); } @NotNull @Override public FileType getFileType() { return file.getFileType(); } } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.compiler.impl; import com.intellij.CommonBundle; import com.intellij.build.BuildContentManager; import com.intellij.compiler.*; import com.intellij.compiler.progress.CompilerMessagesService; import com.intellij.compiler.progress.CompilerTask; import com.intellij.compiler.server.BuildManager; import com.intellij.compiler.server.DefaultMessageHandler; import com.intellij.ide.nls.NlsMessages; import com.intellij.notification.Notification; import com.intellij.notification.NotificationListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.compiler.*; import com.intellij.openapi.deployment.DeploymentUtil; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.module.LanguageLevelUtil; import com.intellij.openapi.module.Module; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressIndicatorProvider; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.CompilerModuleExtension; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ui.configuration.DefaultModuleConfigurationEditorFactory; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.NlsContexts; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.HtmlChunk; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.wm.*; import com.intellij.packaging.artifacts.Artifact; import com.intellij.packaging.impl.compiler.ArtifactCompilerUtil; import com.intellij.packaging.impl.compiler.ArtifactsCompiler; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.PsiDocumentManager; import com.intellij.tracing.Tracer; import com.intellij.util.Chunk; import com.intellij.util.SystemProperties; import com.intellij.util.ThrowableRunnable; import com.intellij.util.containers.CollectionFactory; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.DateFormatUtil; import org.jetbrains.annotations.*; import org.jetbrains.jps.api.*; import org.jetbrains.jps.model.java.JavaSourceRootType; import javax.swing.*; import javax.swing.event.HyperlinkEvent; import java.awt.*; import java.io.IOException; import java.lang.ref.WeakReference; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.*; import java.util.concurrent.TimeUnit; import static org.jetbrains.jps.api.CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.TargetTypeBuildScope; public final class CompileDriver { private static final Logger LOG = Logger.getInstance(CompileDriver.class); private static final Key<Boolean> COMPILATION_STARTED_AUTOMATICALLY = Key.create("compilation_started_automatically"); private static final Key<ExitStatus> COMPILE_SERVER_BUILD_STATUS = Key.create("COMPILE_SERVER_BUILD_STATUS"); private static final long ONE_MINUTE_MS = 60L * 1000L; private final Project myProject; private final Map<Module, String> myModuleOutputPaths = new HashMap<>(); private final Map<Module, String> myModuleTestOutputPaths = new HashMap<>(); public CompileDriver(Project project) { myProject = project; } @SuppressWarnings({"deprecation", "unused"}) public void setCompilerFilter(@SuppressWarnings("unused") CompilerFilter compilerFilter) { } public void rebuild(CompileStatusNotification callback) { doRebuild(callback, new ProjectCompileScope(myProject)); } public void make(CompileScope scope, CompileStatusNotification callback) { make(scope, false, callback); } public void make(CompileScope scope, boolean withModalProgress, CompileStatusNotification callback) { if (validateCompilerConfiguration(scope)) { startup(scope, false, false, withModalProgress, callback, null); } else { callback.finished(true, 0, 0, DummyCompileContext.create(myProject)); } } public boolean isUpToDate(@NotNull CompileScope scope) { if (LOG.isDebugEnabled()) { LOG.debug("isUpToDate operation started"); } final CompilerTask task = new CompilerTask(myProject, JavaCompilerBundle.message("classes.up.to.date.check"), true, false, false, isCompilationStartedAutomatically(scope)); final CompileContextImpl compileContext = new CompileContextImpl(myProject, task, scope, true, false); final Ref<ExitStatus> result = new Ref<>(); Runnable compileWork = () -> { ProgressIndicator indicator = compileContext.getProgressIndicator(); if (indicator.isCanceled() || myProject.isDisposed()) { return; } final BuildManager buildManager = BuildManager.getInstance(); try { buildManager.postponeBackgroundTasks(); buildManager.cancelAutoMakeTasks(myProject); TaskFuture<?> future = compileInExternalProcess(compileContext, true); if (future != null) { while (!future.waitFor(200L, TimeUnit.MILLISECONDS)) { if (indicator.isCanceled()) { future.cancel(false); } } } } catch (Throwable e) { LOG.error(e); } finally { ExitStatus exitStatus = COMPILE_SERVER_BUILD_STATUS.get(compileContext); task.setEndCompilationStamp(exitStatus, System.currentTimeMillis()); result.set(exitStatus); buildManager.allowBackgroundTasks(false); if (!myProject.isDisposed()) { CompilerCacheManager.getInstance(myProject).flushCaches(); } } }; ProgressIndicatorProvider indicatorProvider = ProgressIndicatorProvider.getInstance(); if (!EventQueue.isDispatchThread() && indicatorProvider.getProgressIndicator() != null) { // if called from background process on pooled thread, run synchronously task.run(compileWork, null, indicatorProvider.getProgressIndicator()); } else { task.start(compileWork, null); } if (LOG.isDebugEnabled()) { LOG.debug("isUpToDate operation finished"); } return ExitStatus.UP_TO_DATE.equals(result.get()); } public void compile(CompileScope scope, CompileStatusNotification callback) { if (validateCompilerConfiguration(scope)) { startup(scope, false, true, callback, null); } else { callback.finished(true, 0, 0, DummyCompileContext.create(myProject)); } } private void doRebuild(CompileStatusNotification callback, final CompileScope compileScope) { if (validateCompilerConfiguration(compileScope)) { startup(compileScope, true, false, callback, null); } else { callback.finished(true, 0, 0, DummyCompileContext.create(myProject)); } } public static void setCompilationStartedAutomatically(CompileScope scope) { //todo[nik] pass this option as a parameter to compile/make methods instead scope.putUserData(COMPILATION_STARTED_AUTOMATICALLY, true); } private static boolean isCompilationStartedAutomatically(CompileScope scope) { return Boolean.TRUE.equals(scope.getUserData(COMPILATION_STARTED_AUTOMATICALLY)); } private List<TargetTypeBuildScope> getBuildScopes(@NotNull CompileContextImpl compileContext, CompileScope scope, Collection<String> paths) { List<TargetTypeBuildScope> scopes = new ArrayList<>(); final boolean forceBuild = !compileContext.isMake(); List<TargetTypeBuildScope> explicitScopes = CompileScopeUtil.getBaseScopeForExternalBuild(scope); if (explicitScopes != null) { scopes.addAll(explicitScopes); } else if (!compileContext.isRebuild() && (!paths.isEmpty() || !CompileScopeUtil.allProjectModulesAffected(compileContext))) { CompileScopeUtil.addScopesForSourceSets(scope.getAffectedSourceSets(), scope.getAffectedUnloadedModules(), scopes, forceBuild); } else { final Collection<ModuleSourceSet> sourceSets = scope.getAffectedSourceSets(); boolean includeTests = sourceSets.isEmpty(); for (ModuleSourceSet sourceSet : sourceSets) { if (sourceSet.getType().isTest()) { includeTests = true; break; } } if (includeTests) { scopes.addAll(CmdlineProtoUtil.createAllModulesScopes(forceBuild)); } else { scopes.add(CmdlineProtoUtil.createAllModulesProductionScope(forceBuild)); } } if (paths.isEmpty()) { scopes = mergeScopesFromProviders(scope, scopes, forceBuild); } return scopes; } private List<TargetTypeBuildScope> mergeScopesFromProviders(CompileScope scope, List<TargetTypeBuildScope> scopes, boolean forceBuild) { for (BuildTargetScopeProvider provider : BuildTargetScopeProvider.EP_NAME.getExtensions()) { List<TargetTypeBuildScope> providerScopes = ReadAction.compute( () -> myProject.isDisposed() ? Collections.emptyList() : provider.getBuildTargetScopes(scope, myProject, forceBuild)); scopes = CompileScopeUtil.mergeScopes(scopes, providerScopes); } return scopes; } @Nullable private TaskFuture<?> compileInExternalProcess(@NotNull final CompileContextImpl compileContext, final boolean onlyCheckUpToDate) { final CompileScope scope = compileContext.getCompileScope(); final Collection<String> paths = CompileScopeUtil.fetchFiles(compileContext); List<TargetTypeBuildScope> scopes = getBuildScopes(compileContext, scope, paths); // need to pass scope's user data to server final Map<String, String> builderParams; if (onlyCheckUpToDate) { builderParams = new HashMap<>(); } else { Map<Key<?>, Object> exported = scope.exportUserData(); if (!exported.isEmpty()) { builderParams = new HashMap<>(); for (Map.Entry<Key<?>, Object> entry : exported.entrySet()) { final String _key = entry.getKey().toString(); final String _value = entry.getValue().toString(); builderParams.put(_key, _value); } } else { builderParams = new HashMap<>(); } } if (!scope.getAffectedUnloadedModules().isEmpty()) { builderParams.put(BuildParametersKeys.LOAD_UNLOADED_MODULES, Boolean.TRUE.toString()); } Map<String, List<Artifact>> outputToArtifact = ArtifactCompilerUtil.containsArtifacts(scopes) ? ArtifactCompilerUtil.createOutputToArtifactMap(myProject) : null; return BuildManager.getInstance() .scheduleBuild(myProject, compileContext.isRebuild(), compileContext.isMake(), onlyCheckUpToDate, scopes, paths, builderParams, new DefaultMessageHandler(myProject) { @Override public void sessionTerminated(@NotNull UUID sessionId) { if (compileContext.shouldUpdateProblemsView()) { ProblemsView view = myProject.getServiceIfCreated(ProblemsView.class); if (view != null) { view.clearProgress(); view.clearOldMessages(compileContext.getCompileScope(), compileContext.getSessionId()); } } } @Override public void handleFailure(@NotNull UUID sessionId, CmdlineRemoteProto.Message.Failure failure) { //noinspection HardCodedStringLiteral compileContext .addMessage(CompilerMessageCategory.ERROR, failure.hasDescription() ? failure.getDescription() : "", null, -1, -1); final String trace = failure.hasStacktrace() ? failure.getStacktrace() : null; if (trace != null) { LOG.info(trace); } compileContext.putUserData(COMPILE_SERVER_BUILD_STATUS, ExitStatus.ERRORS); } @Override protected void handleCompileMessage(UUID sessionId, CmdlineRemoteProto.Message.BuilderMessage.CompileMessage message) { final CmdlineRemoteProto.Message.BuilderMessage.CompileMessage.Kind kind = message.getKind(); //System.out.println(compilerMessage.getText()); //noinspection HardCodedStringLiteral final String messageText = message.getText(); if (kind == CmdlineRemoteProto.Message.BuilderMessage.CompileMessage.Kind.PROGRESS) { final ProgressIndicator indicator = compileContext.getProgressIndicator(); indicator.setText(messageText); if (message.hasDone()) { indicator.setFraction(message.getDone()); } } else { final CompilerMessageCategory category = convertToCategory(kind, CompilerMessageCategory.INFORMATION); String sourceFilePath = message.hasSourceFilePath() ? message.getSourceFilePath() : null; if (sourceFilePath != null) { sourceFilePath = FileUtil.toSystemIndependentName(sourceFilePath); } final long line = message.hasLine() ? message.getLine() : -1; final long column = message.hasColumn() ? message.getColumn() : -1; final String srcUrl = sourceFilePath != null ? VirtualFileManager.constructUrl(LocalFileSystem.PROTOCOL, sourceFilePath) : null; compileContext .addMessage(category, messageText, srcUrl, (int)line, (int)column, null, message.getModuleNamesList()); if (compileContext.shouldUpdateProblemsView() && kind == CmdlineRemoteProto.Message.BuilderMessage.CompileMessage.Kind.JPS_INFO) { // treat JPS_INFO messages in a special way: add them as info messages to the problems view final Project project = compileContext.getProject(); ProblemsView.getInstance(project).addMessage( new CompilerMessageImpl(project, category, messageText), compileContext.getSessionId() ); } } } @Override protected void handleBuildEvent(UUID sessionId, CmdlineRemoteProto.Message.BuilderMessage.BuildEvent event) { final CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.Type eventType = event.getEventType(); switch (eventType) { case FILES_GENERATED: final List<CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.GeneratedFile> generated = event.getGeneratedFilesList(); CompilationStatusListener publisher = myProject.isDisposed() ? null : myProject.getMessageBus().syncPublisher(CompilerTopics.COMPILATION_STATUS); Set<String> writtenArtifactOutputPaths = outputToArtifact != null ? CollectionFactory.createFilePathSet() : null; for (CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.GeneratedFile generatedFile : generated) { final String root = FileUtil.toSystemIndependentName(generatedFile.getOutputRoot()); final String relativePath = FileUtil.toSystemIndependentName(generatedFile.getRelativePath()); if (publisher != null) { publisher.fileGenerated(root, relativePath); } if (outputToArtifact != null) { Collection<Artifact> artifacts = outputToArtifact.get(root); if (artifacts != null && !artifacts.isEmpty()) { writtenArtifactOutputPaths .add(FileUtil.toSystemDependentName(DeploymentUtil.appendToPath(root, relativePath))); } } } if (writtenArtifactOutputPaths != null && !writtenArtifactOutputPaths.isEmpty()) { ArtifactsCompiler.addWrittenPaths(compileContext, writtenArtifactOutputPaths); } break; case BUILD_COMPLETED: ExitStatus status = ExitStatus.SUCCESS; if (event.hasCompletionStatus()) { final CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.Status completionStatus = event.getCompletionStatus(); switch (completionStatus) { case CANCELED: status = ExitStatus.CANCELLED; break; case ERRORS: status = ExitStatus.ERRORS; break; case SUCCESS: status = ExitStatus.SUCCESS; break; case UP_TO_DATE: status = ExitStatus.UP_TO_DATE; break; } } compileContext.putUserDataIfAbsent(COMPILE_SERVER_BUILD_STATUS, status); break; case CUSTOM_BUILDER_MESSAGE: if (event.hasCustomBuilderMessage()) { final CmdlineRemoteProto.Message.BuilderMessage.BuildEvent.CustomBuilderMessage message = event.getCustomBuilderMessage(); if (GlobalOptions.JPS_SYSTEM_BUILDER_ID.equals(message.getBuilderId()) && GlobalOptions.JPS_UNPROCESSED_FS_CHANGES_MESSAGE_ID.equals(message.getMessageType())) { //noinspection HardCodedStringLiteral final String text = message.getMessageText(); if (!StringUtil.isEmpty(text)) { compileContext.addMessage(CompilerMessageCategory.INFORMATION, text, null, -1, -1); } } } break; } } @Override public @NotNull ProgressIndicator getProgressIndicator() { return compileContext.getProgressIndicator(); } }); } private void startup(final CompileScope scope, final boolean isRebuild, final boolean forceCompile, final CompileStatusNotification callback, final CompilerMessage message) { startup(scope, isRebuild, forceCompile, false, callback, message); } private void startup(final CompileScope scope, final boolean isRebuild, final boolean forceCompile, boolean withModalProgress, final CompileStatusNotification callback, final CompilerMessage message) { ApplicationManager.getApplication().assertIsDispatchThread(); final boolean isUnitTestMode = ApplicationManager.getApplication().isUnitTestMode(); final String name = JavaCompilerBundle .message( isRebuild ? "compiler.content.name.rebuild" : forceCompile ? "compiler.content.name.recompile" : "compiler.content.name.make"); Tracer.Span span = Tracer.start(name + " preparation"); final CompilerTask compileTask = new CompilerTask( myProject, name, isUnitTestMode, !withModalProgress, true, isCompilationStartedAutomatically(scope), withModalProgress ); StatusBar.Info.set("", myProject, "Compiler"); // ensure the project model seen by build process is up-to-date myProject.save(); if (!isUnitTestMode) { ApplicationManager.getApplication().saveSettings(); } PsiDocumentManager.getInstance(myProject).commitAllDocuments(); FileDocumentManager.getInstance().saveAllDocuments(); final CompileContextImpl compileContext = new CompileContextImpl(myProject, compileTask, scope, !isRebuild && !forceCompile, isRebuild); span.complete(); final Runnable compileWork = () -> { Tracer.Span compileWorkSpan = Tracer.start("compileWork"); final ProgressIndicator indicator = compileContext.getProgressIndicator(); if (indicator.isCanceled() || myProject.isDisposed()) { if (callback != null) { callback.finished(true, 0, 0, compileContext); } return; } CompilerCacheManager compilerCacheManager = CompilerCacheManager.getInstance(myProject); final BuildManager buildManager = BuildManager.getInstance(); try { buildManager.postponeBackgroundTasks(); buildManager.cancelAutoMakeTasks(myProject); LOG.info("COMPILATION STARTED (BUILD PROCESS)"); if (message != null) { compileContext.addMessage(message); } if (isRebuild) { CompilerUtil.runInContext(compileContext, JavaCompilerBundle.message("progress.text.clearing.build.system.data"), (ThrowableRunnable<Throwable>)() -> compilerCacheManager .clearCaches(compileContext)); } final boolean beforeTasksOk = executeCompileTasks(compileContext, true); final int errorCount = compileContext.getMessageCount(CompilerMessageCategory.ERROR); if (!beforeTasksOk || errorCount > 0) { COMPILE_SERVER_BUILD_STATUS.set(compileContext, errorCount > 0 ? ExitStatus.ERRORS : ExitStatus.CANCELLED); return; } TaskFuture<?> future = compileInExternalProcess(compileContext, false); if (future != null) { Tracer.Span compileInExternalProcessSpan = Tracer.start("compile in external process"); while (!future.waitFor(200L, TimeUnit.MILLISECONDS)) { if (indicator.isCanceled()) { future.cancel(false); } } compileInExternalProcessSpan.complete(); if (!executeCompileTasks(compileContext, false)) { COMPILE_SERVER_BUILD_STATUS.set(compileContext, ExitStatus.CANCELLED); } if (compileContext.getMessageCount(CompilerMessageCategory.ERROR) > 0) { COMPILE_SERVER_BUILD_STATUS.set(compileContext, ExitStatus.ERRORS); } } } catch (ProcessCanceledException ignored) { compileContext.putUserDataIfAbsent(COMPILE_SERVER_BUILD_STATUS, ExitStatus.CANCELLED); } catch (Throwable e) { LOG.error(e); // todo } finally { compileWorkSpan.complete(); buildManager.allowBackgroundTasks( true // reset state on explicit build to compensate possibly unbalanced postpone/allow calls (e.g. via BatchFileChangeListener.start/stop) ); Tracer.Span flushCompilerCaches = Tracer.start("flush compiler caches"); compilerCacheManager.flushCaches(); flushCompilerCaches.complete(); final long duration = notifyCompilationCompleted(compileContext, callback, COMPILE_SERVER_BUILD_STATUS.get(compileContext)); CompilerUtil.logDuration( "\tCOMPILATION FINISHED (BUILD PROCESS); Errors: " + compileContext.getMessageCount(CompilerMessageCategory.ERROR) + "; warnings: " + compileContext.getMessageCount(CompilerMessageCategory.WARNING), duration ); if (SystemProperties.getBooleanProperty("idea.is.integration.test", false)) { String logPath = PathManager.getLogPath(); Path perfMetrics = Paths.get(logPath).resolve("performance-metrics").resolve("buildMetrics.json"); try { FileUtil.writeToFile(perfMetrics.toFile(), "{\n\t\"build_errors\" : " + compileContext.getMessageCount(CompilerMessageCategory.ERROR) + "," + "\n\t\"build_warnings\" : " + compileContext.getMessageCount(CompilerMessageCategory.WARNING) + "," + "\n\t\"build_compilation_duration\" : " + duration + "\n}"); } catch (IOException ex) { LOG.info("Could not create json file with the build performance metrics."); } } } }; compileTask.start(compileWork, () -> { if (isRebuild) { final int rv = Messages.showOkCancelDialog( myProject, JavaCompilerBundle.message("you.are.about.to.rebuild.the.whole.project"), JavaCompilerBundle.message("confirm.project.rebuild"), CommonBundle.message("button.build"), JavaCompilerBundle.message("button.rebuild"), Messages.getQuestionIcon() ); if (rv == Messages.OK /*yes, please, do run make*/) { startup(scope, false, false, callback, null); return; } } startup(scope, isRebuild, forceCompile, callback, message); }); } @Nullable @TestOnly public static ExitStatus getExternalBuildExitStatus(CompileContext context) { return context.getUserData(COMPILE_SERVER_BUILD_STATUS); } /** * @noinspection SSBasedInspection */ private long notifyCompilationCompleted(final CompileContextImpl compileContext, final CompileStatusNotification callback, final ExitStatus _status) { long endCompilationStamp = System.currentTimeMillis(); compileContext.getBuildSession().setEndCompilationStamp(_status, endCompilationStamp); final long duration = endCompilationStamp - compileContext.getStartCompilationStamp(); if (!myProject.isDisposed()) { // refresh on output roots is required in order for the order enumerator to see all roots via VFS final Module[] affectedModules = compileContext.getCompileScope().getAffectedModules(); if (_status != ExitStatus.UP_TO_DATE && _status != ExitStatus.CANCELLED) { // have to refresh in case of errors too, because run configuration may be set to ignore errors Collection<String> affectedRoots = ContainerUtil.newHashSet(CompilerPaths.getOutputPaths(affectedModules)); if (!affectedRoots.isEmpty()) { ProgressIndicator indicator = compileContext.getProgressIndicator(); indicator.setText(JavaCompilerBundle.message("synchronizing.output.directories")); CompilerUtil.refreshOutputRoots(affectedRoots); indicator.setText(""); } } } SwingUtilities.invokeLater(() -> { int errorCount = 0; int warningCount = 0; try { errorCount = compileContext.getMessageCount(CompilerMessageCategory.ERROR); warningCount = compileContext.getMessageCount(CompilerMessageCategory.WARNING); } finally { if (callback != null) { callback.finished(_status == ExitStatus.CANCELLED, errorCount, warningCount, compileContext); } } if (!myProject.isDisposed()) { final String statusMessage = createStatusMessage(_status, warningCount, errorCount, duration); final MessageType messageType = errorCount > 0 ? MessageType.ERROR : warningCount > 0 ? MessageType.WARNING : MessageType.INFO; if (duration > ONE_MINUTE_MS && CompilerWorkspaceConfiguration.getInstance(myProject).DISPLAY_NOTIFICATION_POPUP) { String toolWindowId = Registry.is("ide.jps.use.build.tool.window", true) ? BuildContentManager.TOOL_WINDOW_ID : ToolWindowId.MESSAGES_WINDOW; ToolWindowManager.getInstance(myProject).notifyByBalloon(toolWindowId, messageType, statusMessage); } final String wrappedMessage = _status != ExitStatus.UP_TO_DATE ? HtmlChunk.link("#", statusMessage).toString() : statusMessage; final Notification notification = CompilerManager.NOTIFICATION_GROUP.createNotification(wrappedMessage, messageType.toNotificationType()) .setListener(new BuildToolWindowActivationListener(compileContext)) .setImportant(false); compileContext.getBuildSession().registerCloseAction(notification::expire); notification.notify(myProject); if (_status != ExitStatus.UP_TO_DATE && compileContext.getMessageCount(null) > 0) { final String msg = DateFormatUtil.formatDateTime(new Date()) + " - " + statusMessage; compileContext.addMessage(CompilerMessageCategory.INFORMATION, msg, null, -1, -1); } } }); return duration; } private static @Nls String createStatusMessage(final ExitStatus status, final int warningCount, final int errorCount, long duration) { String message; if (status == ExitStatus.CANCELLED) { message = JavaCompilerBundle.message("status.compilation.aborted"); } else if (status == ExitStatus.UP_TO_DATE) { message = JavaCompilerBundle.message("status.all.up.to.date"); } else { String durationString = NlsMessages.formatDurationApproximate(duration); if (status == ExitStatus.SUCCESS) { message = warningCount > 0 ? JavaCompilerBundle.message("status.compilation.completed.successfully.with.warnings", warningCount, durationString) : JavaCompilerBundle.message("status.compilation.completed.successfully", durationString); } else { message = JavaCompilerBundle.message("status.compilation.completed.successfully.with.warnings.and.errors", errorCount, warningCount, durationString); } } return message; } // [mike] performance optimization - this method is accessed > 15,000 times in Aurora private String getModuleOutputPath(Module module, boolean inTestSourceContent) { Map<Module, String> map = inTestSourceContent ? myModuleTestOutputPaths : myModuleOutputPaths; return map.computeIfAbsent(module, k -> CompilerPaths.getModuleOutputPath(module, inTestSourceContent)); } public void executeCompileTask(final CompileTask task, final CompileScope scope, final @NlsContexts.TabTitle String contentName, final Runnable onTaskFinished) { final CompilerTask progressManagerTask = new CompilerTask(myProject, contentName, false, false, true, isCompilationStartedAutomatically(scope)); final CompileContextImpl compileContext = new CompileContextImpl(myProject, progressManagerTask, scope, false, false); FileDocumentManager.getInstance().saveAllDocuments(); progressManagerTask.start(() -> { try { task.execute(compileContext); } catch (ProcessCanceledException ex) { // suppressed } finally { if (onTaskFinished != null) { onTaskFinished.run(); } } }, null); } private boolean executeCompileTasks(@NotNull final CompileContext context, final boolean beforeTasks) { if (myProject.isDisposed()) { return false; } final CompilerManager manager = CompilerManager.getInstance(myProject); final ProgressIndicator progressIndicator = context.getProgressIndicator(); progressIndicator.pushState(); try { List<CompileTask> tasks = beforeTasks ? manager.getBeforeTasks() : manager.getAfterTaskList(); if (tasks.size() > 0) { progressIndicator.setText( JavaCompilerBundle.message(beforeTasks ? "progress.executing.precompile.tasks" : "progress.executing.postcompile.tasks")); for (CompileTask task : tasks) { try { if (!task.execute(context)) { return false; } } catch (ProcessCanceledException e) { throw e; } catch (Throwable t) { LOG.error("Error executing task", t); context .addMessage(CompilerMessageCategory.INFORMATION, JavaCompilerBundle.message("error.task.0.execution.failed", task.toString()), null, -1, -1); } } } } finally { progressIndicator.popState(); StatusBar statusBar = WindowManager.getInstance().getStatusBar(myProject); if (statusBar != null) { statusBar.setInfo(""); } } return true; } private boolean validateCompilerConfiguration(@NotNull final CompileScope scope) { try { final Module[] scopeModules = scope.getAffectedModules(); final CompilerManager compilerManager = CompilerManager.getInstance(myProject); List<Module> modulesWithSources = ContainerUtil.filter(scopeModules, module -> { if (!compilerManager.isValidationEnabled(module)) return false; final boolean hasSources = hasSources(module, JavaSourceRootType.SOURCE); final boolean hasTestSources = hasSources(module, JavaSourceRootType.TEST_SOURCE); if (!hasSources && !hasTestSources) { // If module contains no sources, shouldn't have to select JDK or output directory (SCR #19333) // todo still there may be problems with this approach if some generated files are attributed by this module return false; } return true; }); if (!validateJdks(modulesWithSources, true)) return false; if (!validateOutputs(modulesWithSources)) return false; if (!validateCyclicDependencies(scopeModules)) return false; return true; } catch (ProcessCanceledException e) { return false; } catch (Throwable e) { LOG.error(e); return false; } } private boolean validateJdks(@NotNull List<Module> scopeModules, boolean runUnknownSdkCheck) { final List<String> modulesWithoutJdkAssigned = new ArrayList<>(); boolean projectSdkNotSpecified = false; for (final Module module : scopeModules) { final Sdk jdk = ModuleRootManager.getInstance(module).getSdk(); if (jdk != null) continue; projectSdkNotSpecified |= ModuleRootManager.getInstance(module).isSdkInherited(); modulesWithoutJdkAssigned.add(module.getName()); } if (runUnknownSdkCheck) { var result = CompilerDriverUnknownSdkTracker .getInstance(myProject) .fixSdkSettings(projectSdkNotSpecified, scopeModules, formatModulesList(modulesWithoutJdkAssigned)); if (result == CompilerDriverUnknownSdkTracker.Outcome.STOP_COMPILE) { return false; } //we do not trust the CompilerDriverUnknownSdkTracker, to extra check has to be done anyways return validateJdks(scopeModules, false); } else { if (modulesWithoutJdkAssigned.isEmpty()) return true; showNotSpecifiedError("error.jdk.not.specified", projectSdkNotSpecified, modulesWithoutJdkAssigned, JavaCompilerBundle.message("modules.classpath.title")); return false; } } private boolean validateOutputs(@NotNull List<Module> scopeModules) { final List<String> modulesWithoutOutputPathSpecified = new ArrayList<>(); boolean projectOutputNotSpecified = false; for (final Module module : scopeModules) { final String outputPath = getModuleOutputPath(module, false); final String testsOutputPath = getModuleOutputPath(module, true); if (outputPath == null && testsOutputPath == null) { CompilerModuleExtension compilerExtension = CompilerModuleExtension.getInstance(module); projectOutputNotSpecified |= compilerExtension != null && compilerExtension.isCompilerOutputPathInherited(); modulesWithoutOutputPathSpecified.add(module.getName()); } else { if (outputPath == null) { if (hasSources(module, JavaSourceRootType.SOURCE)) { modulesWithoutOutputPathSpecified.add(module.getName()); } } if (testsOutputPath == null) { if (hasSources(module, JavaSourceRootType.TEST_SOURCE)) { modulesWithoutOutputPathSpecified.add(module.getName()); } } } } if (modulesWithoutOutputPathSpecified.isEmpty()) return true; showNotSpecifiedError("error.output.not.specified", projectOutputNotSpecified, modulesWithoutOutputPathSpecified, DefaultModuleConfigurationEditorFactory.getInstance().getOutputEditorDisplayName()); return false; } private boolean validateCyclicDependencies(Module[] scopeModules) { final List<Chunk<ModuleSourceSet>> chunks = ModuleCompilerUtil.getCyclicDependencies(myProject, Arrays.asList(scopeModules)); for (final Chunk<ModuleSourceSet> chunk : chunks) { final Set<ModuleSourceSet> sourceSets = chunk.getNodes(); if (sourceSets.size() <= 1) { continue; // no need to check one-module chunks } Sdk jdk = null; LanguageLevel languageLevel = null; for (final ModuleSourceSet sourceSet : sourceSets) { Module module = sourceSet.getModule(); final Sdk moduleJdk = ModuleRootManager.getInstance(module).getSdk(); if (jdk == null) { jdk = moduleJdk; } else { if (!jdk.equals(moduleJdk)) { showCyclicModulesErrorNotification("error.chunk.modules.must.have.same.jdk", ModuleSourceSet.getModules(sourceSets)); return false; } } LanguageLevel moduleLanguageLevel = LanguageLevelUtil.getEffectiveLanguageLevel(module); if (languageLevel == null) { languageLevel = moduleLanguageLevel; } else { if (!languageLevel.equals(moduleLanguageLevel)) { showCyclicModulesErrorNotification("error.chunk.modules.must.have.same.language.level", ModuleSourceSet.getModules(sourceSets)); return false; } } } } return true; } private void showCyclicModulesErrorNotification(@PropertyKey(resourceBundle = JavaCompilerBundle.BUNDLE) @NotNull String messageId, @NotNull Set<? extends Module> modulesInChunk) { Module firstModule = ContainerUtil.getFirstItem(modulesInChunk); LOG.assertTrue(firstModule != null); CompileDriverNotifications.getInstance(myProject) .createCannotStartNotification() .withContent(JavaCompilerBundle.message(messageId, getModulesString(modulesInChunk))) .withOpenSettingsAction(firstModule.getName(), null) .showNotification(); } private static String getModulesString(Collection<? extends Module> modulesInChunk) { return StringUtil.join(modulesInChunk, module -> "\"" + module.getName() + "\"", "\n"); } private static boolean hasSources(Module module, final JavaSourceRootType rootType) { return !ModuleRootManager.getInstance(module).getSourceRoots(rootType).isEmpty(); } private void showNotSpecifiedError(@PropertyKey(resourceBundle = JavaCompilerBundle.BUNDLE) @NonNls String resourceId, boolean notSpecifiedValueInheritedFromProject, List<String> modules, String editorNameToSelect) { String nameToSelect = notSpecifiedValueInheritedFromProject ? null : ContainerUtil.getFirstItem(modules); final String message = JavaCompilerBundle.message(resourceId, modules.size(), formatModulesList(modules)); if (ApplicationManager.getApplication().isUnitTestMode()) { LOG.error(message); } CompileDriverNotifications.getInstance(myProject) .createCannotStartNotification() .withContent(message) .withOpenSettingsAction(nameToSelect, editorNameToSelect) .showNotification(); } @NotNull private static String formatModulesList(@NotNull List<String> modules) { final int maxModulesToShow = 10; List<String> actualNamesToInclude = new ArrayList<>(ContainerUtil.getFirstItems(modules, maxModulesToShow)); if (modules.size() > maxModulesToShow) { actualNamesToInclude.add(JavaCompilerBundle.message("error.jdk.module.names.overflow.element.ellipsis")); } return NlsMessages.formatNarrowAndList(actualNamesToInclude); } public static CompilerMessageCategory convertToCategory(CmdlineRemoteProto.Message.BuilderMessage.CompileMessage.Kind kind, CompilerMessageCategory defaultCategory) { switch (kind) { case ERROR: case INTERNAL_BUILDER_ERROR: return CompilerMessageCategory.ERROR; case WARNING: return CompilerMessageCategory.WARNING; case INFO: case JPS_INFO: case OTHER: return CompilerMessageCategory.INFORMATION; default: return defaultCategory; } } private static class BuildToolWindowActivationListener extends NotificationListener.Adapter { private final WeakReference<Project> myProjectRef; private final Object myContentId; BuildToolWindowActivationListener(CompileContextImpl compileContext) { myProjectRef = new WeakReference<>(compileContext.getProject()); myContentId = compileContext.getBuildSession().getContentId(); } @Override protected void hyperlinkActivated(@NotNull Notification notification, @NotNull HyperlinkEvent e) { final Project project = myProjectRef.get(); boolean useBuildToolwindow = Registry.is("ide.jps.use.build.tool.window", true); String toolWindowId = useBuildToolwindow ? BuildContentManager.TOOL_WINDOW_ID : ToolWindowId.MESSAGES_WINDOW; if (project != null && !project.isDisposed()) { if (useBuildToolwindow || CompilerMessagesService.showCompilerContent(project, myContentId)) { final ToolWindow tw = ToolWindowManager.getInstance(project).getToolWindow(toolWindowId); if (tw != null) { tw.activate(null, false); } } else { notification.expire(); } } else { notification.expire(); } } } }
package com.culabs.unicomportal.model.db; import java.util.Date; public class DBImages { /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Integer id; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.created_at * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Date created_at; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.updated_at * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Date updated_at; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.name * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String name; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.image_uuid * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String image_uuid; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.disk_format * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String disk_format; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.container_format * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String container_format; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.size * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Long size; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.status * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String status; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.is_public * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Boolean is_public; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.min_disk * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Integer min_disk; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.min_ram * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Integer min_ram; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.base_image_ref * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String base_image_ref; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.architecture * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String architecture; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.os_distro * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String os_distro; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.os_version * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String os_version; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.owner_id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Integer owner_id; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.creator_id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Integer creator_id; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.job_id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private Integer job_id; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column images.description * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ private String description; /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.id * * @return the value of images.id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Integer getId() { return id; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.id * * @param id the value for images.id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setId(Integer id) { this.id = id; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.created_at * * @return the value of images.created_at * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Date getCreated_at() { return created_at; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.created_at * * @param created_at the value for images.created_at * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setCreated_at(Date created_at) { this.created_at = created_at; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.updated_at * * @return the value of images.updated_at * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Date getUpdated_at() { return updated_at; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.updated_at * * @param updated_at the value for images.updated_at * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setUpdated_at(Date updated_at) { this.updated_at = updated_at; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.name * * @return the value of images.name * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getName() { return name; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.name * * @param name the value for images.name * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setName(String name) { this.name = name; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.image_uuid * * @return the value of images.image_uuid * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getImage_uuid() { return image_uuid; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.image_uuid * * @param image_uuid the value for images.image_uuid * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setImage_uuid(String image_uuid) { this.image_uuid = image_uuid; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.disk_format * * @return the value of images.disk_format * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getDisk_format() { return disk_format; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.disk_format * * @param disk_format the value for images.disk_format * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setDisk_format(String disk_format) { this.disk_format = disk_format; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.container_format * * @return the value of images.container_format * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getContainer_format() { return container_format; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.container_format * * @param container_format the value for images.container_format * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setContainer_format(String container_format) { this.container_format = container_format; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.size * * @return the value of images.size * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Long getSize() { return size; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.size * * @param size the value for images.size * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setSize(Long size) { this.size = size; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.status * * @return the value of images.status * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getStatus() { return status; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.status * * @param status the value for images.status * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setStatus(String status) { this.status = status; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.is_public * * @return the value of images.is_public * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Boolean getIs_public() { return is_public; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.is_public * * @param is_public the value for images.is_public * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setIs_public(Boolean is_public) { this.is_public = is_public; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.min_disk * * @return the value of images.min_disk * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Integer getMin_disk() { return min_disk; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.min_disk * * @param min_disk the value for images.min_disk * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setMin_disk(Integer min_disk) { this.min_disk = min_disk; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.min_ram * * @return the value of images.min_ram * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Integer getMin_ram() { return min_ram; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.min_ram * * @param min_ram the value for images.min_ram * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setMin_ram(Integer min_ram) { this.min_ram = min_ram; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.base_image_ref * * @return the value of images.base_image_ref * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getBase_image_ref() { return base_image_ref; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.base_image_ref * * @param base_image_ref the value for images.base_image_ref * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setBase_image_ref(String base_image_ref) { this.base_image_ref = base_image_ref; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.architecture * * @return the value of images.architecture * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getArchitecture() { return architecture; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.architecture * * @param architecture the value for images.architecture * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setArchitecture(String architecture) { this.architecture = architecture; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.os_distro * * @return the value of images.os_distro * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getOs_distro() { return os_distro; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.os_distro * * @param os_distro the value for images.os_distro * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setOs_distro(String os_distro) { this.os_distro = os_distro; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.os_version * * @return the value of images.os_version * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getOs_version() { return os_version; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.os_version * * @param os_version the value for images.os_version * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setOs_version(String os_version) { this.os_version = os_version; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.owner_id * * @return the value of images.owner_id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Integer getOwner_id() { return owner_id; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.owner_id * * @param owner_id the value for images.owner_id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setOwner_id(Integer owner_id) { this.owner_id = owner_id; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.creator_id * * @return the value of images.creator_id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Integer getCreator_id() { return creator_id; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.creator_id * * @param creator_id the value for images.creator_id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setCreator_id(Integer creator_id) { this.creator_id = creator_id; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.job_id * * @return the value of images.job_id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public Integer getJob_id() { return job_id; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.job_id * * @param job_id the value for images.job_id * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setJob_id(Integer job_id) { this.job_id = job_id; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column images.description * * @return the value of images.description * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public String getDescription() { return description; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column images.description * * @param description the value for images.description * * @mbggenerated Tue May 26 15:53:09 CST 2015 */ public void setDescription(String description) { this.description = description; } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.httppanel.view.paramtable; import java.awt.BorderLayout; import java.awt.Component; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.LinkedList; import java.util.List; import javax.swing.DefaultCellEditor; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableColumn; import org.apache.commons.configuration.FileConfiguration; import org.parosproxy.paros.Constant; import org.parosproxy.paros.network.HtmlParameter; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.zap.extension.httppanel.Message; import org.zaproxy.zap.extension.httppanel.view.HttpPanelView; import org.zaproxy.zap.extension.httppanel.view.HttpPanelViewModel; import org.zaproxy.zap.extension.httppanel.view.HttpPanelViewModelEvent; import org.zaproxy.zap.extension.httppanel.view.HttpPanelViewModelListener; import org.zaproxy.zap.extension.httppanel.view.paramtable.addins.ParamAddinInterface; import org.zaproxy.zap.extension.httppanel.view.paramtable.addins.ParamAddinMagic; import org.zaproxy.zap.extension.httppanel.view.paramtable.addins.ParamAddinUrlencode; import org.zaproxy.zap.utils.DisplayUtils; public abstract class HttpPanelParamTableView implements HttpPanelView, HttpPanelViewModelListener { public static final String NAME = "HttpPanelParamTableView"; private static final String CAPTION_NAME = Constant.messages.getString("http.panel.view.tablev2.name"); private static final String ADD_INS = Constant.messages.getString("http.panel.view.tableparam.addins"); private JTable table; private JPanel mainPanel; private HttpPanelParamTableModel httpPanelTabularModel; private boolean isEditable = false; private List<ParamAddinInterface> addins; private JComboBox<Object> comboBoxAddIns; private HttpPanelViewModel model; public HttpPanelParamTableView(HttpPanelViewModel model, HttpPanelParamTableModel tableModel) { this.httpPanelTabularModel = tableModel; this.model = model; init(); initAddins(); this.model.addHttpPanelViewModelListener(this); } private void init() { // Table table = new JTable(); table.setName(""); table.setModel(httpPanelTabularModel); table.setGridColor(java.awt.Color.gray); table.setIntercellSpacing(new java.awt.Dimension(1, 1)); table.setRowHeight(DisplayUtils.getScaledSize(18)); // Issue 954: Force the JTable cell to auto-save when the focus changes. // Example, edit cell, click OK for a panel dialog box, the data will get saved. table.putClientProperty("terminateEditOnFocusLost", Boolean.TRUE); // Set standard row width's TableColumn column = table.getColumnModel().getColumn(0); column.setPreferredWidth(70); column.setWidth(70); column.setMaxWidth(70); if (table.getColumnCount() == 4) { column = table.getColumnModel().getColumn(3); column.setPreferredWidth(150); column.setWidth(150); column.setMaxWidth(150); } // Main panel mainPanel = new JPanel(new BorderLayout()); mainPanel.add(new JScrollPane(table), BorderLayout.CENTER); } private void initAddins() { // Get all addins addins = new LinkedList<>(); addins.add( new ParamAddinMagic()); addins.add( new ParamAddinUrlencode()); comboBoxAddIns = new JComboBox<>(); comboBoxAddIns.addItem(ADD_INS); for(ParamAddinInterface addin: addins) { comboBoxAddIns.addItem(addin); } comboBoxAddIns.addActionListener(new ComboBoxAddinsActionListener()); table.getColumnModel().getColumn(0).setCellEditor(new DefaultCellEditor(getComboBoxTypes())); table.setAutoResizeMode(JTable.AUTO_RESIZE_NEXT_COLUMN); if (table.getColumnCount() != 4) { return; } table.getColumnModel().getColumn(3).setCellEditor(new DefaultCellEditor(comboBoxAddIns)); table.getColumnModel().getColumn(3).setCellRenderer(new ComboBoxCellRenderer(comboBoxAddIns)); } public abstract JComboBox<HtmlParameter.Type> getComboBoxTypes(); @Override public void dataChanged(HttpPanelViewModelEvent e) { // FIXME(This view should ask for a specific model based on HttpMessage) httpPanelTabularModel.setHttpMessage((HttpMessage)model.getMessage()); } @Override public void save() { httpPanelTabularModel.save(); } @Override public void setSelected(boolean selected) { if (selected) { table.requestFocusInWindow(); } } @Override public String getName() { return NAME; } @Override public String getCaptionName() { return CAPTION_NAME; } @Override public int getPosition() { return 10; } @Override public boolean isEnabled(Message msg) { return true; } @Override public boolean hasChanged() { return httpPanelTabularModel.hasChanged(); } @Override public JComponent getPane() { return mainPanel; } @Override public boolean isEditable() { return isEditable; } @Override public void setEditable(boolean editable) { if (isEditable != editable) { if (isEditable) { table.getColumnModel().removeColumn(table.getColumnModel().getColumn(3)); } else { TableColumn column = new TableColumn(3, 150, new ComboBoxCellRenderer(comboBoxAddIns), new DefaultCellEditor(comboBoxAddIns)); column.setPreferredWidth(150); column.setMaxWidth(150); table.addColumn(column); } isEditable = editable; httpPanelTabularModel.setEditable(editable); } } @Override public HttpPanelViewModel getModel() { return model; } @Override public void setParentConfigurationKey(String configurationKey) { } @Override public void loadConfiguration(FileConfiguration configuration) { } @Override public void saveConfiguration(FileConfiguration configuration) { } private static final class ComboBoxAddinsActionListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { ((JComboBox<?>)e.getSource()).setSelectedIndex(0); } } private static final class ComboBoxCellRenderer extends JComboBox<Object> implements TableCellRenderer { private static final long serialVersionUID = 7945388210094363435L; public ComboBoxCellRenderer(JComboBox<Object> comboBox) { this.addItem(comboBox.getModel().getElementAt(0)); } @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { return this; } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lang; import com.intellij.lang.impl.PsiBuilderImpl; import com.intellij.lexer.Lexer; import com.intellij.lexer.LexerBase; import com.intellij.openapi.project.Project; import com.intellij.psi.FileViewProvider; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.TokenType; import com.intellij.psi.impl.DebugUtil; import com.intellij.psi.impl.source.tree.ASTStructure; import com.intellij.psi.tree.*; import com.intellij.testFramework.LightPlatformTestCase; import com.intellij.testFramework.PlatformTestCase; import com.intellij.util.ThreeState; import com.intellij.util.diff.DiffTree; import com.intellij.util.diff.DiffTreeChangeBuilder; import com.intellij.util.diff.FlyweightCapableTreeStructure; import com.intellij.util.diff.ShallowNodeComparator; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.util.List; public class PsiBuilderQuickTest extends LightPlatformTestCase { private static final IFileElementType ROOT = new IFileElementType("ROOT", Language.ANY); private static final IElementType LETTER = new IElementType("LETTER", Language.ANY); private static final IElementType DIGIT = new IElementType("DIGIT", Language.ANY); private static final IElementType OTHER = new IElementType("OTHER", Language.ANY); private static final IElementType COLLAPSED = new IElementType("COLLAPSED", Language.ANY); private static final IElementType LEFT_BOUND = new IElementType("LEFT_BOUND", Language.ANY) { @Override public boolean isLeftBound() { return true; } }; private static final IElementType COMMENT = new IElementType("COMMENT", Language.ANY); private static final TokenSet WHITESPACE_SET = TokenSet.create(TokenType.WHITE_SPACE); private static final TokenSet COMMENT_SET = TokenSet.create(COMMENT); @SuppressWarnings("JUnitTestCaseWithNonTrivialConstructors") public PsiBuilderQuickTest() { PlatformTestCase.initPlatformLangPrefix(); } public void testPlain() { doTest("a<<b", new Parser() { @Override public void parse(PsiBuilder builder) { while (builder.getTokenType() != null) { builder.advanceLexer(); } } }, "Element(ROOT)\n" + " PsiElement(LETTER)('a')\n" + " PsiElement(OTHER)('<')\n" + " PsiElement(OTHER)('<')\n" + " PsiElement(LETTER)('b')\n" ); } public void testComposites() { doTest("1(a(b)c)2(d)3", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilderUtil.advance(builder, 1); final PsiBuilder.Marker marker1 = builder.mark(); PsiBuilderUtil.advance(builder, 2); final PsiBuilder.Marker marker2 = builder.mark(); PsiBuilderUtil.advance(builder, 3); marker2.done(OTHER); PsiBuilderUtil.advance(builder, 2); marker1.done(OTHER); PsiBuilderUtil.advance(builder, 1); final PsiBuilder.Marker marker3 = builder.mark(); PsiBuilderUtil.advance(builder, 1); builder.mark().done(OTHER); PsiBuilderUtil.advance(builder, 2); marker3.done(OTHER); PsiBuilderUtil.advance(builder, 1); } }, "Element(ROOT)\n" + " PsiElement(DIGIT)('1')\n" + " Element(OTHER)\n" + " PsiElement(OTHER)('(')\n" + " PsiElement(LETTER)('a')\n" + " Element(OTHER)\n" + " PsiElement(OTHER)('(')\n" + " PsiElement(LETTER)('b')\n" + " PsiElement(OTHER)(')')\n" + " PsiElement(LETTER)('c')\n" + " PsiElement(OTHER)(')')\n" + " PsiElement(DIGIT)('2')\n" + " Element(OTHER)\n" + " PsiElement(OTHER)('(')\n" + " Element(OTHER)\n" + " <empty list>\n" + " PsiElement(LETTER)('d')\n" + " PsiElement(OTHER)(')')\n" + " PsiElement(DIGIT)('3')\n" ); } public void testCollapse() { doTest("a<<>>b", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilderUtil.advance(builder, 1); final PsiBuilder.Marker marker1 = builder.mark(); PsiBuilderUtil.advance(builder, 2); marker1.collapse(COLLAPSED); final PsiBuilder.Marker marker2 = builder.mark(); PsiBuilderUtil.advance(builder, 2); marker2.collapse(COLLAPSED); PsiBuilderUtil.advance(builder, 1); } }, "Element(ROOT)\n" + " PsiElement(LETTER)('a')\n" + " PsiElement(COLLAPSED)('<<')\n" + " PsiElement(COLLAPSED)('>>')\n" + " PsiElement(LETTER)('b')\n" ); } public void testDoneAndError() { doTest("a2b", new Parser() { @Override public void parse(PsiBuilder builder) { IElementType tokenType; while ((tokenType = builder.getTokenType()) != null) { final PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); if (tokenType == DIGIT) marker.error("no digits allowed"); else marker.done(tokenType); } } }, "Element(ROOT)\n" + " Element(LETTER)\n" + " PsiElement(LETTER)('a')\n" + " PsiErrorElement:no digits allowed\n" + " PsiElement(DIGIT)('2')\n" + " Element(LETTER)\n" + " PsiElement(LETTER)('b')\n"); } public void testPrecedeAndDoneBefore() { doTest("ab", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker marker1 = builder.mark(); builder.advanceLexer(); final PsiBuilder.Marker marker2 = builder.mark(); builder.advanceLexer(); marker2.done(OTHER); marker2.precede().doneBefore(COLLAPSED, marker2); marker1.doneBefore(COLLAPSED, marker2, "with error"); } }, "Element(ROOT)\n" + " Element(COLLAPSED)\n" + " PsiElement(LETTER)('a')\n" + " Element(COLLAPSED)\n" + " <empty list>\n" + " PsiErrorElement:with error\n" + " <empty list>\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('b')\n"); } public void testErrorBefore() { doTest("a1", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker letter = builder.mark(); builder.advanceLexer(); letter.done(LETTER); final PsiBuilder.Marker digit = builder.mark(); builder.advanceLexer(); digit.done(DIGIT); digit.precede().errorBefore("something lost", digit); } }, "Element(ROOT)\n" + " Element(LETTER)\n" + " PsiElement(LETTER)('a')\n" + " PsiErrorElement:something lost\n" + " <empty list>\n" + " Element(DIGIT)\n" + " PsiElement(DIGIT)('1')\n"); } public void testValidityChecksOnDone() { doFailTest("a", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker first = builder.mark(); builder.advanceLexer(); builder.mark(); first.done(LETTER); } }, "Another not done marker added after this one. Must be done before this."); } public void testValidityChecksOnDoneBefore1() { doFailTest("a", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker first = builder.mark(); builder.advanceLexer(); final PsiBuilder.Marker second = builder.mark(); second.precede(); first.doneBefore(LETTER, second); } }, "Another not done marker added after this one. Must be done before this."); } public void testValidityChecksOnDoneBefore2() { doFailTest("a", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker first = builder.mark(); builder.advanceLexer(); final PsiBuilder.Marker second = builder.mark(); second.doneBefore(LETTER, first); } }, "'Before' marker precedes this one."); } public void testValidityChecksOnTreeBuild1() { doFailTest("aa", new Parser() { @Override public void parse(PsiBuilder builder) { while(!builder.eof()) builder.advanceLexer(); } }, "Parser produced no markers. Text:\naa"); } public void testValidityChecksOnTreeBuild2() { doFailTest("aa", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); marker.done(LETTER); } }, "Tokens [LETTER] were not inserted into the tree. Text:\naa"); } public void testValidityChecksOnTreeBuild3() { doFailTest("a ", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); marker.done(LETTER); while(!builder.eof()) builder.advanceLexer(); } }, "Tokens [WHITE_SPACE] are outside of root element \"LETTER\". Text:\na "); } public void testWhitespaceTrimming() { doTest(" a b ", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); marker.done(OTHER); marker = builder.mark(); builder.advanceLexer(); marker.done(OTHER); builder.advanceLexer(); } }, "Element(ROOT)\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('a')\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('b')\n" + " PsiWhiteSpace(' ')\n"); } public void testWhitespaceBalancingByErrors() { doTest("a b c", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); builder.error("error 1"); marker.done(OTHER); marker = builder.mark(); builder.advanceLexer(); builder.mark().error("error 2"); marker.done(OTHER); marker = builder.mark(); builder.advanceLexer(); marker.error("error 3"); } }, "Element(ROOT)\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('a')\n" + " PsiErrorElement:error 1\n" + " <empty list>\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('b')\n" + " PsiErrorElement:error 2\n" + " <empty list>\n" + " PsiWhiteSpace(' ')\n" + " PsiErrorElement:error 3\n" + " PsiElement(LETTER)('c')\n"); } public void testWhitespaceBalancingByEmptyComposites() { doTest("a b c", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); builder.mark().done(OTHER); marker.done(OTHER); marker = builder.mark(); builder.advanceLexer(); builder.mark().done(LEFT_BOUND); marker.done(OTHER); builder.advanceLexer(); } }, "Element(ROOT)\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('a')\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " <empty list>\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('b')\n" + " Element(LEFT_BOUND)\n" + " <empty list>\n" + " PsiWhiteSpace(' ')\n" + " PsiElement(LETTER)('c')\n"); } public void testCustomEdgeProcessors() { final WhitespacesAndCommentsBinder leftEdgeProcessor = new WhitespacesAndCommentsBinder() { @Override public int getEdgePosition(List<IElementType> tokens, boolean atStreamEdge, TokenTextGetter getter) { int pos = tokens.size() - 1; while (tokens.get(pos) != COMMENT && pos > 0) pos--; return pos; } }; final WhitespacesAndCommentsBinder rightEdgeProcessor = new WhitespacesAndCommentsBinder() { @Override public int getEdgePosition(List<IElementType> tokens, boolean atStreamEdge, TokenTextGetter getter) { int pos = 0; while (tokens.get(pos) != COMMENT && pos < tokens.size()-1) pos++; return pos + 1; } }; doTest("{ # i # }", new Parser() { @Override public void parse(PsiBuilder builder) { while (builder.getTokenType() != LETTER) builder.advanceLexer(); final PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); marker.done(OTHER); marker.setCustomEdgeTokenBinders(leftEdgeProcessor, rightEdgeProcessor); while (builder.getTokenType() != null) builder.advanceLexer(); } }, "Element(ROOT)\n" + " PsiElement(OTHER)('{')\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " PsiElement(COMMENT)('#')\n" + " PsiWhiteSpace(' ')\n" + " PsiElement(LETTER)('i')\n" + " PsiWhiteSpace(' ')\n" + " PsiElement(COMMENT)('#')\n" + " PsiWhiteSpace(' ')\n" + " PsiElement(OTHER)('}')\n"); } private abstract static class MyLazyElementType extends ILazyParseableElementType implements ILightLazyParseableElementType { protected MyLazyElementType(@NonNls String debugName) { super(debugName, Language.ANY); } } public void testLightChameleon() { final IElementType CHAMELEON_2 = new MyChameleon2Type(); final IElementType CHAMELEON_1 = new MyChameleon1Type(CHAMELEON_2); doTest("ab{12[.?]}cd{x}", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilderUtil.advance(builder, 2); PsiBuilder.Marker chameleon = builder.mark(); PsiBuilderUtil.advance(builder, 8); chameleon.collapse(CHAMELEON_1); PsiBuilderUtil.advance(builder, 2); chameleon = builder.mark(); PsiBuilderUtil.advance(builder, 3); chameleon.collapse(CHAMELEON_1); } }, "Element(ROOT)\n" + " PsiElement(LETTER)('a')\n" + " PsiElement(LETTER)('b')\n" + " Element(CHAMELEON_1)\n" + " PsiElement(OTHER)('{')\n" + " PsiElement(DIGIT)('1')\n" + " PsiElement(DIGIT)('2')\n" + " Element(OTHER)\n" + " Element(CHAMELEON_2)\n" + " PsiElement(OTHER)('[')\n" + " PsiElement(OTHER)('.')\n" + " PsiErrorElement:test error 2\n" + " PsiElement(OTHER)('?')\n" + " PsiElement(OTHER)(']')\n" + " PsiErrorElement:test error 1\n" + " <empty list>\n" + " PsiElement(OTHER)('}')\n" + " PsiElement(LETTER)('c')\n" + " PsiElement(LETTER)('d')\n" + " Element(CHAMELEON_1)\n" + " PsiElement(OTHER)('{')\n" + " PsiElement(LETTER)('x')\n" + " PsiElement(OTHER)('}')\n"); } @SuppressWarnings("ConstantConditions") private static PsiBuilderImpl createBuilder(CharSequence text) { ParserDefinition parserDefinition = new ParserDefinition() { @NotNull @Override public Lexer createLexer(Project project) { return new MyTestLexer(); } @Override public PsiParser createParser(Project project) { return null; } @Override public IFileElementType getFileNodeType() { return null; } @NotNull @Override public TokenSet getWhitespaceTokens() { return WHITESPACE_SET; } @NotNull @Override public TokenSet getCommentTokens() { return COMMENT_SET; } @NotNull @Override public TokenSet getStringLiteralElements() { return null; } @NotNull @Override public PsiElement createElement(ASTNode node) { return null; } @Override public PsiFile createFile(FileViewProvider viewProvider) { return null; } @Override public SpaceRequirements spaceExistanceTypeBetweenTokens(ASTNode left, ASTNode right) { return null; } }; return new PsiBuilderImpl(getProject(), null, parserDefinition, parserDefinition.createLexer(getProject()), null, text, null, null); } private interface Parser { void parse(PsiBuilder builder); } private static void doTest(@NonNls final String text, final Parser parser, @NonNls final String expected) { final PsiBuilder builder = createBuilder(text); final PsiBuilder.Marker rootMarker = builder.mark(); parser.parse(builder); rootMarker.done(ROOT); // check light tree composition final FlyweightCapableTreeStructure<LighterASTNode> lightTree = builder.getLightTree(); assertEquals(expected, DebugUtil.lightTreeToString(lightTree, false)); // verify that light tree can be taken multiple times final FlyweightCapableTreeStructure<LighterASTNode> lightTree2 = builder.getLightTree(); assertEquals(expected, DebugUtil.lightTreeToString(lightTree2, false)); // check heavy tree composition final ASTNode root = builder.getTreeBuilt(); assertEquals(expected, DebugUtil.nodeTreeToString(root, false)); // check heavy vs. light tree merging final PsiBuilder builder2 = createBuilder(text); final PsiBuilder.Marker rootMarker2 = builder2.mark(); parser.parse(builder2); rootMarker2.done(ROOT); DiffTree.diff( new ASTStructure(root), builder2.getLightTree(), new ShallowNodeComparator<ASTNode, LighterASTNode>() { @Override public ThreeState deepEqual(ASTNode oldNode, LighterASTNode newNode) { return ThreeState.UNSURE; } @Override public boolean typesEqual(ASTNode oldNode, LighterASTNode newNode) { return true; } @Override public boolean hashCodesEqual(ASTNode oldNode, LighterASTNode newNode) { return true; } }, new DiffTreeChangeBuilder<ASTNode, LighterASTNode>() { @Override public void nodeReplaced(@NotNull ASTNode oldChild, @NotNull LighterASTNode newChild) { fail("replaced(" + oldChild + "," + newChild.getTokenType() + ")"); } @Override public void nodeDeleted(@NotNull ASTNode oldParent, @NotNull ASTNode oldNode) { fail("deleted(" + oldParent + "," + oldNode + ")"); } @Override public void nodeInserted(@NotNull ASTNode oldParent, @NotNull LighterASTNode newNode, int pos) { fail("inserted(" + oldParent + "," + newNode.getTokenType() + ")"); } } ); } private static void doFailTest(@NonNls final String text, final Parser parser, @NonNls final String expected) { final PrintStream std = System.err; //noinspection IOResourceOpenedButNotSafelyClosed System.setErr(new PrintStream(new NullStream())); try { try { ParserDefinition parserDefinition = new ParserDefinition() { @NotNull @Override public Lexer createLexer(Project project) { return null; } @Override public PsiParser createParser(Project project) { return null; } @Override public IFileElementType getFileNodeType() { return null; } @NotNull @Override public TokenSet getWhitespaceTokens() { return TokenSet.EMPTY; } @NotNull @Override public TokenSet getCommentTokens() { return TokenSet.EMPTY; } @NotNull @Override public TokenSet getStringLiteralElements() { return null; } @NotNull @Override public PsiElement createElement(ASTNode node) { return null; } @Override public PsiFile createFile(FileViewProvider viewProvider) { return null; } @Override public SpaceRequirements spaceExistanceTypeBetweenTokens(ASTNode left, ASTNode right) { return null; } }; final PsiBuilder builder = PsiBuilderFactory.getInstance().createBuilder(parserDefinition, new MyTestLexer(),text); builder.setDebugMode(true); parser.parse(builder); builder.getLightTree(); fail("should fail"); } catch (AssertionError e) { assertEquals(expected, e.getMessage()); } } finally { System.setErr(std); } } private static class MyTestLexer extends LexerBase { private CharSequence myBuffer = ""; private int myIndex = 0; private int myBufferEnd = 1; @Override public void start(CharSequence buffer, int startOffset, int endOffset, int initialState) { myBuffer = buffer.subSequence(startOffset, endOffset); myIndex = 0; myBufferEnd = myBuffer.length(); } @Override public int getState() { return 0; } @Override public IElementType getTokenType() { if (myIndex >= myBufferEnd) return null; else if (Character.isLetter(myBuffer.charAt(myIndex))) return LETTER; else if (Character.isDigit(myBuffer.charAt(myIndex))) return DIGIT; else if (Character.isWhitespace(myBuffer.charAt(myIndex))) return TokenType.WHITE_SPACE; else if (myBuffer.charAt(myIndex) == '#') return COMMENT; else return OTHER; } @Override public int getTokenStart() { return myIndex; } @Override public int getTokenEnd() { return myIndex + 1; } @Override public void advance() { if (myIndex < myBufferEnd) myIndex++; } @Override public CharSequence getBufferSequence() { return myBuffer; } @Override public int getBufferEnd() { return myBufferEnd; } } private static class NullStream extends OutputStream { @Override public void write(final int b) throws IOException { } } private static class MyChameleon1Type extends MyLazyElementType { private final IElementType myCHAMELEON_2; public MyChameleon1Type(IElementType CHAMELEON_2) { super("CHAMELEON_1"); myCHAMELEON_2 = CHAMELEON_2; } @Override public FlyweightCapableTreeStructure<LighterASTNode> parseContents(LighterLazyParseableNode chameleon) { final PsiBuilder builder = createBuilder(chameleon.getText()); parse(builder); return builder.getLightTree(); } @Override public ASTNode parseContents(ASTNode chameleon) { final PsiBuilder builder = createBuilder(chameleon.getText()); parse(builder); return builder.getTreeBuilt().getFirstChildNode(); } public void parse(PsiBuilder builder) { final PsiBuilder.Marker root = builder.mark(); PsiBuilder.Marker nested = null; while (!builder.eof()) { final String token = builder.getTokenText(); if ("[".equals(token) && nested == null) { nested = builder.mark(); } builder.advanceLexer(); if ("]".equals(token) && nested != null) { nested.collapse(myCHAMELEON_2); nested.precede().done(OTHER); nested = null; builder.error("test error 1"); } } if (nested != null) nested.drop(); root.done(this); } } private static class MyChameleon2Type extends MyLazyElementType { public MyChameleon2Type() { super("CHAMELEON_2"); } @Override public FlyweightCapableTreeStructure<LighterASTNode> parseContents(LighterLazyParseableNode chameleon) { final PsiBuilder builder = createBuilder(chameleon.getText()); parse(builder); return builder.getLightTree(); } @Override public ASTNode parseContents(ASTNode chameleon) { final PsiBuilder builder = createBuilder(chameleon.getText()); parse(builder); return builder.getTreeBuilt().getFirstChildNode(); } public void parse(PsiBuilder builder) { final PsiBuilder.Marker root = builder.mark(); PsiBuilder.Marker error = null; while (!builder.eof()) { final String token = builder.getTokenText(); if ("?".equals(token)) error = builder.mark(); builder.advanceLexer(); if (error != null) { error.error("test error 2"); error = null; } } root.done(this); } } }
/* * Copyright 2012 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.examples.youtubeapidemo; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; import com.google.android.youtube.player.YouTubeInitializationResult; import com.google.android.youtube.player.YouTubePlayer; import com.google.android.youtube.player.YouTubePlayer.PlayerStyle; import com.google.android.youtube.player.YouTubePlayerFragment; import com.google.android.youtube.player.YouTubeThumbnailLoader; import com.google.android.youtube.player.YouTubeThumbnailView; import android.annotation.TargetApi; import android.app.Activity; import android.app.Dialog; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.DisplayMetrics; import android.util.Pair; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.Toast; import com.examples.youtubeapidemo.ui.FlippingView; import com.examples.youtubeapidemo.ui.ImageWallView; /** * A demo application aimed at showing the capabilities of the YouTube Player API. It shows a video * wall of flipping YouTube thumbnails. Every 5 flips, one of the thumbnails will be replaced with * a playing YouTube video. */ @TargetApi(11) public class VideoWallDemoActivity extends Activity implements FlippingView.Listener, YouTubePlayer.OnInitializedListener, YouTubeThumbnailView.OnInitializedListener { private static final int RECOVERY_DIALOG_REQUEST = 1; /** The player view cannot be smaller than 110 pixels high. */ private static final float PLAYER_VIEW_MINIMUM_HEIGHT_DP = 110; private static final int MAX_NUMBER_OF_ROWS_WANTED = 4; // Example playlist from which videos are displayed on the video wall private static final String PLAYLIST_ID = "ECAE6B03CA849AD332"; private static final int INTER_IMAGE_PADDING_DP = 5; // YouTube thumbnails have a 16 / 9 aspect ratio private static final double THUMBNAIL_ASPECT_RATIO = 16 / 9d; private static final int INITIAL_FLIP_DURATION_MILLIS = 100; private static final int FLIP_DURATION_MILLIS = 500; private static final int FLIP_PERIOD_MILLIS = 2000; private ImageWallView imageWallView; private Handler flipDelayHandler; private FlippingView flippingView; private YouTubeThumbnailView thumbnailView; private YouTubeThumbnailLoader thumbnailLoader; private YouTubePlayerFragment playerFragment; private View playerView; private YouTubePlayer player; private Dialog errorDialog; private int flippingCol; private int flippingRow; private int videoCol; private int videoRow; private boolean nextThumbnailLoaded; private boolean activityResumed; private State state; private enum State { UNINITIALIZED, LOADING_THUMBNAILS, VIDEO_FLIPPED_OUT, VIDEO_LOADING, VIDEO_CUED, VIDEO_PLAYING, VIDEO_ENDED, VIDEO_BEING_FLIPPED_OUT, } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); state = State.UNINITIALIZED; try { ViewGroup viewFrame = new FrameLayout(this); DisplayMetrics displayMetrics = getResources().getDisplayMetrics(); int maxAllowedNumberOfRows = (int) Math.floor( (displayMetrics.heightPixels / displayMetrics.density) / PLAYER_VIEW_MINIMUM_HEIGHT_DP); int numberOfRows = Math.min(maxAllowedNumberOfRows, MAX_NUMBER_OF_ROWS_WANTED); int interImagePaddingPx = (int) displayMetrics.density * INTER_IMAGE_PADDING_DP; int imageHeight = (displayMetrics.heightPixels / numberOfRows) - interImagePaddingPx; int imageWidth = (int) (imageHeight * THUMBNAIL_ASPECT_RATIO); imageWallView = new ImageWallView(this, imageWidth, imageHeight, interImagePaddingPx); viewFrame.addView(imageWallView, MATCH_PARENT, MATCH_PARENT); thumbnailView = new YouTubeThumbnailView(this); thumbnailView.initialize(DeveloperKey.DEVELOPER_KEY, this); flippingView = new FlippingView(this, this, imageWidth, imageHeight); flippingView.setFlipDuration(INITIAL_FLIP_DURATION_MILLIS); viewFrame.addView(flippingView, imageWidth, imageHeight); playerView = new FrameLayout(this); playerView.setId(R.id.player_view); playerView.setVisibility(View.INVISIBLE); viewFrame.addView(playerView, imageWidth, imageHeight); playerFragment = YouTubePlayerFragment.newInstance(); playerFragment.initialize(DeveloperKey.DEVELOPER_KEY, this); getFragmentManager().beginTransaction().add(R.id.player_view, playerFragment).commit(); flipDelayHandler = new FlipDelayHandler(); setContentView(viewFrame); } catch (Exception e) { e.printStackTrace(); } } @Override public void onInitializationSuccess(YouTubeThumbnailView thumbnailView, YouTubeThumbnailLoader thumbnailLoader) { this.thumbnailLoader = thumbnailLoader; thumbnailLoader.setOnThumbnailLoadedListener(new ThumbnailListener()); maybeStartDemo(); } @Override public void onInitializationFailure( YouTubeThumbnailView thumbnailView, YouTubeInitializationResult errorReason) { if (errorReason.isUserRecoverableError()) { if (errorDialog == null || !errorDialog.isShowing()) { errorDialog = errorReason.getErrorDialog(this, RECOVERY_DIALOG_REQUEST); errorDialog.show(); } } else { String errorMessage = String.format(getString(R.string.error_thumbnail_view), errorReason.toString()); Toast.makeText(this, errorMessage, Toast.LENGTH_LONG).show(); } } @Override public void onInitializationSuccess(YouTubePlayer.Provider provider, YouTubePlayer player, boolean wasResumed) { VideoWallDemoActivity.this.player = player; player.setPlayerStyle(PlayerStyle.CHROMELESS); player.setPlayerStateChangeListener(new VideoListener()); maybeStartDemo(); } @Override public void onInitializationFailure( YouTubePlayer.Provider provider, YouTubeInitializationResult errorReason) { if (errorReason.isUserRecoverableError()) { if (errorDialog == null || !errorDialog.isShowing()) { errorDialog = errorReason.getErrorDialog(this, RECOVERY_DIALOG_REQUEST); errorDialog.show(); } } else { String errorMessage = String.format(getString(R.string.error_player), errorReason.toString()); Toast.makeText(this, errorMessage, Toast.LENGTH_LONG).show(); } } private void maybeStartDemo() { if (activityResumed && player != null && thumbnailLoader != null && state.equals(State.UNINITIALIZED)) { thumbnailLoader.setPlaylist(PLAYLIST_ID); // loading the first thumbnail will kick off demo state = State.LOADING_THUMBNAILS; } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == RECOVERY_DIALOG_REQUEST) { // Retry initialization if user performed a recovery action if (errorDialog != null && errorDialog.isShowing()) { errorDialog.dismiss(); } errorDialog = null; playerFragment.initialize(DeveloperKey.DEVELOPER_KEY, this); thumbnailView.initialize(DeveloperKey.DEVELOPER_KEY, this); } } @Override protected void onResume() { super.onResume(); activityResumed = true; if (thumbnailLoader != null && player != null) { if (state.equals(State.UNINITIALIZED)) { maybeStartDemo(); } else if (state.equals(State.LOADING_THUMBNAILS)) { loadNextThumbnail(); } else { if (state.equals(State.VIDEO_PLAYING)) { player.play(); } flipDelayHandler.sendEmptyMessageDelayed(0, FLIP_DURATION_MILLIS); } } } @Override protected void onPause() { flipDelayHandler.removeCallbacksAndMessages(null); activityResumed = false; super.onPause(); } @Override protected void onDestroy() { if (thumbnailLoader != null) { thumbnailLoader.release(); } super.onDestroy(); } private void flipNext() { if (!nextThumbnailLoaded || state.equals(State.VIDEO_LOADING)) { return; } if (state.equals(State.VIDEO_ENDED)) { flippingCol = videoCol; flippingRow = videoRow; state = State.VIDEO_BEING_FLIPPED_OUT; } else { Pair<Integer, Integer> nextTarget = imageWallView.getNextLoadTarget(); flippingCol = nextTarget.first; flippingRow = nextTarget.second; } flippingView.setX(imageWallView.getXPosition(flippingCol, flippingRow)); flippingView.setY(imageWallView.getYPosition(flippingCol, flippingRow)); flippingView.setFlipInDrawable(thumbnailView.getDrawable()); flippingView.setFlipOutDrawable(imageWallView.getImageDrawable(flippingCol, flippingRow)); imageWallView.setImageDrawable(flippingCol, flippingRow, thumbnailView.getDrawable()); imageWallView.hideImage(flippingCol, flippingRow); flippingView.setVisibility(View.VISIBLE); flippingView.flip(); } @Override public void onFlipped(FlippingView view) { imageWallView.showImage(flippingCol, flippingRow); flippingView.setVisibility(View.INVISIBLE); if (activityResumed) { loadNextThumbnail(); if (state.equals(State.VIDEO_BEING_FLIPPED_OUT)) { state = State.VIDEO_FLIPPED_OUT; } else if (state.equals(State.VIDEO_CUED)) { videoCol = flippingCol; videoRow = flippingRow; playerView.setX(imageWallView.getXPosition(flippingCol, flippingRow)); playerView.setY(imageWallView.getYPosition(flippingCol, flippingRow)); imageWallView.hideImage(flippingCol, flippingRow); playerView.setVisibility(View.VISIBLE); player.play(); state = State.VIDEO_PLAYING; } else if (state.equals(State.LOADING_THUMBNAILS) && imageWallView.allImagesLoaded()) { state = State.VIDEO_FLIPPED_OUT; // trigger flip in of an initial video flippingView.setFlipDuration(FLIP_DURATION_MILLIS); flipDelayHandler.sendEmptyMessage(0); } } } private void loadNextThumbnail() { nextThumbnailLoaded = false; if (thumbnailLoader.hasNext()) { thumbnailLoader.next(); } else { thumbnailLoader.first(); } } /** * A handler that periodically flips an element on the video wall. */ private final class FlipDelayHandler extends Handler { @Override public void handleMessage(Message msg) { flipNext(); sendEmptyMessageDelayed(0, FLIP_PERIOD_MILLIS); } } /** * An internal listener which listens to thumbnail loading events from the * {@link YouTubeThumbnailView}. */ private final class ThumbnailListener implements YouTubeThumbnailLoader.OnThumbnailLoadedListener { @Override public void onThumbnailLoaded(YouTubeThumbnailView thumbnail, String videoId) { nextThumbnailLoaded = true; if (activityResumed) { if (state.equals(State.LOADING_THUMBNAILS)) { flipNext(); } else if (state.equals(State.VIDEO_FLIPPED_OUT)) { // load player with the video of the next thumbnail being flipped in state = State.VIDEO_LOADING; player.cueVideo(videoId); } } } @Override public void onThumbnailError(YouTubeThumbnailView thumbnail, YouTubeThumbnailLoader.ErrorReason reason) { loadNextThumbnail(); } } private final class VideoListener implements YouTubePlayer.PlayerStateChangeListener { @Override public void onLoaded(String videoId) { state = State.VIDEO_CUED; } @Override public void onVideoEnded() { state = State.VIDEO_ENDED; imageWallView.showImage(videoCol, videoRow); playerView.setVisibility(View.INVISIBLE); } @Override public void onError(YouTubePlayer.ErrorReason errorReason) { if (errorReason == YouTubePlayer.ErrorReason.UNEXPECTED_SERVICE_DISCONNECTION) { // player has encountered an unrecoverable error - stop the demo flipDelayHandler.removeCallbacksAndMessages(null); state = State.UNINITIALIZED; thumbnailLoader.release(); thumbnailLoader = null; player = null; } else { state = State.VIDEO_ENDED; } } // ignored callbacks @Override public void onVideoStarted() { } @Override public void onAdStarted() { } @Override public void onLoading() { } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.commitlog; import java.io.*; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.CRC32; import java.util.zip.Checksum; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import org.apache.cassandra.db.*; import org.apache.cassandra.io.sstable.SSTable; import org.apache.cassandra.net.MessagingService; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.concurrent.Stage; import org.apache.cassandra.concurrent.StageManager; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.Config; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.io.DeletionService; import org.apache.cassandra.io.util.BufferedRandomAccessFile; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.WrappedRunnable; /* * Commit Log tracks every write operation into the system. The aim * of the commit log is to be able to successfully recover data that was * not stored to disk via the Memtable. Every Commit Log maintains a * header represented by the abstraction CommitLogHeader. The header * contains a bit array and an array of longs and both the arrays are * of size, #column families for the Table, the Commit Log represents. * * Whenever a ColumnFamily is written to, for the first time its bit flag * is set to one in the CommitLogHeader. When it is flushed to disk by the * Memtable its corresponding bit in the header is set to zero. This helps * track which CommitLogs can be thrown away as a result of Memtable flushes. * Additionally, when a ColumnFamily is flushed and written to disk, its * entry in the array of longs is updated with the offset in the Commit Log * file where it was written. This helps speed up recovery since we can seek * to these offsets and start processing the commit log. * * Every Commit Log is rolled over everytime it reaches its threshold in size; * the new log inherits the "dirty" bits from the old. * * Over time there could be a number of commit logs that would be generated. * To allow cleaning up non-active commit logs, whenever we flush a column family and update its bit flag in * the active CL, we take the dirty bit array and bitwise & it with the headers of the older logs. * If the result is 0, then it is safe to remove the older file. (Since the new CL * inherited the old's dirty bitflags, getting a zero for any given bit in the anding * means that either the CF was clean in the old CL or it has been flushed since the * switch in the new.) */ public class CommitLog { private static final int MAX_OUTSTANDING_REPLAY_COUNT = 1024; static final Logger logger = LoggerFactory.getLogger(CommitLog.class); public static final CommitLog instance = new CommitLog(); private final Deque<CommitLogSegment> segments = new ArrayDeque<CommitLogSegment>(); private final ICommitLogExecutorService executor; private volatile int segmentSize = 128*1024*1024; // roll after log gets this big /** * param @ table - name of table for which we are maintaining * this commit log. * param @ recoverymode - is commit log being instantiated in * in recovery mode. */ private CommitLog() { try { DatabaseDescriptor.createAllDirectories(); segmentSize = DatabaseDescriptor.getCommitLogSegmentSize(); } catch (IOException e) { throw new IOError(e); } // all old segments are recovered and deleted before CommitLog is instantiated. // All we need to do is create a new one. segments.add(new CommitLogSegment()); executor = DatabaseDescriptor.getCommitLogSync() == Config.CommitLogSync.batch ? new BatchCommitLogExecutorService() : new PeriodicCommitLogExecutorService(this); } public void resetUnsafe() { segments.clear(); segments.add(new CommitLogSegment()); } private boolean manages(String name) { for (CommitLogSegment segment : segments) { if (segment.getPath().endsWith(name)) return true; } return false; } public static int recover() throws IOException { String directory = DatabaseDescriptor.getCommitLogLocation(); File[] files = new File(directory).listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { // we used to try to avoid instantiating commitlog (thus creating an empty segment ready for writes) // until after recover was finished. this turns out to be fragile; it is less error-prone to go // ahead and allow writes before recover(), and just skip active segments when we do. return CommitLogSegment.possibleCommitLogFile(name) && !instance.manages(name); } }); if (files.length == 0) { logger.info("No commitlog files found; skipping replay"); return 0; } Arrays.sort(files, new FileUtils.FileComparator()); logger.info("Replaying " + StringUtils.join(files, ", ")); int replayed = recover(files); for (File f : files) { if (!f.delete()) logger.error("Unable to remove " + f + "; you should remove it manually or next restart will replay it again (harmless, but time-consuming)"); } logger.info("Log replay complete, " + replayed + " replayed mutations"); return replayed; } // returns the number of replayed mutation (useful for tests in particular) public static int recover(File[] clogs) throws IOException { final Set<Table> tablesRecovered = new HashSet<Table>(); List<Future<?>> futures = new ArrayList<Future<?>>(); byte[] bytes = new byte[4096]; Map<Integer, AtomicInteger> invalidMutations = new HashMap<Integer, AtomicInteger>(); // count the number of replayed mutation. We don't really care about atomicity, but we need it to be a reference. final AtomicInteger replayedCount = new AtomicInteger(); // compute per-CF and global replay positions final Map<Integer, ReplayPosition> cfPositions = new HashMap<Integer, ReplayPosition>(); for (ColumnFamilyStore cfs : ColumnFamilyStore.all()) { // it's important to call RP.gRP per-cf, before aggregating all the positions w/ the Ordering.min call // below: gRP will return NONE if there are no flushed sstables, which is important to have in the // list (otherwise we'll just start replay from the first flush position that we do have, which is not correct). ReplayPosition rp = ReplayPosition.getReplayPosition(cfs.getSSTables()); cfPositions.put(cfs.metadata.cfId, rp); } final ReplayPosition globalPosition = Ordering.from(ReplayPosition.comparator).min(cfPositions.values()); Checksum checksum = new CRC32(); for (final File file : clogs) { final long segment = CommitLogSegment.idFromFilename(file.getName()); int bufferSize = (int) Math.min(Math.max(file.length(), 1), 32 * 1024 * 1024); BufferedRandomAccessFile reader = new BufferedRandomAccessFile(new File(file.getAbsolutePath()), "r", bufferSize, true); assert reader.length() <= Integer.MAX_VALUE; try { int replayPosition; if (globalPosition.segment < segment) replayPosition = 0; else if (globalPosition.segment == segment) replayPosition = globalPosition.position; else replayPosition = (int) reader.length(); if (replayPosition < 0 || replayPosition >= reader.length()) { // replayPosition > reader.length() can happen if some data gets flushed before it is written to the commitlog // (see https://issues.apache.org/jira/browse/CASSANDRA-2285) logger.debug("skipping replay of fully-flushed {}", file); continue; } reader.seek(replayPosition); if (logger.isDebugEnabled()) logger.debug("Replaying " + file + " starting at " + reader.getFilePointer()); /* read the logs populate RowMutation and apply */ while (!reader.isEOF()) { if (logger.isDebugEnabled()) logger.debug("Reading mutation at " + reader.getFilePointer()); long claimedCRC32; int serializedSize; try { // any of the reads may hit EOF serializedSize = reader.readInt(); // RowMutation must be at LEAST 10 bytes: // 3 each for a non-empty Table and Key (including the 2-byte length from // writeUTF/writeWithShortLength) and 4 bytes for column count. // This prevents CRC by being fooled by special-case garbage in the file; see CASSANDRA-2128 if (serializedSize < 10) break; long claimedSizeChecksum = reader.readLong(); checksum.reset(); checksum.update(serializedSize); if (checksum.getValue() != claimedSizeChecksum) break; // entry wasn't synced correctly/fully. that's ok. if (serializedSize > bytes.length) bytes = new byte[(int) (1.2 * serializedSize)]; reader.readFully(bytes, 0, serializedSize); claimedCRC32 = reader.readLong(); } catch(EOFException eof) { break; // last CL entry didn't get completely written. that's ok. } checksum.update(bytes, 0, serializedSize); if (claimedCRC32 != checksum.getValue()) { // this entry must not have been fsynced. probably the rest is bad too, // but just in case there is no harm in trying them (since we still read on an entry boundary) continue; } /* deserialize the commit log entry */ ByteArrayInputStream bufIn = new ByteArrayInputStream(bytes, 0, serializedSize); RowMutation rm = null; try { // assuming version here. We've gone to lengths to make sure what gets written to the CL is in // the current version. so do make sure the CL is drained prior to upgrading a node. rm = RowMutation.serializer().deserialize(new DataInputStream(bufIn), MessagingService.version_, false); } catch (UnserializableColumnFamilyException ex) { AtomicInteger i = invalidMutations.get(ex.cfId); if (i == null) { i = new AtomicInteger(1); invalidMutations.put(ex.cfId, i); } else i.incrementAndGet(); continue; } if (logger.isDebugEnabled()) logger.debug(String.format("replaying mutation for %s.%s: %s", rm.getTable(), ByteBufferUtil.bytesToHex(rm.key()), "{" + StringUtils.join(rm.getColumnFamilies(), ", ") + "}")); final long entryLocation = reader.getFilePointer(); final RowMutation frm = rm; Runnable runnable = new WrappedRunnable() { public void runMayThrow() throws IOException { if (DatabaseDescriptor.getKSMetaData(frm.getTable()) == null) return; final Table table = Table.open(frm.getTable()); RowMutation newRm = new RowMutation(frm.getTable(), frm.key()); // Rebuild the row mutation, omitting column families that a) have already been flushed, // b) are part of a cf that was dropped. Keep in mind that the cf.name() is suspect. do every // thing based on the cfid instead. for (ColumnFamily columnFamily : frm.getColumnFamilies()) { if (CFMetaData.getCF(columnFamily.id()) == null) // null means the cf has been dropped continue; ReplayPosition rp = cfPositions.get(columnFamily.id()); // replay if current segment is newer than last flushed one or, if it is the last known // segment, if we are after the replay position if (segment > rp.segment || (segment == rp.segment && entryLocation > rp.position)) { newRm.add(columnFamily); replayedCount.incrementAndGet(); } } if (!newRm.isEmpty()) { Table.open(newRm.getTable()).apply(newRm, false); tablesRecovered.add(table); } } }; futures.add(StageManager.getStage(Stage.MUTATION).submit(runnable)); if (futures.size() > MAX_OUTSTANDING_REPLAY_COUNT) { FBUtilities.waitOnFutures(futures); futures.clear(); } } } finally { FileUtils.closeQuietly(reader); logger.info("Finished reading " + file); } } for (Map.Entry<Integer, AtomicInteger> entry : invalidMutations.entrySet()) logger.info(String.format("Skipped %d mutations from unknown (probably removed) CF with id %d", entry.getValue().intValue(), entry.getKey())); // wait for all the writes to finish on the mutation stage FBUtilities.waitOnFutures(futures); logger.debug("Finished waiting on mutations from recovery"); // flush replayed tables futures.clear(); for (Table table : tablesRecovered) futures.addAll(table.flush()); FBUtilities.waitOnFutures(futures); return replayedCount.get(); } private CommitLogSegment currentSegment() { return segments.getLast(); } public ReplayPosition getContext() { Callable<ReplayPosition> task = new Callable<ReplayPosition>() { public ReplayPosition call() throws Exception { return currentSegment().getContext(); } }; try { return executor.submit(task).get(); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } // for tests mainly public int segmentsCount() { return segments.size(); } /* * Adds the specified row to the commit log. This method will reset the * file offset to what it is before the start of the operation in case * of any problems. This way we can assume that the subsequent commit log * entry will override the garbage left over by the previous write. */ public void add(RowMutation rowMutation) throws IOException { executor.add(new LogRecordAdder(rowMutation)); } /* * This is called on Memtable flush to add to the commit log * a token indicating that this column family has been flushed. * The bit flag associated with this column family is set in the * header and this is used to decide if the log file can be deleted. */ public void discardCompletedSegments(final Integer cfId, final ReplayPosition context) throws IOException { Callable task = new Callable() { public Object call() throws IOException { discardCompletedSegmentsInternal(context, cfId); return null; } }; try { executor.submit(task).get(); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } /** * Delete log segments whose contents have been turned into SSTables. NOT threadsafe. * * param @ context The commitLog context . * param @ id id of the columnFamily being flushed to disk. * */ private void discardCompletedSegmentsInternal(ReplayPosition context, Integer id) throws IOException { if (logger.isDebugEnabled()) logger.debug("discard completed log segments for " + context + ", column family " + id + "."); /* * Loop through all the commit log files in the history. Now process * all files that are older than the one in the context. For each of * these files the header needs to modified by resetting the dirty * bit corresponding to the flushed CF. */ Iterator<CommitLogSegment> iter = segments.iterator(); while (iter.hasNext()) { CommitLogSegment segment = iter.next(); if (segment.id == context.segment) { // Only unmark this segment if there were not write since the // ReplayPosition was grabbed. segment.turnOffIfNotWritten(id, context.position); maybeDiscardSegment(segment, iter); break; } segment.turnOff(id); maybeDiscardSegment(segment, iter); } } private void maybeDiscardSegment(CommitLogSegment segment, Iterator<CommitLogSegment> iter) { if (segment.isSafeToDelete() && iter.hasNext()) { logger.info("Discarding obsolete commit log:" + segment); segment.close(); DeletionService.executeDelete(segment.getPath()); // usually this will be the first (remaining) segment, but not always, if segment A contains // writes to a CF that is unflushed but is followed by segment B whose CFs are all flushed. iter.remove(); } else { if (logger.isDebugEnabled()) logger.debug("Not safe to delete commit log " + segment + "; dirty is " + segment.dirtyString() + "; hasNext: " + iter.hasNext()); } } void sync() throws IOException { currentSegment().sync(); } public void forceNewSegment() { Callable<?> task = new Callable() { public Object call() throws IOException { createNewSegment(); return null; } }; try { executor.submit(task).get(); } catch (InterruptedException e) { throw new AssertionError(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } private void createNewSegment() throws IOException { sync(); segments.add(new CommitLogSegment()); } // TODO this should be a Runnable since it doesn't actually return anything, but it's difficult to do that // without breaking the fragile CheaterFutureTask in BatchCLES. class LogRecordAdder implements Callable, Runnable { final RowMutation rowMutation; LogRecordAdder(RowMutation rm) { this.rowMutation = rm; } public void run() { try { currentSegment().write(rowMutation); // roll log if necessary if (currentSegment().length() >= segmentSize) createNewSegment(); } catch (IOException e) { throw new IOError(e); } } public Object call() throws Exception { run(); return null; } } public void shutdownBlocking() throws InterruptedException { executor.shutdown(); executor.awaitTermination(); } }
/* * Copyright 2013 Guidewire Software, Inc. */ package gw.lang.reflect; import gw.internal.gosu.parser.StringCache; import gw.lang.parser.GosuParserTypes; import gw.lang.parser.IExpression; import gw.lang.parser.StandardCoercionManager; import gw.lang.reflect.gs.IGosuClass; import gw.lang.reflect.java.JavaTypes; import gw.lang.reflect.gs.IGenericTypeVariable; import gw.lang.parser.IScriptPartId; import gw.lang.reflect.java.IJavaClassInfo; import gw.lang.parser.IBlockClass; import gw.lang.parser.ScriptPartId; import gw.lang.parser.TypeVarToTypeMap; import gw.util.Pair; import gw.util.concurrent.LockingLazyVar; import java.io.ObjectStreamException; import java.util.Collections; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; public class FunctionType extends AbstractType implements IFunctionType, IGenericMethodInfo { public static final ThreadLocal<IFunctionType> CURRENT = new ThreadLocal<IFunctionType>(); private static final IGenericTypeVariable[] EMPTY_TYPE_VARS = new IGenericTypeVariable[0]; private static final IType[] EMPTY_ARGS = new IType[0]; private IType _retType; private volatile IType[] _paramTypes; private IMethodInfo _mi; private String _strFunctionName; private IScriptPartId _scriptPart; private IGosuClass _owningParameterizedType; private volatile IGenericTypeVariable[] _typeVars; private int _iModifiers; transient private FunctionTypeInfo _typeInfo; transient protected Set<IType> _allTypesInHierarchy; transient private String _signature; volatile transient private Map<String, ParameterizedFunctionType> _parameterizationByParamsName; private LockingLazyVar<FunctionArrayType> _arrType = new LockingLazyVar<FunctionArrayType>() { @Override protected FunctionArrayType init() { return new FunctionArrayType( FunctionType.this, getFunctionClass(), getTypeLoader() ); } }; /** * Construct a FunctionType with the specified return type and parameter types array. * * @param strFunctionName The name of the function * @param retType The return type of the function. * @param paramTypes The parameter types. Can be null if no params. */ public FunctionType( String strFunctionName, IType retType, IType[] paramTypes ) { _retType = retType; if (_retType == null) { _retType = JavaTypes.pVOID(); } _paramTypes = paramTypes == null || paramTypes.length == 0 ? EMPTY_ARGS : paramTypes; setName(strFunctionName); _mi = null; _allTypesInHierarchy = Collections.<IType>singleton( this ); _typeVars = EMPTY_TYPE_VARS; } /** * Construct a generic FunctionType with the specified return type, parameter * types, and generic type variables. * * @param strFunctionName The name of the function * @param retType The return type of the function. * @param paramTypes The parameter types. Can be null if no params. * @param typeVars The generic type variables. If null, does not create a * generic function type. */ public FunctionType( String strFunctionName, IType retType, IType[] paramTypes, IGenericTypeVariable[] typeVars ) { this( strFunctionName, retType, paramTypes ); if( typeVars != null ) { _typeVars = typeVars; for( IGenericTypeVariable gtv : typeVars ) { gtv.getTypeVariableDefinition().setEnclosingType( this ); } } } public FunctionType( IMethodInfo mi ) { this(mi, false); } public FunctionType( IMethodInfo mi, boolean lazyTypes ) { _mi = mi; if (!lazyTypes) { initLazyMethodInfoState(); } setName(mi.getDisplayName()); _allTypesInHierarchy = Collections.<IType>singleton( this ); } private void setName(String name) { _strFunctionName = StringCache.get( name ); } private void initLazyMethodInfoState() { TypeSystem.lock(); try { if( _paramTypes == null ) { IParameterInfo[] pd = _mi.getParameters(); int iArgs = pd.length; _paramTypes = new IType[iArgs]; for( int i = 0; i < iArgs; i++ ) { _paramTypes[i] = pd[i].getFeatureType(); } if( _paramTypes.length == 0 ) { _paramTypes = EMPTY_ARGS; } _typeVars = EMPTY_TYPE_VARS; if( _mi instanceof IGenericMethodInfo) { _typeVars = ((IGenericMethodInfo)_mi).getTypeVariables(); } clearParamSignature(); } _retType = _mi.getReturnType(); if( _retType == null ) { _retType = JavaTypes.pVOID(); } } finally { TypeSystem.unlock(); } } public FunctionType( FunctionType source, IGosuClass gsClass ) { if( gsClass.isParameterizedType() ) { _owningParameterizedType = gsClass; TypeVarToTypeMap actualParamByVarName = TypeSystem.mapTypeByVarName( gsClass, gsClass, true ); IGenericTypeVariable[] tvs = source.getTypeVariables(); if( tvs != null ) { for( IGenericTypeVariable tv : tvs ) { if( actualParamByVarName.isEmpty() ) { actualParamByVarName = new TypeVarToTypeMap(); } actualParamByVarName.put( tv.getTypeVariableDefinition().getType(), tv.getTypeVariableDefinition().getType() ); } } assignReturnTypeFromTypeParams( source, actualParamByVarName, true ); assignParamTypesFromTypeParams( source, actualParamByVarName, true ); } else { _retType = source.getReturnType(); if (_retType == null) { _retType = JavaTypes.pVOID(); } _paramTypes = source._paramTypes; //clearParamSignature(); TODO - reenable if you dare } copyFields( source ); } protected FunctionType(FunctionType source, IType returnType, IType[] paramTypes) { _retType = returnType; _paramTypes = paramTypes; copyFields( source ); } protected FunctionType( FunctionType source, TypeVarToTypeMap actualParamByVarName, boolean bKeepTypeVars ) { assignReturnTypeFromTypeParams( source, actualParamByVarName, bKeepTypeVars ); assignParamTypesFromTypeParams( source, actualParamByVarName, bKeepTypeVars ); copyFields( source ); clearParamSignature(); } protected void copyFields( FunctionType source ) { _mi = source._mi; _strFunctionName = source._strFunctionName; _scriptPart = source._scriptPart; _typeVars = source.getGenericTypeVariables(); _typeInfo = source._typeInfo; _allTypesInHierarchy = source._allTypesInHierarchy; _signature = source._signature; _parameterizationByParamsName = source._parameterizationByParamsName; } /** * Create a new FunctionType based on the type parameters assigned to the type * vars in actualParamByVarName. * <p> * It is important to note that this form of parameterization is different * from calling getParameterizedType(). The latter creates a parameterization * of a generic function e.g., function foo&lt;T&gt;() where T is a type var * on the function. * <p> * This here method is for parameterizing a function type that has references * to external type vars e.g., class Fred&lt;T&gt; { function foo( t : T ){} } * where T is a type var on the containing class. * * @param source The raw form of a FunctionType having possible references to * type vars defined in its containing class. * @param actualParamByVarName A map of concrete types by type var name * e.g., "T" -> String * @param bKeepTypeVars Indicates whether or not a type var referenced in the * source FunctionType that cannot be resolved via the actualParamByVarName * map should remain as a TypeVariableType or should be converted to its * bounding type. * @return A parameterized version of the source FunctionType */ public FunctionType parameterize( FunctionType source, TypeVarToTypeMap actualParamByVarName, boolean bKeepTypeVars ) { return new FunctionType( source, actualParamByVarName, bKeepTypeVars ); } private void assignParamTypesFromTypeParams( FunctionType source, TypeVarToTypeMap actualParamByVarName, boolean bKeepTypeVars ) { IType[] genParamTypes = source.getParameterTypes(); if( genParamTypes != null ) { _paramTypes = new IType[genParamTypes.length]; for( int j = 0; j < genParamTypes.length; j++ ) { _paramTypes[j] = TypeSystem.getActualType( genParamTypes[j], actualParamByVarName, bKeepTypeVars ); if( _paramTypes[j] == null ) { _paramTypes[j] = genParamTypes[j]; } } if (_paramTypes.length == 0) { _paramTypes = EMPTY_ARGS; } clearParamSignature(); } } private void assignReturnTypeFromTypeParams( FunctionType source, TypeVarToTypeMap actualParamByVarName, boolean bKeepTypeVars ) { _retType = TypeSystem.getActualType( source.getReturnType(), actualParamByVarName, bKeepTypeVars ); if( _retType == null ) { _retType = source.getReturnType(); } if (_retType == null) { _retType = JavaTypes.pVOID(); } } /** * @return The instrinic type of this FunctionType's return type. */ public IType getIntrinsicType() { return getReturnType(); } public IType getReturnType() { if (_retType == null) { initLazyMethodInfoState(); } return _retType; } public void setRetType( IType retType ) { _retType = retType == null ? JavaTypes.pVOID() : retType; } public IType[] getParameterTypes() { if (_paramTypes == null) { initLazyMethodInfoState(); } return _paramTypes; } public void setArgumentTypes( IType[] paramTypes ) { _paramTypes = paramTypes == null || paramTypes.length == 0 ? EMPTY_ARGS : paramTypes; clearParamSignature(); } public IMethodInfo getMethodInfo() { if( _mi == null ) { if( getScriptPart() != null ) { IType type = getScriptPart().getContainingType(); if( type instanceof IGosuClass ) { IGosuClass gsClass = (IGosuClass)type; _mi = gsClass.getTypeInfo().getMethod( type, getName(), getParameterTypes() ); } else if (type != null) { _mi = type.getTypeInfo().getMethod(getName(), getParameterTypes()); } } } return _mi; } public IFeatureInfo getMethodOrConstructorInfo() { IMethodInfo mi = getMethodInfo(); if( mi == null ) { if( getScriptPart() != null ) { IType type = getScriptPart().getContainingType(); if( type instanceof IGosuClass && type.getRelativeName().equals( getName() ) ) { IGosuClass gsClass = (IGosuClass)type; return gsClass.getTypeInfo().getConstructor( type, getParameterTypes() ); } } } return mi; } private void clearParamSignature() { _signature = null; } public String getParamSignature() { if( _signature == null ) { IType[] paramTypes = getParameterTypes(); if( paramTypes.length == 0 ) { return _signature = _strFunctionName + "()"; } String strParams = _strFunctionName + "("; for( int i = 0; i < paramTypes.length; i++ ) { strParams += (i == 0 ? "" : ", " ) + (paramTypes[i] == null ? "" : paramTypes[i].getName()); } strParams += ")"; _signature = strParams; } return _signature; } public String getParamSignatureForCurrentModule() { String sig; IType[] paramTypes = getParameterTypes(); if( paramTypes.length == 0 ) { sig = (String)(_strFunctionName + "()"); } else { String strParams = _strFunctionName + "("; for( int i = 0; i < paramTypes.length; i++ ) { strParams += (i == 0 ? "" : ", " ) + (paramTypes[i] == null ? "" : getParamTypeNameFromJavaBackedType(paramTypes[i])); } strParams += ")"; sig = (String)strParams; } return sig; } public static String getParamTypeNameFromJavaBackedType(IType paramType) { return TypeSystem.getTypeFromJavaBackedType(paramType).getName(); } public String getName() { return _strFunctionName; } public String getDisplayName() { return getName(); } public String getRelativeName() { return getName(); } public String getNamespace() { IType enclosingType = getEnclosingType(); return enclosingType != null ? enclosingType.getName() : null; } public ITypeLoader getTypeLoader() { return null; } public boolean isInterface() { return false; } public IType[] getInterfaces() { return EMPTY_TYPE_ARRAY; } public boolean isEnum() { return false; } public IType getSupertype() { return null; } public IType getEnclosingType() { if( _scriptPart instanceof IType) { return (IType)_scriptPart; } final IMethodInfo methodInfo = getMethodInfo(); if( methodInfo != null ) { return methodInfo.getOwnersType(); } return null; } public IType getGenericType() { return isGenericType() ? this : null; } public boolean isFinal() { return false; } public boolean isParameterizedType() { return false; } public boolean isGenericType() { return getGenericTypeVariables().length > 0; } public IGenericTypeVariable[] getGenericTypeVariables() { if (_typeVars == null) { initLazyMethodInfoState(); } return _typeVars; } public ParameterizedFunctionType getParameterizedType( IType... typeParams ) { if( typeParams == null || typeParams.length == 0 ) { throw new IllegalArgumentException( "Parameter types required." ); } if( _parameterizationByParamsName == null ) { TypeSystem.lock(); try { if( _parameterizationByParamsName == null ) { _parameterizationByParamsName = new ConcurrentHashMap<String, ParameterizedFunctionType>( 2 ); } } finally { TypeSystem.unlock(); } } String strNameOfParams = TypeSystem.getNameOfParams( typeParams, false, true ); ParameterizedFunctionType parameterizedType = _parameterizationByParamsName.get( strNameOfParams ); if( parameterizedType == null ) { TypeSystem.lock(); try { parameterizedType = _parameterizationByParamsName.get( strNameOfParams ); if( parameterizedType == null ) { parameterizedType = new ParameterizedFunctionType( this, typeParams ); _parameterizationByParamsName.put( strNameOfParams, parameterizedType ); } } finally { TypeSystem.unlock(); } } return parameterizedType; } public IFunctionType inferParameterizedTypeFromArgTypesAndContextType(IType[] argTypes, IType ctxType) { TypeVarToTypeMap map = TypeVarToTypeMap.EMPTY_MAP; if( argTypes.length > 0 ) { if( getMethodInfo() == null ) { map = inferTypeParametersFromArgumentTypes( argTypes ); } else { map = ((IGenericMethodInfo)getMethodInfo()).inferTypeParametersFromArgumentTypes2( _owningParameterizedType, argTypes ); } } IGenericTypeVariable[] typeVars = getGenericTypeVariables(); if( typeVars.length == 0 ) { return this; } IType[] typeParams = new IType[typeVars.length]; for( int i = 0; i < typeVars.length; i++ ) { IType inferredType = map.get( typeVars[i].getTypeVariableDefinition().getType() ); if( inferredType == null && ctxType != null ) { //try to infer type from context type TypeVarToTypeMap returnTypeVars = new TypeVarToTypeMap(); TypeSystem.inferTypeVariableTypesFromGenParamTypeAndConcreteType( getReturnType(), ctxType, returnTypeVars ); inferredType = returnTypeVars.get( typeVars[i].getTypeVariableDefinition().getType() ); } typeParams[i] = inferredType; if( typeParams[i] == null ) { return this; } } return getParameterizedType( typeParams ); } public IType[] getTypeParameters() { return null; } public Set<IType> getAllTypesInHierarchy() { return _allTypesInHierarchy; } public boolean isArray() { return false; } public boolean isPrimitive() { return false; } public IType getArrayType() { return _arrType.get(); } public Object makeArrayInstance( int iLength ) { return TypeSystem.get( getFunctionClass() ).makeArrayInstance( iLength ); } private IJavaClassInfo getFunctionClass() { return TypeSystem.getGosuClassLoader().getFunctionClassForArity( getParameterTypes().length ).getBackingClassInfo(); } public Object getArrayComponent( Object array, int iIndex ) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { return TypeSystem.get( getFunctionClass() ).getArrayComponent( array, iIndex ); } public void setArrayComponent( Object array, int iIndex, Object value ) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { TypeSystem.get( getFunctionClass() ).setArrayComponent( array, iIndex, value ); } public int getArrayLength( Object array ) throws IllegalArgumentException { return TypeSystem.get( getFunctionClass() ).getArrayLength( array ); } public IType getComponentType() { return null; } public boolean isAssignableFrom( IType type ) { if( this == type ) { return true; } if( type instanceof IBlockClass ) { return isAssignableFrom( ((IBlockClass)type).getBlockType() ); } if( type instanceof FunctionType ) { FunctionType otherType = (FunctionType)type; //contravariant arg types if( areParamsCompatible( this, otherType ) ) { //covariant return types return getReturnType().isAssignableFrom( otherType.getReturnType() ) || StandardCoercionManager.arePrimitiveTypesAssignable( getReturnType(), otherType.getReturnType() ) || getReturnType() == GosuParserTypes.NULL_TYPE(); } } return false; } public boolean areParamsCompatible(IFunctionType rhsFunctionType) { return areParamsCompatible(this, rhsFunctionType); } public static boolean areParamsCompatible( IFunctionType lhsType, IFunctionType rhsType ) { IType[] lhsParams = lhsType.getParameterTypes(); IType[] rhsParams = rhsType.getParameterTypes(); if( lhsParams.length != rhsParams.length ) { return false; } for( int i = 0; i < rhsParams.length; i++ ) { IType myParamType = lhsParams[i]; IType otherParamType = rhsParams[i]; //## todo: this is a hack; we need to tighten this up if( !StandardCoercionManager.arePrimitiveTypesAssignable( otherParamType, myParamType ) ) { if( !(otherParamType.isAssignableFrom( myParamType ) || myParamType instanceof ITypeVariableType) ) { return false; } } } return true; } public boolean isMutable() { return false; } public ITypeInfo getTypeInfo() { return _typeInfo == null ? _typeInfo = new FunctionTypeInfo( this ) : _typeInfo; } public void unloadTypeInfo() { _typeInfo = null; } public Object readResolve() throws ObjectStreamException { return this; } public boolean isValid() { return true; } public int getModifiers() { if( _iModifiers == 0 ) { return _mi != null ? Modifier.getModifiersFrom( _mi ) : Modifier.PUBLIC; } return _iModifiers; } public void setModifiers( int iModifiers ) { _iModifiers = iModifiers; } public boolean isAbstract() { return false; } public IScriptPartId getScriptPart() { return _scriptPart; } @Override public IType newInstance( IType[] paramTypes, IType returnType ) { FunctionType functionType = new FunctionType(this._strFunctionName, returnType, paramTypes, cloneTypeVars() ); if (getScriptPart() == null && _mi != null) { if (_mi instanceof MethodInfoDelegate) { functionType.setScriptPart(new ScriptPartId(((MethodInfoDelegate)_mi).getSource().getOwnersType(), null)); } else { functionType.setScriptPart(new ScriptPartId(_mi.getOwnersType(), null)); } } else { functionType.setScriptPart(getScriptPart()); } return functionType; } @Override public Pair<Long, String> getRetainedMemory() { return null; } private IGenericTypeVariable[] cloneTypeVars() { IGenericTypeVariable[] typeVars = new IGenericTypeVariable[_typeVars.length]; for (int i = 0; i < typeVars.length; i++) { typeVars[i] = _typeVars[i].clone(); } return typeVars; } public void setScriptPart( IScriptPartId scriptPart ) { _scriptPart = scriptPart; } public boolean equals( Object o ) { if( this == o ) { return true; } if( !getClass().isInstance( o ) ) { return false; } final FunctionType funcType = (FunctionType)o; // Name if( !funcType.getDisplayName().equals( getDisplayName() ) && !(o instanceof IBlockType) && !(this instanceof IBlockType)) { return false; } // Enclosing Type if( !areEnclosingTypesEqual( funcType ) ) { return false; } // Parameter Types if( funcType.getParameterTypes().length != getParameterTypes().length ) { return false; } for( int i = 0; i < getParameterTypes().length; i++ ) { if( !areSameTypes( getParameterTypes()[i], funcType.getParameterTypes()[i] ) ) { return false; } } // Return Type return areSameTypes( getReturnType(), funcType.getReturnType() ); } protected boolean areEnclosingTypesEqual( FunctionType funcType ) { if( areSameTypes( getEnclosingType(), funcType.getEnclosingType() ) ) { return true; } // crappy fix for when block types are compared as parameter types for methodinfo lookup return getEnclosingType() == null || funcType.getEnclosingType() == null; } private boolean areSameTypes( IType t1, IType t2 ) { return t1 instanceof INonLoadableType ? t1.equals( t2 ) : t1 == t2; } public int hashCode() { int result = getDisplayName().hashCode(); for( int i = 0; i < getParameterTypes().length; i++ ) { if( getParameterTypes()[i] instanceof INonLoadableType ) { result = 31 * result + getParameterTypes()[i].hashCode(); } else { result = 31 * result + getParameterTypes()[i].getName().hashCode(); } } if( getReturnType() instanceof INonLoadableType ) { result = 31 * result + getReturnType().hashCode(); } else { result = 31 * result + getReturnType().getName().hashCode(); } return result; } public String toString() { return getParamSignature().toString() + ":" + getReturnType().getName(); } public TypeVarToTypeMap inferTypeParametersFromArgumentTypes2( IGosuClass owningParameterizedType, IType... argTypes ) { return inferTypeParametersFromArgumentTypes( argTypes ); } public TypeVarToTypeMap inferTypeParametersFromArgumentTypes( IType... argTypes ) { IType[] genParamTypes = getParameterTypes(); TypeVarToTypeMap map = new TypeVarToTypeMap(); for( int i = 0; i < argTypes.length; i++ ) { if( genParamTypes.length > i ) { TypeSystem.inferTypeVariableTypesFromGenParamTypeAndConcreteType( genParamTypes[i], argTypes[i], map ); } } return map; } public IGenericTypeVariable[] getTypeVariables() { return getGenericTypeVariables(); } public IType getParameterizedReturnType( IType... typeParams ) { TypeVarToTypeMap actualParamByVarName = new TypeVarToTypeMap(); int i = 0; for( IGenericTypeVariable tv : getTypeVariables() ) { actualParamByVarName.put( tv.getTypeVariableDefinition().getType(), typeParams[i++] ); } return TypeSystem.getActualType( getReturnType(), actualParamByVarName, false ); } public IType[] getParameterizedParameterTypes( IType... typeParams ) { return getParameterizedParameterTypes2( null, typeParams ); } public IType[] getParameterizedParameterTypes2( IGosuClass ownersType, IType... typeParams ) { TypeVarToTypeMap actualParamByVarName = new TypeVarToTypeMap(); int i = 0; for( IGenericTypeVariable tv : getTypeVariables() ) { actualParamByVarName.put( tv.getTypeVariableDefinition().getType(), typeParams[i++] ); } IType[] genParamTypes = getParameterTypes(); IType[] paramTypes = new IType[genParamTypes.length]; for( int j = 0; j < genParamTypes.length; j++ ) { paramTypes[j] = TypeSystem.getActualType( genParamTypes[j], actualParamByVarName, false ); } return paramTypes; } public IType getRuntimeType() { TypeVarToTypeMap actualParamByVarName = new TypeVarToTypeMap(); actualParamByVarName = mapTypes( actualParamByVarName, getParameterTypes() ); actualParamByVarName = mapTypes( actualParamByVarName, getReturnType() ); return actualParamByVarName.size() != 0 ? parameterize( this, actualParamByVarName, false ) : this; } //Move Intrinsic type helper up here private TypeVarToTypeMap mapTypes( TypeVarToTypeMap actualParamByVarName, IType... types ) { for( int i = 0; i < types.length; i++ ) { IType type = types[i]; if( type instanceof ITypeVariableType ) { actualParamByVarName.put( (ITypeVariableType)types[i], types[i] ); } if( type instanceof ITypeVariableArrayType ) { mapTypes( actualParamByVarName, type.getComponentType() ); } if( type.isParameterizedType() ) { IType[] parameters = type.getTypeParameters(); mapTypes( actualParamByVarName, parameters ); } if( type instanceof IFunctionType ) { IFunctionType funType = (IFunctionType)type; mapTypes( actualParamByVarName, funType.getReturnType() ); IType[] paramTypes = funType.getParameterTypes(); for( IType paramType : paramTypes ) { mapTypes( actualParamByVarName, paramType ); } } } return actualParamByVarName; } public boolean isDiscarded() { return false; } public void setDiscarded( boolean bDiscarded ) { } public boolean isCompoundType() { return false; } public Set<IType> getCompoundTypeComponents() { return null; } @Override public IExpression[] getDefaultValueExpressions() { if( getMethodInfo() instanceof IOptionalParamCapable ) { return ((IOptionalParamCapable)getMethodInfo()).getDefaultValueExpressions(); } return IExpression.EMPTY_ARRAY; } @Override public boolean hasOptionalParams() { for( IExpression o : getDefaultValueExpressions() ) { if( o != null ) { return true; } } return false; } public String[] getParameterNames() { IFeatureInfo miOrCi = getMethodOrConstructorInfo(); if( miOrCi instanceof IOptionalParamCapable ) { return ((IOptionalParamCapable)miOrCi).getParameterNames(); } return new String[0]; } public IGosuClass getOwningParameterizedType() { return _owningParameterizedType; } }
package io.eol.tinkerforge.mqtt.router.mqttproxy; import java.io.IOException; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Objects; import static io.eol.tinkerforge.mqtt.router.mqttproxy.MqttProxyConstants.ENUMERATE_INTERVAL; import static io.eol.tinkerforge.mqtt.router.mqttproxy.MqttProxyConstants.QUIESCE_TIMEOUT; import com.tinkerforge.AlreadyConnectedException; import com.tinkerforge.IPConnection; import com.tinkerforge.TinkerforgeException; import io.eol.tinkerforge.mqtt.router.DeviceProxy; import io.eol.tinkerforge.mqtt.router.enumerator.EnumerationExecutor; import io.eol.tinkerforge.mqtt.router.mqttproxy.callback.ProxyMqttConnectListener; import io.eol.tinkerforge.mqtt.router.mqttproxy.callback.ProxyMqttDisconnectListener; import io.eol.tinkerforge.mqtt.router.mqttproxy.callback.ProxyMqttMessageCallback; import io.eol.tinkerforge.mqtt.router.util.JsonSupport; import org.eclipse.paho.client.mqttv3.IMqttActionListener; import org.eclipse.paho.client.mqttv3.MqttAsyncClient; import org.eclipse.paho.client.mqttv3.MqttClient; import org.eclipse.paho.client.mqttv3.MqttConnectOptions; import org.eclipse.paho.client.mqttv3.MqttException; import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MqttProxy { private final static Logger LOG = LoggerFactory.getLogger(MqttProxy.class); private String brickdHost; private int brickdPort; private String brokerHost; private int brokerPort; private int updateInterval; private IPConnection ipcon; private EnumerationExecutor enumerator; private MqttAsyncClient client; private IMqttActionListener mqttConnectListener; private IMqttActionListener mqttDisconnectListener; private Map<String, DeviceProxy> deviceProxies; private Map<Integer, Class<? extends DeviceProxy>> deviceProxyClasses; public MqttProxy(String brickdHost, int brickdPort, String brokerHost, int brokerPort, int updateInterval) { this.brickdHost = brickdHost; this.brickdPort = brickdPort; this.brokerHost = brokerHost; this.brokerPort = brokerPort; this.updateInterval = updateInterval; // Device Connection this.ipcon = new IPConnection(); addIpconConnectCallback(); addIpconEnumerateCallback(); this.enumerator = new EnumerationExecutor(); // Mqtt Connection String broker = "tcp://" + this.brokerHost + ":" + this.brokerPort; String clientId = "BrickProxy_" + MqttClient.generateClientId(); MemoryPersistence persistence = new MemoryPersistence(); this.mqttConnectListener = new ProxyMqttConnectListener(); this.mqttDisconnectListener = new ProxyMqttDisconnectListener(); try { client = new MqttAsyncClient(broker, clientId, persistence); } catch (MqttException e) { LOG.error(e.toString()); } client.setCallback(new ProxyMqttMessageCallback()); // Device Proxies deviceProxies = new HashMap<>(); } public IPConnection getIpcon() { return ipcon; } public Map<String, DeviceProxy> getDeviceProxies() { return deviceProxies; } public void setDeviceProxies(Map<String, DeviceProxy> deviceProxies) { this.deviceProxies = deviceProxies; } public Map<Integer, Class<? extends DeviceProxy>> getDeviceProxyClasses() { return deviceProxyClasses; } public void connectBrickDaemon() { try { ipcon.connect(brickdHost, brickdPort); LOG.info("Brick Daemon Connect: " + new Date()); } catch (AlreadyConnectedException | IOException e) { e.printStackTrace(); System.exit(-2); } } /** * Connects to MQTT Broker. */ private void connect() { try { MqttConnectOptions connOpts = new MqttConnectOptions(); connOpts.setCleanSession(true); client.connect(connOpts, mqttConnectListener); LOG.info("MqttClient Connect: " + new Date()); } catch (MqttException e) { e.printStackTrace(); System.exit(-1); } enumerator.startDeviceEnumeration(ipcon, ENUMERATE_INTERVAL); } /** * Disconnects from MQTT Broker. */ private void shutdown() { enumerator.stopDeviceEnumeration(); try { if (this.client.isConnected()) { this.client.disconnect(QUIESCE_TIMEOUT * 1000, mqttDisconnectListener); } } catch (MqttException e) { e.printStackTrace(); } } // def publish_as_json(self, topic, payload, *args, **kwargs): private void publishAsJson(String topic, Map<String, Object> payload, boolean retained) { String jsonPayload = JsonSupport.json(payload); if (!Objects.isNull(jsonPayload)) { MqttSupport.publishMqttMessage(this.client, MqttProxyConstants.GLOBAL_TOPIC_PREFIX_TINKERFORGE + topic, jsonPayload.getBytes(), retained); } } // for uid, device_proxy in self.device_proxies.items(): // if not connected and uid == changed_uid or device_proxy.TOPIC_PREFIX != topic_prefix: // continue // // enumerate_entries.append(device_proxy.get_enumerate_entry()) // // self.publish_as_json('enumerate/available/' + topic_prefix, enumerate_entries, retain=True) private void publishEnumerate(String changedUid, boolean connected) { DeviceProxy deviceProxy = deviceProxies.get(changedUid); String topicPrefix = deviceProxy.getTopicPrefix(); String topic = ""; if (connected) { topic = "enumerate/connected/" + topicPrefix; } else { topic = "enumerate/disconnected/" + topicPrefix; } LOG.info(String.format("UID: %s, target topic: %s", changedUid, topic)); // self.publish_as_json(topic, device_proxy.get_enumerate_entry()) this.publishAsJson(topic, deviceProxy.getEnumerateEntry(), false); Map<String, Object> enumerateEntries = new LinkedHashMap<>(); for (Map.Entry<String, DeviceProxy> entry : deviceProxies.entrySet()) { deviceProxy = entry.getValue(); if (!connected && entry.getKey().equals(changedUid) || !deviceProxy.getTopicPrefix().equals(topicPrefix)) { continue; } // TODO no, build a more complex map of maps! enumerateEntries.putAll(deviceProxy.getEnumerateEntry()); } // TODO // self.publish_as_json('enumerate/available/' + topic_prefix, enumerate_entries, retain=True) this.publishAsJson("enumerate/available/" + topicPrefix, enumerateEntries, true); } /** * Adds a ConnectListener to the TinkerForge IPConnection. */ private void addIpconConnectCallback() { ipcon.addConnectedListener((short connectReason) -> { switch (connectReason) { case IPConnection.CONNECT_REASON_REQUEST: LOG.info("Connected by request"); break; case IPConnection.CONNECT_REASON_AUTO_RECONNECT: LOG.info("Auto-Reconnect"); break; } // authenticate // try { // ipcon.authenticate(SECRET); // LOG.info("Authentication succeeded"); // } catch (TinkerforgeException e) { // LOG.info("Could not authenticate: " + e.getMessage()); // return; // } // enumerate try { ipcon.enumerate(); } catch (TinkerforgeException e) { } }); } /** * Adds a EnumerateListener to the TinkerForge IPConnection. */ private void addIpconEnumerateCallback() { ipcon.addEnumerateListener((String uid, String connectedUid, char position, short[] hardwareVersion, short[] firmwareVersion, int deviceIdentifier, short enumerationType) -> { LOG.info(String.format("UID: %s, Enumeration Type: %s", uid, enumerationType)); if (enumerationType == IPConnection.ENUMERATION_TYPE_DISCONNECTED) { if (deviceProxies.containsKey(uid)) { publishEnumerate(uid, false); deviceProxies.get(uid).destroy(); deviceProxies.remove(uid); } else if (deviceProxyClasses.containsKey(deviceIdentifier)) { // TODO String hwVersion = hardwareVersion.toString(); String fwVersion = firmwareVersion.toString(); DeviceProxy deviceProxy = new DeviceProxy(uid, connectedUid, String.valueOf(position), hwVersion, fwVersion, ipcon, client, updateInterval); deviceProxies.put(uid, deviceProxy); publishEnumerate(uid, true); } } }); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.history.core.changes; import org.junit.Test; import java.util.List; public class ChangeListCollectingChangesTest extends ChangeListTestCase { @Test public void tesChangesForFile() { applyAndAdd(cs("1", new CreateFileChange(1, "file", null, -1, false))); applyAndAdd(cs("2", new ContentChange("file", null, -1))); List<Change> result = getChangesFor("file"); assertEquals(2, result.size()); assertEquals("2", result.get(0).getName()); assertEquals("1", result.get(1).getName()); } @Test public void testSeveralChangesForSameFileInOneChangeSet() { applyAndAdd(cs(new CreateFileChange(1, "file", null, -1, false), new ContentChange("file", null, -1))); assertEquals(1, getChangesFor("file").size()); } @Test public void testChangeSetsWithChangesForAnotherFile() { applyAndAdd(cs(new CreateFileChange(1, "file1", null, -1, false), new CreateFileChange(2, "file2", null, -1, false))); assertEquals(1, getChangesFor("file1").size()); } @Test public void testDoesNotIncludeNonrelativeChangeSet() { applyAndAdd(cs("1", new CreateFileChange(1, "file1", null, -1, false))); applyAndAdd(cs("2", new CreateFileChange(2, "file2", null, -1, false))); applyAndAdd(cs("3", new ContentChange("file1", null, -1))); List<Change> result = getChangesFor("file1"); assertEquals(2, result.size()); assertEquals("3", result.get(0).getName()); assertEquals("1", result.get(1).getName()); } @Test public void testChangeSetsForDirectories() { applyAndAdd(cs(new CreateDirectoryChange(1, "dir"))); applyAndAdd(cs(new CreateFileChange(2, "dir/file", null, -1, false))); assertEquals(2, getChangesFor("dir").size()); } @Test public void testChangeSetsForDirectoriesWithFilesMovedAround() { applyAndAdd(cs("1", new CreateDirectoryChange(1, "dir1"), new CreateDirectoryChange(2, "dir2"))); applyAndAdd(cs("2", new CreateFileChange(3, "dir1/file", null, -1, false))); applyAndAdd(cs("3", new MoveChange("dir1/file", "dir2"))); List<Change> cc1 = getChangesFor("dir1"); List<Change> cc2 = getChangesFor("dir2"); assertEquals(3, cc1.size()); assertEquals("3", cc1.get(0).getName()); assertEquals("2", cc1.get(1).getName()); assertEquals("1", cc1.get(2).getName()); assertEquals(2, cc2.size()); assertEquals("3", cc2.get(0).getName()); assertEquals("1", cc2.get(1).getName()); } @Test public void testChangeSetsForMovedFiles() { applyAndAdd(cs(new CreateDirectoryChange(1, "dir1"), new CreateDirectoryChange(2, "dir2"))); applyAndAdd(cs(new CreateFileChange(3, "dir1/file", null, -1, false))); applyAndAdd(cs(new MoveChange("dir1/file", "dir2"))); assertEquals(2, getChangesFor("dir2/file").size()); } @Test public void testChangingParentChangesItsChildren() { applyAndAdd(cs(new CreateDirectoryChange(1, "d"))); applyAndAdd(cs(new CreateFileChange(2, "d/file", null, -1, false))); assertEquals(1, getChangesFor("d/file").size()); applyAndAdd(cs(new RenameChange("d", "dd"))); assertEquals(2, getChangesFor("dd/file").size()); } @Test public void testChangingPreviousParentDoesNotChangeItsChildren() { applyAndAdd(cs(new CreateDirectoryChange(1, "d1"))); applyAndAdd(cs(new CreateDirectoryChange(2, "d2"))); applyAndAdd(cs(new CreateFileChange(3, "d1/file", null, -1, false))); applyAndAdd(cs(new MoveChange("d1/file", "d2"))); assertEquals(2, getChangesFor("d2/file").size()); applyAndAdd(cs(new RenameChange("d1", "d11"))); assertEquals(2, getChangesFor("d2/file").size()); } @Test public void testDoesNotIncludePreviousParentChanges() { applyAndAdd(cs(new CreateDirectoryChange(1, "d"))); applyAndAdd(cs(new RenameChange("d", "dd"))); applyAndAdd(cs(new CreateFileChange(2, "dd/f", null, -1, false))); assertEquals(1, getChangesFor("dd/f").size()); } @Test public void testDoesNotIncludePreviousChangesForNewParent() { applyAndAdd(cs(new CreateFileChange(1, "file", null, -1, false))); applyAndAdd(cs(new CreateDirectoryChange(2, "dir"))); applyAndAdd(cs(new MoveChange("file", "dir"))); assertEquals(2, getChangesFor("dir/file").size()); } @Test public void testDoesNotIncludePreviousLabels() { applyAndAdd(cs(new PutLabelChange(null, -1))); applyAndAdd(cs(new CreateFileChange(1, "file", null, -1, false))); assertEquals(1, getChangesFor("file").size()); } @Test public void testChangesForComplexMovingCase() { applyAndAdd(cs(new CreateDirectoryChange(1, "d1"), new CreateFileChange(2, "d1/file", null, -1, false), new CreateDirectoryChange(3, "d1/d11"), new CreateDirectoryChange(4, "d1/d12"), new CreateDirectoryChange(5, "d2"))); applyAndAdd(cs(new MoveChange("d1/file", "d1/d11"))); applyAndAdd(cs(new MoveChange("d1/d11/file", "d1/d12"))); assertEquals(3, getChangesFor("d1").size()); assertEquals(3, getChangesFor("d1/d12/file").size()); assertEquals(3, getChangesFor("d1/d11").size()); assertEquals(2, getChangesFor("d1/d12").size()); assertEquals(1, getChangesFor("d2").size()); applyAndAdd(cs(new MoveChange("d1/d12", "d2"))); assertEquals(4, getChangesFor("d1").size()); assertEquals(4, getChangesFor("d2/d12/file").size()); assertEquals(2, getChangesFor("d2").size()); assertEquals(3, getChangesFor("d2/d12").size()); } @Test public void testChangesForFileMovedIntoCreatedDir() { Change cs1 = cs(new CreateFileChange(1, "file", null, -1, false)); Change cs2 = cs(new CreateDirectoryChange(2, "dir")); Change cs3 = cs(new MoveChange("file", "dir")); applyAndAdd(cs1, cs2, cs3); assertEquals(array(cs3, cs1), getChangesFor("dir/file")); assertEquals(array(cs3, cs2), getChangesFor("dir")); } @Test public void testChangesForRestoreFile() { Change cs1 = cs(new CreateFileChange(1, "file", null, -1, false)); Change cs2 = cs(new ContentChange("file", null, -1)); Change cs3 = cs(new DeleteChange("file")); Change cs4 = cs(new CreateFileChange(1, "file", null, -1, false)); Change cs5 = cs(new ContentChange("file", null, -1)); applyAndAdd(cs1, cs2, cs3, cs4, cs5); assertEquals(array(cs5, cs4, cs2, cs1), getChangesFor("file")); } @Test public void testChangesForFileRestoredSeveralTimes() { Change cs1 = cs(new CreateFileChange(1, "file", null, -1, false)); Change cs2 = cs(new DeleteChange("file")); Change cs3 = cs(new CreateFileChange(1, "file", null, -1, false)); Change cs4 = cs(new DeleteChange("file")); Change cs5 = cs(new CreateFileChange(1, "file", null, -1, false)); applyAndAdd(cs1, cs2, cs3, cs4, cs5); assertEquals(array(cs5, cs3, cs1), getChangesFor("file")); } @Test public void testChangesForRestoredDirectory() { Change cs1 = cs(new CreateDirectoryChange(1, "dir")); Change cs2 = cs(new DeleteChange("dir")); Change cs3 = cs(new CreateDirectoryChange(1, "dir")); applyAndAdd(cs1, cs2, cs3); assertEquals(array(cs3, cs1), getChangesFor("dir")); } @Test public void testChangesForRestoredDirectoryWithRestoredChildren() { Change cs1 = cs(new CreateDirectoryChange(1, "dir")); Change cs2 = cs(new CreateFileChange(2, "dir/file", null, -1, false)); Change cs3 = cs(new DeleteChange("dir")); Change cs4 = cs(new CreateDirectoryChange(1, "dir")); Change cs5 = cs(new CreateFileChange(2, "dir/file", null, -1, false)); applyAndAdd(cs1, cs2, cs3, cs4, cs5); assertEquals(array(cs5, cs4, cs2, cs1), getChangesFor("dir")); assertEquals(array(cs5, cs2), getChangesFor("dir/file")); } @Test public void testChangesForFileIfThereWereSomeDeletedFilesBeforeItsCreation() { Change cs1 = cs(new CreateFileChange(1, "f1", null, -1, false)); Change cs2 = cs(new DeleteChange("f1")); Change cs3 = cs(new CreateFileChange(2, "f2", null, -1, false)); applyAndAdd(cs1, cs2, cs3); assertEquals(array(cs3), getChangesFor("f2")); } @Test public void testDoesNotIncludeChangeSetIfFileWasRestoredAndDeletedInOneChangeSet() { Change cs1 = cs(new CreateFileChange(1, "f", null, -1, false)); Change cs2 = cs(new DeleteChange("f")); Change cs3 = cs(new CreateFileChange(1, "f", null, -1, false), new DeleteChange("f")); Change cs4 = cs(new CreateFileChange(1, "f", null, -1, false)); applyAndAdd(cs1, cs2, cs3, cs4); assertEquals(array(cs4, cs1), getChangesFor("f")); } @Test public void testIncludingLabelsChanges() { Change cs1 = cs(new CreateFileChange(1, "f1", null, -1, false)); Change cs2 = cs(new CreateFileChange(2, "f2", null, -1, false)); Change cs3 = new PutEntryLabelChange("f1", "label", -1); Change cs4 = new PutLabelChange("label", -1); applyAndAdd(cs1, cs2, cs3, cs4); assertEquals(array(cs4, cs3, cs1), getChangesFor("f1")); assertEquals(array(cs4, cs2), getChangesFor("f2")); } @Test public void testIncludingChangeSetsWithLabelsInside() { Change cs1 = cs(new CreateFileChange(1, "f", null, -1, false)); Change cs2 = cs(new PutLabelChange("label", -1)); applyAndAdd(cs1, cs2); assertEquals(array(cs2, cs1), getChangesFor("f")); } @Test public void testDoesNotSplitChangeSetsWithLabelsInside() { Change cs1 = cs(new CreateFileChange(1, "f", null, -1, false)); Change cs2 = cs(new ContentChange("f", null, -1), new PutLabelChange("label", -1), new ContentChange("f", null, -1)); applyAndAdd(cs1, cs2); assertEquals(array(cs2, cs1), getChangesFor("f")); } @Test public void testDoesNotIncludeChangesMadeBetweenDeletionAndRestore() { Change cs1 = cs(new CreateFileChange(1, "file", null, -1, false)); Change cs2 = cs(new DeleteChange("file")); Change cs3 = cs(new PutLabelChange(null, -1)); Change cs4 = cs(new CreateFileChange(1, "file", null, -1, false)); applyAndAdd(cs1, cs2, cs3, cs4); assertEquals(array(cs4, cs1), getChangesFor("file")); } @Test public void testDoesNotIgnoreDeletionOfChildren() { Change cs1 = cs(new CreateDirectoryChange(1, "dir")); Change cs2 = cs(new CreateFileChange(2, "dir/file", null, -1, false)); Change cs3 = cs(new DeleteChange("dir/file")); applyAndAdd(cs1, cs2, cs3); assertEquals(array(cs3, cs2, cs1), getChangesFor("dir")); } @Test public void testChangesForRestoredFileWhenTHereWereDeletionOfParentAfterDeletionOfTheFile() { Change cs1 = cs(new CreateDirectoryChange(1, "dir1")); Change cs2 = cs(new CreateDirectoryChange(2, "dir1/dir2")); Change cs3 = cs(new CreateFileChange(3, "dir1/dir2/file", null, -1, false)); Change cs4 = cs(new DeleteChange("dir1/dir2/file")); Change cs5 = cs(new DeleteChange("dir1/dir2")); Change cs6 = cs(new DeleteChange("dir1")); Change cs7 = cs(new CreateDirectoryChange(1, "dir1"), new CreateDirectoryChange(2, "dir1/dir2"), new CreateFileChange(3, "dir1/dir2/file", null, -1, false)); applyAndAdd(cs1, cs2, cs3, cs4, cs5, cs6, cs7); assertEquals(array(cs7, cs3), getChangesFor("dir1/dir2/file")); assertEquals(array(cs7, cs4, cs3, cs2), getChangesFor("dir1/dir2")); assertEquals(array(cs7, cs5, cs4, cs3, cs2, cs1), getChangesFor("dir1")); } @Test public void testDoesNotIncludeChangesIfFileAndItsParentWasDeletedAndRestoredInOneChangeset() { Change cs1 = cs(new CreateDirectoryChange(1, "dir"), new CreateFileChange(2, "dir/file", null, -1, false)); Change cs2 = cs(new DeleteChange("dir/file"), new DeleteChange("dir")); Change cs3 = cs(new CreateDirectoryChange(1, "dir"), new CreateFileChange(2, "dir/file", null, -1, false), new DeleteChange("dir/file"), new DeleteChange("dir")); Change cs4 = cs(new CreateDirectoryChange(1, "dir"), new CreateFileChange(2, "dir/file", null, -1, false)); applyAndAdd(cs1, cs2, cs3, cs4); assertEquals(array(cs4, cs1), getChangesFor("dir/file")); assertEquals(array(cs4, cs1), getChangesFor("dir")); } private List<Change> getChangesFor(String path) { return cl.getChangesFor(r, path); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.connectors.kafka.table; import org.apache.flink.api.common.serialization.DeserializationSchema; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.ConfigOption; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.connector.base.DeliveryGuarantee; import org.apache.flink.streaming.connectors.kafka.config.StartupMode; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.catalog.ResolvedCatalogTable; import org.apache.flink.table.catalog.ResolvedSchema; import org.apache.flink.table.connector.ChangelogMode; import org.apache.flink.table.connector.format.DecodingFormat; import org.apache.flink.table.connector.format.EncodingFormat; import org.apache.flink.table.connector.format.Format; import org.apache.flink.table.connector.sink.DynamicTableSink; import org.apache.flink.table.connector.source.DynamicTableSource; import org.apache.flink.table.data.RowData; import org.apache.flink.table.factories.DeserializationFormatFactory; import org.apache.flink.table.factories.DynamicTableSinkFactory; import org.apache.flink.table.factories.DynamicTableSourceFactory; import org.apache.flink.table.factories.FactoryUtil; import org.apache.flink.table.factories.SerializationFormatFactory; import org.apache.flink.table.types.DataType; import org.apache.flink.types.RowKind; import java.time.Duration; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Properties; import java.util.Set; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.KEY_FIELDS; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.KEY_FIELDS_PREFIX; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.KEY_FORMAT; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.PROPS_BOOTSTRAP_SERVERS; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.SINK_BUFFER_FLUSH_INTERVAL; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.SINK_BUFFER_FLUSH_MAX_ROWS; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.SINK_PARALLELISM; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.TOPIC; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.TRANSACTIONAL_ID_PREFIX; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.VALUE_FIELDS_INCLUDE; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.VALUE_FORMAT; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.PROPERTIES_PREFIX; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.autoCompleteSchemaRegistrySubject; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.createKeyFormatProjection; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.createValueFormatProjection; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.getKafkaProperties; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.getSourceTopicPattern; import static org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.getSourceTopics; /** Upsert-Kafka factory. */ public class UpsertKafkaDynamicTableFactory implements DynamicTableSourceFactory, DynamicTableSinkFactory { public static final String IDENTIFIER = "upsert-kafka"; @Override public String factoryIdentifier() { return IDENTIFIER; } @Override public Set<ConfigOption<?>> requiredOptions() { final Set<ConfigOption<?>> options = new HashSet<>(); options.add(PROPS_BOOTSTRAP_SERVERS); options.add(TOPIC); options.add(KEY_FORMAT); options.add(VALUE_FORMAT); return options; } @Override public Set<ConfigOption<?>> optionalOptions() { final Set<ConfigOption<?>> options = new HashSet<>(); options.add(KEY_FIELDS_PREFIX); options.add(VALUE_FIELDS_INCLUDE); options.add(SINK_PARALLELISM); options.add(SINK_BUFFER_FLUSH_INTERVAL); options.add(SINK_BUFFER_FLUSH_MAX_ROWS); return options; } @Override public DynamicTableSource createDynamicTableSource(Context context) { FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context); ReadableConfig tableOptions = helper.getOptions(); DecodingFormat<DeserializationSchema<RowData>> keyDecodingFormat = helper.discoverDecodingFormat(DeserializationFormatFactory.class, KEY_FORMAT); DecodingFormat<DeserializationSchema<RowData>> valueDecodingFormat = helper.discoverDecodingFormat(DeserializationFormatFactory.class, VALUE_FORMAT); // Validate the option data type. helper.validateExcept(PROPERTIES_PREFIX); validateSource( tableOptions, keyDecodingFormat, valueDecodingFormat, context.getPrimaryKeyIndexes()); Tuple2<int[], int[]> keyValueProjections = createKeyValueProjections(context.getCatalogTable()); String keyPrefix = tableOptions.getOptional(KEY_FIELDS_PREFIX).orElse(null); Properties properties = getKafkaProperties(context.getCatalogTable().getOptions()); // always use earliest to keep data integrity StartupMode earliest = StartupMode.EARLIEST; return new KafkaDynamicSource( context.getPhysicalRowDataType(), keyDecodingFormat, new DecodingFormatWrapper(valueDecodingFormat), keyValueProjections.f0, keyValueProjections.f1, keyPrefix, getSourceTopics(tableOptions), getSourceTopicPattern(tableOptions), properties, earliest, Collections.emptyMap(), 0, true, context.getObjectIdentifier().asSummaryString()); } @Override public DynamicTableSink createDynamicTableSink(Context context) { FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper( this, autoCompleteSchemaRegistrySubject(context)); final ReadableConfig tableOptions = helper.getOptions(); EncodingFormat<SerializationSchema<RowData>> keyEncodingFormat = helper.discoverEncodingFormat(SerializationFormatFactory.class, KEY_FORMAT); EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat = helper.discoverEncodingFormat(SerializationFormatFactory.class, VALUE_FORMAT); // Validate the option data type. helper.validateExcept(PROPERTIES_PREFIX); validateSink( tableOptions, keyEncodingFormat, valueEncodingFormat, context.getPrimaryKeyIndexes()); Tuple2<int[], int[]> keyValueProjections = createKeyValueProjections(context.getCatalogTable()); final String keyPrefix = tableOptions.getOptional(KEY_FIELDS_PREFIX).orElse(null); final Properties properties = getKafkaProperties(context.getCatalogTable().getOptions()); Integer parallelism = tableOptions.get(SINK_PARALLELISM); int batchSize = tableOptions.get(SINK_BUFFER_FLUSH_MAX_ROWS); Duration batchInterval = tableOptions.get(SINK_BUFFER_FLUSH_INTERVAL); SinkBufferFlushMode flushMode = new SinkBufferFlushMode(batchSize, batchInterval.toMillis()); // use {@link org.apache.kafka.clients.producer.internals.DefaultPartitioner}. // it will use hash partition if key is set else in round-robin behaviour. return new KafkaDynamicSink( context.getPhysicalRowDataType(), context.getPhysicalRowDataType(), keyEncodingFormat, new EncodingFormatWrapper(valueEncodingFormat), keyValueProjections.f0, keyValueProjections.f1, keyPrefix, tableOptions.get(TOPIC).get(0), properties, null, DeliveryGuarantee.AT_LEAST_ONCE, true, flushMode, parallelism, tableOptions.get(TRANSACTIONAL_ID_PREFIX)); } private Tuple2<int[], int[]> createKeyValueProjections(ResolvedCatalogTable catalogTable) { ResolvedSchema schema = catalogTable.getResolvedSchema(); // primary key should validated earlier List<String> keyFields = schema.getPrimaryKey().get().getColumns(); DataType physicalDataType = schema.toPhysicalRowDataType(); Configuration tableOptions = Configuration.fromMap(catalogTable.getOptions()); // upsert-kafka will set key.fields to primary key fields by default tableOptions.set(KEY_FIELDS, keyFields); int[] keyProjection = createKeyFormatProjection(tableOptions, physicalDataType); int[] valueProjection = createValueFormatProjection(tableOptions, physicalDataType); return Tuple2.of(keyProjection, valueProjection); } // -------------------------------------------------------------------------------------------- // Validation // -------------------------------------------------------------------------------------------- private static void validateSource( ReadableConfig tableOptions, Format keyFormat, Format valueFormat, int[] primaryKeyIndexes) { validateTopic(tableOptions); validateFormat(keyFormat, valueFormat, tableOptions); validatePKConstraints(primaryKeyIndexes); } private static void validateSink( ReadableConfig tableOptions, Format keyFormat, Format valueFormat, int[] primaryKeyIndexes) { validateTopic(tableOptions); validateFormat(keyFormat, valueFormat, tableOptions); validatePKConstraints(primaryKeyIndexes); validateSinkBufferFlush(tableOptions); } private static void validateTopic(ReadableConfig tableOptions) { List<String> topic = tableOptions.get(TOPIC); if (topic.size() > 1) { throw new ValidationException( "The 'upsert-kafka' connector doesn't support topic list now. " + "Please use single topic as the value of the parameter 'topic'."); } } private static void validateFormat( Format keyFormat, Format valueFormat, ReadableConfig tableOptions) { if (!keyFormat.getChangelogMode().containsOnly(RowKind.INSERT)) { String identifier = tableOptions.get(KEY_FORMAT); throw new ValidationException( String.format( "'upsert-kafka' connector doesn't support '%s' as key format, " + "because '%s' is not in insert-only mode.", identifier, identifier)); } if (!valueFormat.getChangelogMode().containsOnly(RowKind.INSERT)) { String identifier = tableOptions.get(VALUE_FORMAT); throw new ValidationException( String.format( "'upsert-kafka' connector doesn't support '%s' as value format, " + "because '%s' is not in insert-only mode.", identifier, identifier)); } } private static void validatePKConstraints(int[] schema) { if (schema.length == 0) { throw new ValidationException( "'upsert-kafka' tables require to define a PRIMARY KEY constraint. " + "The PRIMARY KEY specifies which columns should be read from or write to the Kafka message key. " + "The PRIMARY KEY also defines records in the 'upsert-kafka' table should update or delete on which keys."); } } private static void validateSinkBufferFlush(ReadableConfig tableOptions) { int flushMaxRows = tableOptions.get(SINK_BUFFER_FLUSH_MAX_ROWS); long flushIntervalMs = tableOptions.get(SINK_BUFFER_FLUSH_INTERVAL).toMillis(); if (flushMaxRows > 0 && flushIntervalMs > 0) { // flush is enabled return; } if (flushMaxRows <= 0 && flushIntervalMs <= 0) { // flush is disabled return; } // one of them is set which is not allowed throw new ValidationException( String.format( "'%s' and '%s' must be set to be greater than zero together to enable sink buffer flushing.", SINK_BUFFER_FLUSH_MAX_ROWS.key(), SINK_BUFFER_FLUSH_INTERVAL.key())); } // -------------------------------------------------------------------------------------------- // Format wrapper // -------------------------------------------------------------------------------------------- /** * It is used to wrap the decoding format and expose the desired changelog mode. It's only works * for insert-only format. */ protected static class DecodingFormatWrapper implements DecodingFormat<DeserializationSchema<RowData>> { private final DecodingFormat<DeserializationSchema<RowData>> innerDecodingFormat; private static final ChangelogMode SOURCE_CHANGELOG_MODE = ChangelogMode.newBuilder() .addContainedKind(RowKind.UPDATE_AFTER) .addContainedKind(RowKind.DELETE) .build(); public DecodingFormatWrapper( DecodingFormat<DeserializationSchema<RowData>> innerDecodingFormat) { this.innerDecodingFormat = innerDecodingFormat; } @Override public DeserializationSchema<RowData> createRuntimeDecoder( DynamicTableSource.Context context, DataType producedDataType) { return innerDecodingFormat.createRuntimeDecoder(context, producedDataType); } @Override public ChangelogMode getChangelogMode() { return SOURCE_CHANGELOG_MODE; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } DecodingFormatWrapper that = (DecodingFormatWrapper) obj; return Objects.equals(innerDecodingFormat, that.innerDecodingFormat); } @Override public int hashCode() { return Objects.hash(innerDecodingFormat); } } /** * It is used to wrap the encoding format and expose the desired changelog mode. It's only works * for insert-only format. */ protected static class EncodingFormatWrapper implements EncodingFormat<SerializationSchema<RowData>> { private final EncodingFormat<SerializationSchema<RowData>> innerEncodingFormat; public static final ChangelogMode SINK_CHANGELOG_MODE = ChangelogMode.newBuilder() .addContainedKind(RowKind.INSERT) .addContainedKind(RowKind.UPDATE_AFTER) .addContainedKind(RowKind.DELETE) .build(); public EncodingFormatWrapper( EncodingFormat<SerializationSchema<RowData>> innerEncodingFormat) { this.innerEncodingFormat = innerEncodingFormat; } @Override public SerializationSchema<RowData> createRuntimeEncoder( DynamicTableSink.Context context, DataType consumedDataType) { return innerEncodingFormat.createRuntimeEncoder(context, consumedDataType); } @Override public ChangelogMode getChangelogMode() { return SINK_CHANGELOG_MODE; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } EncodingFormatWrapper that = (EncodingFormatWrapper) obj; return Objects.equals(innerEncodingFormat, that.innerEncodingFormat); } @Override public int hashCode() { return Objects.hash(innerEncodingFormat); } } }
/* * Copyright (C) 2013 Jerzy Chalupski * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.getbase.android.forger.tests; import com.getbase.android.forger.KotlinDataClass; import com.google.common.collect.ImmutableList; import org.chalup.microorm.annotations.Column; import org.chalup.microorm.annotations.Embedded; import org.chalup.thneed.ModelGraph; import org.chalup.thneed.PolymorphicType; import org.chalup.thneed.models.ContentProviderModel; import org.chalup.thneed.models.PojoModel; import android.content.ContentResolver; import android.net.Uri; import android.provider.BaseColumns; import java.util.UUID; public final class TestModels { private TestModels() { } public static class BaseModel { @Column(value = BaseColumns._ID, readonly = true) public long _id; @Column("id") public long id; @Column(value = "updated_at", treatNullAsDefault = true) public String updated_at; } public static class Deal extends BaseModel { @Column("contact_id") public long contactId; @Column("user_id") public long userId; @Column("name") public String name; } public static class User extends BaseModel { @Column("email") public String email; @Column("is_admin") public boolean admin; } public static class Contact extends BaseModel { @Column("contact_id") public Long contactId; @Column("user_id") public long userId; } public static class Lead extends BaseModel { } public static class ContactData extends BaseModel { @Column("lead_id") public long leadId; } public static class DealContact extends BaseModel { @Column("contact_id") public long contactId; @Column("deal_id") public long dealId; } public static class Note extends BaseModel { @Column("notable_type") public String notableType; @Column("notable_id") public long notableId; } public static class Call extends BaseModel { @Column("callable_type") public String callableType; @Column("callable_id") public long callableId; } public static class Tag extends BaseModel { @Column("value") public String value; } public static class Tagging extends BaseModel { @Column("taggable_type") public String taggableType; @Column("taggable_id") public long taggableId; @Column("tag_id") public long tagId; @Column("user_id") public long userId; } public static class SocialInformation { @Column("facebook") public String facebook; } public static class PersonalInfo extends BaseModel { @Column("name") public String name; @Embedded public SocialInformation socialInformation; } public static class ExtendedPersonalInfo extends PersonalInfo { @Column("surname") public String surname; } public static class ExtendedSocialInformation extends SocialInformation { @Column("linkedin") public String linkedin; } public static class PersonalInfoV2 extends BaseModel { @Column("name") public String name; @Embedded public ExtendedSocialInformation socialInformation; } public static class PhoneNumber { @Column("extension") public String extension; @Column("country_code") public String countryCode; @Column("national_number") public String nationalNumber; } public static class ContactInfo { @Embedded public PhoneNumber phoneNumber; @Column("email") public String email; } public static class PersonalInfoV3 extends BaseModel { @Embedded public ContactInfo contactInfo; @Column("name") public String name; @Embedded public ExtendedSocialInformation socialInformation; } public static class ClassWithoutDefaultConstructor extends BaseModel { public ClassWithoutDefaultConstructor(Object unused) { } } public static class ClassWithoutPublicDefaultConstructor extends BaseModel { private ClassWithoutPublicDefaultConstructor() { } } public static class ClassWithNonBasicFieldType extends BaseModel { @Column("uuid") public UUID uuid; } public interface TestModel extends ContentProviderModel, PojoModel { } public static class BaseTestModel implements TestModel { private final Class<?> mKlass; public BaseTestModel(Class<?> klass) { mKlass = klass; } @Override public Uri getUri() { return buildUriFor(mKlass); } @Override public Class<?> getModelClass() { return mKlass; } } public static class PolyModel extends BaseTestModel implements PolymorphicType<PolyModel> { private final String mModelName; public PolyModel(Class<?> klass, String modelName) { super(klass); mModelName = modelName; } @Override public PolyModel self() { return this; } @Override public String getModelName() { return mModelName; } } public static class ComplexDate { private long mTimestamp; public ComplexDate(long timestamp) { mTimestamp = timestamp; } public long getTimestamp() { return mTimestamp; } } public static class ModelWithComplexDate extends BaseModel { @Column("date") private ComplexDate mComplexDate; @Column("another_field") private String mAnotherField; public ComplexDate getComplexDate() { return mComplexDate; } public String getAnotherField() { return mAnotherField; } } public static PolyModel CONTACT = new PolyModel(Contact.class, "Contact"); public static PolyModel DEAL = new PolyModel(Deal.class, "Deal"); public static TestModel USER = new BaseTestModel(User.class); public static PolyModel LEAD = new PolyModel(Lead.class, "Lead"); public static TestModel CONTACT_DATA = new BaseTestModel(ContactData.class); public static TestModel DEAL_CONTACT = new BaseTestModel(DealContact.class); public static TestModel NOTE = new BaseTestModel(Note.class); public static TestModel CALL = new BaseTestModel(Call.class); public static TestModel TAG = new BaseTestModel(Tag.class); public static TestModel TAGGING = new BaseTestModel(Tagging.class); public static TestModel PERSONAL_INFO = new BaseTestModel(PersonalInfo.class); public static TestModel EXTENDED_PERSONAL_INFO = new BaseTestModel(ExtendedPersonalInfo.class); public static TestModel PERSONAL_INFO_V2 = new BaseTestModel(PersonalInfoV2.class); public static TestModel PERSONAL_INFO_V3 = new BaseTestModel(PersonalInfoV3.class); public static TestModel MODEL_WITH_COMPLEX_DATE = new BaseTestModel(ModelWithComplexDate.class); static ModelGraph<TestModel> MODEL_GRAPH = ModelGraph.of(TestModel.class) .identifiedByDefault().by("id") .with(new BaseTestModel(KotlinDataClass.class)) .with(new BaseTestModel(ClassWithoutDefaultConstructor.class)) .with(new BaseTestModel(ClassWithoutPublicDefaultConstructor.class)) .with(new BaseTestModel(ClassWithNonBasicFieldType.class)) .with(USER) .with(MODEL_WITH_COMPLEX_DATE) .with(PERSONAL_INFO) .with(EXTENDED_PERSONAL_INFO) .with(PERSONAL_INFO_V2) .with(PERSONAL_INFO_V3) .where() .the(DEAL).references(CONTACT).by("contact_id") .the(LEAD).mayHave(CONTACT_DATA).linked().by("lead_id") .the(DEAL_CONTACT).links(DEAL).by("deal_id").with(CONTACT).by("contact_id") .the(NOTE).references(ImmutableList.of(CONTACT, DEAL, LEAD)).by("notable_type", "notable_id") .the(CALL).references(ImmutableList.of(CONTACT, LEAD)).by("callable_type", "callable_id") .the(CONTACT).groupsOther().by("contact_id") .the(TAGGING).links(TAG).by("tag_id").with(ImmutableList.of(CONTACT, LEAD, DEAL)).by("taggable_type", "taggable_id") .the(TAGGING).references(USER).by("user_id") .the(DEAL).references(USER).by("user_id") .the(CONTACT).references(USER).by("user_id") .build(); private static Uri buildUriFor(Class<?> klass) { return new Uri.Builder() .scheme(ContentResolver.SCHEME_CONTENT) .authority(TestModels.class.getPackage().getName()) .appendPath(klass.getSimpleName().toLowerCase()) .build(); } }
/** * Copyright 2014 Troy Histed * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.troyhisted.inputfield.util; import java.util.ArrayList; import java.util.Collection; import org.apache.commons.beanutils.DynaBean; import org.apache.commons.beanutils.DynaClass; import org.apache.commons.beanutils.DynaProperty; /** * Specialized list designed for use with BeanUtils. * * <p> * This class alters the behavior of a list in a couple important ways. First, calling <em>get</em> with an index * greater than the size of the list will result in <em>null</em> being returned instead of an * {@link IndexOutOfBoundsException}. Secondly, calling <em>set</em> with an index greater than the size of the * list will cause the list to grow to the index size by inserting null values into the list. * * <p> * The list is constructed with the specific class type object that it is designed to hold. This is used when * BeanUtils are used to add items to the list. When BeanUtils attempts to add a new item to the list, the * correct type is known, so BeanUtils can properly convert the value to the correct type. It is necessary to * pass in the type because generics are lost after compilation due to type erasure. * * @author Troy Histed * * @param <T> * list type */ public class DynaList<T> extends ArrayList<T> implements DynaClass, DynaBean { /** * Indication of whether the previous action was successful. */ private boolean successful = true; private final DynaProperty listDynaProperty; /** * Creates a DynaList. * * @param listType * the type of object the list will hold * @return new DynaList */ public static <T> DynaList<T> construct(Class<? extends T> listType) { return new DynaList<T>(listType); } /** * Constructor. * * @param listType * the type of object the list will hold */ public DynaList(Class<? extends T> listType) { this.listDynaProperty = new DynaProperty(null, listType); } /** * Adds the specified item to the list and returns it. * * <p> * Provides a shorthand for times when an item is to be added to a list and assigned to a local variable. * * <p> * An indication on whether the add was successful can be retrieved via {@link #wasSuccessful()}. * * @param aItem * the item to add * @return the item that was added to the list */ public T addItem(T aItem) { this.successful = super.add(aItem); return aItem; } /** * Adds the specified items to the list and returns them. * * <p> * Provides a shorthand for times when a items are to be added to a list and assigned to a local variable. * * <p> * An indication on whether the add was successful can be retrieved via {@link #wasSuccessful()}. * * @param aItems * the items to add * @return the items that were added to the list */ public Collection<? extends T> addAllItems(Collection<? extends T> aItems) { this.successful = this.addAll(aItems); return aItems; } /** * {@inheritDoc} */ @Override public boolean add(T e) { return this.successful = super.add(e); } /** * {@inheritDoc} */ @Override public boolean addAll(Collection<? extends T> c) { return this.successful = super.addAll(c); } /** * {@inheritDoc} */ @Override public boolean addAll(int index, Collection<? extends T> c) { return this.successful = super.addAll(index, c); } /** * Sets the value into the array at the specified index. * * Calling <em>set</em> with an index greater than the size of the list will cause the list to grow to the * index size by inserting null values into the list. * * @param index * the index where the element is to be inserted * @param element * the object to be inserted * @return the element that was previously at the specified index */ @Override public T set(int index, T element) { while (index >= super.size()) { this.add(null); } return super.set(index, element); } /** * Returns the item in the list at the specified index. * * @param index * the location in the array * @return the element at the specified index or <em>null</em> if the index is greater than the array size */ @Override public T get(int index) { if (index >= super.size()) { return null; } return super.get(index); } /** * Indicates whether the previous action was successful. * * @return <code>true</code> if the previous action defined by {@link DynaList} was successful */ public boolean wasSuccessful() { return this.successful; } /** * {@inheritDoc} */ public boolean contains(String name, String key) { throw new UnsupportedOperationException("Map operations are not supported on a list."); } /** * {@inheritDoc} */ public Object get(String name) { throw new UnsupportedOperationException("A List has no properties."); } /** * {@inheritDoc} */ public Object get(String name, int index) { throw new UnsupportedOperationException("A List has no properties."); } /** * {@inheritDoc} */ public Object get(String name, String key) { throw new UnsupportedOperationException("Map operations are not supported on a list."); } /** * {@inheritDoc} */ public DynaClass getDynaClass() { return this; } /** * {@inheritDoc} */ public void remove(String name, String key) { throw new UnsupportedOperationException("Map operations are not supported on a list."); } /** * {@inheritDoc} */ public void set(String name, Object value) { throw new UnsupportedOperationException("Map operations are not supported on a list."); } /** * {@inheritDoc} */ public void set(String name, int index, Object value) { throw new UnsupportedOperationException("A List has no properties."); } /** * {@inheritDoc} */ public void set(String name, String key, Object value) { throw new UnsupportedOperationException("Map operations are not supported on a list."); } /** * {@inheritDoc} */ public String getName() { return null; } /** * {@inheritDoc} */ public DynaProperty getDynaProperty(String name) { return this.listDynaProperty; } /** * {@inheritDoc} */ public DynaProperty[] getDynaProperties() { return null; } /** * @throws IllegalStateException * DynaList class cannot be dynamically instantiated */ public DynaBean newInstance() throws IllegalAccessException, InstantiationException { throw new IllegalStateException("DynaList class cannot be dynamically instantiated."); } }
package com.saucecode.baghchal.logic; import java.util.Arrays; import com.saucecode.baghchal.logic.interfaces.BaghChalI; import com.saucecode.baghchal.logic.interfaces.Player; /** * Implementation of {@link com.saucecode.baghchal.logic.interfaces.BaghChalI}. * * @author Torben Kr&uuml;ger * */ public class BaghChal implements BaghChalI { /** * A matrix for all possible moves. */ private static final int[][] MOVES = { { 0, 1, 2, 0, 0, 1, 1, 0, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, { 1, 0, 1, 2, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, { 2, 1, 0, 1, 2, 0, 1, 1, 1, 0, 2, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, { 0, 2, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, { 0, 0, 2, 1, 0, 0, 0, 0, 1, 1, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, { 1, 0, 0, 0, 0, 0, 1, 2, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, { 1, 1, 1, 0, 0, 1, 0, 1, 2, 0, 1, 1, 1, 0, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0 }, { 0, 0, 1, 0, 0, 2, 1, 0, 1, 2, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0 }, { 0, 0, 1, 1, 1, 0, 2, 1, 0, 1, 0, 0, 1, 1, 1, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0 }, { 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0 }, { 2, 0, 2, 0, 0, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0, 1, 1, 0, 0, 0, 2, 0, 2, 0, 0 }, { 0, 2, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 2, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0 }, { 2, 0, 2, 0, 2, 0, 1, 1, 1, 0, 2, 1, 0, 1, 2, 0, 1, 1, 1, 0, 2, 0, 2, 0, 2 }, { 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 2, 0 }, { 0, 0, 2, 0, 2, 0, 0, 0, 1, 1, 0, 0, 2, 1, 0, 0, 0, 0, 1, 1, 0, 0, 2, 0, 2 }, { 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 2, 0, 0, 1, 0, 0, 0, 0 }, { 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 1, 1, 1, 0, 0, 1, 0, 1, 2, 0, 1, 1, 1, 0, 0 }, { 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 1, 2, 0, 0, 1, 0, 0 }, { 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 0, 0, 1, 1, 1, 0, 2, 1, 0, 1, 0, 0, 1, 1, 1 }, { 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 0, 0, 0, 0, 1 }, { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 0, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0 }, { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 2, 0 }, { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 2, 0, 1, 1, 1, 0, 2, 1, 0, 1, 2 }, { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 1 }, { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 0, 0, 1, 1, 0, 0, 2, 1, 0 } }; /** * The board being used. */ private Player[][] board; /** * The number of goats left to set. */ private int goatsLeftToSet; /** * The number of goats eaten. This value <b>always</b> has to be in range [ * 0 ; {@link BaghChalI#TIGER_WIN_CONDITION} ]. */ private int goatsEaten; /** * The current selection. */ private Selection selected; /** * The current state. */ private State state; /** * The previous game. Used for {@link #undo()}. */ private BaghChal previous; /** * The following game. used for {@link #redo()}. */ private BaghChal next; /** * Creates a new instance of {@link BaghChal}. */ public BaghChal() { board = new Player[DIM][DIM]; for (int row = 0; row < DIM; row++) { for (int column = 0; column < DIM; column++) { board[row][column] = Player.NONE; } } board[0][0] = Player.TIGER; board[0][DIM - 1] = Player.TIGER; board[DIM - 1][DIM - 1] = Player.TIGER; board[DIM - 1][0] = Player.TIGER; goatsLeftToSet = GOATS_START_COUNT; goatsEaten = 0; selected = null; state = State.GOAT_SET; previous = null; next = null; } /** * Creates a clone of an existing BaghChal game. * * @param orig * original game, wwich shall be cloned */ private BaghChal(BaghChal orig) { board = new Player[DIM][DIM]; for (int row = 0; row < DIM; row++) { board[row] = orig.board[row].clone(); } goatsLeftToSet = orig.goatsLeftToSet; goatsEaten = orig.goatsEaten; selected = null; state = orig.state; if (state == State.GOAT_MOVE) { state = State.GOAT_SELECT; } if (state == State.TIGER_MOVE) { state = State.TIGER_SELECT; } previous = orig.previous; next = orig.next; } @Override public Player[][] getBoard() { return board.clone(); } @Override public int getGoatsEaten() { return goatsEaten; } @Override public int getGoatsLeftToSet() { return goatsLeftToSet; } @Override public Selection getSelection() { return selected; } @Override public State getState() { return state; } @Override public boolean action(int row, int column) { checkInput(row); checkInput(column); switch (state) { case TIGER_WON: return false; case GOAT_WON: return false; case GOAT_SET: if (board[row][column] == Player.NONE) { previous = clone(); previous.next = this; board[row][column] = Player.GOAT; goatsLeftToSet--; switch (getWinner()) { case GOAT: state = State.GOAT_WON; return true; case TIGER: state = State.TIGER_WON; return true; case NONE: state = State.TIGER_SELECT; return true; default: throw new InternalError("no such enum"); } } return false; case GOAT_MOVE: switch (board[row][column]) { case GOAT: if ((selected.row == row) && (selected.column == column)) { selected = null; state = State.GOAT_SELECT; return true; } else { selected = new Selection(row, column); return true; } case TIGER: return false; case NONE: if ((getMovePossibility(selected.row, selected.column, row, column)) == 1) { previous = clone(); board[selected.row][selected.column] = Player.NONE; board[row][column] = Player.GOAT; selected = null; switch (getWinner()) { case GOAT: state = State.GOAT_WON; return true; case TIGER: state = State.TIGER_WON; return true; case NONE: state = State.TIGER_SELECT; return true; default: throw new InternalError("no such enum"); } } return false; default: throw new InternalError("no such enum"); } case GOAT_SELECT: if (board[row][column] == Player.GOAT) { selected = new Selection(row, column); state = State.GOAT_MOVE; return true; } return false; case TIGER_MOVE: switch (board[row][column]) { case GOAT: return false; case TIGER: if ((selected.row == row) && (selected.column == column)) { selected = null; state = State.TIGER_SELECT; return true; } else { selected = new Selection(row, column); return true; } case NONE: switch (getMovePossibility(selected.row, selected.column, row, column)) { case 0: return false; case 1: previous = clone(); board[selected.row][selected.column] = Player.NONE; board[row][column] = Player.TIGER; selected = null; state = (goatsLeftToSet > 0) ? State.GOAT_SET : State.GOAT_SELECT; return true; case 2: int betweenRow = (selected.row + row) / 2; int betweenColumn = (selected.column + column) / 2; if (board[betweenRow][betweenColumn] != Player.GOAT) { return false; } previous = clone(); board[selected.row][selected.column] = Player.NONE; board[betweenRow][betweenColumn] = Player.NONE; goatsEaten++; board[row][column] = Player.TIGER; selected = null; switch (getWinner()) { case GOAT: state = State.GOAT_WON; return true; case TIGER: state = State.TIGER_WON; return true; case NONE: state = (goatsLeftToSet > 0) ? State.GOAT_SET : State.GOAT_SELECT; return true; default: throw new InternalError("no such enum"); } default: throw new InternalError("no such enum"); } default: throw new InternalError("no such enum"); } case TIGER_SELECT: if (board[row][column] == Player.TIGER) { selected = new Selection(row, column); state = State.TIGER_MOVE; return true; } return false; default: throw new InternalError("no such enum"); } } @Override public boolean undo() { if (isAnyUndoLeft()) { BaghChal clone = clone(); board = previous.board; goatsLeftToSet = previous.goatsLeftToSet; goatsEaten = previous.goatsEaten; selected = previous.selected; state = previous.state; previous = previous.previous; next = clone; return true; } return false; } @Override public boolean redo() { if (isAnyRedoLeft()) { BaghChal clone = clone(); if (previous != null) { previous.next = clone(); } board = next.board; goatsLeftToSet = next.goatsLeftToSet; goatsEaten = next.goatsEaten; selected = next.selected; state = next.state; previous = clone; clone.next = this; next = next.next; return true; } return false; } @Override public boolean isAnyUndoLeft() { return previous != null; } @Override public boolean isAnyRedoLeft() { return next != null; } @Override public String toString() { StringBuilder sb = new StringBuilder(); for (int row = 0; row < DIM; row++) { for (int column = 0; column < DIM; column++) { sb.append(board[row][column].toChar()); sb.append("---"); } sb.delete(sb.length() - 3, sb.length()); sb.append('\n'); if (row % 2 == 0) { sb.append("| \\ | / | \\ | / |\n"); } else { sb.append("| / | \\ | / | \\ |\n"); } } sb.delete(sb.length() - 18, sb.length()); sb.append("goats left to set: "); sb.append(goatsLeftToSet); sb.append('\n'); sb.append("goats eaten: "); sb.append(goatsEaten); sb.append('\n'); sb.append("selected: "); sb.append(selected); sb.append('\n'); sb.append("state: "); sb.append(state); sb.append('\n'); return sb.toString(); } @Override public BaghChal clone() { return new BaghChal(this); } /** * Returns a possiblity of wether a move is possible or not. * * @param row * row * @param column * column * @param targetRow * target row * @param targetColumn * target column * @return * <ul> * <li>{@code 0}, if there is no possible move</li> * <li>{@code 1}, if a direct move without jump would be * possible</li> * <li>{@code 2}, if a jump would be possible</li> * </ul> */ private int getMovePossibility(int row, int column, int targetRow, int targetColumn) { return MOVES[row * 5 + column][targetRow * 5 + targetColumn]; } /** * Returns wether at least 1 tiger is able to move. * * @return {@code true}, if at least 1 tiger is able to move */ private boolean anyTigerCanMove() { for (int row = 0; row < DIM; row++) { for (int column = 0; column < DIM; column++) { if ((board[row][column] == Player.TIGER) && tigerCanMove(row, column)) { return true; } } } return false; } /** * Returns wether a specific tiger is able to move. This method expects that * the given {@code row} and {@code column} point on a tiger. * * @param row * row of the specific tiger * @param column * column of the specific tiger * @return {@code true}, if the specific tiger is able to move */ private boolean tigerCanMove(int row, int column) { for (int targetRow = 0; targetRow < DIM; targetRow++) { for (int targetColumn = 0; targetColumn < DIM; targetColumn++) { int move = getMovePossibility(row, column, targetRow, targetColumn); if (move == 2 && board[targetRow][targetColumn] == Player.NONE && board[(row + targetRow) / 2][(column + targetColumn) / 2] == Player.GOAT) { return true; } if (move == 1 && board[targetRow][targetColumn] == Player.NONE) { return true; } } } return false; } /** * Returns the winner of this match. * * @return * <ul> * <li>{@link Player#GOAT}, if no tiger can move anymore</li> * <li>{@link Player#TIGER}, if * {@link BaghChalI#TIGER_WIN_CONDITION} goats are eaten</li> * <li>{@link Player#NONE}, else</li> * </ul> */ private Player getWinner() { if (goatsEaten == TIGER_WIN_CONDITION) { return Player.TIGER; } if (!anyTigerCanMove()) { return Player.GOAT; } return Player.NONE; } /** * Throws an exception, if the value is not in range {@code [ 0 ; } * {@link BaghChalI#DIM} {@code )}. * * @param value * value to be checked * * @throws IllegalArgumentException * if {@code value} is not in range {@code [ 0 ; } * {@link BaghChalI#DIM} {@code )} */ private void checkInput(int value) { if (value < 0 || value >= DIM) { throw new IllegalArgumentException("value has to be in range [ 0 ; " + DIM + " ), but was " + value); } } @Override /* * this method IGNORES fields previous and next */ public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.deepHashCode(board); result = prime * result + goatsEaten; result = prime * result + goatsLeftToSet; result = prime * result + ((selected == null) ? 0 : selected.hashCode()); result = prime * result + ((state == null) ? 0 : state.hashCode()); return result; } @Override /* * this method IGNORES fields previous and next */ public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; BaghChal other = (BaghChal) obj; if (!Arrays.deepEquals(board, other.board)) return false; if (goatsEaten != other.goatsEaten) return false; if (goatsLeftToSet != other.goatsLeftToSet) return false; if (selected == null) { if (other.selected != null) return false; } else if (!selected.equals(other.selected)) return false; if (state != other.state) return false; return true; } }
package org.csource.fastdfs; public class StructGroupStat extends StructBase { protected static final int FIELD_INDEX_GROUP_NAME = 0; protected static final int FIELD_INDEX_TOTAL_MB = 1; protected static final int FIELD_INDEX_FREE_MB = 2; protected static final int FIELD_INDEX_TRUNK_FREE_MB = 3; protected static final int FIELD_INDEX_STORAGE_COUNT = 4; protected static final int FIELD_INDEX_STORAGE_PORT = 5; protected static final int FIELD_INDEX_STORAGE_HTTP_PORT = 6; protected static final int FIELD_INDEX_ACTIVE_COUNT = 7; protected static final int FIELD_INDEX_CURRENT_WRITE_SERVER = 8; protected static final int FIELD_INDEX_STORE_PATH_COUNT = 9; protected static final int FIELD_INDEX_SUBDIR_COUNT_PER_PATH = 10; protected static final int FIELD_INDEX_CURRENT_TRUNK_FILE_ID = 11; protected static int fieldsTotalSize; protected static FieldInfo[] fieldsArray = new FieldInfo[12]; static { int offset = 0; fieldsArray[FIELD_INDEX_GROUP_NAME] = new FieldInfo("groupName", offset, ProtoCommon.FDFS_GROUP_NAME_MAX_LEN + 1); offset += ProtoCommon.FDFS_GROUP_NAME_MAX_LEN + 1; fieldsArray[FIELD_INDEX_TOTAL_MB] = new FieldInfo("totalMB", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_FREE_MB] = new FieldInfo("freeMB", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_TRUNK_FREE_MB] = new FieldInfo("trunkFreeMB", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_STORAGE_COUNT] = new FieldInfo("storageCount", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_STORAGE_PORT] = new FieldInfo("storagePort", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_STORAGE_HTTP_PORT] = new FieldInfo("storageHttpPort", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_ACTIVE_COUNT] = new FieldInfo("activeCount", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_CURRENT_WRITE_SERVER] = new FieldInfo("currentWriteServer", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_STORE_PATH_COUNT] = new FieldInfo("storePathCount", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_SUBDIR_COUNT_PER_PATH] = new FieldInfo("subdirCountPerPath", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsArray[FIELD_INDEX_CURRENT_TRUNK_FILE_ID] = new FieldInfo("currentTrunkFileId", offset, ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE); offset += ProtoCommon.FDFS_PROTO_PKG_LEN_SIZE; fieldsTotalSize = offset; } protected String groupName; // name of this group protected long totalMB; // total disk storage in MB protected long freeMB; // free disk space in MB protected long trunkFreeMB; // trunk free space in MB protected int storageCount; // storage server count protected int storagePort; // storage server port protected int storageHttpPort; // storage server HTTP port protected int activeCount; // active storage server count protected int currentWriteServer; // current storage server index to upload file protected int storePathCount; // store base path count of each storage server protected int subdirCountPerPath; // sub dir count per store path protected int currentTrunkFileId; // current trunk file id /** * get group name * * @return group name */ public String getGroupName() { return this.groupName; } /** * get total disk space in MB * * @return total disk space in MB */ public long getTotalMB() { return this.totalMB; } /** * get free disk space in MB * * @return free disk space in MB */ public long getFreeMB() { return this.freeMB; } /** * get trunk free space in MB * * @return trunk free space in MB */ public long getTrunkFreeMB() { return this.trunkFreeMB; } /** * get storage server count in this group * * @return storage server count in this group */ public int getStorageCount() { return this.storageCount; } /** * get active storage server count in this group * * @return active storage server count in this group */ public int getActiveCount() { return this.activeCount; } /** * get storage server port * * @return storage server port */ public int getStoragePort() { return this.storagePort; } /** * get storage server HTTP port * * @return storage server HTTP port */ public int getStorageHttpPort() { return this.storageHttpPort; } /** * get current storage server index to upload file * * @return current storage server index to upload file */ public int getCurrentWriteServer() { return this.currentWriteServer; } /** * get store base path count of each storage server * * @return store base path count of each storage server */ public int getStorePathCount() { return this.storePathCount; } /** * get sub dir count per store path * * @return sub dir count per store path */ public int getSubdirCountPerPath() { return this.subdirCountPerPath; } /** * get current trunk file id * * @return current trunk file id */ public int getCurrentTrunkFileId() { return this.currentTrunkFileId; } /** * set fields * * @param bs byte array * @param offset start offset */ public void setFields(byte[] bs, int offset) { this.groupName = stringValue(bs, offset, fieldsArray[FIELD_INDEX_GROUP_NAME]); this.totalMB = longValue(bs, offset, fieldsArray[FIELD_INDEX_TOTAL_MB]); this.freeMB = longValue(bs, offset, fieldsArray[FIELD_INDEX_FREE_MB]); this.trunkFreeMB = longValue(bs, offset, fieldsArray[FIELD_INDEX_TRUNK_FREE_MB]); this.storageCount = intValue(bs, offset, fieldsArray[FIELD_INDEX_STORAGE_COUNT]); this.storagePort = intValue(bs, offset, fieldsArray[FIELD_INDEX_STORAGE_PORT]); this.storageHttpPort = intValue(bs, offset, fieldsArray[FIELD_INDEX_STORAGE_HTTP_PORT]); this.activeCount = intValue(bs, offset, fieldsArray[FIELD_INDEX_ACTIVE_COUNT]); this.currentWriteServer = intValue(bs, offset, fieldsArray[FIELD_INDEX_CURRENT_WRITE_SERVER]); this.storePathCount = intValue(bs, offset, fieldsArray[FIELD_INDEX_STORE_PATH_COUNT]); this.subdirCountPerPath = intValue(bs, offset, fieldsArray[FIELD_INDEX_SUBDIR_COUNT_PER_PATH]); this.currentTrunkFileId = intValue(bs, offset, fieldsArray[FIELD_INDEX_CURRENT_TRUNK_FILE_ID]); } /** * get fields total size * * @return fields total size */ public static int getFieldsTotalSize() { return fieldsTotalSize; } }
package org.nohope.jaxb2.plugin.metadata; import com.sun.codemodel.*; import com.sun.tools.xjc.Options; import com.sun.tools.xjc.model.Aspect; import com.sun.tools.xjc.model.CClassInfo; import com.sun.tools.xjc.model.CTypeInfo; import com.sun.tools.xjc.outline.ClassOutline; import com.sun.tools.xjc.outline.FieldOutline; import com.sun.tools.xjc.outline.Outline; import org.jvnet.jaxb2_commons.plugin.AbstractParameterizablePlugin; import org.jvnet.jaxb2_commons.util.ClassUtils; import org.nohope.jaxb2.plugin.Utils; import org.nohope.reflection.TypeReference; import org.xml.sax.ErrorHandler; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlTransient; import java.io.StringWriter; import java.util.Collection; import java.util.HashMap; import java.util.Map; /** * @author <a href="mailto:ketoth.xupack@gmail.com">ketoth xupack</a> * @since 9/30/13 3:44 PM */ public class MetadataPlugin extends AbstractParameterizablePlugin { @Override public String getOptionName() { return "Xmetadata"; } @Override public String getUsage() { return "TBD"; } @Override public boolean run(final Outline outline, final Options opt, final ErrorHandler errorHandler) { final Collection<? extends ClassOutline> classes = outline.getClasses(); for (final ClassOutline classOutline : classes) { final Map<String, JClass> collectionTypes = new HashMap<>(); for (final FieldOutline field : classOutline.getDeclaredFields()) { if (field.getPropertyInfo().isCollection()) { final Collection<? extends CTypeInfo> references = field.getPropertyInfo().ref(); if (references.size() == 1) { // FIXME: more than one? final CTypeInfo ref = references.iterator().next(); if (ref instanceof CClassInfo) { final CClassInfo collectionType = (CClassInfo) ref; collectionTypes.put(field.getPropertyInfo().getName(false), collectionType.getType().toType(outline, Aspect.IMPLEMENTATION)); } } } } final JDefinedClass c = classOutline.implClass; for (final Map.Entry<String, JClass> entry : collectionTypes.entrySet()) { c.fields() .get(entry.getKey()) .annotate(CollectionType.class) .param("value", entry.getValue()); } final JCodeModel cm = c.owner(); ClassUtils._implements(c, cm.ref(IMetadataHolder.class) .narrow(cm.directClass(c.name() + ".IInstanceDescriptor"))); } for (final ClassOutline classOutline : classes) { processClassOutline(classOutline); } return true; } protected static void processClassOutline(final ClassOutline classOutline) { final JDefinedClass theClass = classOutline.implClass; generateDescriptor(theClass); } private static void generateDescriptor(final JDefinedClass theClass) { final JCodeModel codeModel = theClass.owner(); final JDefinedClass classLevelDescriptorInterface; final JDefinedClass classLevelDescriptor; final JDefinedClass instanceLevelDescriptorInterface; final JDefinedClass instanceLevelDescriptor; final JClass abstractDescriptor = codeModel.ref(Descriptor.class); final JClass abstractValueDescriptor = codeModel.ref(ValueDescriptor.class); final JClass descriptorInterface = codeModel.ref(IDescriptor.class); final JClass valueDescriptor = codeModel.ref(IValueDescriptor.class); try { // class-level descriptors classLevelDescriptorInterface = theClass._interface("IClassDescriptor") ._extends(descriptorInterface.narrow(theClass)) ; classLevelDescriptor = theClass._class(JMod.PUBLIC | JMod.STATIC, "ClassDescriptor") ._extends(abstractDescriptor.narrow(theClass)) ._implements(classLevelDescriptorInterface) ; // class-level descriptors instanceLevelDescriptorInterface = theClass._interface("IInstanceDescriptor") ._extends(classLevelDescriptorInterface) ._implements(valueDescriptor.narrow(theClass)) ; instanceLevelDescriptor = theClass._class(JMod.PUBLIC | JMod.STATIC, "InstanceDescriptor") ._extends(abstractValueDescriptor.narrow(theClass)) ._implements(instanceLevelDescriptorInterface) ; } catch (JClassAlreadyExistsException e) { throw new IllegalStateException(e); } // public getter methods theClass.method(JMod.PUBLIC | JMod.STATIC, classLevelDescriptorInterface, "getClassDescriptor") .body() ._return(JExpr._new(classLevelDescriptor) .arg(JExpr._null()) .arg(JExpr._null()) ); final JMethod instancedDescriptorGetter = theClass.method(JMod.PUBLIC, instanceLevelDescriptorInterface, "getInstanceDescriptor"); instancedDescriptorGetter .body() ._return(JExpr._new(instanceLevelDescriptor) .arg(JExpr._null()) .arg(JExpr._null()) .arg(JExpr._new(codeModel.ref(PasstroughGetter.class).narrow(theClass)).arg(JExpr._this()))); instancedDescriptorGetter.annotate(Override.class); instancedDescriptorGetter.annotate(XmlTransient.class); final JClass typeReference = codeModel.ref(TypeReference.class); final JInvocation thisTypeRef = JExpr._new(codeModel.anonymousClass(typeReference.narrow(theClass))); // class-level descriptor constructor { final JClass parentDescriptor = abstractDescriptor.narrow(codeModel.wildcard()); final JMethod classLevelDescriptorConstructor = classLevelDescriptor.constructor(JMod.PROTECTED); final JVar parentVar = classLevelDescriptorConstructor.param(JMod.FINAL, parentDescriptor, "parent"); final JVar nameVar = classLevelDescriptorConstructor.param(JMod.FINAL, String.class, "name"); classLevelDescriptorConstructor .body() .invoke("super") .arg(parentVar) .arg(nameVar) .arg(thisTypeRef); } // instance-level descriptor constructor { final JClass parentDescriptor = abstractValueDescriptor.narrow(codeModel.directClass("?")); final JClass getter = codeModel.ref(IValueGetter.class).narrow(theClass); final JMethod instanceLevelDescriptorConstructor = instanceLevelDescriptor.constructor(JMod.PROTECTED); final JVar parentVar = instanceLevelDescriptorConstructor.param(JMod.FINAL, parentDescriptor, "parent"); final JVar nameVar = instanceLevelDescriptorConstructor.param(JMod.FINAL, String.class, "name"); final JVar getterVar = instanceLevelDescriptorConstructor.param(JMod.FINAL, getter, "getter"); instanceLevelDescriptorConstructor .body() .invoke("super") .arg(parentVar) .arg(nameVar) .arg(thisTypeRef) .arg(getterVar) ; } final Map<String, JMethod> instanceMethods = new HashMap<>(); final Map<String, JMethod> classMethods = new HashMap<>(); // FIXME: iterate over fields for (final JMethod method : theClass.methods()) { final String name = method.name(); final String fieldMetaName = getAssociatedFieldName(theClass, method); if (fieldMetaName == null) { continue; } final JType methodType = method.type(); final JClass concreteClassLevelReturnType; final JClass abstractClassLevelReturnType; final JClass abstractInstanceLevelReturnType; final JClass concreteInstanceLevelReturnType; final JInvocation classLevelMethodExpression; final JInvocation instanceLevelMethodExpression; final JDefinedClass valueGetter = codeModel.anonymousClass(codeModel.ref(IValueGetter.class).narrow(methodType)); valueGetter.method(JMod.PUBLIC, methodType.boxify(), "get") ._throws(codeModel.ref(Exception.class)) .body() ._return(JExpr.invoke("getValue").invoke(name)); if (Utils.isErasuredAssignable(codeModel.ref(IMetadataHolder.class), methodType)) { final JClass castedType = (JClass) methodType; concreteClassLevelReturnType = codeModel.directClass(castedType.name() + ".ClassDescriptor"); abstractClassLevelReturnType = codeModel.directClass(castedType.name() + ".IClassDescriptor"); classLevelMethodExpression = JExpr._new(concreteClassLevelReturnType) .arg(JExpr._this()) .arg(fieldMetaName) ; abstractInstanceLevelReturnType = codeModel.directClass(castedType.name() + ".IInstanceDescriptor"); concreteInstanceLevelReturnType = codeModel.directClass(castedType.name() + ".InstanceDescriptor"); instanceLevelMethodExpression = JExpr._new(concreteInstanceLevelReturnType) .arg(JExpr._this()) .arg(fieldMetaName) .arg(JExpr._new(valueGetter)) ; } else { final JClass returnClass = methodType.isPrimitive() ? methodType.boxify() : (JClass) methodType; abstractClassLevelReturnType = descriptorInterface.narrow(returnClass); concreteClassLevelReturnType = abstractDescriptor.narrow(returnClass); final JDefinedClass returnTypeRef = codeModel.anonymousClass(typeReference.narrow(returnClass)); final JClass java7concreteClass = abstractDescriptor.narrow(new JClass[]{}); classLevelMethodExpression = JExpr._new(java7concreteClass) .arg(JExpr._this()) .arg(fieldMetaName) .arg(JExpr._new(returnTypeRef)) ; abstractInstanceLevelReturnType = valueDescriptor.narrow(returnClass); concreteInstanceLevelReturnType = abstractValueDescriptor.narrow(returnClass); instanceLevelMethodExpression = JExpr._new(concreteInstanceLevelReturnType) .arg(JExpr._this()) .arg(fieldMetaName) .arg(JExpr._new(returnTypeRef)) .arg(JExpr._new(valueGetter)) ; } classLevelDescriptorInterface.method(JMod.NONE, abstractClassLevelReturnType, name); instanceLevelDescriptorInterface.method(JMod.NONE, abstractInstanceLevelReturnType, name) .annotate(Override.class); final JMethod classLevelDescriptorMethod = classLevelDescriptor.method(JMod.PUBLIC, concreteClassLevelReturnType, name); classLevelDescriptorMethod.javadoc().add("This method reflects '" + fieldMetaName + "' field metadata.\n"); classLevelDescriptorMethod.javadoc().addReturn().add(concreteClassLevelReturnType.erasure().fullName()); classLevelDescriptorMethod.javadoc().add("@see"); classLevelDescriptorMethod.javadoc().add(IDescriptor.class.getCanonicalName()); classLevelDescriptorMethod.annotate(Override.class); classLevelDescriptorMethod.body()._return(classLevelMethodExpression); final JMethod instanceDescriptorMethod = instanceLevelDescriptor.method(JMod.PUBLIC, abstractInstanceLevelReturnType, name); instanceDescriptorMethod.annotate(Override.class); instanceDescriptorMethod.body()._return(instanceLevelMethodExpression); instanceMethods.put(fieldMetaName, instanceDescriptorMethod); classMethods.put(fieldMetaName, classLevelDescriptorMethod); } { final JMethod instancedDescriptorChildGetter = instanceLevelDescriptor.method(JMod.PUBLIC, codeModel.ref(IValueDescriptor.class).narrow(codeModel.wildcard()), "getChild"); //instancedDescriptorChildGetter._throws(CallException.class); final JVar name = instancedDescriptorChildGetter.param(String.class, "name"); final JSwitch _switch = instancedDescriptorChildGetter.body() ._switch(name); for (final Map.Entry<String, JMethod> e : instanceMethods.entrySet()) { _switch._case(JExpr.lit(e.getKey())) .body() ._return(JExpr.invoke(e.getValue())); } _switch._default() .body() ._throw(JExpr._new(codeModel.ref(IllegalArgumentException.class)) .arg(JExpr.lit("No requested child found"))); instancedDescriptorChildGetter.annotate(Override.class); } { final JMethod instancedDescriptorChildGetter = classLevelDescriptor.method(JMod.PUBLIC, codeModel.ref(IDescriptor.class).narrow(codeModel.wildcard()), "getChild"); //instancedDescriptorChildGetter._throws(CallException.class); final JVar name = instancedDescriptorChildGetter.param(String.class, "name"); final JSwitch _switch = instancedDescriptorChildGetter.body()._switch(name); for (final Map.Entry<String, JMethod> e : classMethods.entrySet()) { _switch._case(JExpr.lit(e.getKey())) .body() ._return(JExpr.invoke(e.getValue())); } _switch._default() .body() ._throw(JExpr._new(codeModel.ref(IllegalArgumentException.class)) .arg(JExpr.lit("No requested child found"))); instancedDescriptorChildGetter.annotate(Override.class); } } private static String getAssociatedFieldName(final JDefinedClass theClass, final JMethod method) { final JCodeModel codeModel = theClass.owner(); final String name = method.name(); final boolean isGetMethod = name.startsWith("get"); if (!isGetMethod && !name.startsWith("is")) { return null; } final String string = name.substring(isGetMethod ? 3 : 2, name.length()); final String fieldName = Character.toLowerCase(string.charAt(0)) + (string.length() > 1 ? string.substring(1) : ""); final JFieldVar field = theClass.fields().get(fieldName); if (field == null) { return null; } for (final JAnnotationUse annotation : field.annotations()) { if (annotation.getAnnotationClass().isAssignableFrom(codeModel.ref(XmlElement.class))) { final Map<String, JAnnotationValue> members = annotation.getAnnotationMembers(); if (!members.containsKey("name")) { continue; } final JAnnotationValue annotationFieldName = members.get("name"); final StringWriter writer = new StringWriter(); final JFormatter f = new JFormatter(writer); annotationFieldName.generate(f); final String value = writer.toString(); final String actualName = value.substring(1, value.length() - 1); if ("".equals(actualName)) { throw new IllegalStateException(); } if ("##default".equals(actualName)) { return fieldName; } return actualName; } } return fieldName; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document; import static com.google.common.base.Preconditions.checkArgument; import java.io.InputStream; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.sql.DataSource; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.Weigher; import com.google.common.collect.Sets; import com.google.common.util.concurrent.MoreExecutors; import com.mongodb.DB; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.cache.CacheLIRS; import org.apache.jackrabbit.oak.cache.CacheValue; import org.apache.jackrabbit.oak.cache.EmpiricalWeigher; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.commons.json.JsopReader; import org.apache.jackrabbit.oak.commons.json.JsopStream; import org.apache.jackrabbit.oak.commons.json.JsopTokenizer; import org.apache.jackrabbit.oak.plugins.document.DocumentNodeState.Children; import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore; import org.apache.jackrabbit.oak.plugins.document.mongo.MongoBlobStore; import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore; import org.apache.jackrabbit.oak.plugins.document.mongo.MongoVersionGCSupport; import org.apache.jackrabbit.oak.plugins.document.persistentCache.CacheType; import org.apache.jackrabbit.oak.plugins.document.persistentCache.PersistentCache; import org.apache.jackrabbit.oak.plugins.document.rdb.RDBBlobStore; import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore; import org.apache.jackrabbit.oak.plugins.document.rdb.RDBOptions; import org.apache.jackrabbit.oak.plugins.document.util.RevisionsKey; import org.apache.jackrabbit.oak.plugins.document.util.StringValue; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.stats.Clock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A JSON-based wrapper around the NodeStore implementation that stores the * data in a {@link DocumentStore}. It is used for testing purpose only. */ public class DocumentMK { static final Logger LOG = LoggerFactory.getLogger(DocumentMK.class); /** * The path where the persistent cache is stored. */ static final String DEFAULT_PERSISTENT_CACHE_URI = System.getProperty("oak.documentMK.persCache"); /** * The threshold where special handling for many child node starts. */ static final int MANY_CHILDREN_THRESHOLD = Integer.getInteger( "oak.documentMK.manyChildren", 50); /** * Enable or disable the LIRS cache (null to use the default setting for this configuration). */ static final Boolean LIRS_CACHE; static { String s = System.getProperty("oak.documentMK.lirsCache"); LIRS_CACHE = s == null ? null : Boolean.parseBoolean(s); } /** * Enable fast diff operations. */ static final boolean FAST_DIFF = Boolean.parseBoolean( System.getProperty("oak.documentMK.fastDiff", "true")); /** * The guava cache concurrency level. */ static final int CACHE_CONCURRENCY = Integer.getInteger( "oak.documentMK.cacheConcurrency", 16); /** * The node store. */ protected final DocumentNodeStore nodeStore; /** * The document store (might be used by multiple DocumentMKs). */ protected final DocumentStore store; DocumentMK(Builder builder) { this.nodeStore = builder.getNodeStore(); this.store = nodeStore.getDocumentStore(); } public void dispose() { nodeStore.dispose(); } void backgroundRead() { nodeStore.backgroundRead(true); } void backgroundWrite() { nodeStore.backgroundWrite(); } void runBackgroundOperations() { nodeStore.runBackgroundOperations(); } public DocumentNodeStore getNodeStore() { return nodeStore; } ClusterNodeInfo getClusterInfo() { return nodeStore.getClusterInfo(); } int getPendingWriteCount() { return nodeStore.getPendingWriteCount(); } public String getHeadRevision() throws DocumentStoreException { return nodeStore.getHeadRevision().toString(); } public String checkpoint(long lifetime) throws DocumentStoreException { try { return nodeStore.checkpoint(lifetime); } catch (DocumentStoreException e) { throw new DocumentStoreException(e); } } public String diff(String fromRevisionId, String toRevisionId, String path, int depth) throws DocumentStoreException { if (depth != 0) { throw new DocumentStoreException("Only depth 0 is supported, depth is " + depth); } if (path == null || path.equals("")) { path = "/"; } try { return nodeStore.diff(fromRevisionId, toRevisionId, path); } catch (DocumentStoreException e) { throw new DocumentStoreException(e); } } public boolean nodeExists(String path, String revisionId) throws DocumentStoreException { if (!PathUtils.isAbsolute(path)) { throw new DocumentStoreException("Path is not absolute: " + path); } revisionId = revisionId != null ? revisionId : nodeStore.getHeadRevision().toString(); Revision rev = Revision.fromString(revisionId); DocumentNodeState n; try { n = nodeStore.getNode(path, rev); } catch (DocumentStoreException e) { throw new DocumentStoreException(e); } return n != null; } public String getNodes(String path, String revisionId, int depth, long offset, int maxChildNodes, String filter) throws DocumentStoreException { if (depth != 0) { throw new DocumentStoreException("Only depth 0 is supported, depth is " + depth); } revisionId = revisionId != null ? revisionId : nodeStore.getHeadRevision().toString(); Revision rev = Revision.fromString(revisionId); try { DocumentNodeState n = nodeStore.getNode(path, rev); if (n == null) { return null; } JsopStream json = new JsopStream(); boolean includeId = filter != null && filter.contains(":id"); includeId |= filter != null && filter.contains(":hash"); json.object(); n.append(json, includeId); int max; if (maxChildNodes == -1) { max = Integer.MAX_VALUE; maxChildNodes = Integer.MAX_VALUE; } else { // use long to avoid overflows long m = ((long) maxChildNodes) + offset; max = (int) Math.min(m, Integer.MAX_VALUE); } Children c = nodeStore.getChildren(n, null, max); for (long i = offset; i < c.children.size(); i++) { if (maxChildNodes-- <= 0) { break; } String name = c.children.get((int) i); json.key(name).object().endObject(); } if (c.hasMore) { // TODO use a better way to notify there are more children json.key(":childNodeCount").value(Long.MAX_VALUE); } else { json.key(":childNodeCount").value(c.children.size()); } json.endObject(); return json.toString(); } catch (DocumentStoreException e) { throw new DocumentStoreException(e); } } public String commit(String rootPath, String jsonDiff, String baseRevId, String message) throws DocumentStoreException { boolean success = false; boolean isBranch = false; Revision rev; Commit commit = nodeStore.newCommit(baseRevId != null ? Revision.fromString(baseRevId) : null, null); try { Revision baseRev = commit.getBaseRevision(); isBranch = baseRev != null && baseRev.isBranch(); parseJsonDiff(commit, jsonDiff, rootPath); rev = commit.apply(); success = true; } catch (DocumentStoreException e) { throw new DocumentStoreException(e); } finally { if (!success) { nodeStore.canceled(commit); } else { nodeStore.done(commit, isBranch, null); } } return rev.toString(); } public String branch(@Nullable String trunkRevisionId) throws DocumentStoreException { // nothing is written when the branch is created, the returned // revision simply acts as a reference to the branch base revision Revision revision = trunkRevisionId != null ? Revision.fromString(trunkRevisionId) : nodeStore.getHeadRevision(); return revision.asBranchRevision().toString(); } public String merge(String branchRevisionId, String message) throws DocumentStoreException { // TODO improve implementation if needed Revision revision = Revision.fromString(branchRevisionId); if (!revision.isBranch()) { throw new DocumentStoreException("Not a branch: " + branchRevisionId); } try { return nodeStore.merge(revision, null).toString(); } catch (DocumentStoreException e) { throw new DocumentStoreException(e); } catch (CommitFailedException e) { throw new DocumentStoreException(e); } } @Nonnull public String rebase(@Nonnull String branchRevisionId, @Nullable String newBaseRevisionId) throws DocumentStoreException { Revision r = Revision.fromString(branchRevisionId); Revision base = newBaseRevisionId != null ? Revision.fromString(newBaseRevisionId) : nodeStore.getHeadRevision(); return nodeStore.rebase(r, base).toString(); } @Nonnull public String reset(@Nonnull String branchRevisionId, @Nonnull String ancestorRevisionId) throws DocumentStoreException { Revision branch = Revision.fromString(branchRevisionId); if (!branch.isBranch()) { throw new DocumentStoreException("Not a branch revision: " + branchRevisionId); } Revision ancestor = Revision.fromString(ancestorRevisionId); if (!ancestor.isBranch()) { throw new DocumentStoreException("Not a branch revision: " + ancestorRevisionId); } try { return nodeStore.reset(branch, ancestor, null).toString(); } catch (DocumentStoreException e) { throw new DocumentStoreException(e); } } public long getLength(String blobId) throws DocumentStoreException { try { return nodeStore.getBlobStore().getBlobLength(blobId); } catch (Exception e) { throw new DocumentStoreException(e); } } public int read(String blobId, long pos, byte[] buff, int off, int length) throws DocumentStoreException { try { int read = nodeStore.getBlobStore().readBlob(blobId, pos, buff, off, length); return read < 0 ? 0 : read; } catch (Exception e) { throw new DocumentStoreException(e); } } public String write(InputStream in) throws DocumentStoreException { try { return nodeStore.getBlobStore().writeBlob(in); } catch (Exception e) { throw new DocumentStoreException(e); } } //-------------------------< accessors >------------------------------------ public DocumentStore getDocumentStore() { return store; } //------------------------------< internal >-------------------------------- private void parseJsonDiff(Commit commit, String json, String rootPath) { Revision baseRev = commit.getBaseRevision(); String baseRevId = baseRev != null ? baseRev.toString() : null; Set<String> added = Sets.newHashSet(); JsopReader t = new JsopTokenizer(json); while (true) { int r = t.read(); if (r == JsopReader.END) { break; } String path = PathUtils.concat(rootPath, t.readString()); switch (r) { case '+': t.read(':'); t.read('{'); parseAddNode(commit, t, path); added.add(path); break; case '-': DocumentNodeState toRemove = nodeStore.getNode(path, commit.getBaseRevision()); if (toRemove == null) { throw new DocumentStoreException("Node not found: " + path + " in revision " + baseRevId); } commit.removeNode(path); nodeStore.markAsDeleted(toRemove, commit, true); commit.removeNodeDiff(path); break; case '^': t.read(':'); String value; if (t.matches(JsopReader.NULL)) { value = null; } else { value = t.readRawValue().trim(); } String p = PathUtils.getParentPath(path); if (!added.contains(p) && nodeStore.getNode(p, commit.getBaseRevision()) == null) { throw new DocumentStoreException("Node not found: " + path + " in revision " + baseRevId); } String propertyName = PathUtils.getName(path); commit.updateProperty(p, propertyName, value); commit.updatePropertyDiff(p, propertyName, value); break; case '>': { // TODO support moving nodes that were modified within this commit t.read(':'); String targetPath = t.readString(); if (!PathUtils.isAbsolute(targetPath)) { targetPath = PathUtils.concat(rootPath, targetPath); } DocumentNodeState source = nodeStore.getNode(path, baseRev); if (source == null) { throw new DocumentStoreException("Node not found: " + path + " in revision " + baseRevId); } else if (nodeExists(targetPath, baseRevId)) { throw new DocumentStoreException("Node already exists: " + targetPath + " in revision " + baseRevId); } commit.moveNode(path, targetPath); nodeStore.moveNode(source, targetPath, commit); break; } case '*': { // TODO support copying nodes that were modified within this commit t.read(':'); String targetPath = t.readString(); if (!PathUtils.isAbsolute(targetPath)) { targetPath = PathUtils.concat(rootPath, targetPath); } DocumentNodeState source = nodeStore.getNode(path, baseRev); if (source == null) { throw new DocumentStoreException("Node not found: " + path + " in revision " + baseRevId); } else if (nodeExists(targetPath, baseRevId)) { throw new DocumentStoreException("Node already exists: " + targetPath + " in revision " + baseRevId); } commit.copyNode(path, targetPath); nodeStore.copyNode(source, targetPath, commit); break; } default: throw new DocumentStoreException("token: " + (char) t.getTokenType()); } } } private void parseAddNode(Commit commit, JsopReader t, String path) { DocumentNodeState n = new DocumentNodeState(nodeStore, path, commit.getRevision()); if (!t.matches('}')) { do { String key = t.readString(); t.read(':'); if (t.matches('{')) { String childPath = PathUtils.concat(path, key); parseAddNode(commit, t, childPath); } else { String value = t.readRawValue().trim(); n.setProperty(key, value); } } while (t.matches(',')); t.read('}'); } commit.addNode(n); commit.addNodeDiff(n); } //----------------------------< Builder >----------------------------------- /** * A builder for a DocumentMK instance. */ public static class Builder { private static final long DEFAULT_MEMORY_CACHE_SIZE = 256 * 1024 * 1024; public static final int DEFAULT_NODE_CACHE_PERCENTAGE = 25; public static final int DEFAULT_CHILDREN_CACHE_PERCENTAGE = 10; public static final int DEFAULT_DIFF_CACHE_PERCENTAGE = 5; public static final int DEFAULT_DOC_CHILDREN_CACHE_PERCENTAGE = 3; public static final int DEFAULT_CACHE_SEGMENT_COUNT = 16; public static final int DEFAULT_CACHE_STACK_MOVE_DISTANCE = 16; private DocumentNodeStore nodeStore; private DocumentStore documentStore; private DiffCache diffCache; private BlobStore blobStore; private int clusterId = Integer.getInteger("oak.documentMK.clusterId", 0); private int asyncDelay = 1000; private boolean timing; private boolean logging; private boolean leaseCheck = true; // OAK-2739 is enabled by default also for non-osgi private Weigher<CacheValue, CacheValue> weigher = new EmpiricalWeigher(); private long memoryCacheSize = DEFAULT_MEMORY_CACHE_SIZE; private int nodeCachePercentage = DEFAULT_NODE_CACHE_PERCENTAGE; private int childrenCachePercentage = DEFAULT_CHILDREN_CACHE_PERCENTAGE; private int diffCachePercentage = DEFAULT_DIFF_CACHE_PERCENTAGE; private int docChildrenCachePercentage = DEFAULT_DOC_CHILDREN_CACHE_PERCENTAGE; private int cacheSegmentCount = DEFAULT_CACHE_SEGMENT_COUNT; private int cacheStackMoveDistance = DEFAULT_CACHE_STACK_MOVE_DISTANCE; private boolean useSimpleRevision; private long offHeapCacheSize = -1; private long maxReplicationLagMillis = TimeUnit.HOURS.toMillis(6); private boolean disableBranches; private Clock clock = Clock.SIMPLE; private Executor executor; private String persistentCacheURI = DEFAULT_PERSISTENT_CACHE_URI; private PersistentCache persistentCache; public Builder() { } /** * Use the given MongoDB as backend storage for the DocumentNodeStore. * * @param db the MongoDB connection * @return this */ public Builder setMongoDB(DB db, int blobCacheSizeMB) { if (db != null) { if (this.documentStore == null) { this.documentStore = new MongoDocumentStore(db, this); } if (this.blobStore == null) { GarbageCollectableBlobStore s = new MongoBlobStore(db, blobCacheSizeMB * 1024 * 1024L); PersistentCache p = getPersistentCache(); if (p != null) { s = p.wrapBlobStore(s); } this.blobStore = s; } } return this; } /** * Set the MongoDB connection to use. By default an in-memory store is used. * * @param db the MongoDB connection * @return this */ public Builder setMongoDB(DB db) { return setMongoDB(db, 16); } /** * Sets a {@link DataSource} to use for the RDB document and blob * stores. * * @return this */ public Builder setRDBConnection(DataSource ds) { this.documentStore = new RDBDocumentStore(ds, this); if(this.blobStore == null) { this.blobStore = new RDBBlobStore(ds); } return this; } /** * Sets a {@link DataSource} to use for the RDB document and blob * stores, including {@link RDBOptions}. * * @return this */ public Builder setRDBConnection(DataSource ds, RDBOptions options) { this.documentStore = new RDBDocumentStore(ds, this, options); if(this.blobStore == null) { this.blobStore = new RDBBlobStore(ds, options); } return this; } /** * Sets the persistent cache option. * * @return this */ public Builder setPersistentCache(String persistentCache) { this.persistentCacheURI = persistentCache; return this; } /** * Sets a {@link DataSource}s to use for the RDB document and blob * stores. * * @return this */ public Builder setRDBConnection(DataSource documentStoreDataSource, DataSource blobStoreDataSource) { this.documentStore = new RDBDocumentStore(documentStoreDataSource, this); this.blobStore = new RDBBlobStore(blobStoreDataSource); return this; } /** * Use the timing document store wrapper. * * @param timing whether to use the timing wrapper. * @return this */ public Builder setTiming(boolean timing) { this.timing = timing; return this; } public boolean getTiming() { return timing; } public Builder setLogging(boolean logging) { this.logging = logging; return this; } public boolean getLogging() { return logging; } public Builder setLeaseCheck(boolean leaseCheck) { this.leaseCheck = leaseCheck; return this; } public boolean getLeaseCheck() { return leaseCheck; } /** * Set the document store to use. By default an in-memory store is used. * * @param documentStore the document store * @return this */ public Builder setDocumentStore(DocumentStore documentStore) { this.documentStore = documentStore; return this; } public DocumentStore getDocumentStore() { if (documentStore == null) { documentStore = new MemoryDocumentStore(); } return documentStore; } public DocumentNodeStore getNodeStore() { if (nodeStore == null) { nodeStore = new DocumentNodeStore(this); } return nodeStore; } public DiffCache getDiffCache() { if (diffCache == null) { diffCache = new TieredDiffCache(this); } return diffCache; } public Builder setDiffCache(DiffCache diffCache) { this.diffCache = diffCache; return this; } /** * Set the blob store to use. By default an in-memory store is used. * * @param blobStore the blob store * @return this */ public Builder setBlobStore(BlobStore blobStore) { this.blobStore = blobStore; return this; } public BlobStore getBlobStore() { if (blobStore == null) { blobStore = new MemoryBlobStore(); } return blobStore; } /** * Set the cluster id to use. By default, 0 is used, meaning the cluster * id is automatically generated. * * @param clusterId the cluster id * @return this */ public Builder setClusterId(int clusterId) { this.clusterId = clusterId; return this; } public Builder setCacheSegmentCount(int cacheSegmentCount) { this.cacheSegmentCount = cacheSegmentCount; return this; } public Builder setCacheStackMoveDistance(int cacheSegmentCount) { this.cacheStackMoveDistance = cacheSegmentCount; return this; } public int getClusterId() { return clusterId; } /** * Set the maximum delay to write the last revision to the root node. By * default 1000 (meaning 1 second) is used. * * @param asyncDelay in milliseconds * @return this */ public Builder setAsyncDelay(int asyncDelay) { this.asyncDelay = asyncDelay; return this; } public int getAsyncDelay() { return asyncDelay; } public Weigher<CacheValue, CacheValue> getWeigher() { return weigher; } public Builder withWeigher(Weigher<CacheValue, CacheValue> weigher) { this.weigher = weigher; return this; } public Builder memoryCacheSize(long memoryCacheSize) { this.memoryCacheSize = memoryCacheSize; return this; } public Builder memoryCacheDistribution(int nodeCachePercentage, int childrenCachePercentage, int docChildrenCachePercentage, int diffCachePercentage) { checkArgument(nodeCachePercentage >= 0); checkArgument(childrenCachePercentage>= 0); checkArgument(docChildrenCachePercentage >= 0); checkArgument(diffCachePercentage >= 0); checkArgument(nodeCachePercentage + childrenCachePercentage + docChildrenCachePercentage + diffCachePercentage < 100); this.nodeCachePercentage = nodeCachePercentage; this.childrenCachePercentage = childrenCachePercentage; this.docChildrenCachePercentage = docChildrenCachePercentage; this.diffCachePercentage = diffCachePercentage; return this; } public long getNodeCacheSize() { return memoryCacheSize * nodeCachePercentage / 100; } public long getChildrenCacheSize() { return memoryCacheSize * childrenCachePercentage / 100; } public long getDocumentCacheSize() { return memoryCacheSize - getNodeCacheSize() - getChildrenCacheSize() - getDiffCacheSize() - getDocChildrenCacheSize(); } public long getDocChildrenCacheSize() { return memoryCacheSize * docChildrenCachePercentage / 100; } public long getDiffCacheSize() { return memoryCacheSize * diffCachePercentage / 100; } public long getMemoryDiffCacheSize() { return getDiffCacheSize() / 2; } public long getLocalDiffCacheSize() { return getDiffCacheSize() / 2; } public Builder setUseSimpleRevision(boolean useSimpleRevision) { this.useSimpleRevision = useSimpleRevision; return this; } public boolean isUseSimpleRevision() { return useSimpleRevision; } public boolean useOffHeapCache() { return this.offHeapCacheSize > 0; } public long getOffHeapCacheSize() { return offHeapCacheSize; } public Builder offHeapCacheSize(long offHeapCacheSize) { this.offHeapCacheSize = offHeapCacheSize; return this; } public Executor getExecutor() { if(executor == null){ return MoreExecutors.sameThreadExecutor(); } return executor; } public Builder setExecutor(Executor executor){ this.executor = executor; return this; } public Builder clock(Clock clock) { this.clock = clock; return this; } public Clock getClock() { return clock; } public Builder setMaxReplicationLag(long duration, TimeUnit unit){ maxReplicationLagMillis = unit.toMillis(duration); return this; } public long getMaxReplicationLagMillis() { return maxReplicationLagMillis; } public Builder disableBranches() { disableBranches = true; return this; } public boolean isDisableBranches() { return disableBranches; } VersionGCSupport createVersionGCSupport() { DocumentStore store = getDocumentStore(); if (store instanceof MongoDocumentStore) { return new MongoVersionGCSupport((MongoDocumentStore) store); } else { return new VersionGCSupport(store); } } /** * Open the DocumentMK instance using the configured options. * * @return the DocumentMK instance */ public DocumentMK open() { return new DocumentMK(this); } public Cache<PathRev, DocumentNodeState> buildNodeCache(DocumentNodeStore store) { return buildCache(CacheType.NODE, getNodeCacheSize(), store, null); } public Cache<PathRev, DocumentNodeState.Children> buildChildrenCache() { return buildCache(CacheType.CHILDREN, getChildrenCacheSize(), null, null); } public Cache<StringValue, NodeDocument.Children> buildDocChildrenCache() { return buildCache(CacheType.DOC_CHILDREN, getDocChildrenCacheSize(), null, null); } public Cache<PathRev, StringValue> buildMemoryDiffCache() { return buildCache(CacheType.DIFF, getMemoryDiffCacheSize(), null, null); } public Cache<RevisionsKey, LocalDiffCache.Diff> buildLocalDiffCache() { return buildCache(CacheType.LOCAL_DIFF, getLocalDiffCacheSize(), null, null); } public Cache<CacheValue, NodeDocument> buildDocumentCache(DocumentStore docStore) { return buildCache(CacheType.DOCUMENT, getDocumentCacheSize(), null, docStore); } private <K extends CacheValue, V extends CacheValue> Cache<K, V> buildCache( CacheType cacheType, long maxWeight, DocumentNodeStore docNodeStore, DocumentStore docStore ) { Cache<K, V> cache = buildCache(cacheType.name(), maxWeight); PersistentCache p = getPersistentCache(); if (p != null) { if (docNodeStore != null) { docNodeStore.setPersistentCache(p); } cache = p.wrap(docNodeStore, docStore, cache, cacheType); } return cache; } private PersistentCache getPersistentCache() { if (persistentCacheURI == null) { return null; } if (persistentCache == null) { try { persistentCache = new PersistentCache(persistentCacheURI); } catch (Throwable e) { LOG.warn("Persistent cache not available; please disable the configuration", e); throw new IllegalArgumentException(e); } } return persistentCache; } private <K extends CacheValue, V extends CacheValue> Cache<K, V> buildCache( String module, long maxWeight) { // by default, use the LIRS cache when using the persistent cache, // but don't use it otherwise boolean useLirs = persistentCacheURI != null; // allow to override this by using the system property if (LIRS_CACHE != null) { useLirs = LIRS_CACHE; } if (useLirs) { return CacheLIRS.<K, V>newBuilder(). module(module). weigher(new Weigher<K, V>() { @Override public int weigh(K key, V value) { return weigher.weigh(key, value); } }). averageWeight(2000). maximumWeight(maxWeight). segmentCount(cacheSegmentCount). stackMoveDistance(cacheStackMoveDistance). recordStats(). build(); } return CacheBuilder.newBuilder(). concurrencyLevel(CACHE_CONCURRENCY). weigher(weigher). maximumWeight(maxWeight). recordStats(). build(); } } }
/** * Copyright (C) 2006-2013 phloc systems * http://www.phloc.com * office[at]phloc[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.phloc.commons.io.streams; import java.io.IOException; import java.io.Reader; import javax.annotation.CheckForSigned; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.concurrent.NotThreadSafe; /** * A non-synchronized copy of the class {@link java.io.StringReader}.<br> * Note: super class {@link Reader} uses the lock object internally only for * <code>long skip(long n)</code> and as this method is overwritten in here, the * lock is never used. * * @author Philip Helger * @see java.io.StringReader */ @NotThreadSafe public class NonBlockingStringReader extends Reader { private String m_sStr; private final int m_nLength; private int m_nNext = 0; private int m_nMark = 0; public NonBlockingStringReader (@Nonnull final char [] aChars) { this (new String (aChars)); } public NonBlockingStringReader (@Nonnull final char [] aChars, @Nonnegative final int nOfs, @Nonnegative final int nLen) { this (new String (aChars, nOfs, nLen)); } /** * Creates a new string reader. * * @param sStr * String providing the character stream. May not be <code>null</code>. */ public NonBlockingStringReader (@Nonnull final String sStr) { if (sStr == null) throw new NullPointerException ("str"); m_sStr = sStr; m_nLength = sStr.length (); } /** * Check to make sure that the stream has not been closed * * @throws IOException * When the string is closed */ private void _ensureOpen () throws IOException { if (m_sStr == null) throw new IOException ("Stream closed"); } /** * Reads a single character. * * @return The character read, or -1 if the end of the stream has been reached * @exception IOException * If an I/O error occurs */ @Override @CheckForSigned public int read () throws IOException { _ensureOpen (); if (m_nNext >= m_nLength) return -1; return m_sStr.charAt (m_nNext++); } /** * Reads characters into a portion of an array. * * @param aBuf * Destination buffer * @param nOfs * Offset at which to start writing characters * @param nLen * Maximum number of characters to read * @return The number of characters read, or -1 if the end of the stream has * been reached * @exception IOException * If an I/O error occurs */ @Override @CheckForSigned public int read (@Nonnull final char aBuf[], @Nonnegative final int nOfs, @Nonnegative final int nLen) throws IOException { _ensureOpen (); if (nOfs < 0 || nLen < 0 || (nOfs + nLen) > aBuf.length) throw new IllegalArgumentException ("ofs:" + nOfs + ";len=" + nLen + ";bufLen=" + aBuf.length); if (nLen == 0) return 0; if (m_nNext >= m_nLength) return -1; final int nChars = Math.min (m_nLength - m_nNext, nLen); m_sStr.getChars (m_nNext, m_nNext + nChars, aBuf, nOfs); m_nNext += nChars; return nChars; } /** * Skips the specified number of characters in the stream. Returns the number * of characters that were skipped. * <p> * * @param nCharsToSkip * The parameter may be negative, even though the <code>skip</code> * method of the {@link Reader} superclass throws an exception in this * case. Negative values of the parameter cause the stream to skip * backwards. Negative return values indicate a skip backwards. It is * not possible to skip backwards past the beginning of the string. * @return If the entire string has been read or skipped, then this method has * no effect and always returns 0. * @exception IOException * If an I/O error occurs */ @Override public long skip (final long nCharsToSkip) throws IOException { _ensureOpen (); if (m_nNext >= m_nLength) return 0; // Bound skip by beginning and end of the source long n = Math.min (m_nLength - m_nNext, nCharsToSkip); n = Math.max (-m_nNext, n); m_nNext += n; return n; } /** * Tells whether this stream is ready to be read. * * @return <code>true</code> if the next read() is guaranteed not to block for * input * @exception IOException * If the stream is closed */ @Override public boolean ready () throws IOException { _ensureOpen (); return true; } /** * Tells whether this stream supports the mark() operation, which it does. * * @return always <code>true</code> */ @Override public boolean markSupported () { return true; } /** * Marks the present position in the stream. Subsequent calls to reset() will * reposition the stream to this point. * * @param nReadAheadLimit * Limit on the number of characters that may be read while still * preserving the mark. Because the stream's input comes from a string, * there is no actual limit, so this argument must not be negative, but * is otherwise ignored. * @exception IllegalArgumentException * If readAheadLimit is < 0 * @exception IOException * If an I/O error occurs */ @Override public void mark (final int nReadAheadLimit) throws IOException { if (nReadAheadLimit < 0) throw new IllegalArgumentException ("Read-ahead limit < 0: " + nReadAheadLimit); _ensureOpen (); m_nMark = m_nNext; } /** * Resets the stream to the most recent mark, or to the beginning of the * string if it has never been marked. * * @exception IOException * If an I/O error occurs */ @Override public void reset () throws IOException { _ensureOpen (); m_nNext = m_nMark; } /** * Closes the stream and releases any system resources associated with it. * Once the stream has been closed, further read(), ready(), mark(), or * reset() invocations will throw an IOException. Closing a previously closed * stream has no effect. */ @Override public void close () { m_sStr = null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.cameltooling.idea.documentation; import java.util.Arrays; import java.util.List; import java.util.Map; import com.github.cameltooling.idea.service.CamelCatalogService; import com.github.cameltooling.idea.service.CamelService; import com.github.cameltooling.idea.util.CamelIdeaUtils; import com.github.cameltooling.idea.util.IdeaUtils; import com.intellij.ide.BrowserUtil; import com.intellij.lang.ASTNode; import com.intellij.lang.Language; import com.intellij.lang.documentation.DocumentationProviderEx; import com.intellij.lang.documentation.ExternalDocumentationHandler; import com.intellij.lang.documentation.ExternalDocumentationProvider; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiClassType; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiExpression; import com.intellij.psi.PsiExpressionList; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiLiteralExpression; import com.intellij.psi.PsiManager; import com.intellij.psi.PsiMethod; import com.intellij.psi.PsiMethodCallExpression; import com.intellij.psi.PsiType; import com.intellij.psi.PsiTypeParameterList; import com.intellij.psi.impl.light.LightElement; import com.intellij.psi.impl.source.PsiClassReferenceType; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlToken; import com.intellij.psi.xml.XmlTokenType; import org.apache.camel.catalog.CamelCatalog; import org.apache.camel.tooling.model.ComponentModel; import org.apache.camel.tooling.model.JsonMapper; import org.apache.camel.util.json.DeserializationException; import org.apache.camel.util.json.JsonObject; import org.apache.camel.util.json.Jsoner; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import static com.github.cameltooling.idea.util.StringUtils.asComponentName; import static com.github.cameltooling.idea.util.StringUtils.asLanguageName; import static com.github.cameltooling.idea.util.StringUtils.wrapSeparator; import static com.github.cameltooling.idea.util.StringUtils.wrapWords; /** * Camel documentation provider to hook into IDEA to show Camel endpoint documentation in popups and various other places. */ public class CamelDocumentationProvider extends DocumentationProviderEx implements ExternalDocumentationProvider, ExternalDocumentationHandler { private static final Logger LOG = Logger.getInstance(CamelDocumentationProvider.class); public IdeaUtils getIdeaUtils() { return ServiceManager.getService(IdeaUtils.class); } public CamelIdeaUtils getCamelIdeaUtils() { return ServiceManager.getService(CamelIdeaUtils.class); } @Nullable @Override public String getQuickNavigateInfo(PsiElement element, PsiElement originalElement) { if (ServiceManager.getService(element.getProject(), CamelService.class).isCamelPresent()) { PsiExpressionList exps = PsiTreeUtil.getNextSiblingOfType(originalElement, PsiExpressionList.class); if (exps != null) { if (exps.getExpressions().length >= 1) { // grab first string parameter (as the string would contain the camel endpoint uri final PsiClassType stringType = PsiType.getJavaLangString(element.getManager(), element.getResolveScope()); PsiExpression exp = Arrays.stream(exps.getExpressions()).filter( e -> e.getType() != null && stringType.isAssignableFrom(e.getType())) .findFirst().orElse(null); if (exp instanceof PsiLiteralExpression) { Object o = ((PsiLiteralExpression) exp).getValue(); String val = o != null ? o.toString() : null; // okay only allow this popup to work when its from a RouteBuilder class PsiClass clazz = PsiTreeUtil.getParentOfType(originalElement, PsiClass.class); if (clazz != null) { PsiClassType[] types = clazz.getExtendsListTypes(); boolean found = Arrays.stream(types).anyMatch(p -> p.getClassName().equals("RouteBuilder")); if (found) { String componentName = asComponentName(val); if (componentName != null) { // the quick info cannot be so wide so wrap at 120 chars return generateCamelComponentDocumentation(componentName, val, 120, element.getProject()); } } } } } } } return null; } @Nullable @Override public List<String> getUrlFor(PsiElement element, PsiElement originalElement) { if (hasDocumentationForCamelComponent(element)) { String val = fetchLiteralForCamelDocumentation(element); String url = externalUrl(element.getProject(), val); if (url != null) { return List.of(url); } } return null; } @Nullable @Override public String generateDoc(PsiElement element, @Nullable PsiElement originalElement) { if (element instanceof DocumentationElement) { DocumentationElement documentationElement = (DocumentationElement) element; return generateCamelEndpointOptionDocumentation(documentationElement.getComponentName(), documentationElement.getEndpointOption(), element.getProject()); } String val = null; if (ServiceManager.getService(element.getProject(), CamelService.class).isCamelPresent()) { val = fetchLiteralForCamelDocumentation(element); if (val == null) { return null; } } String componentName = asComponentName(val); if (componentName != null) { return generateCamelComponentDocumentation(componentName, val, -1, element.getProject()); } else { // its maybe a method call for a Camel language PsiMethodCallExpression call = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class); if (call != null) { PsiMethod method = call.resolveMethod(); if (method != null) { // try to see if we have a Camel language with the method name String name = asLanguageName(method.getName()); if (ServiceManager.getService(element.getProject(), CamelCatalogService.class).get().findLanguageNames().contains(name)) { // okay its a potential Camel language so see if the psi method call is using // camel-core types so we know for a fact its really a Camel language if (isPsiMethodCamelLanguage(method)) { String html = ServiceManager.getService(element.getProject(), CamelCatalogService.class).get().languageHtmlDoc(name); if (html != null) { return html; } } } } } } return null; } @Nullable @Override public PsiElement getDocumentationElementForLookupItem(PsiManager psiManager, Object object, PsiElement element) { // we only support literal - string types where Camel endpoints can be specified if (!(object instanceof String)) { return null; } String lookup = object.toString(); // must be a Camel component String componentName = asComponentName(lookup); if (componentName == null) { return null; } // unescape xml & lookup = lookup.replaceAll("&amp;", "&"); // get last option from lookup line int pos = Math.max(lookup.lastIndexOf("&"), lookup.lastIndexOf("?")); if (pos > 0) { String option = lookup.substring(pos + 1); // if the option has a value then drop that pos = option.indexOf("="); if (pos != -1) { option = option.substring(0, pos); } LOG.debug("getDocumentationElementForLookupItem: " + option); // if the option ends with a dot then its a prefixed/multi value option which we need special logic // find its real option name and documentation which we want to show in the quick doc window if (option.endsWith(".")) { CamelCatalog camelCatalog = ServiceManager.getService(psiManager.getProject(), CamelCatalogService.class).get(); String json = camelCatalog.componentJSonSchema(componentName); if (json == null) { return null; } ComponentModel component = JsonMapper.generateComponentModel(json); final String prefixOption = option; // find the line with this prefix as prefix and multivalue ComponentModel.EndpointOptionModel endpointOption = component.getEndpointOptions().stream().filter( o -> o.isMultiValue() && prefixOption.equals(o.getPrefix())) .findFirst().orElse(null); // use the real option name instead of the prefix if (endpointOption != null) { option = endpointOption.getName(); } } return new DocumentationElement(psiManager, element.getLanguage(), element, option, componentName); } return null; } @Nullable @Override public PsiElement getDocumentationElementForLink(PsiManager psiManager, String link, PsiElement context) { return null; } @Override public @Nullable PsiElement getCustomDocumentationElement(@NotNull Editor editor, @NotNull PsiFile file, @Nullable PsiElement contextElement, int targetOffset) { // documentation from properties file will cause IDEA to call this method where we can tell IDEA we can provide // documentation for the element if we can detect its a Camel component if (contextElement != null) { ASTNode node = contextElement.getNode(); if (node instanceof XmlToken) { //there is an &amp; in the route that splits the route in separated PsiElements if (node.getElementType() == XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN //the caret is at the end of the route next to the " character || node.getElementType() == XmlTokenType.XML_ATTRIBUTE_VALUE_END_DELIMITER //the caret is placed on an &amp; element || contextElement.getText().equals("&amp;")) { if (hasDocumentationForCamelComponent(contextElement.getParent())) { return contextElement.getParent(); } } } if (hasDocumentationForCamelComponent(contextElement)) { return contextElement; } } return null; } @Override public @Nullable String fetchExternalDocumentation(Project project, PsiElement element, List<String> docUrls, boolean onHover) { // F1 documentation which is external but shown inside IDEA // need to be run as read-action to avoid IDEA reporting an error return ApplicationManager.getApplication().runReadAction((Computable<String>) () -> generateDoc(element, element)); } @Override public boolean hasDocumentationFor(PsiElement element, PsiElement originalElement) { return hasDocumentationForCamelComponent(element); } @Override public boolean canPromptToConfigureDocumentation(PsiElement element) { return false; } @Override public void promptToConfigureDocumentation(PsiElement element) { // noop } @Override public boolean handleExternal(PsiElement element, PsiElement originalElement) { String val = fetchLiteralForCamelDocumentation(element); if (val == null || !ServiceManager.getService(element.getProject(), CamelService.class).isCamelPresent()) { return false; } String url = externalUrl(element.getProject(), val); if (url != null) { BrowserUtil.browse(url); return true; } return false; } private static String externalUrl(Project project, String val) { String url = null; String name = asComponentName(val); CamelCatalog camelCatalog = ServiceManager.getService(project, CamelCatalogService.class).get(); if (name != null && camelCatalog.findComponentNames().contains(name)) { String json = camelCatalog.componentJSonSchema(name); ComponentModel component = JsonMapper.generateComponentModel(json); String version = component.getVersion(); if (version.startsWith("2")) { version = "2.x"; } else if (version.startsWith("3.4")) { version = "3.4.x"; // LTS } else if (version.startsWith("3.7")) { version = "3.7.x"; // LTS } else { version = "latest"; } if ("other".equals(component.getKind())) { url = String.format("https://camel.apache.org/components/%s/others/%s.html", version, component.getName()); } else if ("component".equals(component.getKind())) { url = String.format("https://camel.apache.org/components/%s/%s-component.html", version, component.getScheme()); } else { url = String.format("https://camel.apache.org/components/%s/%ss/%s-%s.html", version, component.getKind(), component.getName(), component.getKind()); } } return url; } @Override public boolean handleExternalLink(PsiManager psiManager, String link, PsiElement context) { return false; } @Override public boolean canFetchDocumentationLink(String link) { return false; } @NotNull @Override public String fetchExternalDocumentation(@NotNull String link, @Nullable PsiElement element) { return null; } private boolean hasDocumentationForCamelComponent(PsiElement element) { if (ServiceManager.getService(element.getProject(), CamelService.class).isCamelPresent()) { String text = fetchLiteralForCamelDocumentation(element); if (text != null) { // check if its a known Camel component String name = asComponentName(text); Project project = element.getProject(); return ServiceManager.getService(project, CamelCatalogService.class).get().findComponentNames().contains(name); } } return false; } private String fetchLiteralForCamelDocumentation(PsiElement element) { if (element == null) { return null; } return getIdeaUtils().extractTextFromElement(element); } /** * Generates documentation for the endpoint option. * @param componentName the name of the Camel component * @param option the name of the Camel component option to generate documentation for * @param project the current project * @return a String representing the HTML documentation */ private String generateCamelEndpointOptionDocumentation(String componentName, String option, Project project) { CamelCatalog camelCatalog = ServiceManager.getService(project, CamelCatalogService.class).get(); String json = camelCatalog.componentJSonSchema(componentName); if (json == null) { return null; } ComponentModel component = JsonMapper.generateComponentModel(json); ComponentModel.EndpointOptionModel endpointOption; if (option.endsWith(".")) { // find the line with this prefix as prefix and multivalue endpointOption = component.getEndpointOptions().stream().filter( o -> o.isMultiValue() && option.equals(o.getPrefix())) .findFirst().orElse(null); } else { endpointOption = component.getEndpointOptions().stream().filter( o -> option.equals(o.getName())) .findFirst().orElse(null); } if (endpointOption == null) { return null; } StringBuilder builder = new StringBuilder(); if (endpointOption.isDeprecated()) { builder.append("<strong><s>").append(endpointOption.getName()).append("</s></strong><br/><br/>"); } else { builder.append("<strong>").append(endpointOption.getName()).append("</strong><br/><br/>"); } builder.append("<strong>Group: </strong>").append(endpointOption.getGroup()).append("<br/>"); builder.append("<strong>Type: </strong>").append("<tt>").append(endpointOption.getJavaType()).append("</tt>").append("<br/>"); boolean required = endpointOption.isRequired(); builder.append("<strong>Required: </strong>").append(required).append("<br/>"); if (endpointOption.getEnums() != null) { String values = String.join(", ", endpointOption.getEnums()); builder.append("<strong>Possible values: </strong>").append(values).append("<br/>"); } if (endpointOption.getDefaultValue() != null) { builder.append("<strong>Default value: </strong>").append(endpointOption.getDefaultValue()).append("<br/>"); } builder.append("<br/><div>").append(endpointOption.getDescription()).append("</div>"); return builder.toString(); } private String generateCamelComponentDocumentation(String componentName, String val, int wrapLength, Project project) { // it is a known Camel component CamelCatalog camelCatalog = ServiceManager.getService(project, CamelCatalogService.class).get(); String json = camelCatalog.componentJSonSchema(componentName); if (json == null) { return null; } ComponentModel component = JsonMapper.generateComponentModel(json); // camel catalog expects &amp; as & when it parses so replace all &amp; as & String camelQuery = val; camelQuery = camelQuery.replaceAll("&amp;", "&"); // strip up ending incomplete parameter if (camelQuery.endsWith("&") || camelQuery.endsWith("?")) { camelQuery = camelQuery.substring(0, camelQuery.length() - 1); } Map<String, String> existing = null; try { existing = camelCatalog.endpointProperties(camelQuery); } catch (Throwable e) { LOG.warn("Error parsing Camel endpoint properties with url: " + camelQuery, e); } StringBuilder options = new StringBuilder(); if (existing != null && !existing.isEmpty()) { JsonObject jsonObject; try { jsonObject = (JsonObject) Jsoner.deserialize(json); } catch (DeserializationException e) { throw new RuntimeException(e); } Map<String, JsonObject> properties = jsonObject.getMap("properties"); for (Map.Entry<String, String> entry : existing.entrySet()) { String name = entry.getKey(); String value = entry.getValue(); JsonObject row = properties.get(name); if (row != null) { String kind = row.getString("kind"); String deprecated = row.getString("deprecated"); String line; if ("path".equals(kind)) { line = value + "<br/>"; } else { if ("true".equals(deprecated)) { line = "<s>" + name + "</s>=" + value + "<br/>"; } else { line = name + "=" + value + "<br/>"; } } options.append("<br/>"); options.append("<b>").append(line).append("</b>"); String summary = row.getString("description"); // the text looks a bit weird when using single / summary = summary.replace('/', ' '); options.append(wrapText(summary, wrapLength)).append("<br/>"); } } } // append any lenient options as well Map<String, String> extra = null; try { extra = camelCatalog.endpointLenientProperties(camelQuery); } catch (Throwable e) { LOG.warn("Error parsing Camel endpoint properties with url: " + camelQuery, e); } if (extra != null && !extra.isEmpty()) { for (Map.Entry<String, String> entry : extra.entrySet()) { String name = entry.getKey(); String value = entry.getValue(); String line = name + "=" + value + "<br/>"; options.append("<br/>"); options.append("<b>").append(line).append("</b>"); String summary = "This option is a custom option that is not part of the Camel component"; options.append(wrapText(summary, wrapLength)).append("<br/>"); } } StringBuilder sb = new StringBuilder(); if (component.isDeprecated()) { sb.append("<b><s>").append(component.getTitle()).append(" Component (deprecated)</s></b><br/>"); } else { sb.append("<b>").append(component.getTitle()).append(" Component</b><br/>"); } sb.append(wrapText(component.getDescription(), wrapLength)).append("<br/><br/>"); if (component.getDeprecatedSince() != null) { sb.append("<b>Deprecated Since:</b> <tt>").append(component.getDeprecatedSince()).append("</tt><br/>"); } sb.append("<b>Since:</b> <tt>").append(component.getFirstVersionShort()).append("</tt><br/>"); if (component.getSupportLevel() != null) { sb.append("<b>Support Level:</b> <tt>").append(component.getSupportLevel()).append("</tt><br/>"); } String g = component.getGroupId(); String a = component.getArtifactId(); String v = component.getVersion(); if (g != null && a != null && v != null) { sb.append("<b>Maven:</b> <tt>").append(g).append(":").append(a).append(":").append(v).append("</tt><br/>"); } sb.append("<b>Syntax:</b> <tt>").append(component.getSyntax()).append("?options</tt><br/>"); sb.append("<p/>"); sb.append("<br/>"); // indent the endpoint url with 5 spaces and wrap it by url separator String wrapped = wrapSeparator(val, "&", "<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;", 100); sb.append("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<b>").append(wrapped).append("</b><br/>"); if (options.length() > 0) { sb.append(options); } return sb.toString(); } private boolean isPsiMethodCamelLanguage(PsiMethod method) { PsiType type = method.getReturnType(); if (type instanceof PsiClassReferenceType) { PsiClassReferenceType clazz = (PsiClassReferenceType) type; PsiClass resolved = clazz.resolve(); if (resolved != null) { boolean language = getCamelIdeaUtils().isCamelExpressionOrLanguage(resolved); // try parent using some weird/nasty stub stuff which is how complex IDEA AST // is when its parsing the Camel route builder if (!language) { PsiElement elem = resolved.getParent(); if (elem instanceof PsiTypeParameterList) { elem = elem.getParent(); } if (elem instanceof PsiClass) { language = getCamelIdeaUtils().isCamelExpressionOrLanguage((PsiClass) elem); } } return language; } } return false; } private static String wrapText(String text, int wrapLength) { if (wrapLength > 0) { text = wrapWords(text, "<br/>", wrapLength, true); } return text; } /** * {@link PsiElement} used only to transfer documentation data. */ static class DocumentationElement extends LightElement { private PsiElement element; private String endpointOption; private String componentName; DocumentationElement(@NotNull PsiManager psiManager, @NotNull Language language, PsiElement element, String endpointOption, String componentName) { super(psiManager, language); this.element = element; this.endpointOption = endpointOption; this.componentName = componentName; } @Override public String toString() { return element.getText(); } @Override public String getText() { return endpointOption; } public PsiElement getElement() { return element; } String getEndpointOption() { return endpointOption; } String getComponentName() { return componentName; } } }
/******************************************************************************* * * Copyright FUJITSU LIMITED 2017 * * Creation Date: Dec 20, 2012 * *******************************************************************************/ package org.oscm.billingservice.business.calculation.revenue.setup; import org.oscm.billingservice.setup.BillingIntegrationTestBase; import org.oscm.billingservice.setup.IntegrationTestSetup; import org.oscm.billingservice.setup.VOPriceModelFactory.TestPriceModel; import org.oscm.billingservice.setup.VOServiceFactory; import org.oscm.billingservice.setup.VOServiceFactory.TestService; import org.oscm.test.DateTimeHandling; import org.oscm.internal.vo.VORoleDefinition; import org.oscm.internal.vo.VOServiceDetails; import org.oscm.internal.vo.VOSubscriptionDetails; /** * @author baumann */ public class ParameterChargeSetup extends IntegrationTestSetup { /** * A customer subscribes to a service and terminates the subscription within * a time unit. The value of a parameter is changed several times in the * time unit. The role of the assigned user is changed too in the time unit. * The parameter fees are charged in a "pro rata" way. See example 2 in * requirement http://wwwi.est.fujitsu.com/confluence/x/4I7H */ public void createMonthScenarioParAndRoleChange() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-02-04 12:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHARGE_PU_MONTH_ROLES", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES_PARS, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VORoleDefinition role = VOServiceFactory .getRole(serviceDetails, "USER"); container.login(basicSetup.getCustomerAdminKey(), ROLE_ORGANIZATION_ADMIN); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( "PARCHARGE_PU_MONTH_ROLES", serviceDetails, basicSetup.getCustomerUser1(), role); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-11 12:00:00"), "MAX_FOLDER_NUMBER", "4"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-15 00:00:00")); subDetails = subscrSetup.modifyUserRole(subDetails.getUsageLicenses() .get(0), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-22 00:00:00"), "MAX_FOLDER_NUMBER", "88"); BillingIntegrationTestBase .setDateFactoryInstance("2013-02-25 12:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHARGE_PU_MONTH_ROLES", subDetails); } /** * A customer subscribes to a service and terminates the subscription within * a billing period. The value of a parameter and the user role are changed * several times. The parameter fees are charged in a "pro rata" way if * parameter- or user role changes occur in a time unit. Similar as example * 2 in requirement http://wwwi.est.fujitsu.com/confluence/x/4I7H, but there * are several time units charged in the billing period. */ public void createWeekScenarioParAndRoleChange() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-02-01 00:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHARGE_PU_WEEK_ROLES", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VORoleDefinition role = VOServiceFactory.getRole(serviceDetails, "ADMIN"); container.login(basicSetup.getCustomerAdminKey(), ROLE_ORGANIZATION_ADMIN); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( "PARCHARGE_PU_WEEK_ROLES", serviceDetails, basicSetup.getCustomerUser1(), role); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-04 00:00:00")); subDetails = subscrSetup.modifyUserRole(subDetails.getUsageLicenses() .get(0), VOServiceFactory.getRole(serviceDetails, "USER"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-04 00:00:00"), "MAX_FOLDER_NUMBER", "2"); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-11 00:00:00"), "MAX_FOLDER_NUMBER", "7"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-14 12:00:00")); subDetails = subscrSetup.modifyUserRole(subDetails.getUsageLicenses() .get(0), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-19 18:00:00"), "MAX_FOLDER_NUMBER", "13"); BillingIntegrationTestBase .setDateFactoryInstance("2013-03-01 00:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHARGE_PU_WEEK_ROLES", subDetails); } /** * Same as createWeekScenarioParAndRoleChange(), but the subscription is * terminated at the beginning of the overlapping week. */ public void createWeekScenarioParAndRoleChange2() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-02-01 00:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHARGE_PU_WEEK_ROLES2", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VORoleDefinition role = VOServiceFactory.getRole(serviceDetails, "ADMIN"); container.login(basicSetup.getCustomerAdminKey(), ROLE_ORGANIZATION_ADMIN); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( "PARCHARGE_PU_WEEK_ROLES2", serviceDetails, basicSetup.getCustomerUser1(), role); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-04 00:00:00")); subDetails = subscrSetup.modifyUserRole(subDetails.getUsageLicenses() .get(0), VOServiceFactory.getRole(serviceDetails, "USER"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-04 00:00:00"), "MAX_FOLDER_NUMBER", "2"); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-11 00:00:00"), "MAX_FOLDER_NUMBER", "7"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-14 12:00:00")); subDetails = subscrSetup.modifyUserRole(subDetails.getUsageLicenses() .get(0), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-19 18:00:00"), "MAX_FOLDER_NUMBER", "13"); // Terminate the subscription at the beginning of the overlapping week BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-25 00:00:00")); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHARGE_PU_WEEK_ROLES2", subDetails); } /** * A customer subscribes to a service and terminates the subscription within * a time unit. The value of a parameter is changed several times in the * time unit. In addition the assigned user is deassigned and reassigned * with a different role in the time unit. The parameter fees are charged in * a "pro rata" way. See example 3 in requirement * http://wwwi.est.fujitsu.com/confluence/x/4I7H */ public void createMonthScenarioParAndUserAssignChange() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-02-04 12:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHARGE_PU_MONTH_ASSIGN", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES_PARS, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VORoleDefinition role = VOServiceFactory .getRole(serviceDetails, "USER"); container.login(basicSetup.getCustomerAdminKey(), ROLE_ORGANIZATION_ADMIN); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( "PARCHARGE_PU_MONTH_ASSIGN", serviceDetails, basicSetup.getCustomerUser1(), role); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-11 12:00:00"), "MAX_FOLDER_NUMBER", "4"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-13 06:00:00")); subDetails = subscrSetup.revokeUser(basicSetup.getCustomerUser1(), subDetails.getSubscriptionId()); // Add same user again, but with a different role BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-15 00:00:00")); subDetails = subscrSetup.addUser(basicSetup.getCustomerUser1(), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-22 00:00:00"), "MAX_FOLDER_NUMBER", "88"); BillingIntegrationTestBase .setDateFactoryInstance("2013-02-25 12:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHARGE_PU_MONTH_ASSIGN", subDetails); } /** * A customer subscribes to a service and terminates the subscription within * a billing period. The value of a parameter is changed several times. Also * the assigned user is deassigned and reassigned several times using * different roles. The parameter fees are charged in a "pro rata" way if * parameter- or user role changes occur in a time unit. Similar as example * 3 in requirement http://wwwi.est.fujitsu.com/confluence/x/4I7H, but there * are several time units charged in the billing period. */ public void createWeekScenarioParAndUserAssignChange() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-02-01 00:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHARGE_PU_WEEK_ASSIGN", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VORoleDefinition role = VOServiceFactory.getRole(serviceDetails, "ADMIN"); container.login(basicSetup.getCustomerAdminKey(), ROLE_ORGANIZATION_ADMIN); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( "PARCHARGE_PU_WEEK_ASSIGN", serviceDetails, basicSetup.getCustomerUser1(), role); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-04 00:00:00")); subDetails = subscrSetup.modifyUserRole(subDetails.getUsageLicenses() .get(0), VOServiceFactory.getRole(serviceDetails, "USER"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-04 00:00:00"), "MAX_FOLDER_NUMBER", "2"); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-11 00:00:00"), "MAX_FOLDER_NUMBER", "7"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-12 06:00:00")); subDetails = subscrSetup.revokeUser(basicSetup.getCustomerUser1(), subDetails.getSubscriptionId()); // Add same user again, but with a different role BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-14 12:00:00")); subDetails = subscrSetup.addUser(basicSetup.getCustomerUser1(), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-19 18:00:00"), "MAX_FOLDER_NUMBER", "13"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-21 12:00:00")); subDetails = subscrSetup.revokeUser(basicSetup.getCustomerUser1(), subDetails.getSubscriptionId()); // Add same user again with the same role BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-24 00:00:00")); subDetails = subscrSetup.addUser(basicSetup.getCustomerUser1(), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); BillingIntegrationTestBase .setDateFactoryInstance("2013-03-01 00:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHARGE_PU_WEEK_ASSIGN", subDetails); } /** * A customer subscribes to a service and terminates the subscription within * a time unit. The value of a parameter is changed several times in the * time unit. In addition the assigned user is deassigned and reassigned * with a different role in the time unit. One parameter change is done * while the user is deassigned. The parameter fees are charged in a * "pro rata" way. See example 4 in requirement * http://wwwi.est.fujitsu.com/confluence/x/4I7H */ public void createMonthScenarioParAndUserAssignChange2() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-02-04 12:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHARGE_PU_MONTH_ASSIGN2", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES_PARS, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VORoleDefinition role = VOServiceFactory .getRole(serviceDetails, "USER"); container.login(basicSetup.getCustomerAdminKey(), ROLE_ORGANIZATION_ADMIN); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( "PARCHARGE_PU_MONTH_ASSIGN2", serviceDetails, basicSetup.getCustomerUser1(), role); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-11 12:00:00"), "MAX_FOLDER_NUMBER", "4"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-13 06:00:00")); subDetails = subscrSetup.revokeUser(basicSetup.getCustomerUser1(), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-22 00:00:00"), "MAX_FOLDER_NUMBER", "88"); // Add same user again, but with a different role BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-23 18:00:00")); subDetails = subscrSetup.addUser(basicSetup.getCustomerUser1(), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); BillingIntegrationTestBase .setDateFactoryInstance("2013-02-25 12:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHARGE_PU_MONTH_ASSIGN2", subDetails); } /** * A customer subscribes to a service and terminates the subscription within * a billing period. The value of a parameter is changed several times. In * addition the assigned user is deassigned in one time unit and reassigned * with a different role in another time unit. Two parameter changes are * done while the user is deassigned. The parameter fees are charged in a * "pro rata" way if parameter- or user role changes occur in a time unit. * See examples 5 and 6 in requirement * http://wwwi.est.fujitsu.com/confluence/x/4I7H */ public void createWeekScenarioParAndUserAssignChange2() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-02-01 00:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHARGE_PU_WEEK_ASSIGN2", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VORoleDefinition role = VOServiceFactory.getRole(serviceDetails, "ADMIN"); container.login(basicSetup.getCustomerAdminKey(), ROLE_ORGANIZATION_ADMIN); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( "PARCHARGE_PU_WEEK_ASSIGN2", serviceDetails, basicSetup.getCustomerUser1(), role); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-04 00:00:00")); subDetails = subscrSetup.modifyUserRole(subDetails.getUsageLicenses() .get(0), VOServiceFactory.getRole(serviceDetails, "USER"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-04 00:00:00"), "MAX_FOLDER_NUMBER", "2"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-10 00:00:00")); subDetails = subscrSetup.revokeUser(basicSetup.getCustomerUser1(), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-11 00:00:00"), "MAX_FOLDER_NUMBER", "7"); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-19 18:00:00"), "MAX_FOLDER_NUMBER", "10"); // Add same user again, but with a different role BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-21 12:00:00")); subDetails = subscrSetup.addUser(basicSetup.getCustomerUser1(), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-23 06:00:00"), "MAX_FOLDER_NUMBER", "13"); BillingIntegrationTestBase .setDateFactoryInstance("2013-03-01 00:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHARGE_PU_WEEK_ASSIGN2", subDetails); } /** * A customer subscribes to a service and terminates the subscription within * a time unit. After a while the assigned user is deassigned. The value of * a parameter is changed several times in the time unit. One parameter * change is done before the user is deassigned. The other parameter change * is done after the user has been deassigned. The parameter fees are * charged in a "pro rata" way. See example 5 in requirement * http://wwwi.est.fujitsu.com/confluence/x/4I7H */ public void createMonthScenarioParChangeAndUserDeassign() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-02-04 12:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHARGE_PU_MONTH_DEASSIGN", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES_PARS2, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VORoleDefinition role = VOServiceFactory.getRole(serviceDetails, "ADMIN"); container.login(basicSetup.getCustomerAdminKey(), ROLE_ORGANIZATION_ADMIN); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( "PARCHARGE_PU_MONTH_DEASSIGN", serviceDetails, basicSetup.getCustomerUser1(), role); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-11 12:00:00"), "LONG_NUMBER", "29"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-15 00:00:00")); subDetails = subscrSetup.revokeUser(basicSetup.getCustomerUser1(), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-22 00:00:00"), "LONG_NUMBER", "1588"); BillingIntegrationTestBase .setDateFactoryInstance("2013-02-25 12:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHARGE_PU_MONTH_DEASSIGN", subDetails); } /** * A customer subscribes to a service and terminates the subscription within * a billing period. The assigned user is deassigned in the middle of the * subscription. The value of a parameter is changed several times. Two * parameter changes are done before the user is deassigned. Another * parameter change is done after the user is deassigned. The parameter fees * are charged in a "pro rata" way if parameter- or user role changes occur * in a time unit. Similar as example 5 in requirement * http://wwwi.est.fujitsu.com/confluence/x/4I7H, but several time units are * charged. */ public void createWeekScenarioParChangeAndUserDeassign() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-02-01 00:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHARGE_PU_WEEK_DEASSIGN", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS_STEPPED, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VORoleDefinition role = VOServiceFactory.getRole(serviceDetails, "ADMIN"); container.login(basicSetup.getCustomerAdminKey(), ROLE_ORGANIZATION_ADMIN); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( "PARCHARGE_PU_WEEK_DEASSIGN", serviceDetails, basicSetup.getCustomerUser1(), role); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-04 00:00:00"), "LONG_NUMBER", "1523"); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-12 18:00:00"), "LONG_NUMBER", "400"); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-02-14 12:00:00")); subDetails = subscrSetup.revokeUser(basicSetup.getCustomerUser1(), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-16 06:00:00"), "LONG_NUMBER", "29"); BillingIntegrationTestBase .setDateFactoryInstance("2013-03-01 00:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHARGE_PU_WEEK_DEASSIGN", subDetails); } /** * A subscription starts in an overlapping week. A parameter is changed in * that week. */ public void createWeekScenarioParChange() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-01-28 02:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHANGE_PU_WEEK_SERVICE", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( basicSetup.getCustomerAdminKey(), "PARCHANGE_PU_WEEK", serviceDetails, basicSetup.getCustomerUser1(), VOServiceFactory.getRole(serviceDetails, "ADMIN")); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-01-30 10:00:00"), "MAX_FOLDER_NUMBER", "2"); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-02-11 00:00:00"), "MAX_FOLDER_NUMBER", "7"); BillingIntegrationTestBase .setDateFactoryInstance("2013-02-25 00:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHANGE_PU_WEEK", subDetails); } /** * A subscription starts in an overlapping week. It is upgraded in that week * from a per unit/week service to a pro rata/week service. A parameter is * changed before the upgrade. */ public void createWeekScenarioParChangeUpgrade() throws Exception { BillingIntegrationTestBase .setDateFactoryInstance("2013-01-28 12:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHANGE_UPGRADE_PU_WEEK_SERVICE", TestService.EXAMPLE, TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS2, 1, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( basicSetup.getCustomerAdminKey(), "PARCHANGE_UPGRADE_PU_WEEK", serviceDetails, basicSetup.getCustomerUser1(), VOServiceFactory.getRole(serviceDetails, "ADMIN")); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-01-30 00:00:00"), "MAX_FOLDER_NUMBER", "2"); // Upgrade the subscription to a pro rata service VOServiceDetails proRataService = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "PARCHG_UPGRADE_RATA_PU_WEEK_SERVICE", TestService.EXAMPLE, TestPriceModel.EXAMPLE_RATA_WEEK_ROLES_PAR_I, technicalService, supplierMarketplace); serviceSetup.registerCompatibleServices( basicSetup.getSupplierAdminKey(), serviceDetails, proRataService); BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling .calculateMillis("2013-01-30 12:00:00")); VOSubscriptionDetails upgradedSubDetails = subscrSetup .copyParametersAndUpgradeSubscription( basicSetup.getCustomerAdminKey(), subDetails, proRataService); BillingIntegrationTestBase .setDateFactoryInstance("2013-02-25 08:00:00"); subscrSetup .unsubscribeToService(upgradedSubDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHANGE_UPGRADE_PU_WEEK", subDetails); BillingIntegrationTestBase.updateSubscriptionListForTests( "PARCHANGE_UPGRADE_PU_WEEK", upgradedSubDetails); } public void createRataWeekScenarioParUserChange() throws Exception { setDateFactory("2013-04-01 00:00:00"); VOServiceDetails serviceDetails = serviceSetup .createPublishAndActivateMarketableService( basicSetup.getSupplierAdminKey(), "RATA_WEEK_PAR_USER_CHANGE_SERVICE", TestService.EXAMPLE, TestPriceModel.EXAMPLE_RATA_WEEK_ROLES_PAR_I, technicalService, supplierMarketplace); setCutOffDay(basicSetup.getSupplierAdminKey(), 1); VOSubscriptionDetails subDetails = subscrSetup.subscribeToService( basicSetup.getCustomerAdminKey(), "RATA_WEEK_PAR_USER_CHANGE", serviceDetails, basicSetup.getCustomerUser1(), VOServiceFactory.getRole(serviceDetails, "ADMIN")); setDateFactory("2013-04-08 00:00:00"); subDetails = subscrSetup.addUser(basicSetup.getCustomerUser2(), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-04-08 00:00:00"), "MAX_FOLDER_NUMBER", "2"); setDateFactory("2013-04-15 00:00:00"); subDetails = subscrSetup.revokeUser(basicSetup.getCustomerUser1(), subDetails.getSubscriptionId()); subDetails = subscrSetup.modifyParameterForSubscription(subDetails, DateTimeHandling.calculateMillis("2013-04-18 12:00:00"), "MAX_FOLDER_NUMBER", "3"); setDateFactory("2013-04-22 00:00:00"); subDetails = subscrSetup.addUser(basicSetup.getCustomerUser1(), VOServiceFactory.getRole(serviceDetails, "GUEST"), subDetails.getSubscriptionId()); setDateFactory("2013-04-29 00:00:00"); subscrSetup.unsubscribeToService(subDetails.getSubscriptionId()); resetCutOffDay(basicSetup.getSupplierAdminKey()); BillingIntegrationTestBase.updateSubscriptionListForTests( "RATA_WEEK_PAR_USER_CHANGE", subDetails); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://code.google.com/p/google-apis-client-generator/ * (build: 2015-06-30 18:20:40 UTC) * on 2015-07-12 at 12:04:51 UTC * Modify at your own risk. */ package com.example.suvisavi.expenses.backend.registration; /** * Registration request. * * @since 1.3 */ @SuppressWarnings("javadoc") public abstract class RegistrationRequest<T> extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest<T> { /** * @param client Google client * @param method HTTP Method * @param uriTemplate URI template for the path relative to the base URL. If it starts with a "/" * the base path from the base URL will be stripped out. The URI template can also be a * full URL. URI template expansion is done using * {@link com.google.api.client.http.UriTemplate#expand(String, String, Object, boolean)} * @param content A POJO that can be serialized into JSON or {@code null} for none * @param responseClass response class to parse into */ public RegistrationRequest( Registration client, String method, String uriTemplate, Object content, Class<T> responseClass) { super( client, method, uriTemplate, content, responseClass); } /** Data format for the response. */ @com.google.api.client.util.Key private java.lang.String alt; /** * Data format for the response. [default: json] */ public java.lang.String getAlt() { return alt; } /** Data format for the response. */ public RegistrationRequest<T> setAlt(java.lang.String alt) { this.alt = alt; return this; } /** Selector specifying which fields to include in a partial response. */ @com.google.api.client.util.Key private java.lang.String fields; /** * Selector specifying which fields to include in a partial response. */ public java.lang.String getFields() { return fields; } /** Selector specifying which fields to include in a partial response. */ public RegistrationRequest<T> setFields(java.lang.String fields) { this.fields = fields; return this; } /** * API key. Your API key identifies your project and provides you with API access, quota, and * reports. Required unless you provide an OAuth 2.0 token. */ @com.google.api.client.util.Key private java.lang.String key; /** * API key. Your API key identifies your project and provides you with API access, quota, and * reports. Required unless you provide an OAuth 2.0 token. */ public java.lang.String getKey() { return key; } /** * API key. Your API key identifies your project and provides you with API access, quota, and * reports. Required unless you provide an OAuth 2.0 token. */ public RegistrationRequest<T> setKey(java.lang.String key) { this.key = key; return this; } /** OAuth 2.0 token for the current user. */ @com.google.api.client.util.Key("oauth_token") private java.lang.String oauthToken; /** * OAuth 2.0 token for the current user. */ public java.lang.String getOauthToken() { return oauthToken; } /** OAuth 2.0 token for the current user. */ public RegistrationRequest<T> setOauthToken(java.lang.String oauthToken) { this.oauthToken = oauthToken; return this; } /** Returns response with indentations and line breaks. */ @com.google.api.client.util.Key private java.lang.Boolean prettyPrint; /** * Returns response with indentations and line breaks. [default: true] */ public java.lang.Boolean getPrettyPrint() { return prettyPrint; } /** Returns response with indentations and line breaks. */ public RegistrationRequest<T> setPrettyPrint(java.lang.Boolean prettyPrint) { this.prettyPrint = prettyPrint; return this; } /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string * assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided. */ @com.google.api.client.util.Key private java.lang.String quotaUser; /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string * assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided. */ public java.lang.String getQuotaUser() { return quotaUser; } /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string * assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided. */ public RegistrationRequest<T> setQuotaUser(java.lang.String quotaUser) { this.quotaUser = quotaUser; return this; } /** * IP address of the site where the request originates. Use this if you want to enforce per-user * limits. */ @com.google.api.client.util.Key private java.lang.String userIp; /** * IP address of the site where the request originates. Use this if you want to enforce per-user * limits. */ public java.lang.String getUserIp() { return userIp; } /** * IP address of the site where the request originates. Use this if you want to enforce per-user * limits. */ public RegistrationRequest<T> setUserIp(java.lang.String userIp) { this.userIp = userIp; return this; } @Override public final Registration getAbstractGoogleClient() { return (Registration) super.getAbstractGoogleClient(); } @Override public RegistrationRequest<T> setDisableGZipContent(boolean disableGZipContent) { return (RegistrationRequest<T>) super.setDisableGZipContent(disableGZipContent); } @Override public RegistrationRequest<T> setRequestHeaders(com.google.api.client.http.HttpHeaders headers) { return (RegistrationRequest<T>) super.setRequestHeaders(headers); } @Override public RegistrationRequest<T> set(String parameterName, Object value) { return (RegistrationRequest<T>) super.set(parameterName, value); } }
import android.annotation.TargetApi; import android.content.Context; import android.graphics.Bitmap; import android.os.Build.VERSION; import android.os.Handler; import android.os.SystemClock; import android.support.rastermill.FrameSequence; import android.util.Log; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.text.DecimalFormat; import java.util.Map; import org.chromium.net.ChunkedWritableByteChannel; import org.chromium.net.HttpUrlRequest; import org.chromium.net.HttpUrlRequestFactory; import org.chromium.net.HttpUrlRequestListener; public abstract class ife extends kaz implements iwy { public static DecimalFormat a; public final ifj b; final ift c; public ifg d; public ifh e; public String f; public String g; public int h; public String i; public String j; public String k; public String l; private long m; private String n; private int v = -1; public ife(ifj paramifj, kba paramkba) { super(paramifj, paramkba); this.b = paramifj; this.c = new ift(this, paramifj); } @TargetApi(19) private static int a(Bitmap paramBitmap) { if (Build.VERSION.SDK_INT >= 19) { return paramBitmap.getAllocationByteCount(); } return paramBitmap.getRowBytes() * paramBitmap.getHeight(); } private final void a(String paramString, ByteBuffer paramByteBuffer, boolean paramBoolean1, boolean paramBoolean2) { if ((paramBoolean1) && (!this.c.b(paramString, paramBoolean2))) { return; } if (paramBoolean2) {} for (iab localiab = this.b.f();; localiab = this.b.e()) { localiab.a(paramString, paramByteBuffer); return; } } private final void a(ByteBuffer paramByteBuffer) { kba localkba = (kba)this.o; if ((this.q != 2) && (this.q != 1)) { if (this.t) { String str7 = String.valueOf(this.o); String str8 = String.valueOf(kaz.c(this.q)); new StringBuilder(53 + String.valueOf(str7).length() + String.valueOf(str8).length()).append("Resource no longer needed, not delivering: ").append(str7).append(", status: ").append(str8); } return; } if ((0x2 & localkba.j) != 0) { if (this.t) { String str5 = String.valueOf(this.o); String str6 = String.valueOf(f()); new StringBuilder(48 + String.valueOf(str5).length() + String.valueOf(str6).length()).append("Completing a download-only request: ").append(str5).append(" file name: ").append(str6); } this.b.a(this, 1, f()); return; } if ((0x8 & localkba.j) != 0) { if (this.t) { String str4 = String.valueOf(this.o); new StringBuilder(56 + String.valueOf(str4).length()).append("Image decoding disabled. Delivering bytes to consumers: ").append(str4); } this.b.a(this, 1, efj.a(paramByteBuffer)); return; } int i1; for (;;) { try { if (paramByteBuffer.limit() >= 6) { paramByteBuffer.rewind(); if ((paramByteBuffer.get(0) == 71) && (paramByteBuffer.get(1) == 73) && (paramByteBuffer.get(2) == 70)) { i1 = 1; if (i1 != 0) { this.s = 2; } if (((0x4 & localkba.j) == 0) || ((0x20 & localkba.j) != 0) || (!ifb.a(paramByteBuffer))) { break; } c(new ifb(FrameSequence.a(paramByteBuffer))); return; } } } catch (OutOfMemoryError localOutOfMemoryError) { if (this.t) { String str1 = String.valueOf(this.o); new StringBuilder(36 + String.valueOf(str1).length()).append("Out of memory while decoding image: ").append(str1); } iff localiff = new iff(this); efj.m().post(localiff); this.b.a(this, 6, null); return; } i1 = 0; } if (((0x4 & localkba.j) != 0) && (i1 != 0)) { c(new icm(paramByteBuffer)); return; } if ((0x40 & localkba.j) != 0) {} for (boolean bool = true;; bool = false) { Object localObject1 = this.b.a(this, paramByteBuffer, bool); if (this.d != null) {} for (Object localObject2 = this.d.a((kba)this.o, localObject1);; localObject2 = localObject1) { if (localObject2 != null) { if (this.t) { String str2 = String.valueOf(this.o); new StringBuilder(31 + String.valueOf(str2).length()).append("Delivering image to consumers: ").append(str2); } c(localObject2); return; } if (this.t) { String str3 = String.valueOf(this.o); new StringBuilder(26 + String.valueOf(str3).length()).append("Bad image; cannot decode: ").append(str3); } File localFile = f(); if (localFile != null) { localFile.delete(); } this.b.a(this, 5); return; } } } public static boolean a(String paramString) { if ((paramString == null) || (paramString.length() == 0)) {} while (paramString.charAt(-1 + paramString.length()) != '~') { return false; } return true; } public static int b(Object paramObject) { if ((paramObject instanceof Bitmap)) { return ((Bitmap)paramObject).getWidth(); } if ((paramObject instanceof iey)) { return ((iey)paramObject).b; } return 0; } public abstract String a(); public final void a(int paramInt) { this.c.a(); super.a(paramInt); } public final void a(int paramInt, String paramString) { this.c.a(); super.a(paramInt, paramString); } public final void a(long paramLong1, long paramLong2) { if (this.e != null) { this.e.a(paramLong1, paramLong2); } } public final void a(long paramLong, String paramString, int paramInt1, int paramInt2, int paramInt3, Bitmap paramBitmap) { this.m = (SystemClock.currentThreadTimeMillis() - paramLong); long l1 = this.m; this.f = (23 + l1 + " ms"); this.n = paramString; this.h = paramInt1; if (this.t) { if (paramBitmap != null) { break label198; } } label198: int i1; int i2; for (String str1 = "null";; str1 = 25 + "[" + i1 + "x" + i2 + "]") { String str2 = this.f; new StringBuilder(80 + String.valueOf(paramString).length() + String.valueOf(str1).length() + String.valueOf(str2).length()).append("Decoded ").append(paramInt1).append(" byte ").append(paramString).append(" from source [").append(paramInt2).append("x").append(paramInt3).append("] into bitmap ").append(str1).append(" in ").append(str2); this.i = null; this.j = null; this.k = null; this.l = null; return; i1 = paramBitmap.getWidth(); i2 = paramBitmap.getHeight(); } } public void a(Object paramObject) { ift localift2; boolean bool3; File localFile1; if ((0x2 & ((kba)this.o).j) != 0) { this.c.a(); localift2 = this.c; if (localift2.l) { File localFile2 = localift2.m; String str10 = localift2.m.getName(); if ((0x400 & ((kba)localift2.a.o).j) != 0) { bool3 = true; localift2.b(str10, bool3); localFile1 = localFile2; label91: if (this.t) { String str8 = String.valueOf(this.o); String str9 = String.valueOf(localFile1); new StringBuilder(48 + String.valueOf(str8).length() + String.valueOf(str9).length()).append("Completing a download-only request: ").append(str8).append(" file name: ").append(str9); } this.b.a(this, 1, localFile1); } } } ByteBuffer localByteBuffer; kba localkba; do { do { return; bool3 = false; break; localFile1 = localift2.a.f(); localift2.m.renameTo(localFile1); break label91; localByteBuffer = (ByteBuffer)paramObject; if (this.t) { int i2 = localByteBuffer.remaining(); String str7 = String.valueOf(this.o); new StringBuilder(47 + String.valueOf(str7).length()).append("Delivering data: ").append(str7).append("; buffer has ").append(i2).append(" bytes"); } } while (localByteBuffer == null); localkba = (kba)this.o; this.c.a(); ift localift1 = this.c; if (localift1.m != null) { localift1.m.delete(); } } while ((this.q != 2) && (localByteBuffer.remaining() == 0)); boolean bool1; if ((this.c.i == -1L) && (this.q != 2)) { bool1 = true; } for (;;) { String str1; label373: boolean bool2; String str4; String str5; if (bool1) { str1 = e(); int i1 = 0x400 & localkba.j; bool2 = false; if (i1 != 0) { bool2 = true; } if ((0x2 & localkba.j) == 0) { break label578; } a(str1, localByteBuffer, bool1, bool2); if (this.t) { str4 = String.valueOf(this.o); str5 = String.valueOf(f()); if (!bool2) { break label570; } } } label570: for (String str6 = "; long-term cache";; str6 = "") { new StringBuilder(48 + String.valueOf(str4).length() + String.valueOf(str5).length() + String.valueOf(str6).length()).append("Completing a download-only request: ").append(str4).append(" file name: ").append(str5).append(str6); this.b.a(this, 1, f()); return; if ((this.c.i <= 0L) || (localByteBuffer.remaining() >= this.c.i)) { break label706; } bool1 = true; break; str1 = d(); break label373; } label578: if (!bool1) { a(localByteBuffer); } if ((0x1 & localkba.j) != 0) { break; } String str2; if (this.t) { str2 = String.valueOf(this.o); if (!bool2) { break label698; } } label698: for (String str3 = "; long-term cache";; str3 = "") { new StringBuilder(40 + String.valueOf(str2).length() + String.valueOf(str1).length() + String.valueOf(str3).length()).append("Saving image in file cache: ").append(str2).append(" file name: ").append(str1).append(str3); a(str1, localByteBuffer, bool1, bool2); return; } label706: bool1 = false; } } protected final void a(StringBuilder paramStringBuilder) { paramStringBuilder.append("\n Size:").append(l()); } public abstract String b(); public void c() { kba localkba = (kba)this.o; if ((0x80 & localkba.j) != 0) { if (this.t) { String str9 = String.valueOf(this.o); String str10 = String.valueOf(f()); new StringBuilder(34 + String.valueOf(str9).length() + String.valueOf(str10).length()).append("Loading disabled for: ").append(str9).append(" file name: ").append(str10); } this.b.a(this, 3, null); return; } if ((0x2 & localkba.j) != 0) { File localFile2 = g(); if (localFile2 != null) { if (this.t) { String str7 = String.valueOf(this.o); String str8 = String.valueOf(localFile2); new StringBuilder(46 + String.valueOf(str7).length() + String.valueOf(str8).length()).append("Returning file name to consumers: ").append(str7).append(" file name: ").append(str8); } this.b.a(this, 1, localFile2); return; } } File localFile1; if ((0x1 & localkba.j) == 0) { localFile1 = g(); if (localFile1 != null) { if (this.t) { String str5 = String.valueOf(this.o); String str6 = String.valueOf(localFile1); new StringBuilder(37 + String.valueOf(str5).length() + String.valueOf(str6).length()).append("Loading image from file: ").append(str5).append(" file name: ").append(str6); } } } for (;;) { try { ByteBuffer localByteBuffer2 = efj.a(localFile1, true); localByteBuffer1 = localByteBuffer2; } catch (FileNotFoundException localFileNotFoundException) { localByteBuffer1 = null; continue; } catch (IOException localIOException) { String str4 = String.valueOf(localFile1); Log.e("EsResource", 18 + String.valueOf(str4).length() + "Cannot load file: " + str4, localIOException); } if (localByteBuffer1 == null) { break; } a(localByteBuffer1); return; ByteBuffer localByteBuffer1 = null; } if ((0x40000 & localkba.j) != 0) { if (this.t) { String str2 = String.valueOf(this.o); String str3 = String.valueOf(f()); new StringBuilder(38 + String.valueOf(str2).length() + String.valueOf(str3).length()).append("Downloading disabled for: ").append(str2).append(" file name: ").append(str3); } this.b.a(this, 3, null); return; } if (this.t) { String str1 = String.valueOf(this.o); new StringBuilder(21 + String.valueOf(str1).length()).append("Requesting download: ").append(str1); } h(); } public String d() { return b(); } public final String e() { String str = String.valueOf(d()); return 1 + String.valueOf(str).length() + str + '~'; } public File f() { kba localkba = (kba)this.o; if ((0x1 & localkba.j) != 0) { return null; } if ((0x400 & localkba.j) != 0) {} for (iab localiab = this.b.f();; localiab = this.b.e()) { return new File(localiab.b(d())); } } public File g() { String str = d(); File localFile = this.b.e().a(str); if (localFile != null) { return localFile; } return this.b.f().a(str); } public void h() { ift localift1 = this.c; localift1.a.u = true; localift1.e = System.currentTimeMillis(); localift1.b.a(localift1.a); String str1 = a(); this.c.g = str1; if (str1 == null) { if (Log.isLoggable("ImageResource", 3)) { String str16 = String.valueOf(this.o); new StringBuilder(35 + String.valueOf(str16).length()).append("Unable to download null image url: ").append(str16); } this.c.a(); super.a(5); return; } ift localift3; if ((0x2 & ((kba)this.o).j) != 0) { localift3 = this.c; kba localkba = (kba)localift3.a.o; String str10 = localift3.a.e(); if ((0x400 & localkba.j) != 0) {} FileChannel localFileChannel; for (iab localiab = localift3.b.f();; localiab = localift3.b.e()) { String str11 = localiab.b(str10); localift3.m = new File(str11); if (localift3.m.exists()) { localift3.g = localift3.a(str11, false); } if (localift3.a.t) { String str14 = localift3.g; String str15 = String.valueOf(localift3.a.o); new StringBuilder(34 + String.valueOf(str14).length() + String.valueOf(str15).length()).append("Downloading using URL: ").append(str14).append(" resource: ").append(str15); } File localFile = localift3.m.getParentFile(); if (!localFile.exists()) { localFile.mkdirs(); } try { localFileChannel = new FileOutputStream(localift3.m, true).getChannel(); if (localift3.g != null) { break; } String str13 = String.valueOf(localift3); throw new NullPointerException(22 + String.valueOf(str13).length() + "Download URL is null: " + str13); } catch (IOException localIOException2) { Log.e("EsResource", "Cannot open cache file", localIOException2); ife localife = localift3.a; localife.c.a(); localife.a(4); return; } } Context localContext3 = localift3.b.r(); String str12 = localift3.g; HttpUrlRequestListener localHttpUrlRequestListener3 = localift3.n; HttpUrlRequest localHttpUrlRequest2 = ixd.a(localContext3).a(str12, 0, null, localFileChannel, localHttpUrlRequestListener3); localHttpUrlRequest2.a(localift3.m.length()); long l3; if ((0x2000 & localkba.j) != 0) { l3 = localift3.b.k(); if ((0x800 & localkba.j) != 0) { break label601; } } label601: for (boolean bool = true;; bool = false) { localHttpUrlRequest2.a(l3, bool); try { localift3.j = localHttpUrlRequest2; localHttpUrlRequest2.f(); return; } finally {} l3 = localift3.b.l(); break; } } localift2 = this.c; long l1 = localift2.b.l(); localift2.i = 0L; String str2 = localift2.a.e(); localift2.m = localift2.b.e().a(str2); if (localift2.m != null) { localift2.g = localift2.a(localift2.m.getPath(), true); if (!localift2.m.exists()) { localift2.m = null; } } if (localift2.a.t) { String str8 = localift2.g; String str9 = String.valueOf(localift2.a.o); new StringBuilder(34 + String.valueOf(str8).length() + String.valueOf(str9).length()).append("Downloading using URL: ").append(str8).append(" resource: ").append(str9); } localift2.k = new ixe(new ChunkedWritableByteChannel(), localift2.a); if (localift2.m != null) {} try { localift2.i = localift2.m.length(); if (localift2.a.t) { long l2 = localift2.i; String str7 = String.valueOf(localift2.a.o); new StringBuilder(68 + String.valueOf(str2).length() + String.valueOf(str7).length()).append("Continuing download to file ").append(str2).append(" (").append(l2).append(" bytes) resource: ").append(str7); } ByteBuffer localByteBuffer = efj.a(localift2.m, true); localift2.k.write(localByteBuffer); } catch (IOException localIOException1) { for (;;) { Map localMap; try { ifd localifd; String str5; String str6; Context localContext2; String str4; ixe localixe; HttpUrlRequestListener localHttpUrlRequestListener2; localift2.j = localHttpUrlRequest1; localHttpUrlRequest1.f(); return; } finally {} localIOException1 = localIOException1; localift2.k = null; localift2.i = 0L; continue; Context localContext1 = localift2.b.r(); String str3 = localift2.g; HttpUrlRequestListener localHttpUrlRequestListener1 = localift2.n; HttpUrlRequest localHttpUrlRequest1 = ixd.a(localContext1).a(str3, 2, localMap, localHttpUrlRequestListener1); } } localifd = (ifd)mbb.b(localift2.b.r(), ifd.class); localMap = null; if (localifd != null) { localMap = localifd.a(localift2.b.r(), ((kba)localift2.a.o).d(), localift2.g); if ((localift2.a.t) && (!localMap.isEmpty())) { str5 = String.valueOf(localMap); str6 = String.valueOf(localift2.a.o); new StringBuilder(33 + String.valueOf(str5).length() + String.valueOf(str6).length()).append("Adding image headers: ").append(str5).append(" resource: ").append(str6); } } if (localift2.k != null) { localContext2 = localift2.b.r(); str4 = localift2.g; localixe = localift2.k; localHttpUrlRequestListener2 = localift2.n; localHttpUrlRequest1 = ixd.a(localContext2).a(str4, 2, localMap, localixe, localHttpUrlRequestListener2); localHttpUrlRequest1.a(localift2.i); localHttpUrlRequest1.a(l1, true); } } public final void i() { if ((0x10 & ((kba)this.o).j) == 0) { Bitmap localBitmap = j(); if (localBitmap != null) { if (localBitmap.isMutable()) { localBitmap.eraseColor(0); } this.b.a(localBitmap); } } super.i(); } public final Bitmap j() { if ((this.p instanceof Bitmap)) { return (Bitmap)this.p; } if ((this.p instanceof iey)) { return ((iey)this.p).a; } return null; } public final int k() { if ((this.p instanceof Bitmap)) { return ((Bitmap)this.p).getHeight(); } if ((this.p instanceof iey)) { return ((iey)this.p).c; } return 0; } public final int l() { if (this.v == -1) { if (this.p == null) { break label137; } if (!(this.p instanceof Bitmap)) { break label44; } this.v = a((Bitmap)this.p); } label137: for (;;) { return this.v; label44: if ((this.p instanceof iey)) { this.v = a(((iey)this.p).a); } else if ((this.p instanceof icm)) { icm localicm = (icm)this.p; this.v = (localicm.a.length + (localicm.f.length << 2)); } else if ((this.p instanceof byte[])) { this.v = ((byte[])this.p).length; } else { this.v = 2147483647; } } } public String m() { String str = this.n; if (str != null) { if (str.startsWith("image/")) { str = str.substring(6); } return str; } return null; } } /* Location: F:\apktool\apktool\com.google.android.apps.plus\classes-dex2jar.jar * Qualified Name: ife * JD-Core Version: 0.7.0.1 */
/* * Copyright 2011 Ian D. Bollinger * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cleansvg; import java.util.*; import com.google.common.collect.*; import com.google.inject.Inject; import org.cleansvg.SVGModule.Defaults; import org.w3c.dom.*; /** * @author ian.bollinger@gmail.com (Ian D. Bollinger) */ class Cleaner { // TODO: export all this junk to file private static final Set<String> JUNK_NAMESPACES = ImmutableSet.of( "http://web.resource.org/cc/", "http://purl.org/dc/elements/1.1/", "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "http://www.inkscape.org/namespaces/inkscape", "http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"); private static final Set<String> REMOVABLE_EMPTY_TAGS = ImmutableSet.of( "defs", "g", "metadata"); private final Map<String, String> defaults; private SVGDocument document; @Inject Cleaner(@Defaults final Map<String, String> defaults) { this.defaults = defaults; } public void process(final Document document) { // TODO: this should be injected. this.document = new SVGDocument(document); removeJunk(); firstPass(); pushDownAttributes(); removeEmptyGroups(); makeGroupsPass(); removeEmptyGroups(); } // TODO: rename! private void removeJunk() { for (final Element element : document.getElements()) { if (isRemovable(element)) { document.deleteNode(element); } } } private boolean isRemovable(final Attr attr) { return isJunkNamespace(attr) || "id".equals(attr.getName()) || attr.getValue().equals(defaults.get(attr.getName())); } private void firstPass() { for (final Node node : document.getNodes()) { processNode(node); } } private void processNode(final Node node) { switch (node.getNodeType()) { case Node.ELEMENT_NODE: processElement((Element) node); return; case Node.TEXT_NODE: processText((Text) node); return; default: document.deleteNode(node); } } private void processElement(final Element element) { if (isJunkNamespace(element)) { document.deleteNode(element); } else { convertStyleAttributes(element); convertColorAttributes(element); compactPathAttribute(element); processAttributes(element); } } private void pushDownAttributes() { for (final Element element : document.getElements()) { if (document.isGroup(element)) { final NodeList children = document.getAllChildElements(element); if (children.getLength() == 1) { final Element child = (Element) children.item(0); if (document.getAllChildElements(child).getLength() > 0) { continue; } element.getParentNode().insertBefore(child, element); document.pushDownAttributes(element, child); document.deleteNode(element); } } } } private void removeEmptyGroups() { for (final Element element : document.getElements()) { if (document.isGroup(element) && !element.hasAttributes()) { document.pushUpAttributes(element); document.deleteNode(element); } } } private void makeGroupsPass() { for (final Element element : document.getElements()) { makeGroups(element); } } private void processText(final Text text) { if (text.getTextContent().isEmpty()) { document.deleteNode(text); } } private void processAttributes(final Element element) { for (final Attr attr : ImmutableList.copyOf( document.getAttributes(element))) { if (isRemovable(attr)) { element.removeAttributeNode(attr); } } } private void compactPathAttribute(final Element element) { if (!element.hasAttribute("d")) { return; } element.setAttribute("d", Paths.compactAndRelativize(element.getAttribute("d"), 2)); } private void convertStyleAttributes(final Element element) { if (!element.hasAttribute("style")) { return; } for (final String property : element.getAttribute("style").split(";")) { if (!property.isEmpty()) { final String[] nameValue = property.split(":"); element.setAttribute(nameValue[0].trim(), nameValue[1].trim()); } } element.removeAttribute("style"); } private void convertColorAttributes(final Element element) { // if (!element.hasAttribute("fill")) { // return; // } } // TODO: This method is too complicated. private void makeGroups(final Element element) { if ("text".equals(element.getTagName())) { return; } final List<Element> children = document.getDirectChildElements(element); final int size = children.size(); if (size <= 1) { return; } Element a = children.get(0); for (final Element b : children.subList(1, size)) { final Map<String, String> attrs = document.getAttributeMap(a); final Map<String, String> attrs1 = document.getAttributeMap(a); final Map<String, String> attrs2 = document.getAttributeMap(b); attrs.entrySet().retainAll(attrs2.entrySet()); attrs.remove("d"); if (attrs.isEmpty()) { a = b; continue; } if (document.isGroup(a) && attrs1.equals(attrs2)) { document.removeAttributes(b, attrs2); a.appendChild(b); } else { final Element group = document.createGroup(); document.setAttributes(group, attrs); element.insertBefore(group, a); document.appendChildren(group, a, b); document.removeAttributes(a, attrs); document.removeAttributes(b, attrs); a = group; } } } private boolean isRemovable(final Element element) { // TODO: this isn't quite right. Empty elements with attributes don't // count. return !element.hasChildNodes() && REMOVABLE_EMPTY_TAGS.contains(element.getTagName()); } private boolean isJunkNamespace(final Node node) { final String uri = node.getNamespaceURI(); // String prefix = node.getPrefix(); return JUNK_NAMESPACES.contains(uri); } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.gef.EditPart; import org.eclipse.gef.commands.Command; import org.eclipse.gmf.runtime.diagram.core.util.ViewUtil; import org.eclipse.gmf.runtime.diagram.ui.commands.DeferredLayoutCommand; import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy; import org.eclipse.gmf.runtime.diagram.ui.commands.SetViewMutabilityCommand; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CanonicalEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.requests.CreateViewRequest; import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter; import org.eclipse.gmf.runtime.notation.Node; import org.eclipse.gmf.runtime.notation.View; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressingEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BAMMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BeanMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorOperationEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ConditionalRouterMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DataMapperMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EJBMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FastXSLTMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ForEachMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.JsonTransformMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoopBackMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.NamedEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyGroupMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PublishEventMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RespondMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RouterMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.URLRewriteMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ValidateMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbDiagramUpdater; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbNodeDescriptor; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; /** * @generated */ public class MediatorFlowMediatorFlowCompartment18CanonicalEditPolicy extends CanonicalEditPolicy { /** * @generated */ protected void refreshOnActivate() { // Need to activate editpart children before invoking the canonical refresh for EditParts to add event listeners List<?> c = getHost().getChildren(); for (int i = 0; i < c.size(); i++) { ((EditPart) c.get(i)).activate(); } super.refreshOnActivate(); } /** * @generated */ protected EStructuralFeature getFeatureToSynchronize() { return EsbPackage.eINSTANCE.getMediatorFlow_Children(); } /** * @generated */ @SuppressWarnings("rawtypes") protected List getSemanticChildrenList() { View viewObject = (View) getHost().getModel(); LinkedList<EObject> result = new LinkedList<EObject>(); List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater .getMediatorFlowMediatorFlowCompartment_7045SemanticChildren(viewObject); for (EsbNodeDescriptor d : childDescriptors) { result.add(d.getModelElement()); } return result; } /** * @generated */ protected boolean isOrphaned(Collection<EObject> semanticChildren, final View view) { return isMyDiagramElement(view) && !semanticChildren.contains(view.getElement()); } /** * @generated */ private boolean isMyDiagramElement(View view) { int visualID = EsbVisualIDRegistry.getVisualID(view); switch (visualID) { case DropMediatorEditPart.VISUAL_ID: case PropertyMediatorEditPart.VISUAL_ID: case PropertyGroupMediatorEditPart.VISUAL_ID: case ThrottleMediatorEditPart.VISUAL_ID: case FilterMediatorEditPart.VISUAL_ID: case LogMediatorEditPart.VISUAL_ID: case EnrichMediatorEditPart.VISUAL_ID: case XSLTMediatorEditPart.VISUAL_ID: case SwitchMediatorEditPart.VISUAL_ID: case SequenceEditPart.VISUAL_ID: case EventMediatorEditPart.VISUAL_ID: case EntitlementMediatorEditPart.VISUAL_ID: case ClassMediatorEditPart.VISUAL_ID: case SpringMediatorEditPart.VISUAL_ID: case ScriptMediatorEditPart.VISUAL_ID: case FaultMediatorEditPart.VISUAL_ID: case XQueryMediatorEditPart.VISUAL_ID: case CommandMediatorEditPart.VISUAL_ID: case DBLookupMediatorEditPart.VISUAL_ID: case DBReportMediatorEditPart.VISUAL_ID: case SmooksMediatorEditPart.VISUAL_ID: case SendMediatorEditPart.VISUAL_ID: case HeaderMediatorEditPart.VISUAL_ID: case CloneMediatorEditPart.VISUAL_ID: case CacheMediatorEditPart.VISUAL_ID: case IterateMediatorEditPart.VISUAL_ID: case CalloutMediatorEditPart.VISUAL_ID: case TransactionMediatorEditPart.VISUAL_ID: case RMSequenceMediatorEditPart.VISUAL_ID: case RuleMediatorEditPart.VISUAL_ID: case OAuthMediatorEditPart.VISUAL_ID: case AggregateMediatorEditPart.VISUAL_ID: case StoreMediatorEditPart.VISUAL_ID: case BuilderMediatorEditPart.VISUAL_ID: case CallTemplateMediatorEditPart.VISUAL_ID: case PayloadFactoryMediatorEditPart.VISUAL_ID: case EnqueueMediatorEditPart.VISUAL_ID: case URLRewriteMediatorEditPart.VISUAL_ID: case ValidateMediatorEditPart.VISUAL_ID: case RouterMediatorEditPart.VISUAL_ID: case ConditionalRouterMediatorEditPart.VISUAL_ID: case BAMMediatorEditPart.VISUAL_ID: case BeanMediatorEditPart.VISUAL_ID: case EJBMediatorEditPart.VISUAL_ID: case DefaultEndPointEditPart.VISUAL_ID: case AddressEndPointEditPart.VISUAL_ID: case FailoverEndPointEditPart.VISUAL_ID: case RecipientListEndPointEditPart.VISUAL_ID: case WSDLEndPointEditPart.VISUAL_ID: case NamedEndpointEditPart.VISUAL_ID: case LoadBalanceEndPointEditPart.VISUAL_ID: case APIResourceEndpointEditPart.VISUAL_ID: case AddressingEndpointEditPart.VISUAL_ID: case HTTPEndpointEditPart.VISUAL_ID: case TemplateEndpointEditPart.VISUAL_ID: case CloudConnectorEditPart.VISUAL_ID: case CloudConnectorOperationEditPart.VISUAL_ID: case LoopBackMediatorEditPart.VISUAL_ID: case RespondMediatorEditPart.VISUAL_ID: case CallMediatorEditPart.VISUAL_ID: case DataMapperMediatorEditPart.VISUAL_ID: case FastXSLTMediatorEditPart.VISUAL_ID: case ForEachMediatorEditPart.VISUAL_ID: case PublishEventMediatorEditPart.VISUAL_ID: case JsonTransformMediatorEditPart.VISUAL_ID: return true; } return false; } /** * @generated */ protected void refreshSemantic() { if (resolveSemanticElement() == null) { return; } LinkedList<IAdaptable> createdViews = new LinkedList<IAdaptable>(); List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater .getMediatorFlowMediatorFlowCompartment_7045SemanticChildren((View) getHost().getModel()); LinkedList<View> orphaned = new LinkedList<View>(); // we care to check only views we recognize as ours LinkedList<View> knownViewChildren = new LinkedList<View>(); for (View v : getViewChildren()) { if (isMyDiagramElement(v)) { knownViewChildren.add(v); } } // alternative to #cleanCanonicalSemanticChildren(getViewChildren(), semanticChildren) // // iteration happens over list of desired semantic elements, trying to find best matching View, while original CEP // iterates views, potentially losing view (size/bounds) information - i.e. if there are few views to reference same EObject, only last one // to answer isOrphaned == true will be used for the domain element representation, see #cleanCanonicalSemanticChildren() for (Iterator<EsbNodeDescriptor> descriptorsIterator = childDescriptors.iterator(); descriptorsIterator .hasNext();) { EsbNodeDescriptor next = descriptorsIterator.next(); String hint = EsbVisualIDRegistry.getType(next.getVisualID()); LinkedList<View> perfectMatch = new LinkedList<View>(); // both semanticElement and hint match that of NodeDescriptor for (View childView : getViewChildren()) { EObject semanticElement = childView.getElement(); if (next.getModelElement().equals(semanticElement)) { if (hint.equals(childView.getType())) { perfectMatch.add(childView); // actually, can stop iteration over view children here, but // may want to use not the first view but last one as a 'real' match (the way original CEP does // with its trick with viewToSemanticMap inside #cleanCanonicalSemanticChildren } } } if (perfectMatch.size() > 0) { descriptorsIterator.remove(); // precise match found no need to create anything for the NodeDescriptor // use only one view (first or last?), keep rest as orphaned for further consideration knownViewChildren.remove(perfectMatch.getFirst()); } } // those left in knownViewChildren are subject to removal - they are our diagram elements we didn't find match to, // or those we have potential matches to, and thus need to be recreated, preserving size/location information. orphaned.addAll(knownViewChildren); // ArrayList<CreateViewRequest.ViewDescriptor> viewDescriptors = new ArrayList<CreateViewRequest.ViewDescriptor>( childDescriptors.size()); for (EsbNodeDescriptor next : childDescriptors) { String hint = EsbVisualIDRegistry.getType(next.getVisualID()); IAdaptable elementAdapter = new CanonicalElementAdapter(next.getModelElement(), hint); CreateViewRequest.ViewDescriptor descriptor = new CreateViewRequest.ViewDescriptor(elementAdapter, Node.class, hint, ViewUtil.APPEND, false, host().getDiagramPreferencesHint()); viewDescriptors.add(descriptor); } boolean changed = deleteViews(orphaned.iterator()); // CreateViewRequest request = getCreateViewRequest(viewDescriptors); Command cmd = getCreateViewCommand(request); if (cmd != null && cmd.canExecute()) { SetViewMutabilityCommand.makeMutable(new EObjectAdapter(host().getNotationView())).execute(); executeCommand(cmd); @SuppressWarnings("unchecked") List<IAdaptable> nl = (List<IAdaptable>) request.getNewObject(); createdViews.addAll(nl); } if (changed || createdViews.size() > 0) { postProcessRefreshSemantic(createdViews); } if (createdViews.size() > 1) { // perform a layout of the container DeferredLayoutCommand layoutCmd = new DeferredLayoutCommand(host().getEditingDomain(), createdViews, host()); executeCommand(new ICommandProxy(layoutCmd)); } makeViewsImmutable(createdViews); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2.impl; import java.time.Duration; import java.util.HashMap; import java.util.Map; import java.util.Stack; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.vfs2.FileListener; import org.apache.commons.vfs2.FileMonitor; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.provider.AbstractFileSystem; /** * A polling {@link FileMonitor} implementation. * <p> * The DefaultFileMonitor is a Thread based polling file system monitor with a 1 second delay. * </p> * * <h2>Design:</h2> * <p> * There is a Map of monitors known as FileMonitorAgents. With the thread running, each FileMonitorAgent object is asked * to "check" on the file it is responsible for. To do this check, the cache is cleared. * </p> * <ul> * <li>If the file existed before the refresh and it no longer exists, a delete event is fired.</li> * <li>If the file existed before the refresh and it still exists, check the last modified timestamp to see if that has * changed.</li> * <li>If it has, fire a change event.</li> * </ul> * <p> * With each file delete, the FileMonitorAgent of the parent is asked to re-build its list of children, so that they can * be accurately checked when there are new children. * </p> * <p> * New files are detected during each "check" as each file does a check for new children. If new children are found, * create events are fired recursively if recursive descent is enabled. * </p> * <p> * For performance reasons, added a delay that increases as the number of files monitored increases. The default is a * delay of 1 second for every 1000 files processed. * </p> * <h2>Example usage:</h2> * * <pre> * FileSystemManager fsManager = VFS.getManager(); * FileObject listendir = fsManager.resolveFile("/home/username/monitored/"); * * DefaultFileMonitor fm = new DefaultFileMonitor(new CustomFileListener()); * fm.setRecursive(true); * fm.addFile(listendir); * fm.start(); * </pre> * * <i>(where CustomFileListener is a class that implements the FileListener interface.)</i> */ // TODO Add a Builder so we can construct and start. public class DefaultFileMonitor implements Runnable, FileMonitor, AutoCloseable { /** * File monitor agent. */ private static final class FileMonitorAgent { private final FileObject fileObject; private final DefaultFileMonitor defaultFileMonitor; private boolean exists; private long timestamp; private Map<FileName, Object> children; private FileMonitorAgent(final DefaultFileMonitor fm, final FileObject file) { this.defaultFileMonitor = fm; this.fileObject = file; this.refresh(); this.resetChildrenList(); try { this.exists = this.fileObject.exists(); } catch (final FileSystemException fse) { this.exists = false; this.timestamp = -1; } if (this.exists) { try { this.timestamp = this.fileObject.getContent().getLastModifiedTime(); } catch (final FileSystemException fse) { this.timestamp = -1; } } } private void check() { this.refresh(); try { // If the file existed and now doesn't if (this.exists && !this.fileObject.exists()) { this.exists = this.fileObject.exists(); this.timestamp = -1; // Fire delete event ((AbstractFileSystem) this.fileObject.getFileSystem()).fireFileDeleted(this.fileObject); // Remove listener in case file is re-created. Don't want to fire twice. if (this.defaultFileMonitor.getFileListener() != null) { this.fileObject.getFileSystem().removeListener(this.fileObject, this.defaultFileMonitor.getFileListener()); } // Remove from map this.defaultFileMonitor.queueRemoveFile(this.fileObject); } else if (this.exists && this.fileObject.exists()) { // Check the timestamp to see if it has been modified if (this.timestamp != this.fileObject.getContent().getLastModifiedTime()) { this.timestamp = this.fileObject.getContent().getLastModifiedTime(); // Fire change event // Don't fire if it's a folder because new file children // and deleted files in a folder have their own event triggered. if (!this.fileObject.getType().hasChildren()) { ((AbstractFileSystem) this.fileObject.getFileSystem()).fireFileChanged(this.fileObject); } } } else if (!this.exists && this.fileObject.exists()) { this.exists = this.fileObject.exists(); this.timestamp = this.fileObject.getContent().getLastModifiedTime(); // Don't fire if it's a folder because new file children // and deleted files in a folder have their own event triggered. if (!this.fileObject.getType().hasChildren()) { ((AbstractFileSystem) this.fileObject.getFileSystem()).fireFileCreated(this.fileObject); } } this.checkForNewChildren(); } catch (final FileSystemException fse) { LOG.error(fse.getLocalizedMessage(), fse); } } /** * Only checks for new children. If children are removed, they'll eventually be checked. */ private void checkForNewChildren() { try { if (this.fileObject.getType().hasChildren()) { final FileObject[] newChildren = this.fileObject.getChildren(); if (this.children != null) { // See which new children are not listed in the current children map. final Map<FileName, Object> newChildrenMap = new HashMap<>(); final Stack<FileObject> missingChildren = new Stack<>(); for (final FileObject element : newChildren) { newChildrenMap.put(element.getName(), new Object()); // null ? // If the child's not there if (!this.children.containsKey(element.getName())) { missingChildren.push(element); } } this.children = newChildrenMap; // If there were missing children if (!missingChildren.empty()) { while (!missingChildren.empty()) { this.fireAllCreate(missingChildren.pop()); } } } else if (newChildren.length > 0) { // First set of children - Break out the cigars this.children = new HashMap<>(); for (final FileObject element : newChildren) { this.children.put(element.getName(), new Object()); // null? this.fireAllCreate(element); } } } } catch (final FileSystemException fse) { LOG.error(fse.getLocalizedMessage(), fse); } } /** * Recursively fires create events for all children if recursive descent is enabled. Otherwise the create event is only * fired for the initial FileObject. * * @param child The child to add. */ private void fireAllCreate(final FileObject child) { // Add listener so that it can be triggered if (this.defaultFileMonitor.getFileListener() != null) { child.getFileSystem().addListener(child, this.defaultFileMonitor.getFileListener()); } ((AbstractFileSystem) child.getFileSystem()).fireFileCreated(child); // Remove it because a listener is added in the queueAddFile if (this.defaultFileMonitor.getFileListener() != null) { child.getFileSystem().removeListener(child, this.defaultFileMonitor.getFileListener()); } this.defaultFileMonitor.queueAddFile(child); // Add try { if (this.defaultFileMonitor.isRecursive() && child.getType().hasChildren()) { final FileObject[] newChildren = child.getChildren(); for (final FileObject element : newChildren) { fireAllCreate(element); } } } catch (final FileSystemException fse) { LOG.error(fse.getLocalizedMessage(), fse); } } /** * Clear the cache and re-request the file object. */ private void refresh() { try { this.fileObject.refresh(); } catch (final FileSystemException fse) { LOG.error(fse.getLocalizedMessage(), fse); } } private void resetChildrenList() { try { if (this.fileObject.getType().hasChildren()) { this.children = new HashMap<>(); final FileObject[] childrenList = this.fileObject.getChildren(); for (final FileObject element : childrenList) { this.children.put(element.getName(), new Object()); // null? } } } catch (final FileSystemException fse) { this.children = null; } } } private static final Log LOG = LogFactory.getLog(DefaultFileMonitor.class); private static final Duration DEFAULT_DELAY = Duration.ofSeconds(1); private static final int DEFAULT_MAX_FILES = 1000; /** * Map from FileName to FileObject being monitored. */ private final Map<FileName, FileMonitorAgent> monitorMap = new HashMap<>(); /** * The low priority thread used for checking the files being monitored. */ private Thread monitorThread; /** * File objects to be removed from the monitor map. */ private final Stack<FileObject> deleteStack = new Stack<>(); /** * File objects to be added to the monitor map. */ private final Stack<FileObject> addStack = new Stack<>(); /** * A flag used to determine if the monitor thread should be running. */ private volatile boolean runFlag = true; // used for inter-thread communication /** * A flag used to determine if adding files to be monitored should be recursive. */ private boolean recursive; /** * Set the delay between checks */ private Duration delay = DEFAULT_DELAY; /** * Set the number of files to check until a delay will be inserted */ private int checksPerRun = DEFAULT_MAX_FILES; /** * A listener object that if set, is notified on file creation and deletion. */ private final FileListener listener; /** * Constructs a new instance with the given listener. * * @param listener the listener. */ public DefaultFileMonitor(final FileListener listener) { this.listener = listener; } /** * Adds a file to be monitored. * * @param file The FileObject to monitor. */ @Override public void addFile(final FileObject file) { synchronized (this.monitorMap) { if (this.monitorMap.get(file.getName()) == null) { this.monitorMap.put(file.getName(), new FileMonitorAgent(this, file)); try { if (this.listener != null) { file.getFileSystem().addListener(file, this.listener); } if (file.getType().hasChildren() && this.recursive) { // Traverse the children final FileObject[] children = file.getChildren(); for (final FileObject element : children) { this.addFile(element); // Add depth first } } } catch (final FileSystemException fse) { LOG.error(fse.getLocalizedMessage(), fse); } } } } @Override public void close() { this.runFlag = false; if (this.monitorThread != null) { this.monitorThread.interrupt(); try { this.monitorThread.join(); } catch (final InterruptedException e) { // ignore } this.monitorThread = null; } } /** * Gets the number of files to check per run. * * @return The number of files to check per iteration. */ public int getChecksPerRun() { return checksPerRun; } /** * Gets the delay between runs. * * @return The delay period in milliseconds. * @deprecated Use {@link #getDelayDuration()}. */ @Deprecated public long getDelay() { return delay.toMillis(); } /** * Gets the delay between runs. * * @return The delay period. */ public Duration getDelayDuration() { return delay; } /** * Gets the current FileListener object notified when there are changes with the files added. * * @return The FileListener. */ FileListener getFileListener() { return this.listener; } /** * Tests the recursive setting when adding files for monitoring. * * @return true if monitoring is enabled for children. */ public boolean isRecursive() { return this.recursive; } /** * Queues a file for addition to be monitored. * * @param file The FileObject to add. */ protected void queueAddFile(final FileObject file) { this.addStack.push(file); } /** * Queues a file for removal from being monitored. * * @param file The FileObject to be removed from being monitored. */ protected void queueRemoveFile(final FileObject file) { this.deleteStack.push(file); } /** * Removes a file from being monitored. * * @param file The FileObject to remove from monitoring. */ @Override public void removeFile(final FileObject file) { synchronized (this.monitorMap) { final FileName fn = file.getName(); if (this.monitorMap.get(fn) != null) { FileObject parent; try { parent = file.getParent(); } catch (final FileSystemException fse) { parent = null; } this.monitorMap.remove(fn); if (parent != null) { // Not the root final FileMonitorAgent parentAgent = this.monitorMap.get(parent.getName()); if (parentAgent != null) { parentAgent.resetChildrenList(); } } } } } /** * Asks the agent for each file being monitored to check its file for changes. */ @Override public void run() { mainloop: while (!monitorThread.isInterrupted() && this.runFlag) { // For each entry in the map final Object[] fileNames; synchronized (this.monitorMap) { fileNames = this.monitorMap.keySet().toArray(); } for (int iterFileNames = 0; iterFileNames < fileNames.length; iterFileNames++) { final FileName fileName = (FileName) fileNames[iterFileNames]; final FileMonitorAgent agent; synchronized (this.monitorMap) { agent = this.monitorMap.get(fileName); } if (agent != null) { agent.check(); } if (getChecksPerRun() > 0 && (iterFileNames + 1) % getChecksPerRun() == 0) { try { Thread.sleep(getDelayDuration().toMillis()); } catch (final InterruptedException e) { // Woke up. } } if (monitorThread.isInterrupted() || !this.runFlag) { continue mainloop; } } while (!this.addStack.empty()) { this.addFile(this.addStack.pop()); } while (!this.deleteStack.empty()) { this.removeFile(this.deleteStack.pop()); } try { Thread.sleep(getDelayDuration().toMillis()); } catch (final InterruptedException e) { continue; } } this.runFlag = true; } /** * Sets the number of files to check per run. a additional delay will be added if there are more files to check * * @param checksPerRun a value less than 1 will disable this feature */ public void setChecksPerRun(final int checksPerRun) { this.checksPerRun = checksPerRun; } /** * Sets the delay between runs. * * @param delay The delay period. * @since 2.10.0 */ public void setDelay(final Duration delay) { this.delay = delay == null || delay.isNegative() ? DEFAULT_DELAY : delay; } /** * Sets the delay between runs. * * @param delay The delay period in milliseconds. * @deprecated Use {@link #setDelay(Duration)}. */ @Deprecated public void setDelay(final long delay) { setDelay(delay > 0 ? Duration.ofMillis(delay) : DEFAULT_DELAY); } /** * Sets the recursive setting when adding files for monitoring. * * @param newRecursive true if monitoring should be enabled for children. */ public void setRecursive(final boolean newRecursive) { this.recursive = newRecursive; } /** * Starts monitoring the files that have been added. */ public synchronized void start() { if (this.monitorThread == null) { this.monitorThread = new Thread(this); this.monitorThread.setDaemon(true); this.monitorThread.setPriority(Thread.MIN_PRIORITY); } this.monitorThread.start(); } /** * Stops monitoring the files that have been added. */ public synchronized void stop() { close(); } }
package org.motechproject.openmrs19.domain; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.ObjectUtils; import org.joda.time.DateTime; import java.util.ArrayList; import java.util.List; import java.util.Objects; import static ch.lambdaj.Lambda.having; import static ch.lambdaj.Lambda.on; import static ch.lambdaj.Lambda.select; import static org.hamcrest.Matchers.equalTo; /** * Represents a single person. This class stores personal information about the person. It's used by the * {@link OpenMRSPatient}. It's part of the MOTECH model. */ public class OpenMRSPerson { private String id; private String firstName; private String middleName; private String lastName; private String preferredName; private String address; private DateTime dateOfBirth; private Boolean birthDateEstimated; private Integer age; private String gender; private boolean isDead; private List<OpenMRSAttribute> attributes = new ArrayList<OpenMRSAttribute>(); private DateTime deathDate; /** * Default constructor. */ public OpenMRSPerson() { this(null); } /** * Creates a person with the given OpenMRS {@code id}. * * @param id the OpenMRS ID of the person */ public OpenMRSPerson(String id) { this.id = id; } public void setPreferredName(String preferredName) { this.preferredName = preferredName; } public void setAddress(String address) { this.address = address; } public void setDateOfBirth(DateTime dateOfBirth) { this.dateOfBirth = dateOfBirth; } public void setBirthDateEstimated(Boolean birthDateEstimated) { this.birthDateEstimated = birthDateEstimated; } public String getGender() { return gender; } public void setGender(String gender) { this.gender = gender; } public String getId() { return id; } public void setId(String id) { this.id = id; } public void setFirstName(String firstName) { this.firstName = firstName; } public void setMiddleName(String middleName) { this.middleName = middleName; } public void setLastName(String lastName) { this.lastName = lastName; } public void setIsDead(Boolean isDead) { this.isDead = isDead; } public String getFirstName() { return firstName; } public String getMiddleName() { return middleName; } public String getLastName() { return lastName; } public String getFullName() { return firstName + " " + middleName + " " + lastName; } public List<OpenMRSAttribute> getAttributes() { return attributes; } public void addAttribute(OpenMRSAttribute attribute) { attributes.add(attribute); } public void setAttributes(List<OpenMRSAttribute> attributes) { this.attributes = attributes; } public void setDeathDate(DateTime deathDate) { this.deathDate = deathDate; } @Deprecated public String attrValue(String key) { List<OpenMRSAttribute> filteredItems = select(attributes, having(on(OpenMRSAttribute.class).getName(), equalTo(key))); return CollectionUtils.isNotEmpty(filteredItems) ? filteredItems.get(0).getValue() : null; } public String getPreferredName() { return preferredName; } public DateTime getDateOfBirth() { return new DateTime(dateOfBirth); } public String getAddress() { return address; } public Boolean getBirthDateEstimated() { return birthDateEstimated; } public Boolean isDead() { return isDead; } public Integer getAge() { return age; } public DateTime getDeathDate() { return deathDate; } public void setAge(Integer age) { this.age = age; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof OpenMRSPerson)) { return false; } OpenMRSPerson other = (OpenMRSPerson) o; return equalNameData(other) && equalAgeAndBirthDates(other) && Objects.equals(id, other.id) && Objects.equals(address, other.address) && Objects.equals(gender, other.gender) && Objects.equals(attributes, other.attributes) && Objects.equals(deathDate, other.deathDate) && isDead == other.isDead; } public boolean equalNameData(OpenMRSPerson other) { return Objects.equals(firstName, other.firstName) && Objects.equals(middleName, other.middleName) && Objects.equals(lastName, other.lastName) && Objects.equals(preferredName, other.preferredName); } public boolean equalAgeAndBirthDates(OpenMRSPerson other) { return Objects.equals(dateOfBirth, other.dateOfBirth) && Objects.equals(birthDateEstimated, other.birthDateEstimated) && Objects.equals(age, other.age); } @Override public int hashCode() { int hash = 1; hash = hash * 31 + ObjectUtils.hashCode(id); hash = hash * 31 + ObjectUtils.hashCode(firstName); hash = hash * 31 + ObjectUtils.hashCode(middleName); hash = hash * 31 + ObjectUtils.hashCode(lastName); hash = hash * 31 + ObjectUtils.hashCode(preferredName); hash = hash * 31 + ObjectUtils.hashCode(address); hash = hash * 31 + ObjectUtils.hashCode(dateOfBirth); hash = hash * 31 + ObjectUtils.hashCode(birthDateEstimated); hash = hash * 31 + ObjectUtils.hashCode(age); hash = hash * 31 + ObjectUtils.hashCode(gender); hash = hash * 31 + Boolean.valueOf(isDead).hashCode(); hash = hash * 31 + ObjectUtils.hashCode(attributes); hash = hash * 31 + ObjectUtils.hashCode(deathDate); return hash; } public String getPersonId() { return id; } public void setPersonId(String id) { this.id = id; } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova; import java.util.HashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.cordova.api.CordovaInterface; import org.apache.cordova.api.CordovaPlugin; import org.apache.cordova.api.LOG; import org.json.JSONException; import org.json.JSONObject; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.res.Configuration; import android.graphics.Color; import android.media.AudioManager; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.util.Log; import android.view.Display; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.view.WindowManager; import android.webkit.ValueCallback; import android.webkit.WebViewClient; import android.widget.LinearLayout; /** * This class is the main Android activity that represents the Cordova * application. It should be extended by the user to load the specific * html file that contains the application. * * As an example: * * package org.apache.cordova.examples; * import android.app.Activity; * import android.os.Bundle; * import org.apache.cordova.*; * * public class Examples extends DroidGap { * @Override * public void onCreate(Bundle savedInstanceState) { * super.onCreate(savedInstanceState); * * // Set properties for activity * super.setStringProperty("loadingDialog", "Title,Message"); // show loading dialog * super.setStringProperty("errorUrl", "file:///android_asset/www/error.html"); // if error loading file in super.loadUrl(). * * // Initialize activity * super.init(); * * // Clear cache if you want * super.appView.clearCache(true); * * // Load your application * super.setIntegerProperty("splashscreen", R.drawable.splash); // load splash.jpg image from the resource drawable directory * super.loadUrl("file:///android_asset/www/index.html", 3000); // show splash screen 3 sec before loading app * } * } * * Properties: The application can be configured using the following properties: * * // Display a native loading dialog when loading app. Format for value = "Title,Message". * // (String - default=null) * super.setStringProperty("loadingDialog", "Wait,Loading Demo..."); * * // Display a native loading dialog when loading sub-pages. Format for value = "Title,Message". * // (String - default=null) * super.setStringProperty("loadingPageDialog", "Loading page..."); * * // Load a splash screen image from the resource drawable directory. * // (Integer - default=0) * super.setIntegerProperty("splashscreen", R.drawable.splash); * * // Set the background color. * // (Integer - default=0 or BLACK) * super.setIntegerProperty("backgroundColor", Color.WHITE); * * // Time in msec to wait before triggering a timeout error when loading * // with super.loadUrl(). (Integer - default=20000) * super.setIntegerProperty("loadUrlTimeoutValue", 60000); * * // URL to load if there's an error loading specified URL with loadUrl(). * // Should be a local URL starting with file://. (String - default=null) * super.setStringProperty("errorUrl", "file:///android_asset/www/error.html"); * * // Enable app to keep running in background. (Boolean - default=true) * super.setBooleanProperty("keepRunning", false); * * Cordova.xml configuration: * Cordova uses a configuration file at res/xml/cordova.xml to specify the following settings. * * Approved list of URLs that can be loaded into DroidGap * <access origin="http://server regexp" subdomains="true" /> * Log level: ERROR, WARN, INFO, DEBUG, VERBOSE (default=ERROR) * <log level="DEBUG" /> * * Cordova plugins: * Cordova uses a file at res/xml/plugins.xml to list all plugins that are installed. * Before using a new plugin, a new element must be added to the file. * name attribute is the service name passed to Cordova.exec() in JavaScript * value attribute is the Java class name to call. * * <plugins> * <plugin name="App" value="org.apache.cordova.App"/> * ... * </plugins> */ public class DroidGap extends Activity implements CordovaInterface { public static String TAG = "DroidGap"; // The webview for our app protected CordovaWebView appView; protected CordovaWebViewClient webViewClient; protected LinearLayout root; protected boolean cancelLoadUrl = false; protected ProgressDialog spinnerDialog = null; private final ExecutorService threadPool = Executors.newCachedThreadPool(); // The initial URL for our app // ie http://server/path/index.html#abc?query //private String url = null; private static int ACTIVITY_STARTING = 0; private static int ACTIVITY_RUNNING = 1; private static int ACTIVITY_EXITING = 2; private int activityState = 0; // 0=starting, 1=running (after 1st resume), 2=shutting down // Plugin to call when activity result is received protected CordovaPlugin activityResultCallback = null; protected boolean activityResultKeepRunning; // Default background color for activity // (this is not the color for the webview, which is set in HTML) private int backgroundColor = Color.BLACK; /* * The variables below are used to cache some of the activity properties. */ // Draw a splash screen using an image located in the drawable resource directory. // This is not the same as calling super.loadSplashscreen(url) protected int splashscreen = 0; protected int splashscreenTime = 3000; // LoadUrl timeout value in msec (default of 20 sec) protected int loadUrlTimeoutValue = 20000; // Keep app running when pause is received. (default = true) // If true, then the JavaScript and native code continue to run in the background // when another application (activity) is started. protected boolean keepRunning = true; private int lastRequestCode; private Object responseCode; private Intent lastIntent; private Object lastResponseCode; private String initCallbackClass; private Object LOG_TAG; /** * Sets the authentication token. * * @param authenticationToken * @param host * @param realm */ public void setAuthenticationToken(AuthenticationToken authenticationToken, String host, String realm) { if (this.appView != null && this.appView.viewClient != null) { this.appView.viewClient.setAuthenticationToken(authenticationToken, host, realm); } } /** * Removes the authentication token. * * @param host * @param realm * * @return the authentication token or null if did not exist */ public AuthenticationToken removeAuthenticationToken(String host, String realm) { if (this.appView != null && this.appView.viewClient != null) { return this.appView.viewClient.removeAuthenticationToken(host, realm); } return null; } /** * Gets the authentication token. * * In order it tries: * 1- host + realm * 2- host * 3- realm * 4- no host, no realm * * @param host * @param realm * * @return the authentication token */ public AuthenticationToken getAuthenticationToken(String host, String realm) { if (this.appView != null && this.appView.viewClient != null) { return this.appView.viewClient.getAuthenticationToken(host, realm); } return null; } /** * Clear all authentication tokens. */ public void clearAuthenticationTokens() { if (this.appView != null && this.appView.viewClient != null) { this.appView.viewClient.clearAuthenticationTokens(); } } /** * Called when the activity is first created. * * @param savedInstanceState */ @SuppressWarnings("deprecation") @Override public void onCreate(Bundle savedInstanceState) { Config.init(this); LOG.d(TAG, "DroidGap.onCreate()"); super.onCreate(savedInstanceState); if(savedInstanceState != null) { initCallbackClass = savedInstanceState.getString("callbackClass"); } if(!this.getBooleanProperty("showTitle", false)) { getWindow().requestFeature(Window.FEATURE_NO_TITLE); } if(this.getBooleanProperty("setFullscreen", false)) { getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); } else { getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN); } // This builds the view. We could probably get away with NOT having a LinearLayout, but I like having a bucket! Display display = getWindowManager().getDefaultDisplay(); int width = display.getWidth(); int height = display.getHeight(); root = new LinearLayoutSoftKeyboardDetect(this, width, height); root.setOrientation(LinearLayout.VERTICAL); root.setBackgroundColor(this.backgroundColor); root.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 0.0F)); // Setup the hardware volume controls to handle volume control setVolumeControlStream(AudioManager.STREAM_MUSIC); } /** * Get the Android activity. * * @return */ public Activity getActivity() { return this; } /** * Create and initialize web container with default web view objects. */ public void init() { CordovaWebView webView = new CordovaWebView(DroidGap.this); CordovaWebViewClient webViewClient; if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.HONEYCOMB) { webViewClient = new CordovaWebViewClient(this, webView); } else { webViewClient = new IceCreamCordovaWebViewClient(this, webView); } this.init(webView, webViewClient, new CordovaChromeClient(this, webView)); } /** * Initialize web container with web view objects. * * @param webView * @param webViewClient * @param webChromeClient */ public void init(CordovaWebView webView, CordovaWebViewClient webViewClient, CordovaChromeClient webChromeClient) { LOG.d(TAG, "DroidGap.init()"); // Set up web container this.appView = webView; this.appView.setId(100); this.appView.setWebViewClient(webViewClient); this.appView.setWebChromeClient(webChromeClient); webViewClient.setWebView(this.appView); webChromeClient.setWebView(this.appView); this.appView.setLayoutParams(new LinearLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1.0F)); // Add web view but make it invisible while loading URL this.appView.setVisibility(View.INVISIBLE); this.root.addView(this.appView); setContentView(this.root); // Clear cancel flag this.cancelLoadUrl = false; } /** * Load the url into the webview. * * @param url */ public void loadUrl(String url) { // Init web view if not already done if (this.appView == null) { this.init(); } // Set backgroundColor this.backgroundColor = this.getIntegerProperty("backgroundColor", Color.BLACK); this.root.setBackgroundColor(this.backgroundColor); // If keepRunning this.keepRunning = this.getBooleanProperty("keepRunning", true); // Then load the spinner this.loadSpinner(); this.appView.loadUrl(url); } /* * Load the spinner */ void loadSpinner() { // If loadingDialog property, then show the App loading dialog for first page of app String loading = null; if ((this.appView == null) || !this.appView.canGoBack()) { loading = this.getStringProperty("loadingDialog", null); } else { loading = this.getStringProperty("loadingPageDialog", null); } if (loading != null) { String title = ""; String message = "Loading Application..."; if (loading.length() > 0) { int comma = loading.indexOf(','); if (comma > 0) { title = loading.substring(0, comma); message = loading.substring(comma + 1); } else { title = ""; message = loading; } } this.spinnerStart(title, message); } } /** * Load the url into the webview after waiting for period of time. * This is used to display the splashscreen for certain amount of time. * * @param url * @param time The number of ms to wait before loading webview */ public void loadUrl(final String url, int time) { // Init web view if not already done if (this.appView == null) { this.init(); } this.splashscreenTime = time; this.splashscreen = this.getIntegerProperty("splashscreen", 0); this.showSplashScreen(this.splashscreenTime); this.appView.loadUrl(url, time); } /** * Cancel loadUrl before it has been loaded. */ // TODO NO-OP @Deprecated public void cancelLoadUrl() { this.cancelLoadUrl = true; } /** * Clear the resource cache. */ public void clearCache() { if (this.appView == null) { this.init(); } this.appView.clearCache(true); } /** * Clear web history in this web view. */ public void clearHistory() { this.appView.clearHistory(); } /** * Go to previous page in history. (We manage our own history) * * @return true if we went back, false if we are already at top */ public boolean backHistory() { if (this.appView != null) { return appView.backHistory(); } return false; } @Override /** * Called by the system when the device configuration changes while your activity is running. * * @param Configuration newConfig */ public void onConfigurationChanged(Configuration newConfig) { //don't reload the current page when the orientation is changed super.onConfigurationChanged(newConfig); } /** * Get boolean property for activity. * * @param name * @param defaultValue * @return */ public boolean getBooleanProperty(String name, boolean defaultValue) { Bundle bundle = this.getIntent().getExtras(); if (bundle == null) { return defaultValue; } Boolean p; try { p = (Boolean) bundle.get(name); } catch (ClassCastException e) { String s = bundle.get(name).toString(); if ("true".equals(s)) { p = true; } else { p = false; } } if (p == null) { return defaultValue; } return p.booleanValue(); } /** * Get int property for activity. * * @param name * @param defaultValue * @return */ public int getIntegerProperty(String name, int defaultValue) { Bundle bundle = this.getIntent().getExtras(); if (bundle == null) { return defaultValue; } Integer p; try { p = (Integer) bundle.get(name); } catch (ClassCastException e) { p = Integer.parseInt(bundle.get(name).toString()); } if (p == null) { return defaultValue; } return p.intValue(); } /** * Get string property for activity. * * @param name * @param defaultValue * @return */ public String getStringProperty(String name, String defaultValue) { Bundle bundle = this.getIntent().getExtras(); if (bundle == null) { return defaultValue; } String p = bundle.getString(name); if (p == null) { return defaultValue; } return p; } /** * Get double property for activity. * * @param name * @param defaultValue * @return */ public double getDoubleProperty(String name, double defaultValue) { Bundle bundle = this.getIntent().getExtras(); if (bundle == null) { return defaultValue; } Double p; try { p = (Double) bundle.get(name); } catch (ClassCastException e) { p = Double.parseDouble(bundle.get(name).toString()); } if (p == null) { return defaultValue; } return p.doubleValue(); } /** * Set boolean property on activity. * * @param name * @param value */ public void setBooleanProperty(String name, boolean value) { Log.d(TAG, "Setting boolean properties in DroidGap will be deprecated in 3.0 on July 2013, please use config.xml"); this.getIntent().putExtra(name, value); } /** * Set int property on activity. * * @param name * @param value */ public void setIntegerProperty(String name, int value) { Log.d(TAG, "Setting integer properties in DroidGap will be deprecated in 3.1 on August 2013, please use config.xml"); this.getIntent().putExtra(name, value); } /** * Set string property on activity. * * @param name * @param value */ public void setStringProperty(String name, String value) { Log.d(TAG, "Setting string properties in DroidGap will be deprecated in 3.0 on July 2013, please use config.xml"); this.getIntent().putExtra(name, value); } /** * Set double property on activity. * * @param name * @param value */ public void setDoubleProperty(String name, double value) { Log.d(TAG, "Setting double properties in DroidGap will be deprecated in 3.0 on July 2013, please use config.xml"); this.getIntent().putExtra(name, value); } @Override /** * Called when the system is about to start resuming a previous activity. */ protected void onPause() { super.onPause(); LOG.d(TAG, "Paused the application!"); // Don't process pause if shutting down, since onDestroy() will be called if (this.activityState == ACTIVITY_EXITING) { return; } if (this.appView == null) { return; } else { this.appView.handlePause(this.keepRunning); } // hide the splash screen to avoid leaking a window this.removeSplashScreen(); } @Override /** * Called when the activity receives a new intent **/ protected void onNewIntent(Intent intent) { super.onNewIntent(intent); //Forward to plugins if (this.appView != null) this.appView.onNewIntent(intent); } @Override /** * Called when the activity will start interacting with the user. */ protected void onResume() { super.onResume(); LOG.d(TAG, "Resuming the App"); if (this.activityState == ACTIVITY_STARTING) { this.activityState = ACTIVITY_RUNNING; return; } if (this.appView == null) { return; } this.appView.handleResume(this.keepRunning, this.activityResultKeepRunning); // If app doesn't want to run in background if (!this.keepRunning || this.activityResultKeepRunning) { // Restore multitasking state if (this.activityResultKeepRunning) { this.keepRunning = this.activityResultKeepRunning; this.activityResultKeepRunning = false; } } } @Override /** * The final call you receive before your activity is destroyed. */ public void onDestroy() { LOG.d(TAG, "onDestroy()"); super.onDestroy(); // hide the splash screen to avoid leaking a window this.removeSplashScreen(); if (this.appView != null) { appView.handleDestroy(); } else { this.endActivity(); } } /** * Send a message to all plugins. * * @param id The message id * @param data The message data */ public void postMessage(String id, Object data) { if (this.appView != null) { this.appView.postMessage(id, data); } } /** * @deprecated * Add services to res/xml/plugins.xml instead. * * Add a class that implements a service. * * @param serviceType * @param className */ public void addService(String serviceType, String className) { if (this.appView != null && this.appView.pluginManager != null) { this.appView.pluginManager.addService(serviceType, className); } } /** * Send JavaScript statement back to JavaScript. * (This is a convenience method) * * @param message */ public void sendJavascript(String statement) { if (this.appView != null) { this.appView.jsMessageQueue.addJavaScript(statement); } } /** * Show the spinner. Must be called from the UI thread. * * @param title Title of the dialog * @param message The message of the dialog */ public void spinnerStart(final String title, final String message) { if (this.spinnerDialog != null) { this.spinnerDialog.dismiss(); this.spinnerDialog = null; } final DroidGap me = this; this.spinnerDialog = ProgressDialog.show(DroidGap.this, title, message, true, true, new DialogInterface.OnCancelListener() { public void onCancel(DialogInterface dialog) { me.spinnerDialog = null; } }); } /** * Stop spinner - Must be called from UI thread */ public void spinnerStop() { if (this.spinnerDialog != null && this.spinnerDialog.isShowing()) { this.spinnerDialog.dismiss(); this.spinnerDialog = null; } } /** * End this activity by calling finish for activity */ public void endActivity() { this.activityState = ACTIVITY_EXITING; super.finish(); } /** * Launch an activity for which you would like a result when it finished. When this activity exits, * your onActivityResult() method will be called. * * @param command The command object * @param intent The intent to start * @param requestCode The request code that is passed to callback to identify the activity */ public void startActivityForResult(CordovaPlugin command, Intent intent, int requestCode) { this.activityResultCallback = command; this.activityResultKeepRunning = this.keepRunning; // If multitasking turned on, then disable it for activities that return results if (command != null) { this.keepRunning = false; } // Start activity super.startActivityForResult(intent, requestCode); } @Override /** * Called when an activity you launched exits, giving you the requestCode you started it with, * the resultCode it returned, and any additional data from it. * * @param requestCode The request code originally supplied to startActivityForResult(), * allowing you to identify who this result came from. * @param resultCode The integer result code returned by the child activity through its setResult(). * @param data An Intent, which can return result data to the caller (various data can be attached to Intent "extras"). */ protected void onActivityResult(int requestCode, int resultCode, Intent intent) { LOG.d(TAG, "Incoming Result"); super.onActivityResult(requestCode, resultCode, intent); Log.d(TAG, "Request code = " + requestCode); ValueCallback<Uri> mUploadMessage = this.appView.getWebChromeClient().getValueCallback(); if (requestCode == CordovaChromeClient.FILECHOOSER_RESULTCODE) { Log.d(TAG, "did we get here?"); if (null == mUploadMessage) return; Uri result = intent == null || resultCode != Activity.RESULT_OK ? null : intent.getData(); Log.d(TAG, "result = " + result); // Uri filepath = Uri.parse("file://" + FileUtils.getRealPathFromURI(result, this)); // Log.d(TAG, "result = " + filepath); mUploadMessage.onReceiveValue(result); mUploadMessage = null; } CordovaPlugin callback = this.activityResultCallback; if(callback == null) { if(initCallbackClass != null) { this.activityResultCallback = appView.pluginManager.getPlugin(initCallbackClass); callback = activityResultCallback; LOG.d(TAG, "We have a callback to send this result to"); callback.onActivityResult(requestCode, resultCode, intent); } } else { LOG.d(TAG, "We have a callback to send this result to"); callback.onActivityResult(requestCode, resultCode, intent); } } public void setActivityResultCallback(CordovaPlugin plugin) { this.activityResultCallback = plugin; } /** * Report an error to the host application. These errors are unrecoverable (i.e. the main resource is unavailable). * The errorCode parameter corresponds to one of the ERROR_* constants. * * @param errorCode The error code corresponding to an ERROR_* value. * @param description A String describing the error. * @param failingUrl The url that failed to load. */ public void onReceivedError(final int errorCode, final String description, final String failingUrl) { final DroidGap me = this; // If errorUrl specified, then load it final String errorUrl = me.getStringProperty("errorUrl", null); if ((errorUrl != null) && (errorUrl.startsWith("file://") || Config.isUrlWhiteListed(errorUrl)) && (!failingUrl.equals(errorUrl))) { // Load URL on UI thread me.runOnUiThread(new Runnable() { public void run() { // Stop "app loading" spinner if showing me.spinnerStop(); me.appView.showWebPage(errorUrl, false, true, null); } }); } // If not, then display error dialog else { final boolean exit = !(errorCode == WebViewClient.ERROR_HOST_LOOKUP); me.runOnUiThread(new Runnable() { public void run() { if (exit) { me.appView.setVisibility(View.GONE); me.displayError("Application Error", description + " (" + failingUrl + ")", "OK", exit); } } }); } } /** * Display an error dialog and optionally exit application. * * @param title * @param message * @param button * @param exit */ public void displayError(final String title, final String message, final String button, final boolean exit) { final DroidGap me = this; me.runOnUiThread(new Runnable() { public void run() { try { AlertDialog.Builder dlg = new AlertDialog.Builder(me); dlg.setMessage(message); dlg.setTitle(title); dlg.setCancelable(false); dlg.setPositiveButton(button, new AlertDialog.OnClickListener() { public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); if (exit) { me.endActivity(); } } }); dlg.create(); dlg.show(); } catch (Exception e) { finish(); } } }); } /** * Determine if URL is in approved list of URLs to load. * * @param url * @return */ public boolean isUrlWhiteListed(String url) { return Config.isUrlWhiteListed(url); } /* * Hook in DroidGap for menu plugins * */ @Override public boolean onCreateOptionsMenu(Menu menu) { this.postMessage("onCreateOptionsMenu", menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onPrepareOptionsMenu(Menu menu) { this.postMessage("onPrepareOptionsMenu", menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { this.postMessage("onOptionsItemSelected", item); return true; } /** * Get Activity context. * * @return */ public Context getContext() { LOG.d(TAG, "This will be deprecated December 2012"); return this; } /** * Load the specified URL in the Cordova webview or a new browser instance. * * NOTE: If openExternal is false, only URLs listed in whitelist can be loaded. * * @param url The url to load. * @param openExternal Load url in browser instead of Cordova webview. * @param clearHistory Clear the history stack, so new page becomes top of history * @param params DroidGap parameters for new app */ public void showWebPage(String url, boolean openExternal, boolean clearHistory, HashMap<String, Object> params) { if (this.appView != null) { appView.showWebPage(url, openExternal, clearHistory, params); } } protected Dialog splashDialog; /** * Removes the Dialog that displays the splash screen */ public void removeSplashScreen() { if (splashDialog != null && splashDialog.isShowing()) { splashDialog.dismiss(); splashDialog = null; } } /** * Shows the splash screen over the full Activity */ @SuppressWarnings("deprecation") protected void showSplashScreen(final int time) { final DroidGap that = this; Runnable runnable = new Runnable() { public void run() { // Get reference to display Display display = getWindowManager().getDefaultDisplay(); // Create the layout for the dialog LinearLayout root = new LinearLayout(that.getActivity()); root.setMinimumHeight(display.getHeight()); root.setMinimumWidth(display.getWidth()); root.setOrientation(LinearLayout.VERTICAL); root.setBackgroundColor(that.getIntegerProperty("backgroundColor", Color.BLACK)); root.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT, 0.0F)); root.setBackgroundResource(that.splashscreen); // Create and show the dialog splashDialog = new Dialog(that, android.R.style.Theme_Translucent_NoTitleBar); // check to see if the splash screen should be full screen if ((getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_FULLSCREEN) == WindowManager.LayoutParams.FLAG_FULLSCREEN) { splashDialog.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); } splashDialog.setContentView(root); splashDialog.setCancelable(false); splashDialog.show(); // Set Runnable to remove splash screen just in case final Handler handler = new Handler(); handler.postDelayed(new Runnable() { public void run() { removeSplashScreen(); } }, time); } }; this.runOnUiThread(runnable); } @Override public boolean onKeyUp(int keyCode, KeyEvent event) { //Get whatever has focus! View childView = appView.getFocusedChild(); if ((appView.isCustomViewShowing() || childView != null ) && keyCode == KeyEvent.KEYCODE_BACK) { return appView.onKeyUp(keyCode, event); } else { return super.onKeyUp(keyCode, event); } } /* * Android 2.x needs to be able to check where the cursor is. Android 4.x does not * * (non-Javadoc) * @see android.app.Activity#onKeyDown(int, android.view.KeyEvent) */ @Override public boolean onKeyDown(int keyCode, KeyEvent event) { //Get whatever has focus! View childView = appView.getFocusedChild(); //Determine if the focus is on the current view or not if (childView != null && keyCode == KeyEvent.KEYCODE_BACK) { return appView.onKeyDown(keyCode, event); } else return super.onKeyDown(keyCode, event); } /** * Called when a message is sent to plugin. * * @param id The message id * @param data The message data * @return Object or null */ public Object onMessage(String id, Object data) { LOG.d(TAG, "onMessage(" + id + "," + data + ")"); if ("splashscreen".equals(id)) { if ("hide".equals(data.toString())) { this.removeSplashScreen(); } else { // If the splash dialog is showing don't try to show it again if (this.splashDialog == null || !this.splashDialog.isShowing()) { this.splashscreen = this.getIntegerProperty("splashscreen", 0); this.showSplashScreen(this.splashscreenTime); } } } else if ("spinner".equals(id)) { if ("stop".equals(data.toString())) { this.spinnerStop(); this.appView.setVisibility(View.VISIBLE); } } else if ("onReceivedError".equals(id)) { JSONObject d = (JSONObject) data; try { this.onReceivedError(d.getInt("errorCode"), d.getString("description"), d.getString("url")); } catch (JSONException e) { e.printStackTrace(); } } else if ("exit".equals(id)) { this.endActivity(); } return null; } public ExecutorService getThreadPool() { return threadPool; } protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); if(this.activityResultCallback != null) { String cClass = this.activityResultCallback.getClass().getName(); outState.putString("callbackClass", cClass); } } }
package org.hl7.v3; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlEnumValue; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for ArtificialDentition. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="ArtificialDentition"> * &lt;restriction base="{urn:hl7-org:v3}cs"> * &lt;enumeration value="TID10a"/> * &lt;enumeration value="TID10i"/> * &lt;enumeration value="TID10p"/> * &lt;enumeration value="TID10pd"/> * &lt;enumeration value="TID10pm"/> * &lt;enumeration value="TID11a"/> * &lt;enumeration value="TID11i"/> * &lt;enumeration value="TID11p"/> * &lt;enumeration value="TID11pd"/> * &lt;enumeration value="TID11pm"/> * &lt;enumeration value="TID12a"/> * &lt;enumeration value="TID12i"/> * &lt;enumeration value="TID12p"/> * &lt;enumeration value="TID12pd"/> * &lt;enumeration value="TID12pm"/> * &lt;enumeration value="TID13a"/> * &lt;enumeration value="TID13i"/> * &lt;enumeration value="TID13p"/> * &lt;enumeration value="TID13pd"/> * &lt;enumeration value="TID13pm"/> * &lt;enumeration value="TID14a"/> * &lt;enumeration value="TID14i"/> * &lt;enumeration value="TID14p"/> * &lt;enumeration value="TID14pd"/> * &lt;enumeration value="TID14pm"/> * &lt;enumeration value="TID15a"/> * &lt;enumeration value="TID15i"/> * &lt;enumeration value="TID15p"/> * &lt;enumeration value="TID15pd"/> * &lt;enumeration value="TID15pm"/> * &lt;enumeration value="TID16a"/> * &lt;enumeration value="TID16i"/> * &lt;enumeration value="TID16p"/> * &lt;enumeration value="TID16pd"/> * &lt;enumeration value="TID16pm"/> * &lt;enumeration value="TID17a"/> * &lt;enumeration value="TID17ad"/> * &lt;enumeration value="TID17am"/> * &lt;enumeration value="TID17i"/> * &lt;enumeration value="TID17id"/> * &lt;enumeration value="TID17im"/> * &lt;enumeration value="TID17p"/> * &lt;enumeration value="TID17pd"/> * &lt;enumeration value="TID17pm"/> * &lt;enumeration value="TID18a"/> * &lt;enumeration value="TID18ad"/> * &lt;enumeration value="TID18am"/> * &lt;enumeration value="TID18i"/> * &lt;enumeration value="TID18id"/> * &lt;enumeration value="TID18im"/> * &lt;enumeration value="TID18p"/> * &lt;enumeration value="TID18pd"/> * &lt;enumeration value="TID18pm"/> * &lt;enumeration value="TID19a"/> * &lt;enumeration value="TID19ad"/> * &lt;enumeration value="TID19am"/> * &lt;enumeration value="TID19i"/> * &lt;enumeration value="TID19id"/> * &lt;enumeration value="TID19im"/> * &lt;enumeration value="TID19p"/> * &lt;enumeration value="TID19pd"/> * &lt;enumeration value="TID19pm"/> * &lt;enumeration value="TID1a"/> * &lt;enumeration value="TID1i"/> * &lt;enumeration value="TID1p"/> * &lt;enumeration value="TID1pd"/> * &lt;enumeration value="TID1pm"/> * &lt;enumeration value="TID20a"/> * &lt;enumeration value="TID20i"/> * &lt;enumeration value="TID20p"/> * &lt;enumeration value="TID20pd"/> * &lt;enumeration value="TID20pm"/> * &lt;enumeration value="TID21a"/> * &lt;enumeration value="TID21i"/> * &lt;enumeration value="TID21p"/> * &lt;enumeration value="TID21pd"/> * &lt;enumeration value="TID21pm"/> * &lt;enumeration value="TID22a"/> * &lt;enumeration value="TID22i"/> * &lt;enumeration value="TID22p"/> * &lt;enumeration value="TID22pd"/> * &lt;enumeration value="TID22pm"/> * &lt;enumeration value="TID23a"/> * &lt;enumeration value="TID23i"/> * &lt;enumeration value="TID23p"/> * &lt;enumeration value="TID23pd"/> * &lt;enumeration value="TID23pm"/> * &lt;enumeration value="TID24a"/> * &lt;enumeration value="TID24i"/> * &lt;enumeration value="TID24p"/> * &lt;enumeration value="TID24pd"/> * &lt;enumeration value="TID24pm"/> * &lt;enumeration value="TID25a"/> * &lt;enumeration value="TID25i"/> * &lt;enumeration value="TID25p"/> * &lt;enumeration value="TID25pd"/> * &lt;enumeration value="TID25pm"/> * &lt;enumeration value="TID26a"/> * &lt;enumeration value="TID26i"/> * &lt;enumeration value="TID26p"/> * &lt;enumeration value="TID26pd"/> * &lt;enumeration value="TID26pm"/> * &lt;enumeration value="TID27a"/> * &lt;enumeration value="TID27i"/> * &lt;enumeration value="TID27p"/> * &lt;enumeration value="TID27pd"/> * &lt;enumeration value="TID27pm"/> * &lt;enumeration value="TID28a"/> * &lt;enumeration value="TID28i"/> * &lt;enumeration value="TID28p"/> * &lt;enumeration value="TID28pd"/> * &lt;enumeration value="TID28pm"/> * &lt;enumeration value="TID29a"/> * &lt;enumeration value="TID29i"/> * &lt;enumeration value="TID29p"/> * &lt;enumeration value="TID29pd"/> * &lt;enumeration value="TID29pm"/> * &lt;enumeration value="TID2a"/> * &lt;enumeration value="TID2i"/> * &lt;enumeration value="TID2p"/> * &lt;enumeration value="TID2pd"/> * &lt;enumeration value="TID2pm"/> * &lt;enumeration value="TID30a"/> * &lt;enumeration value="TID30ad"/> * &lt;enumeration value="TID30am"/> * &lt;enumeration value="TID30i"/> * &lt;enumeration value="TID30id"/> * &lt;enumeration value="TID30im"/> * &lt;enumeration value="TID30p"/> * &lt;enumeration value="TID30pd"/> * &lt;enumeration value="TID30pm"/> * &lt;enumeration value="TID31a"/> * &lt;enumeration value="TID31ad"/> * &lt;enumeration value="TID31am"/> * &lt;enumeration value="TID31i"/> * &lt;enumeration value="TID31id"/> * &lt;enumeration value="TID31im"/> * &lt;enumeration value="TID31p"/> * &lt;enumeration value="TID31pd"/> * &lt;enumeration value="TID31pm"/> * &lt;enumeration value="TID32a"/> * &lt;enumeration value="TID32ad"/> * &lt;enumeration value="TID32am"/> * &lt;enumeration value="TID32i"/> * &lt;enumeration value="TID32id"/> * &lt;enumeration value="TID32im"/> * &lt;enumeration value="TID32p"/> * &lt;enumeration value="TID32pd"/> * &lt;enumeration value="TID32pm"/> * &lt;enumeration value="TID3a"/> * &lt;enumeration value="TID3i"/> * &lt;enumeration value="TID3p"/> * &lt;enumeration value="TID3pd"/> * &lt;enumeration value="TID3pm"/> * &lt;enumeration value="TID4a"/> * &lt;enumeration value="TID4i"/> * &lt;enumeration value="TID4p"/> * &lt;enumeration value="TID4pd"/> * &lt;enumeration value="TID4pm"/> * &lt;enumeration value="TID5a"/> * &lt;enumeration value="TID5i"/> * &lt;enumeration value="TID5p"/> * &lt;enumeration value="TID5pd"/> * &lt;enumeration value="TID5pm"/> * &lt;enumeration value="TID6a"/> * &lt;enumeration value="TID6i"/> * &lt;enumeration value="TID6p"/> * &lt;enumeration value="TID6pd"/> * &lt;enumeration value="TID6pm"/> * &lt;enumeration value="TID7a"/> * &lt;enumeration value="TID7i"/> * &lt;enumeration value="TID7p"/> * &lt;enumeration value="TID7pd"/> * &lt;enumeration value="TID7pm"/> * &lt;enumeration value="TID8a"/> * &lt;enumeration value="TID8i"/> * &lt;enumeration value="TID8p"/> * &lt;enumeration value="TID8pd"/> * &lt;enumeration value="TID8pm"/> * &lt;enumeration value="TID9a"/> * &lt;enumeration value="TID9i"/> * &lt;enumeration value="TID9p"/> * &lt;enumeration value="TID9pd"/> * &lt;enumeration value="TID9pm"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "ArtificialDentition") @XmlEnum public enum ArtificialDentition { @XmlEnumValue("TID10a") TID_10_A("TID10a"), @XmlEnumValue("TID10i") TID_10_I("TID10i"), @XmlEnumValue("TID10p") TID_10_P("TID10p"), @XmlEnumValue("TID10pd") TID_10_PD("TID10pd"), @XmlEnumValue("TID10pm") TID_10_PM("TID10pm"), @XmlEnumValue("TID11a") TID_11_A("TID11a"), @XmlEnumValue("TID11i") TID_11_I("TID11i"), @XmlEnumValue("TID11p") TID_11_P("TID11p"), @XmlEnumValue("TID11pd") TID_11_PD("TID11pd"), @XmlEnumValue("TID11pm") TID_11_PM("TID11pm"), @XmlEnumValue("TID12a") TID_12_A("TID12a"), @XmlEnumValue("TID12i") TID_12_I("TID12i"), @XmlEnumValue("TID12p") TID_12_P("TID12p"), @XmlEnumValue("TID12pd") TID_12_PD("TID12pd"), @XmlEnumValue("TID12pm") TID_12_PM("TID12pm"), @XmlEnumValue("TID13a") TID_13_A("TID13a"), @XmlEnumValue("TID13i") TID_13_I("TID13i"), @XmlEnumValue("TID13p") TID_13_P("TID13p"), @XmlEnumValue("TID13pd") TID_13_PD("TID13pd"), @XmlEnumValue("TID13pm") TID_13_PM("TID13pm"), @XmlEnumValue("TID14a") TID_14_A("TID14a"), @XmlEnumValue("TID14i") TID_14_I("TID14i"), @XmlEnumValue("TID14p") TID_14_P("TID14p"), @XmlEnumValue("TID14pd") TID_14_PD("TID14pd"), @XmlEnumValue("TID14pm") TID_14_PM("TID14pm"), @XmlEnumValue("TID15a") TID_15_A("TID15a"), @XmlEnumValue("TID15i") TID_15_I("TID15i"), @XmlEnumValue("TID15p") TID_15_P("TID15p"), @XmlEnumValue("TID15pd") TID_15_PD("TID15pd"), @XmlEnumValue("TID15pm") TID_15_PM("TID15pm"), @XmlEnumValue("TID16a") TID_16_A("TID16a"), @XmlEnumValue("TID16i") TID_16_I("TID16i"), @XmlEnumValue("TID16p") TID_16_P("TID16p"), @XmlEnumValue("TID16pd") TID_16_PD("TID16pd"), @XmlEnumValue("TID16pm") TID_16_PM("TID16pm"), @XmlEnumValue("TID17a") TID_17_A("TID17a"), @XmlEnumValue("TID17ad") TID_17_AD("TID17ad"), @XmlEnumValue("TID17am") TID_17_AM("TID17am"), @XmlEnumValue("TID17i") TID_17_I("TID17i"), @XmlEnumValue("TID17id") TID_17_ID("TID17id"), @XmlEnumValue("TID17im") TID_17_IM("TID17im"), @XmlEnumValue("TID17p") TID_17_P("TID17p"), @XmlEnumValue("TID17pd") TID_17_PD("TID17pd"), @XmlEnumValue("TID17pm") TID_17_PM("TID17pm"), @XmlEnumValue("TID18a") TID_18_A("TID18a"), @XmlEnumValue("TID18ad") TID_18_AD("TID18ad"), @XmlEnumValue("TID18am") TID_18_AM("TID18am"), @XmlEnumValue("TID18i") TID_18_I("TID18i"), @XmlEnumValue("TID18id") TID_18_ID("TID18id"), @XmlEnumValue("TID18im") TID_18_IM("TID18im"), @XmlEnumValue("TID18p") TID_18_P("TID18p"), @XmlEnumValue("TID18pd") TID_18_PD("TID18pd"), @XmlEnumValue("TID18pm") TID_18_PM("TID18pm"), @XmlEnumValue("TID19a") TID_19_A("TID19a"), @XmlEnumValue("TID19ad") TID_19_AD("TID19ad"), @XmlEnumValue("TID19am") TID_19_AM("TID19am"), @XmlEnumValue("TID19i") TID_19_I("TID19i"), @XmlEnumValue("TID19id") TID_19_ID("TID19id"), @XmlEnumValue("TID19im") TID_19_IM("TID19im"), @XmlEnumValue("TID19p") TID_19_P("TID19p"), @XmlEnumValue("TID19pd") TID_19_PD("TID19pd"), @XmlEnumValue("TID19pm") TID_19_PM("TID19pm"), @XmlEnumValue("TID1a") TID_1_A("TID1a"), @XmlEnumValue("TID1i") TID_1_I("TID1i"), @XmlEnumValue("TID1p") TID_1_P("TID1p"), @XmlEnumValue("TID1pd") TID_1_PD("TID1pd"), @XmlEnumValue("TID1pm") TID_1_PM("TID1pm"), @XmlEnumValue("TID20a") TID_20_A("TID20a"), @XmlEnumValue("TID20i") TID_20_I("TID20i"), @XmlEnumValue("TID20p") TID_20_P("TID20p"), @XmlEnumValue("TID20pd") TID_20_PD("TID20pd"), @XmlEnumValue("TID20pm") TID_20_PM("TID20pm"), @XmlEnumValue("TID21a") TID_21_A("TID21a"), @XmlEnumValue("TID21i") TID_21_I("TID21i"), @XmlEnumValue("TID21p") TID_21_P("TID21p"), @XmlEnumValue("TID21pd") TID_21_PD("TID21pd"), @XmlEnumValue("TID21pm") TID_21_PM("TID21pm"), @XmlEnumValue("TID22a") TID_22_A("TID22a"), @XmlEnumValue("TID22i") TID_22_I("TID22i"), @XmlEnumValue("TID22p") TID_22_P("TID22p"), @XmlEnumValue("TID22pd") TID_22_PD("TID22pd"), @XmlEnumValue("TID22pm") TID_22_PM("TID22pm"), @XmlEnumValue("TID23a") TID_23_A("TID23a"), @XmlEnumValue("TID23i") TID_23_I("TID23i"), @XmlEnumValue("TID23p") TID_23_P("TID23p"), @XmlEnumValue("TID23pd") TID_23_PD("TID23pd"), @XmlEnumValue("TID23pm") TID_23_PM("TID23pm"), @XmlEnumValue("TID24a") TID_24_A("TID24a"), @XmlEnumValue("TID24i") TID_24_I("TID24i"), @XmlEnumValue("TID24p") TID_24_P("TID24p"), @XmlEnumValue("TID24pd") TID_24_PD("TID24pd"), @XmlEnumValue("TID24pm") TID_24_PM("TID24pm"), @XmlEnumValue("TID25a") TID_25_A("TID25a"), @XmlEnumValue("TID25i") TID_25_I("TID25i"), @XmlEnumValue("TID25p") TID_25_P("TID25p"), @XmlEnumValue("TID25pd") TID_25_PD("TID25pd"), @XmlEnumValue("TID25pm") TID_25_PM("TID25pm"), @XmlEnumValue("TID26a") TID_26_A("TID26a"), @XmlEnumValue("TID26i") TID_26_I("TID26i"), @XmlEnumValue("TID26p") TID_26_P("TID26p"), @XmlEnumValue("TID26pd") TID_26_PD("TID26pd"), @XmlEnumValue("TID26pm") TID_26_PM("TID26pm"), @XmlEnumValue("TID27a") TID_27_A("TID27a"), @XmlEnumValue("TID27i") TID_27_I("TID27i"), @XmlEnumValue("TID27p") TID_27_P("TID27p"), @XmlEnumValue("TID27pd") TID_27_PD("TID27pd"), @XmlEnumValue("TID27pm") TID_27_PM("TID27pm"), @XmlEnumValue("TID28a") TID_28_A("TID28a"), @XmlEnumValue("TID28i") TID_28_I("TID28i"), @XmlEnumValue("TID28p") TID_28_P("TID28p"), @XmlEnumValue("TID28pd") TID_28_PD("TID28pd"), @XmlEnumValue("TID28pm") TID_28_PM("TID28pm"), @XmlEnumValue("TID29a") TID_29_A("TID29a"), @XmlEnumValue("TID29i") TID_29_I("TID29i"), @XmlEnumValue("TID29p") TID_29_P("TID29p"), @XmlEnumValue("TID29pd") TID_29_PD("TID29pd"), @XmlEnumValue("TID29pm") TID_29_PM("TID29pm"), @XmlEnumValue("TID2a") TID_2_A("TID2a"), @XmlEnumValue("TID2i") TID_2_I("TID2i"), @XmlEnumValue("TID2p") TID_2_P("TID2p"), @XmlEnumValue("TID2pd") TID_2_PD("TID2pd"), @XmlEnumValue("TID2pm") TID_2_PM("TID2pm"), @XmlEnumValue("TID30a") TID_30_A("TID30a"), @XmlEnumValue("TID30ad") TID_30_AD("TID30ad"), @XmlEnumValue("TID30am") TID_30_AM("TID30am"), @XmlEnumValue("TID30i") TID_30_I("TID30i"), @XmlEnumValue("TID30id") TID_30_ID("TID30id"), @XmlEnumValue("TID30im") TID_30_IM("TID30im"), @XmlEnumValue("TID30p") TID_30_P("TID30p"), @XmlEnumValue("TID30pd") TID_30_PD("TID30pd"), @XmlEnumValue("TID30pm") TID_30_PM("TID30pm"), @XmlEnumValue("TID31a") TID_31_A("TID31a"), @XmlEnumValue("TID31ad") TID_31_AD("TID31ad"), @XmlEnumValue("TID31am") TID_31_AM("TID31am"), @XmlEnumValue("TID31i") TID_31_I("TID31i"), @XmlEnumValue("TID31id") TID_31_ID("TID31id"), @XmlEnumValue("TID31im") TID_31_IM("TID31im"), @XmlEnumValue("TID31p") TID_31_P("TID31p"), @XmlEnumValue("TID31pd") TID_31_PD("TID31pd"), @XmlEnumValue("TID31pm") TID_31_PM("TID31pm"), @XmlEnumValue("TID32a") TID_32_A("TID32a"), @XmlEnumValue("TID32ad") TID_32_AD("TID32ad"), @XmlEnumValue("TID32am") TID_32_AM("TID32am"), @XmlEnumValue("TID32i") TID_32_I("TID32i"), @XmlEnumValue("TID32id") TID_32_ID("TID32id"), @XmlEnumValue("TID32im") TID_32_IM("TID32im"), @XmlEnumValue("TID32p") TID_32_P("TID32p"), @XmlEnumValue("TID32pd") TID_32_PD("TID32pd"), @XmlEnumValue("TID32pm") TID_32_PM("TID32pm"), @XmlEnumValue("TID3a") TID_3_A("TID3a"), @XmlEnumValue("TID3i") TID_3_I("TID3i"), @XmlEnumValue("TID3p") TID_3_P("TID3p"), @XmlEnumValue("TID3pd") TID_3_PD("TID3pd"), @XmlEnumValue("TID3pm") TID_3_PM("TID3pm"), @XmlEnumValue("TID4a") TID_4_A("TID4a"), @XmlEnumValue("TID4i") TID_4_I("TID4i"), @XmlEnumValue("TID4p") TID_4_P("TID4p"), @XmlEnumValue("TID4pd") TID_4_PD("TID4pd"), @XmlEnumValue("TID4pm") TID_4_PM("TID4pm"), @XmlEnumValue("TID5a") TID_5_A("TID5a"), @XmlEnumValue("TID5i") TID_5_I("TID5i"), @XmlEnumValue("TID5p") TID_5_P("TID5p"), @XmlEnumValue("TID5pd") TID_5_PD("TID5pd"), @XmlEnumValue("TID5pm") TID_5_PM("TID5pm"), @XmlEnumValue("TID6a") TID_6_A("TID6a"), @XmlEnumValue("TID6i") TID_6_I("TID6i"), @XmlEnumValue("TID6p") TID_6_P("TID6p"), @XmlEnumValue("TID6pd") TID_6_PD("TID6pd"), @XmlEnumValue("TID6pm") TID_6_PM("TID6pm"), @XmlEnumValue("TID7a") TID_7_A("TID7a"), @XmlEnumValue("TID7i") TID_7_I("TID7i"), @XmlEnumValue("TID7p") TID_7_P("TID7p"), @XmlEnumValue("TID7pd") TID_7_PD("TID7pd"), @XmlEnumValue("TID7pm") TID_7_PM("TID7pm"), @XmlEnumValue("TID8a") TID_8_A("TID8a"), @XmlEnumValue("TID8i") TID_8_I("TID8i"), @XmlEnumValue("TID8p") TID_8_P("TID8p"), @XmlEnumValue("TID8pd") TID_8_PD("TID8pd"), @XmlEnumValue("TID8pm") TID_8_PM("TID8pm"), @XmlEnumValue("TID9a") TID_9_A("TID9a"), @XmlEnumValue("TID9i") TID_9_I("TID9i"), @XmlEnumValue("TID9p") TID_9_P("TID9p"), @XmlEnumValue("TID9pd") TID_9_PD("TID9pd"), @XmlEnumValue("TID9pm") TID_9_PM("TID9pm"); private final String value; ArtificialDentition(String v) { value = v; } public String value() { return value; } public static ArtificialDentition fromValue(String v) { for (ArtificialDentition c: ArtificialDentition.values()) { if (c.value.equals(v)) { return c; } } throw new IllegalArgumentException(v); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ package fixtures.azurespecials.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceResponse; import fixtures.azurespecials.ErrorException; import java.io.IOException; import okhttp3.ResponseBody; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.Path; import retrofit2.http.POST; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in SubscriptionInMethods. */ public class SubscriptionInMethodsInner { /** The Retrofit service to perform REST calls. */ private SubscriptionInMethodsService service; /** The service client containing this operation class. */ private AutoRestAzureSpecialParametersTestClientImpl client; /** * Initializes an instance of SubscriptionInMethodsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public SubscriptionInMethodsInner(Retrofit retrofit, AutoRestAzureSpecialParametersTestClientImpl client) { this.service = retrofit.create(SubscriptionInMethodsService.class); this.client = client; } /** * The interface defining all the services for SubscriptionInMethods to be * used by Retrofit to perform actually REST calls. */ interface SubscriptionInMethodsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azurespecials.SubscriptionInMethods postMethodLocalValid" }) @POST("azurespecials/subscriptionId/method/string/none/path/local/1234-5678-9012-3456/{subscriptionId}") Observable<Response<ResponseBody>> postMethodLocalValid(@Path("subscriptionId") String subscriptionId, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azurespecials.SubscriptionInMethods postMethodLocalNull" }) @POST("azurespecials/subscriptionId/method/string/none/path/local/null/{subscriptionId}") Observable<Response<ResponseBody>> postMethodLocalNull(@Path("subscriptionId") String subscriptionId, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azurespecials.SubscriptionInMethods postPathLocalValid" }) @POST("azurespecials/subscriptionId/path/string/none/path/local/1234-5678-9012-3456/{subscriptionId}") Observable<Response<ResponseBody>> postPathLocalValid(@Path("subscriptionId") String subscriptionId, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azurespecials.SubscriptionInMethods postSwaggerLocalValid" }) @POST("azurespecials/subscriptionId/swagger/string/none/path/local/1234-5678-9012-3456/{subscriptionId}") Observable<Response<ResponseBody>> postSwaggerLocalValid(@Path("subscriptionId") String subscriptionId, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId This should appear as a method parameter, use value '1234-5678-9012-3456' */ public void postMethodLocalValid(String subscriptionId) { postMethodLocalValidWithServiceResponseAsync(subscriptionId).toBlocking().single().body(); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId This should appear as a method parameter, use value '1234-5678-9012-3456' * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<Void> postMethodLocalValidAsync(String subscriptionId, final ServiceCallback<Void> serviceCallback) { return ServiceCall.fromResponse(postMethodLocalValidWithServiceResponseAsync(subscriptionId), serviceCallback); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId This should appear as a method parameter, use value '1234-5678-9012-3456' * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> postMethodLocalValidAsync(String subscriptionId) { return postMethodLocalValidWithServiceResponseAsync(subscriptionId).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId This should appear as a method parameter, use value '1234-5678-9012-3456' * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> postMethodLocalValidWithServiceResponseAsync(String subscriptionId) { if (subscriptionId == null) { throw new IllegalArgumentException("Parameter subscriptionId is required and cannot be null."); } return service.postMethodLocalValid(subscriptionId, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = postMethodLocalValidDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> postMethodLocalValidDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .registerError(ErrorException.class) .build(response); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = null, client-side validation should prevent you from making this call. * * @param subscriptionId This should appear as a method parameter, use value null, client-side validation should prvenet the call */ public void postMethodLocalNull(String subscriptionId) { postMethodLocalNullWithServiceResponseAsync(subscriptionId).toBlocking().single().body(); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = null, client-side validation should prevent you from making this call. * * @param subscriptionId This should appear as a method parameter, use value null, client-side validation should prvenet the call * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<Void> postMethodLocalNullAsync(String subscriptionId, final ServiceCallback<Void> serviceCallback) { return ServiceCall.fromResponse(postMethodLocalNullWithServiceResponseAsync(subscriptionId), serviceCallback); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = null, client-side validation should prevent you from making this call. * * @param subscriptionId This should appear as a method parameter, use value null, client-side validation should prvenet the call * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> postMethodLocalNullAsync(String subscriptionId) { return postMethodLocalNullWithServiceResponseAsync(subscriptionId).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = null, client-side validation should prevent you from making this call. * * @param subscriptionId This should appear as a method parameter, use value null, client-side validation should prvenet the call * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> postMethodLocalNullWithServiceResponseAsync(String subscriptionId) { if (subscriptionId == null) { throw new IllegalArgumentException("Parameter subscriptionId is required and cannot be null."); } return service.postMethodLocalNull(subscriptionId, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = postMethodLocalNullDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> postMethodLocalNullDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .registerError(ErrorException.class) .build(response); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId Should appear as a method parameter -use value '1234-5678-9012-3456' */ public void postPathLocalValid(String subscriptionId) { postPathLocalValidWithServiceResponseAsync(subscriptionId).toBlocking().single().body(); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId Should appear as a method parameter -use value '1234-5678-9012-3456' * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<Void> postPathLocalValidAsync(String subscriptionId, final ServiceCallback<Void> serviceCallback) { return ServiceCall.fromResponse(postPathLocalValidWithServiceResponseAsync(subscriptionId), serviceCallback); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId Should appear as a method parameter -use value '1234-5678-9012-3456' * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> postPathLocalValidAsync(String subscriptionId) { return postPathLocalValidWithServiceResponseAsync(subscriptionId).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId Should appear as a method parameter -use value '1234-5678-9012-3456' * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> postPathLocalValidWithServiceResponseAsync(String subscriptionId) { if (subscriptionId == null) { throw new IllegalArgumentException("Parameter subscriptionId is required and cannot be null."); } return service.postPathLocalValid(subscriptionId, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = postPathLocalValidDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> postPathLocalValidDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .registerError(ErrorException.class) .build(response); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId The subscriptionId, which appears in the path, the value is always '1234-5678-9012-3456' */ public void postSwaggerLocalValid(String subscriptionId) { postSwaggerLocalValidWithServiceResponseAsync(subscriptionId).toBlocking().single().body(); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId The subscriptionId, which appears in the path, the value is always '1234-5678-9012-3456' * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<Void> postSwaggerLocalValidAsync(String subscriptionId, final ServiceCallback<Void> serviceCallback) { return ServiceCall.fromResponse(postSwaggerLocalValidWithServiceResponseAsync(subscriptionId), serviceCallback); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId The subscriptionId, which appears in the path, the value is always '1234-5678-9012-3456' * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> postSwaggerLocalValidAsync(String subscriptionId) { return postSwaggerLocalValidWithServiceResponseAsync(subscriptionId).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed. * * @param subscriptionId The subscriptionId, which appears in the path, the value is always '1234-5678-9012-3456' * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> postSwaggerLocalValidWithServiceResponseAsync(String subscriptionId) { if (subscriptionId == null) { throw new IllegalArgumentException("Parameter subscriptionId is required and cannot be null."); } return service.postSwaggerLocalValid(subscriptionId, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = postSwaggerLocalValidDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> postSwaggerLocalValidDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .registerError(ErrorException.class) .build(response); } }
/* * Copyright (C) 2014-2022 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.jaxb.mock.external; import java.math.BigDecimal; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlValue; /** * <p> * Java class for CA_GEN_Issue complex type. * <p> * The following schema fragment specifies the expected content contained within * this class. * * <pre> * &lt;complexType name="CA_GEN_Issue"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Title" type="{http://www.w3.org/2001/XMLSchema}decimal"/> * &lt;element name="SubTitle" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="DateDescription" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="FirstPage"> * &lt;complexType> * &lt;simpleContent> * &lt;extension base="&lt;http://www.w3.org/2001/XMLSchema>string"> * &lt;attribute name="Width" use="required" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" /> * &lt;attribute name="Height" use="required" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" /> * &lt;/extension> * &lt;/simpleContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;attribute name="ID" use="required" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" /> * &lt;attribute name="CollectionID" use="required" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" /> * &lt;attribute name="ContentLanguage" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="MenuLanguage" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="MenuLayout" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="PageCount" use="required" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" /> * &lt;attribute name="ArticleCount" use="required" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" /> * &lt;attribute name="Directory" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="DateSort" use="required" type="{}CA_DATETIME" /> * &lt;attribute name="DateFrom" use="required" type="{}CA_DATETIME" /> * &lt;attribute name="DateTo" use="required" type="{}CA_DATETIME" /> * &lt;attribute name="DirAbsolute" use="required" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> */ @XmlAccessorType (XmlAccessType.FIELD) @XmlType (name = "CA_GEN_Issue", propOrder = { "m_aTitle", "m_sSubTitle", "m_sDateDescription", "m_aFirstPage" }) public final class MockJAXBIssue { @XmlElement (name = "Title", required = true) private BigDecimal m_aTitle; @XmlElement (name = "SubTitle") private String m_sSubTitle; @XmlElement (name = "DateDescription") private String m_sDateDescription; @XmlElement (name = "FirstPage") private MockJAXBIssue.FirstPage m_aFirstPage; @XmlAttribute (name = "ID") @XmlSchemaType (name = "unsignedInt") private long m_nID; @XmlAttribute (name = "CollectionID") @XmlSchemaType (name = "unsignedInt") private long m_nCollectionID; @XmlAttribute (name = "ContentLanguage") private String m_sContentLanguage; @XmlAttribute (name = "MenuLanguage") private String m_sMenuLanguage; @XmlAttribute (name = "MenuLayout") private String m_sMenuLayout; @XmlAttribute (name = "PageCount") @XmlSchemaType (name = "unsignedInt") private long m_nPageCount; @XmlAttribute (name = "ArticleCount") @XmlSchemaType (name = "unsignedInt") private long m_nArticleCount; @XmlAttribute (name = "Directory") private String m_sDirectory; @XmlAttribute (name = "DateSort") private String m_sDateSort; @XmlAttribute (name = "DateFrom") private String m_sDateFrom; @XmlAttribute (name = "DateTo") private String m_sDateTo; @XmlAttribute (name = "DirAbsolute") @XmlSchemaType (name = "unsignedInt") private long m_nDirAbsolute; /** * Gets the value of the title property. * * @return possible object is {@link String } */ public BigDecimal getTitle () { return m_aTitle; } /** * Sets the value of the title property. * * @param value * allowed object is {@link String } */ public void setTitle (final BigDecimal value) { this.m_aTitle = value; } /** * Gets the value of the subTitle property. * * @return possible object is {@link String } */ public String getSubTitle () { return m_sSubTitle; } /** * Sets the value of the subTitle property. * * @param value * allowed object is {@link String } */ public void setSubTitle (final String value) { this.m_sSubTitle = value; } /** * Gets the value of the dateDescription property. * * @return possible object is {@link String } */ public String getDateDescription () { return m_sDateDescription; } /** * Sets the value of the dateDescription property. * * @param value * allowed object is {@link String } */ public void setDateDescription (final String value) { this.m_sDateDescription = value; } /** * Gets the value of the firstPage property. * * @return possible object is {@link MockJAXBIssue.FirstPage } */ public MockJAXBIssue.FirstPage getFirstPage () { return m_aFirstPage; } /** * Sets the value of the firstPage property. * * @param value * allowed object is {@link MockJAXBIssue.FirstPage } */ public void setFirstPage (final MockJAXBIssue.FirstPage value) { this.m_aFirstPage = value; } /** * Gets the value of the id property. * * @return id */ public long getID () { return m_nID; } /** * Sets the value of the id property. * * @param value * new id */ public void setID (final long value) { this.m_nID = value; } /** * Gets the value of the collectionID property. * * @return collection ID */ public long getCollectionID () { return m_nCollectionID; } /** * Sets the value of the collectionID property. * * @param value * new value */ public void setCollectionID (final long value) { this.m_nCollectionID = value; } /** * Gets the value of the contentLanguage property. * * @return possible object is {@link String } */ public String getContentLanguage () { return m_sContentLanguage; } /** * Sets the value of the contentLanguage property. * * @param value * allowed object is {@link String } */ public void setContentLanguage (final String value) { this.m_sContentLanguage = value; } /** * Gets the value of the menuLanguage property. * * @return possible object is {@link String } */ public String getMenuLanguage () { return m_sMenuLanguage; } /** * Sets the value of the menuLanguage property. * * @param value * allowed object is {@link String } */ public void setMenuLanguage (final String value) { this.m_sMenuLanguage = value; } /** * Gets the value of the menuLayout property. * * @return possible object is {@link String } */ public String getMenuLayout () { return m_sMenuLayout; } /** * Sets the value of the menuLayout property. * * @param value * allowed object is {@link String } */ public void setMenuLayout (final String value) { this.m_sMenuLayout = value; } /** * Gets the value of the pageCount property. * * @return page count */ public long getPageCount () { return m_nPageCount; } /** * Sets the value of the pageCount property. * * @param value * new value */ public void setPageCount (final long value) { this.m_nPageCount = value; } /** * Gets the value of the articleCount property. * * @return article count */ public long getArticleCount () { return m_nArticleCount; } /** * Sets the value of the articleCount property. * * @param value * new value */ public void setArticleCount (final long value) { this.m_nArticleCount = value; } /** * Gets the value of the directory property. * * @return possible object is {@link String } */ public String getDirectory () { return m_sDirectory; } /** * Sets the value of the directory property. * * @param value * allowed object is {@link String } */ public void setDirectory (final String value) { this.m_sDirectory = value; } /** * Gets the value of the dateSort property. * * @return possible object is {@link String } */ public String getDateSort () { return m_sDateSort; } /** * Sets the value of the dateSort property. * * @param value * allowed object is {@link String } */ public void setDateSort (final String value) { this.m_sDateSort = value; } /** * Gets the value of the dateFrom property. * * @return possible object is {@link String } */ public String getDateFrom () { return m_sDateFrom; } /** * Sets the value of the dateFrom property. * * @param value * allowed object is {@link String } */ public void setDateFrom (final String value) { this.m_sDateFrom = value; } /** * Gets the value of the dateTo property. * * @return possible object is {@link String } */ public String getDateTo () { return m_sDateTo; } /** * Sets the value of the dateTo property. * * @param value * allowed object is {@link String } */ public void setDateTo (final String value) { this.m_sDateTo = value; } /** * Gets the value of the dirAbsolute property. * * @return absolute dir? */ public long getDirAbsolute () { return m_nDirAbsolute; } /** * Sets the value of the dirAbsolute property. * * @param value * new value */ public void setDirAbsolute (final long value) { this.m_nDirAbsolute = value; } /** * <p> * Java class for anonymous complex type. * <p> * The following schema fragment specifies the expected content contained * within this class. * * <pre> * &lt;complexType> * &lt;simpleContent> * &lt;extension base="&lt;http://www.w3.org/2001/XMLSchema>string"> * &lt;attribute name="Width" use="required" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" /> * &lt;attribute name="Height" use="required" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" /> * &lt;/extension> * &lt;/simpleContent> * &lt;/complexType> * </pre> */ @XmlAccessorType (XmlAccessType.FIELD) @XmlType (name = "", propOrder = { "m_sValue" }) public static class FirstPage { @XmlValue private String m_sValue; @XmlAttribute (name = "Width", required = true) @XmlSchemaType (name = "unsignedInt") private long m_nWidth; @XmlAttribute (name = "Height", required = true) @XmlSchemaType (name = "unsignedInt") private long m_nHeight; /** * Gets the value of the value property. * * @return possible object is {@link String } */ public String getValue () { return m_sValue; } /** * Sets the value of the value property. * * @param value * allowed object is {@link String } */ public void setValue (final String value) { this.m_sValue = value; } /** * Gets the value of the width property. * * @return width */ public long getWidth () { return m_nWidth; } /** * Sets the value of the width property. * * @param value * new width */ public void setWidth (final long value) { this.m_nWidth = value; } /** * Gets the value of the height property. * * @return height */ public long getHeight () { return m_nHeight; } /** * Sets the value of the height property. * * @param value * new height */ public void setHeight (final long value) { this.m_nHeight = value; } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package git4idea; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.intellij.openapi.vcs.Executor.overwrite; import static com.intellij.openapi.vcs.Executor.touch; import static git4idea.GitCucumberWorld.virtualCommits; import static git4idea.test.GitExecutor.git; import static org.junit.Assert.assertTrue; /** * * @author Kirill Likhodedov */ public class CommitDetails { private String myHash; private String myMessage; private String myAuthor; private Collection<Change> myChanges; private static class Change { public void apply() throws IOException { switch (myType) { case MODIFIED: overwrite(myFile, myContent); break; case ADDED: touch(myFile, myContent); break; case DELETED: throw new UnsupportedOperationException("Not implemented yet"); case MOVED: throw new UnsupportedOperationException("Not implemented yet"); } } enum Type { MODIFIED, ADDED, DELETED, MOVED } private final Type myType; private final String myFile; private final String myContent; Change(Type type, String filename, String content) { myType = type; myFile = filename; myContent = content; } } private enum ParsingStage { MESSAGE, DATA, CHANGES } private enum Data { AUTHOR("Author") { @Override void apply(CommitDetails commitDetails, String value) { commitDetails.myAuthor = value; } }; private final String myKey; Data(String key) { myKey = key; } abstract void apply(CommitDetails commitDetails, String value); private String getKey() { return myKey; } } /** * Format: * <pre> commit subject and optional description ----- Author: John Bro Changes: M file.txt "feature changes" * </pre> */ public static CommitDetails parse(String hash, String details) { CommitDetails commit = new CommitDetails(); commit.myHash = hash; StringBuilder message = new StringBuilder(); Collection<Change> changes = new ArrayList<Change>(); ParsingStage stage = ParsingStage.MESSAGE; for (String line : StringUtil.splitByLines(details)) { Pair<Data, String> data = checkDataLine(line); if (data != null) { stage = ParsingStage.DATA; } else if (line.matches("[MADR]\\d* [^ ]+ .+")) { stage = ParsingStage.CHANGES; } if (stage == ParsingStage.MESSAGE) { message.append(line); } else if (stage == ParsingStage.CHANGES) { changes.add(parseChange(line)); } else if (data != null) { data.getFirst().apply(commit, data.getSecond()); } } commit.myMessage = message.toString(); commit.myChanges = changes; return commit; } private static Pair<Data, String> checkDataLine(String line) { for (Data data : Data.values()) { String dataPrefix = data.getKey().toLowerCase() + ": "; if (line.toLowerCase().startsWith(dataPrefix) && line.trim().length() != dataPrefix.length()) { return Pair.create(data, line.substring(dataPrefix.length()).trim()); } } return null; } private static Change parseChange(String change) { int firstSpace = change.indexOf(' '); int secondSpace = change.indexOf(' ', firstSpace + 1); return new Change(parseType(change.substring(0, firstSpace)), change.substring(firstSpace + 1, secondSpace), StringUtil.unescapeStringCharacters(StringUtil.unquoteString(change.substring(secondSpace + 1)))); } private static Change.Type parseType(String type) { if (type.equals("M")) { return Change.Type.MODIFIED; } else if (type.equals("A")) { return Change.Type.ADDED; } else if (type.equals("D")) { return Change.Type.DELETED; } else if (type.equals("R")) { return Change.Type.MOVED; } return null; } /** * @return real commit details. */ public CommitDetails apply() throws IOException { for (Change change : myChanges) { change.apply(); } for (Change change : myChanges) { if (change.myType == Change.Type.ADDED) { git("add %s", change.myFile); } } String authorString = myAuthor == null ? "" : String.format(" --author '%1$s <%1$s@example.com>'", myAuthor); String commitOutput = git(String.format("commit -am '%s' %s", myMessage, authorString)); CommitDetails realCommit = parseHashFromCommitOutput(commitOutput); virtualCommits.register(myHash, realCommit); return realCommit; } CommitDetails parseHashFromCommitOutput(String commitOutput) { String line = commitOutput.split("\n")[0]; Pattern reg = Pattern.compile("^\\s*\\[.+ ([a-fA-F0-9]+)\\] (.+)$"); Matcher matcher = reg.matcher(line); boolean matches = matcher.matches(); assertTrue(String.format("The output of the commit command doesn't match the expected pattern: %nLine: [%s]%nWhole output: [%s]", StringUtil.escapeLineBreak(line), StringUtil.escapeLineBreak(commitOutput)), matches); return new CommitDetails().hash(matcher.group(1)).message(matcher.group(2)); } private CommitDetails hash(String hash) { myHash = hash; return this; } private CommitDetails message(String message) { myMessage = message; return this; } public String getHash() { return myHash; } public String getMessage() { return myMessage; } }
package com.artemis.utils; import java.util.Arrays; import static java.lang.Math.max; /** * Collection type a bit like ArrayList but does not preserve the order of its * entities, speedwise it is very good, especially suited for games. * * * @author original Bag by Arni Arent */ public class IntBag implements ImmutableIntBag { /** The backing array. */ private int[] data; /** The number of values stored by this bag. */ protected int size = 0; /** * Constructs an empty Bag with an initial capacity of 64. */ public IntBag() { this(64); } /** * Constructs an empty Bag with the specified initial capacity. * * @param capacity * the initial capacity of Bag */ public IntBag(int capacity) { data = new int[capacity]; } /** * Removes the first occurrence of the value from this IntBag, if * it is present. * * @param value * the value to be removed * * @return true, if value was removed */ public boolean removeValue(int value) { int index = indexOf(value); if (index > -1) removeIndex(index); return index > -1; } /** * Removes the element at the specified position in this Bag. * <p> * It does this by overwriting it was last element then removing last * element * </p> * * @param index * the index of element to be removed * * @return element that was removed from the Bag * @deprecated Call {@link #removeIndex(int)} instead. {@link #remove(int)} will be removed in 3.0 due to ambiguity. * * @throws ArrayIndexOutOfBoundsException if the index is out of range * ({@code index < 0 || index >= size()}) */ @Deprecated public int remove(int index) { int e = data[index]; // make copy of element to remove so it can be returned data[index] = data[--size]; // overwrite item to remove with last element data[size] = 0; // null last element, so gc can do its work return e; } /** * Removes the element at the specified position in this Bag. * <p> * It does this by overwriting it was last element then removing last * element * </p> * * @param index * the index of element to be removed * * @return element that was removed from the Bag * * @throws ArrayIndexOutOfBoundsException if the index is out of range * ({@code index < 0 || index >= size()}) */ public int removeIndex(int index) { int e = data[index]; // make copy of element to remove so it can be returned data[index] = data[--size]; // overwrite item to remove with last element data[size] = 0; // null last element, so gc can do its work return e; } /** * Find index of element. * * @param value * element to check * * @return index of element, or {@code -1} if there is no such index. */ public int indexOf(int value) { for(int i = 0; size > i; i++) { if(value == data[i]) { return i; } } return -1; } /** * Check if bag contains this element. * * @param value * element to check * * @return {@code true} if the bag contains this element */ public boolean contains(int value) { for(int i = 0; size > i; i++) { if(value == data[i]) { return true; } } return false; } /** * Returns the element at the specified position in Bag. * * @param index * index of the element to return * * @return the element at the specified position in bag * * @throws ArrayIndexOutOfBoundsException if the index is out of range * ({@code index < 0 || index >= size()}) */ public int get(int index) { if (index >= size) { String message = "tried accessing element " + index + "/" + size; throw new ArrayIndexOutOfBoundsException(message); } return data[index]; } /** * Returns the number of elements in this bag. * * @return the number of elements in this bag */ public int size() { return size; } /** * Returns the number of elements the bag can hold without growing. * * @return the number of elements the bag can hold without growing */ public int getCapacity() { return data.length; } /** * Checks if the internal storage supports this index. * * @param index * index to check * * @return {@code true} if the index is within bounds */ public boolean isIndexWithinBounds(int index) { return index < getCapacity(); } /** * Returns true if this bag contains no elements. * * @return {@code true} if this bag contains no elements */ public boolean isEmpty() { return size == 0; } /** * Adds the specified element to the end of this bag. * <p> * If required, it also increases the capacity of the bag. * </p> * * @param value * element to be added to this list */ public void add(int value) { // is size greater than capacity increase capacity if (size == data.length) grow(2 * data.length); data[size++] = value; } /** * Adds the specified elements to the end of this bag. * <p> * If required, it also increases the capacity of the bag. * </p> * * @param other * elements to be added to this list */ public void addAll(IntBag other) { for (int i = 0; i < other.size(); i++) { add(other.data[i]); } } /** * Set element at specified index in the bag. * * @param index * position of element * @param value * the element */ public void set(int index, int value) { if(index >= data.length) { grow(max((2 * data.length), index + 1)); } size = max(size, index + 1); data[index] = value; } private void grow(int newCapacity) { int[] oldData = data; data = new int[newCapacity]; System.arraycopy(oldData, 0, data, 0, oldData.length); } /** * Check if an item, if added at the given item will fit into the bag. * <p> * If not, the bag capacity will be increased to hold an item at the index. * </p> * * @param index * index to check */ public void ensureCapacity(int index) { if(index >= data.length) { grow(index + 1); } } /** * Removes all of the elements from this bag. * <p> * The bag will be empty after this call returns. * </p> */ public void clear() { Arrays.fill(data, 0, size, 0); size = 0; } /** * Returns this bag's underlying array. * <p> * Use with care. * </p> * * @return the underlying array * * @see IntBag#size() */ public int[] getData() { return data; } /** * Set the size. * <p> * This will not resize the bag, nor will it clean up contents beyond the * given size. Use with caution. * </p> * * @param size * the size to set */ public void setSize(int size) { this.size = size; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IntBag intBag = (IntBag) o; if (size != intBag.size()) return false; for (int i = 0; size > i; i++) { if (data[i] != intBag.data[i]) return false; } return true; } @Override public int hashCode() { int hash = 0; for (int i = 0, s = size; s > i; i++) { hash = (127 * hash) + data[i]; } return hash; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("IntBag("); for (int i = 0; size > i; i++) { if (i > 0) sb.append(", "); sb.append(data[i]); } sb.append(')'); return sb.toString(); } }
/* * Copyright 2012 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.screens.guided.template.client.editor; import java.util.function.Supplier; import javax.enterprise.context.Dependent; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.inject.Inject; import com.google.gwt.event.shared.EventBus; import com.google.gwt.event.shared.SimpleEventBus; import com.google.gwt.user.client.ui.IsWidget; import org.drools.workbench.models.guided.template.shared.TemplateModel; import org.drools.workbench.screens.guided.template.client.resources.i18n.GuidedTemplateEditorConstants; import org.drools.workbench.screens.guided.template.client.type.GuidedRuleTemplateResourceType; import org.drools.workbench.screens.guided.template.model.GuidedTemplateEditorContent; import org.drools.workbench.screens.guided.template.service.GuidedRuleTemplateEditorService; import org.guvnor.common.services.shared.metadata.model.Metadata; import org.jboss.errai.common.client.api.Caller; import org.jboss.errai.common.client.api.RemoteCallback; import org.kie.workbench.common.services.datamodel.model.PackageDataModelOracleBaselinePayload; import org.kie.workbench.common.services.shared.rulename.RuleNamesService; import org.kie.workbench.common.widgets.client.datamodel.AsyncPackageDataModelOracle; import org.kie.workbench.common.widgets.client.datamodel.AsyncPackageDataModelOracleFactory; import org.kie.workbench.common.widgets.client.datamodel.ImportAddedEvent; import org.kie.workbench.common.widgets.client.datamodel.ImportRemovedEvent; import org.kie.workbench.common.widgets.client.popups.validation.ValidationPopup; import org.kie.workbench.common.widgets.configresource.client.widget.bound.ImportsWidgetPresenter; import org.kie.workbench.common.widgets.metadata.client.KieEditor; import org.uberfire.backend.vfs.ObservablePath; import org.uberfire.client.annotations.WorkbenchEditor; import org.uberfire.client.annotations.WorkbenchMenu; import org.uberfire.client.annotations.WorkbenchPartTitle; import org.uberfire.client.annotations.WorkbenchPartTitleDecoration; import org.uberfire.client.annotations.WorkbenchPartView; import org.uberfire.client.views.pfly.multipage.PageImpl; import org.uberfire.ext.editor.commons.service.support.SupportsSaveAndRename; import org.uberfire.ext.widgets.common.client.callbacks.CommandErrorCallback; import org.uberfire.ext.widgets.common.client.callbacks.HasBusyIndicatorDefaultErrorCallback; import org.uberfire.lifecycle.OnClose; import org.uberfire.lifecycle.OnMayClose; import org.uberfire.lifecycle.OnStartup; import org.uberfire.mvp.Command; import org.uberfire.mvp.PlaceRequest; import org.uberfire.workbench.events.NotificationEvent; import org.uberfire.workbench.model.menu.Menus; @Dependent @WorkbenchEditor(identifier = "GuidedRuleTemplateEditor", supportedTypes = {GuidedRuleTemplateResourceType.class}) public class GuidedRuleTemplateEditorPresenter extends KieEditor<TemplateModel> { private GuidedRuleTemplateEditorView view; @Inject private GuidedRuleTemplateDataView dataView; @Inject private ImportsWidgetPresenter importsWidget; @Inject protected Caller<GuidedRuleTemplateEditorService> service; @Inject private Event<NotificationEvent> notification; @Inject protected ValidationPopup validationPopup; @Inject private GuidedRuleTemplateResourceType type; @Inject private AsyncPackageDataModelOracleFactory oracleFactory; private EventBus eventBus = new SimpleEventBus(); private TemplateModel model; private AsyncPackageDataModelOracle oracle; @Inject private Caller<RuleNamesService> ruleNamesService; @Inject public GuidedRuleTemplateEditorPresenter(final GuidedRuleTemplateEditorView baseView) { super(baseView); view = baseView; } @OnStartup public void onStartup(final ObservablePath path, final PlaceRequest place) { super.init(path, place, type); } protected void loadContent() { view.showLoading(); getService().call(getModelSuccessCallback(), getNoSuchFileExceptionErrorCallback()).loadContent(versionRecordManager.getCurrentPath()); } @Override protected Supplier<TemplateModel> getContentSupplier() { return this::getModel; } @Override protected Caller<? extends SupportsSaveAndRename<TemplateModel, Metadata>> getSaveAndRenameServiceCaller() { return getService(); } private RemoteCallback<GuidedTemplateEditorContent> getModelSuccessCallback() { return new RemoteCallback<GuidedTemplateEditorContent>() { @Override public void callback(final GuidedTemplateEditorContent content) { //Path is set to null when the Editor is closed (which can happen before async calls complete). if (versionRecordManager.getCurrentPath() == null) { return; } resetEditorPages(content.getOverview()); addSourcePage(); addPage(new PageImpl(dataView, GuidedTemplateEditorConstants.INSTANCE.Data()) { @Override public void onFocus() { dataView.setContent(model, oracle, eventBus, isReadOnly); } @Override public void onLostFocus() { // Nothing to do here } }); addImportsTab(importsWidget); model = content.getModel(); final PackageDataModelOracleBaselinePayload dataModel = content.getDataModel(); oracle = oracleFactory.makeAsyncPackageDataModelOracle(versionRecordManager.getCurrentPath(), model, dataModel); view.setContent(model, oracle, ruleNamesService, eventBus, isReadOnly); importsWidget.setContent(oracle, model.getImports(), isReadOnly); createOriginalHash(model); view.hideBusyIndicator(); } }; } public void handleImportAddedEvent(@Observes ImportAddedEvent event) { if (!event.getDataModelOracle().equals(this.oracle)) { return; } view.refresh(); } public void handleImportRemovedEvent(@Observes ImportRemovedEvent event) { if (!event.getDataModelOracle().equals(this.oracle)) { return; } view.refresh(); } @Override protected void onValidate(final Command finished) { getService().call( validationPopup.getValidationCallback(finished), new CommandErrorCallback(finished)).validate(versionRecordManager.getCurrentPath(), view.getContent()); } @Override protected void save(String commitMessage) { getService().call(getSaveSuccessCallback(model.hashCode()), new HasBusyIndicatorDefaultErrorCallback(view)).save(versionRecordManager.getCurrentPath(), view.getContent(), metadata, commitMessage); } @Override public void onSourceTabSelected() { getService().call(new RemoteCallback<String>() { @Override public void callback(String source) { updateSource(source); } }).toSource(versionRecordManager.getCurrentPath(), model); } @OnClose public void onClose() { this.versionRecordManager.clear(); this.oracleFactory.destroy(oracle); } @OnMayClose public boolean mayClose() { return super.mayClose(view.getContent()); } @WorkbenchPartTitle public String getTitleText() { return super.getTitleText(); } @WorkbenchPartTitleDecoration public IsWidget getTitle() { return super.getTitle(); } @WorkbenchPartView public IsWidget getWidget() { return super.getWidget(); } @WorkbenchMenu public Menus getMenus() { return menus; } /* * Getter due to test purposes */ Caller<GuidedRuleTemplateEditorService> getService() { return service; } TemplateModel getModel() { return model; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.stress.journal; import java.util.ArrayList; import java.util.concurrent.CountDownLatch; import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.client.ClientConsumer; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.client.ClientProducer; import org.apache.activemq.artemis.api.core.client.ClientSession; import org.apache.activemq.artemis.api.core.client.ClientSessionFactory; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.tests.util.UnitTestCase; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.JournalType; import org.apache.activemq.artemis.tests.util.ServiceTestBase; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * A MultiThreadConsumerStressTest * <p/> * This test validates consuming / sending messages while compacting is working */ public class MultiThreadConsumerStressTest extends ServiceTestBase { // Constants ----------------------------------------------------- // Attributes ---------------------------------------------------- final SimpleString ADDRESS = new SimpleString("SomeAddress"); final SimpleString QUEUE = new SimpleString("SomeQueue"); private ActiveMQServer server; private ClientSessionFactory sf; @Override @Before public void setUp() throws Exception { super.setUp(); setupServer(JournalType.NIO); } @Test public void testProduceAndConsume() throws Throwable { int numberOfConsumers = 5; // this test assumes numberOfConsumers == numberOfProducers int numberOfProducers = numberOfConsumers; int produceMessage = 10000; int commitIntervalProduce = 100; int consumeMessage = (int) (produceMessage * 0.9); int commitIntervalConsume = 100; ClientSession session = sf.createSession(false, false); session.createQueue("compact", "compact-queue", true); ClientProducer producer = session.createProducer("compact"); for (int i = 0; i < 100; i++) { producer.send(session.createMessage(true)); } session.commit(); // Number of messages expected to be received after restart int numberOfMessagesExpected = (produceMessage - consumeMessage) * numberOfConsumers; CountDownLatch latchReady = new CountDownLatch(numberOfConsumers + numberOfProducers); CountDownLatch latchStart = new CountDownLatch(1); ArrayList<BaseThread> threads = new ArrayList<BaseThread>(); ProducerThread[] prod = new ProducerThread[numberOfProducers]; for (int i = 0; i < numberOfProducers; i++) { prod[i] = new ProducerThread(i, latchReady, latchStart, produceMessage, commitIntervalProduce); prod[i].start(); threads.add(prod[i]); } ConsumerThread[] cons = new ConsumerThread[numberOfConsumers]; for (int i = 0; i < numberOfConsumers; i++) { cons[i] = new ConsumerThread(i, latchReady, latchStart, consumeMessage, commitIntervalConsume); cons[i].start(); threads.add(cons[i]); } UnitTestCase.waitForLatch(latchReady); latchStart.countDown(); for (BaseThread t : threads) { t.join(); if (t.e != null) { throw t.e; } } server.stop(); setupServer(JournalType.NIO); ClientSession sess = sf.createSession(true, true); ClientConsumer consumer = sess.createConsumer(QUEUE); sess.start(); for (int i = 0; i < numberOfMessagesExpected; i++) { ClientMessage msg = consumer.receive(5000); Assert.assertNotNull(msg); if (i % 1000 == 0) { System.out.println("Received #" + i + " on thread before end"); } msg.acknowledge(); } Assert.assertNull(consumer.receiveImmediate()); sess.close(); } private void setupServer(final JournalType journalType) throws Exception { Configuration config = createDefaultConfig(true) .setJournalType(journalType) .setJournalFileSize(ActiveMQDefaultConfiguration.getDefaultJournalFileSize()) .setJournalMinFiles(ActiveMQDefaultConfiguration.getDefaultJournalMinFiles()) .setJournalCompactMinFiles(2) .setJournalCompactPercentage(50); server = createServer(true, config); server.start(); ServerLocator locator = createNettyNonHALocator(); locator.setBlockOnDurableSend(false); locator.setBlockOnNonDurableSend(false); locator.setBlockOnAcknowledge(false); sf = createSessionFactory(locator); ClientSession sess = sf.createSession(); try { sess.createQueue(ADDRESS, QUEUE, true); } catch (Exception ignored) { } sess.close(); locator.close(); locator = createInVMNonHALocator(); sf = createSessionFactory(locator); } // Static -------------------------------------------------------- // Constructors -------------------------------------------------- // Public -------------------------------------------------------- class BaseThread extends Thread { Throwable e; final CountDownLatch latchReady; final CountDownLatch latchStart; final int numberOfMessages; final int commitInterval; BaseThread(final String name, final CountDownLatch latchReady, final CountDownLatch latchStart, final int numberOfMessages, final int commitInterval) { super(name); this.latchReady = latchReady; this.latchStart = latchStart; this.commitInterval = commitInterval; this.numberOfMessages = numberOfMessages; } } class ProducerThread extends BaseThread { ProducerThread(final int id, final CountDownLatch latchReady, final CountDownLatch latchStart, final int numberOfMessages, final int commitInterval) { super("ClientProducer:" + id, latchReady, latchStart, numberOfMessages, commitInterval); } @Override public void run() { ClientSession session = null; latchReady.countDown(); try { UnitTestCase.waitForLatch(latchStart); session = sf.createSession(false, false); ClientProducer prod = session.createProducer(ADDRESS); for (int i = 0; i < numberOfMessages; i++) { if (i % commitInterval == 0) { session.commit(); } if (i % 1000 == 0) { // System.out.println(Thread.currentThread().getName() + "::received #" + i); } ClientMessage msg = session.createMessage(true); prod.send(msg); } session.commit(); System.out.println("Thread " + Thread.currentThread().getName() + " sent " + numberOfMessages + " messages"); } catch (Throwable e) { e.printStackTrace(); this.e = e; } finally { try { session.close(); } catch (Throwable e) { e.printStackTrace(); } } } } class ConsumerThread extends BaseThread { ConsumerThread(final int id, final CountDownLatch latchReady, final CountDownLatch latchStart, final int numberOfMessages, final int commitInterval) { super("ClientConsumer:" + id, latchReady, latchStart, numberOfMessages, commitInterval); } @Override public void run() { ClientSession session = null; latchReady.countDown(); try { UnitTestCase.waitForLatch(latchStart); session = sf.createSession(false, false); session.start(); ClientConsumer cons = session.createConsumer(QUEUE); for (int i = 0; i < numberOfMessages; i++) { ClientMessage msg = cons.receive(60 * 1000); msg.acknowledge(); if (i % commitInterval == 0) { session.commit(); } if (i % 1000 == 0) { // System.out.println(Thread.currentThread().getName() + "::sent #" + i); } } System.out.println("Thread " + Thread.currentThread().getName() + " received " + numberOfMessages + " messages"); session.commit(); } catch (Throwable e) { this.e = e; } finally { try { session.close(); } catch (Throwable e) { this.e = e; } } } } // Package protected --------------------------------------------- // Protected ----------------------------------------------------- // Private ------------------------------------------------------- // Inner classes ------------------------------------------------- }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.redis.internal.data.collections; import static it.unimi.dsi.fastutil.HashCommon.mix; import static org.apache.geode.internal.JvmSizeUtils.memoryOverhead; import java.util.Collection; import java.util.Random; import it.unimi.dsi.fastutil.objects.ObjectOpenCustomHashSet; import org.apache.geode.annotations.VisibleForTesting; import org.apache.geode.internal.size.Sizeable; public abstract class SizeableObjectOpenCustomHashSetWithCursor<E> extends ObjectOpenCustomHashSet<E> implements Sizeable { private static final long serialVersionUID = 9174920505089089517L; private static final int OPEN_HASH_SET_OVERHEAD = memoryOverhead(SizeableObjectOpenCustomHashSetWithCursor.class); private int memberOverhead; public SizeableObjectOpenCustomHashSetWithCursor(int expected, Strategy<? super E> strategy) { super(expected, strategy); } public SizeableObjectOpenCustomHashSetWithCursor(Strategy<? super E> strategy) { super(strategy); } public SizeableObjectOpenCustomHashSetWithCursor(Collection<? extends E> c, Strategy<? super E> strategy) { super(c, strategy); } @Override public boolean add(E e) { boolean added = super.add(e); if (added) { memberOverhead += sizeElement(e); } return added; } @SuppressWarnings("unchecked") @Override public boolean remove(Object e) { boolean removed = super.remove(e); if (removed) { memberOverhead -= sizeElement((E) e); } return removed; } /* * Gets a random member given an index. * If member does not exist at that index, then goes to closest member that is right of it. */ public E getRandomMemberFromBackingArray(Random rand) { final int backingArrayLength = key.length; E member; int index = rand.nextInt(backingArrayLength); // ADD CHECK FOR NULLLLL while ((member = key[index]) == null) { ++index; if (index >= backingArrayLength) { index = 0; } } return member; } @Override public int getSizeInBytes() { // The object referenced by the "strategy" field is not sized // since it is usually a singleton instance. return OPEN_HASH_SET_OVERHEAD + memoryOverhead(key) + memberOverhead; } /** * Scan entries and pass them to the given consumer function, starting at the passed in * cursor. This method will scan until at least count entries are returned, or the entire * set has been scanned. Once the returned cursor is 0, the entire set is scanned. * * This method may emit more than *count* number of elements if there are hash collisions. * * @param cursor The cursor to start from. Should be 0 for the initial scan. Subsequent calls * should use the cursor returned by the previous scan call. * @param count The number of elements to scan * @param consumer A function to pass the scanned members * @param privateData Some data to pass to the function, for example a set to collect elements in. * This * allows the function to be stateless. * @param <D> The type of the data passed to the function/ * @return The next cursor to scan from, or 0 if the scan has touched all elements. */ public <D> int scan(int cursor, int count, SizeableObjectOpenCustomHashSetWithCursor.EntryConsumer<E, D> consumer, D privateData) { // Implementation notes // // This stateless scan cursor algorithm is based on the dictScan cursor // implementation from dict.c in redis. Please see the comments in that class for the full // details. That iteration algorithm was designed by Pieter Noordhuis. // // There is one wrinkle due to the fact that we are using a different type of hashtable here. // The parent class, ObjectOpenCustomHashSet, uses an open addressing with a linear // probe. What that means is that when there is a hash collision, instead of putting // a linked list of hash entries into a single hash bucket, this implementation simply // moves on to the next element to the right in the array and tries to put the inserted // object there, continuing until it finds a null slot. // // So in order to use the redis cursor algorithm, our scan needs to probe ahead to // subsequent positions to find any hash entries that match the position we are scanning. // This is logically equivalent to iterating over the linked list in a hashtable bucket // for a redis style closed addressing hashtable. // do { // Emit all the entries at the cursor. This means looking forward in the hash // table for any non-null entries that might hash to the current cursor and emitting // those as well. This may even wrap around to the front of the hashtable. int position = cursor; while (key[position & mask] != null) { E currentElement = key[position & mask]; if (elementHashesTo(currentElement, position, cursor & mask)) { consumer.consume(privateData, currentElement); count--; } position++; } // Increment the reversed cursor cursor |= ~mask; cursor = rev(cursor); cursor++; cursor = rev(cursor); } while (count > 0 && cursor != 0); return cursor; } /** * reverse the bits in a cursor. * * Package scope to allow for unit testing to make sure we don't have some silly * java signed int issues * * @param value the value to reverse * @return the reversed bits. */ static int rev(int value) { // This implementation is also based on dict.c from redis, which was originally from // http://graphics.stanford.edu/~seander/bithacks.html#ReverseParallel int s = 32; int mask = ~0; while ((s >>>= 1) > 0) { mask ^= (mask << s); value = ((value >>> s) & mask) | ((value << s) & ~mask); } return value; } public interface EntryConsumer<E, D> { void consume(D privateData, E element); } /** * Check to see if given element hashes to the expected hash. * * @param currentElement The element to key * @param currentPosition The position of the element in the element[] array * @param expectedHash - the expected hash of the element. */ private boolean elementHashesTo(E currentElement, int currentPosition, int expectedHash) { // There is a small optimization here. If the previous element // is null, we know that the element at position does hash to the expected // hash because it is not here as a result of a collision at some previous position. E previousElement = key[(currentPosition - 1) & mask]; return previousElement == null || hash(currentElement) == expectedHash; } @VisibleForTesting public int hash(E element) { return mix(strategy().hashCode(element)) & mask; } protected abstract int sizeElement(E element); }
package com.torch2424.battlequest; import android.app.Activity; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.ServiceConnection; import android.graphics.Typeface; import android.media.AudioManager; import android.os.Bundle; import android.os.IBinder; import android.view.View; import android.view.WindowManager; import android.view.animation.AlphaAnimation; import android.view.animation.Animation; import android.view.animation.Animation.AnimationListener; import android.view.animation.AnimationUtils; import android.widget.RelativeLayout; import android.widget.TextView; import com.torch2424.battlequest.BGMusic.MusicBinder; import com.torch2424.trustinheartdemo.R; public class FirstScreen extends Activity { //declaring music out here to be accessed everywhere BGMusic bgMusic; boolean musicBound; Intent playIntent; //for onresume boolean to check if music is paused boolean musicPaused; //to fix double pause requests boolean noPause; //get our textviews TextView tapToBegin; TextView gameBy; TextView noComply; TextView year; //text animation Animation animation1; Animation animation2; //exit boolean boolean exit; @Override protected void onCreate(Bundle savedInstanceState) { //check if we called start screen just to close app before anything else Intent intent = getIntent(); exit = intent.getBooleanExtra("EXIT", false); if(exit) { //stop the music first playIntent = new Intent(this, BGMusic.class); stopService(playIntent); finish(); System.gc(); } super.onCreate(savedInstanceState); setContentView(R.layout.activity_first_screen); //setting fonts setFont(); //set up the music service playMusic(); //aquire wakelock getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); } //connect to the service ServiceConnection musicConnection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { MusicBinder binder = (MusicBinder)service; //get service bgMusic = binder.getService(); //dont play the song if we are exting if(exit == false) { bgMusic.playSong(R.raw.rrppgg_theme); } } @Override public void onServiceDisconnected(ComponentName name) { } }; public void playMusic() { //make sure only affect media playback not ringer setVolumeControlStream(AudioManager.STREAM_MUSIC); //for pausing noPause = false; playIntent = new Intent(this, BGMusic.class); getApplicationContext().startService(playIntent); bindService(playIntent, musicConnection, Context.BIND_AUTO_CREATE); } public void setFont() { //get our views tapToBegin = (TextView) findViewById(R.id.tapToBegin); gameBy = (TextView) findViewById(R.id.gameBy); year = (TextView) findViewById(R.id.year); noComply = (TextView) findViewById(R.id.noComply); //get font Typeface tf = FontCache.get(getApplicationContext(), "font"); //set fonts tapToBegin.setTypeface(tf); gameBy.setTypeface(tf); year.setTypeface(tf); noComply.setTypeface(tf); } public void startAnimating() { //gotten from stack to animate our textview modified for me //http://stackoverflow.com/questions/3298330/android-alpha-animation-fadein-fadeout-with-delays animation1 = new AlphaAnimation(0.0f, 1.0f); animation1.setDuration(250); animation1.setStartOffset(500); //animation1 AnimationListener animation1.setAnimationListener(new AnimationListener(){ @Override public void onAnimationEnd(Animation arg0) { // start animation2 when animation1 ends (continue) tapToBegin.startAnimation(animation2); } @Override public void onAnimationRepeat(Animation arg0) { // TODO Auto-generated method stub } @Override public void onAnimationStart(Animation arg0) { // TODO Auto-generated method stub } }); animation2 = new AlphaAnimation(1.0f, 0.0f); animation2.setDuration(250); animation2.setStartOffset(1000); //animation2 AnimationListener animation2.setAnimationListener(new AnimationListener(){ @Override public void onAnimationEnd(Animation arg0) { // start animation1 when animation2 ends (repeat) tapToBegin.startAnimation(animation1); } @Override public void onAnimationRepeat(Animation arg0) { // TODO Auto-generated method stub } @Override public void onAnimationStart(Animation arg0) { // TODO Auto-generated method stub } }); tapToBegin.startAnimation(animation1); } //function to start the game public void begin(View view) { //if the music is still fading in, dont let the user go to the next activity if(bgMusic.fading) { } else { //however we want to check if there are any save files foe the game yet to determine if //this is their first time playing String[] files = this.getFilesDir().list(); if(files.length >= 1) { //start choose your character bgMusic.pauseSong(); //dont pause twice noPause = true; //since overowrld doesnt play song do it for it bgMusic.playSong(R.raw.character); Intent fight = new Intent(this, SaveFileSelect.class); //add this flag to remove all previous activities fight.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(fight); //dont finish because we use this activity to close everything later //finish(); } else { bgMusic.pauseSong(); //dont pause twice noPause = true; //since overowrld doesnt play song do it for it bgMusic.playSong(R.raw.character); Intent edit = new Intent(this, WelcomeRRPPGG.class); edit.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(edit); } } } //on pause and on resume to pause and play music @Override public void onPause() { super.onPause(); //need to check if it is not already pausing from when we request it to pause //from switching activities if(noPause == false) { if(bgMusic != null) { bgMusic.pauseSong(); musicPaused = true; } } } @Override public void onResume() { super.onResume(); if(musicPaused) { bgMusic.resumeSong(); } //animate the textview startAnimating(); //animate the layout getWindow().setWindowAnimations(R.anim.layout_fadein); Animation anim = AnimationUtils.loadAnimation(this,R.anim.layout_fadein); // 'body' is root layout id which is for MainActivity findViewById(R.id.container).startAnimation(anim); } @Override public void onBackPressed() { //close the app completely bgMusic.stopSong(); stopService(playIntent); finish(); } //need to add this to avoid service connection leaks @Override public void onDestroy() { super.onDestroy(); unbindService(musicConnection); Unbind.unbindDrawables((RelativeLayout) findViewById(R.id.container)); System.gc(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.examples.pi.math; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.hadoop.examples.pi.Container; import org.apache.hadoop.examples.pi.Util; /** * Bellard's BBP-type Pi formula * 1/2^6 \sum_{n=0}^\infty (-1)^n/2^{10n} * (-2^5/(4n+1) -1/(4n+3) +2^8/(10n+1) -2^6/(10n+3) -2^2/(10n+5) * -2^2/(10n+7) +1/(10n+9)) * * References: * * [1] David H. Bailey, Peter B. Borwein and Simon Plouffe. On the Rapid * Computation of Various Polylogarithmic Constants. * Math. Comp., 66:903-913, 1996. * * [2] Fabrice Bellard. A new formula to compute the n'th binary digit of pi, * 1997. Available at http://fabrice.bellard.free.fr/pi . */ public final class Bellard { /** Parameters for the sums */ public enum Parameter { // \sum_{k=0}^\infty (-1)^{k+1}( 2^{d-10k-1}/(4k+1) + 2^{d-10k-6}/(4k+3) ) P8_1(false, 1, 8, -1), P8_3(false, 3, 8, -6), P8_5(P8_1), P8_7(P8_3), /* * 2^d\sum_{k=0}^\infty (-1)^k( 2^{ 2-10k} / (10k + 1) * -2^{ -10k} / (10k + 3) * -2^{-4-10k} / (10k + 5) * -2^{-4-10k} / (10k + 7) * +2^{-6-10k} / (10k + 9) ) */ P20_21(true , 1, 20, 2), P20_3(false, 3, 20, 0), P20_5(false, 5, 20, -4), P20_7(false, 7, 20, -4), P20_9(true , 9, 20, -6), P20_11(P20_21), P20_13(P20_3), P20_15(P20_5), P20_17(P20_7), P20_19(P20_9); final boolean isplus; final long j; final int deltaN; final int deltaE; final int offsetE; private Parameter(boolean isplus, long j, int deltaN, int offsetE) { this.isplus = isplus; this.j = j; this.deltaN = deltaN; this.deltaE = -20; this.offsetE = offsetE; } private Parameter(Parameter p) { this.isplus = !p.isplus; this.j = p.j + (p.deltaN >> 1); this.deltaN = p.deltaN; this.deltaE = p.deltaE; this.offsetE = p.offsetE + (p.deltaE >> 1); } /** Get the Parameter represented by the String */ public static Parameter get(String s) { s = s.trim(); if (s.charAt(0) == 'P') s = s.substring(1); final String[] parts = s.split("\\D+"); if (parts.length >= 2) { final String name = "P" + parts[0] + "_" + parts[1]; for(Parameter p : values()) if (p.name().equals(name)) return p; } throw new IllegalArgumentException("s=" + s + ", parts=" + Arrays.asList(parts)); } } /** The sums in the Bellard's formula */ public static class Sum implements Container<Summation>, Iterable<Summation> { private static final long ACCURACY_BIT = 50; private final Parameter parameter; private final Summation sigma; private final Summation[] parts; private final Tail tail; /** Constructor */ private <T extends Container<Summation>> Sum(long b, Parameter p, int nParts, List<T> existing) { if (b < 0) throw new IllegalArgumentException("b = " + b + " < 0"); if (nParts < 1) throw new IllegalArgumentException("nParts = " + nParts + " < 1"); final long i = p.j == 1 && p.offsetE >= 0? 1 : 0; final long e = b + i*p.deltaE + p.offsetE; final long n = i*p.deltaN + p.j; this.parameter = p; this.sigma = new Summation(n, p.deltaN, e, p.deltaE, 0); this.parts = partition(sigma, nParts, existing); this.tail = new Tail(n, e); } private static <T extends Container<Summation>> Summation[] partition( Summation sigma, int nParts, List<T> existing) { final List<Summation> parts = new ArrayList<Summation>(); if (existing == null || existing.isEmpty()) parts.addAll(Arrays.asList(sigma.partition(nParts))); else { final long stepsPerPart = sigma.getSteps()/nParts; final List<Summation> remaining = sigma.remainingTerms(existing); for(Summation s : remaining) { final int n = (int)((s.getSteps() - 1)/stepsPerPart) + 1; parts.addAll(Arrays.asList(s.partition(n))); } for(Container<Summation> c : existing) parts.add(c.getElement()); Collections.sort(parts); } return parts.toArray(new Summation[parts.size()]); } /** {@inheritDoc} */ @Override public String toString() { int n = 0; for(Summation s : parts) if (s.getValue() == null) n++; return getClass().getSimpleName() + "{" + parameter + ": " + sigma + ", remaining=" + n + "}"; } /** Set the value of sigma */ public void setValue(Summation s) { if (s.getValue() == null) throw new IllegalArgumentException("s.getValue()" + "\n sigma=" + sigma + "\n s =" + s); if (!s.contains(sigma) || !sigma.contains(s)) throw new IllegalArgumentException("!s.contains(sigma) || !sigma.contains(s)" + "\n sigma=" + sigma + "\n s =" + s); sigma.setValue(s.getValue()); } /** get the value of sigma */ public double getValue() { if (sigma.getValue() == null) { double d = 0; for(int i = 0; i < parts.length; i++) d = Modular.addMod(d, parts[i].compute()); sigma.setValue(d); } final double s = Modular.addMod(sigma.getValue(), tail.compute()); return parameter.isplus? s: -s; } /** {@inheritDoc} */ @Override public Summation getElement() { if (sigma.getValue() == null) { int i = 0; double d = 0; for(; i < parts.length && parts[i].getValue() != null; i++) d = Modular.addMod(d, parts[i].getValue()); if (i == parts.length) sigma.setValue(d); } return sigma; } /** The sum tail */ private class Tail { private long n; private long e; private Tail(long n, long e) { this.n = n; this.e = e; } private double compute() { if (e > 0) { final long edelta = -sigma.E.delta; long q = e / edelta; long r = e % edelta; if (r == 0) { e = 0; n += q * sigma.N.delta; } else { e = edelta - r; n += (q + 1)*sigma.N.delta; } } else if (e < 0) e = -e; double s = 0; for(;; e -= sigma.E.delta) { if (e > ACCURACY_BIT || (1L << (ACCURACY_BIT - e)) < n) return s; s += 1.0 / (n << e); if (s >= 1) s--; n += sigma.N.delta; } } } /** {@inheritDoc} */ @Override public Iterator<Summation> iterator() { return new Iterator<Summation>() { private int i = 0; /** {@inheritDoc} */ @Override public boolean hasNext() {return i < parts.length;} /** {@inheritDoc} */ @Override public Summation next() {return parts[i++];} /** Unsupported */ @Override public void remove() {throw new UnsupportedOperationException();} }; } } /** Get the sums for the Bellard formula. */ public static <T extends Container<Summation>> Map<Parameter, Sum> getSums( long b, int partsPerSum, Map<Parameter, List<T>> existing) { final Map<Parameter, Sum> sums = new TreeMap<Parameter, Sum>(); for(Parameter p : Parameter.values()) { final Sum s = new Sum(b, p, partsPerSum, existing.get(p)); Util.out.println("put " + s); sums.put(p, s); } return sums; } /** Compute bits of Pi from the results. */ public static <T extends Container<Summation>> double computePi( final long b, Map<Parameter, T> results) { if (results.size() != Parameter.values().length) throw new IllegalArgumentException("m.size() != Parameter.values().length" + ", m.size()=" + results.size() + "\n m=" + results); double pi = 0; for(Parameter p : Parameter.values()) { final Summation sigma = results.get(p).getElement(); final Sum s = new Sum(b, p, 1, null); s.setValue(sigma); pi = Modular.addMod(pi, s.getValue()); } return pi; } /** Compute bits of Pi in the local machine. */ public static double computePi(final long b) { double pi = 0; for(Parameter p : Parameter.values()) pi = Modular.addMod(pi, new Sum(b, p, 1, null).getValue()); return pi; } /** Estimate the number of terms. */ public static long bit2terms(long b) { return 7*(b/10); } private static void computePi(Util.Timer t, long b) { t.tick(Util.pi2string(computePi(b), bit2terms(b))); } /** main */ public static void main(String[] args) throws IOException { final Util.Timer t = new Util.Timer(false); computePi(t, 0); computePi(t, 1); computePi(t, 2); computePi(t, 3); computePi(t, 4); Util.printBitSkipped(1008); computePi(t, 1008); computePi(t, 1012); long b = 10; for(int i = 0; i < 7; i++) { Util.printBitSkipped(b); computePi(t, b - 4); computePi(t, b); computePi(t, b + 4); b *= 10; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed; import java.io.Externalizable; import java.nio.ByteBuffer; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.GridDirectCollection; import org.apache.ignite.internal.GridDirectMap; import org.apache.ignite.internal.GridDirectTransient; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxEntry; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxKey; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxState; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxStateAware; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.UUIDCollectionMessage; import org.apache.ignite.internal.util.tostring.GridToStringBuilder; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.C1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionIsolation; import org.jetbrains.annotations.Nullable; /** * Transaction prepare request for optimistic and eventually consistent * transactions. */ public class GridDistributedTxPrepareRequest extends GridDistributedBaseMessage implements IgniteTxStateAware { /** */ private static final long serialVersionUID = 0L; /** */ private static final int NEED_RETURN_VALUE_FLAG_MASK = 0x01; /** */ private static final int INVALIDATE_FLAG_MASK = 0x02; /** */ private static final int ONE_PHASE_COMMIT_FLAG_MASK = 0x04; /** */ private static final int LAST_REQ_FLAG_MASK = 0x08; /** */ private static final int SYSTEM_TX_FLAG_MASK = 0x10; /** */ public static final int STORE_WRITE_THROUGH_FLAG_MASK = 0x20; /** */ public static final int QUERY_UPDATE_FLAG_MASK = 0x40; /** Collection to message converter. */ private static final C1<Collection<UUID>, UUIDCollectionMessage> COL_TO_MSG = new C1<Collection<UUID>, UUIDCollectionMessage>() { @Override public UUIDCollectionMessage apply(Collection<UUID> uuids) { return new UUIDCollectionMessage(uuids); } }; /** Message to collection converter. */ private static final C1<UUIDCollectionMessage, Collection<UUID>> MSG_TO_COL = new C1<UUIDCollectionMessage, Collection<UUID>>() { @Override public Collection<UUID> apply(UUIDCollectionMessage msg) { return msg.uuids(); } }; /** Thread ID. */ @GridToStringInclude private long threadId; /** Transaction concurrency. */ @GridToStringInclude private TransactionConcurrency concurrency; /** Transaction isolation. */ @GridToStringInclude private TransactionIsolation isolation; /** Commit version for EC transactions. */ @GridToStringInclude private GridCacheVersion writeVer; /** Transaction timeout. */ @GridToStringInclude private long timeout; /** Transaction read set. */ @GridToStringInclude @GridDirectCollection(IgniteTxEntry.class) private Collection<IgniteTxEntry> reads; /** Transaction write entries. */ @GridToStringInclude @GridDirectCollection(IgniteTxEntry.class) private Collection<IgniteTxEntry> writes; /** DHT versions to verify. */ @GridToStringInclude @GridDirectTransient private Map<IgniteTxKey, GridCacheVersion> dhtVers; /** */ @GridDirectCollection(IgniteTxKey.class) private Collection<IgniteTxKey> dhtVerKeys; /** */ @GridDirectCollection(GridCacheVersion.class) private Collection<GridCacheVersion> dhtVerVals; /** Expected transaction size. */ private int txSize; /** Transaction nodes mapping (primary node -> related backup nodes). */ @GridDirectTransient private Map<UUID, Collection<UUID>> txNodes; /** Tx nodes direct marshallable message. */ @GridDirectMap(keyType = UUID.class, valueType = UUIDCollectionMessage.class) private Map<UUID, UUIDCollectionMessage> txNodesMsg; /** IO policy. */ private byte plc; /** Transient TX state. */ @GridDirectTransient private IgniteTxState txState; /** */ @GridToStringExclude private byte flags; /** * Required by {@link Externalizable}. */ public GridDistributedTxPrepareRequest() { /* No-op. */ } /** * @param tx Cache transaction. * @param timeout Transactions timeout. * @param reads Read entries. * @param writes Write entries. * @param txNodes Transaction nodes mapping. * @param retVal Return value flag. * @param last Last request flag. * @param onePhaseCommit One phase commit flag. * @param addDepInfo Deployment info flag. */ public GridDistributedTxPrepareRequest( IgniteInternalTx tx, long timeout, @Nullable Collection<IgniteTxEntry> reads, Collection<IgniteTxEntry> writes, Map<UUID, Collection<UUID>> txNodes, boolean retVal, boolean last, boolean onePhaseCommit, boolean addDepInfo ) { super(tx.xidVersion(), 0, addDepInfo); writeVer = tx.writeVersion(); threadId = tx.threadId(); concurrency = tx.concurrency(); isolation = tx.isolation(); txSize = tx.size(); plc = tx.ioPolicy(); this.timeout = timeout; this.reads = reads; this.writes = writes; this.txNodes = txNodes; setFlag(tx.system(), SYSTEM_TX_FLAG_MASK); setFlag(retVal, NEED_RETURN_VALUE_FLAG_MASK); setFlag(tx.isInvalidate(), INVALIDATE_FLAG_MASK); setFlag(onePhaseCommit, ONE_PHASE_COMMIT_FLAG_MASK); setFlag(last, LAST_REQ_FLAG_MASK); } /** * @return Flag indicating whether transaction needs return value. */ public final boolean needReturnValue() { return isFlag(NEED_RETURN_VALUE_FLAG_MASK); } /** * @param retVal Need return value. */ public final void needReturnValue(boolean retVal) { setFlag(retVal, NEED_RETURN_VALUE_FLAG_MASK); } /** * @return Transaction nodes mapping. */ public Map<UUID, Collection<UUID>> transactionNodes() { return txNodes; } /** * @return System flag. */ public final boolean system() { return isFlag(SYSTEM_TX_FLAG_MASK); } /** * @return Flag indicating whether transaction use cache store. */ public boolean storeWriteThrough() { return (flags & STORE_WRITE_THROUGH_FLAG_MASK) != 0; } /** * @param storeWriteThrough Store write through value. */ public void storeWriteThrough(boolean storeWriteThrough) { if (storeWriteThrough) flags = (byte)(flags | STORE_WRITE_THROUGH_FLAG_MASK); else flags &= ~STORE_WRITE_THROUGH_FLAG_MASK; } /** * * @return Flag indicating whether it is a query update. */ public boolean queryUpdate() { return (flags & QUERY_UPDATE_FLAG_MASK) != 0; } /** * * @param queryUpdate Query update value. */ public void queryUpdate(boolean queryUpdate) { if (queryUpdate) flags = (byte)(flags | QUERY_UPDATE_FLAG_MASK); else flags &= ~QUERY_UPDATE_FLAG_MASK; } /** * @return IO policy. */ public byte policy() { return plc; } /** * Adds version to be verified on remote node. * * @param key Key for which version is verified. * @param dhtVer DHT version to check. */ public void addDhtVersion(IgniteTxKey key, @Nullable GridCacheVersion dhtVer) { if (dhtVers == null) dhtVers = new HashMap<>(); dhtVers.put(key, dhtVer); } /** * @return Map of versions to be verified. */ public Map<IgniteTxKey, GridCacheVersion> dhtVersions() { return dhtVers == null ? Collections.<IgniteTxKey, GridCacheVersion>emptyMap() : dhtVers; } /** * @return Thread ID. */ public long threadId() { return threadId; } /** * @return Commit version. */ public GridCacheVersion writeVersion() { return writeVer; } /** * @return Invalidate flag. */ public boolean isInvalidate() { return isFlag(INVALIDATE_FLAG_MASK); } /** * @return Transaction timeout. */ public long timeout() { return timeout; } /** * @return Concurrency. */ public TransactionConcurrency concurrency() { return concurrency; } /** * @return Isolation level. */ public TransactionIsolation isolation() { return isolation; } /** * @return Read set. */ public Collection<IgniteTxEntry> reads() { return reads; } /** * @return Write entries. */ public Collection<IgniteTxEntry> writes() { return writes; } /** * @param reads Reads. */ protected void reads(Collection<IgniteTxEntry> reads) { this.reads = reads; } /** * @param writes Writes. */ protected void writes(Collection<IgniteTxEntry> writes) { this.writes = writes; } /** * @return Expected transaction size. */ public int txSize() { return txSize; } /** * @return One phase commit flag. */ public boolean onePhaseCommit() { return isFlag(ONE_PHASE_COMMIT_FLAG_MASK); } /** * @return {@code True} if this is last prepare request for node. */ public boolean last() { return isFlag(LAST_REQ_FLAG_MASK); } /** {@inheritDoc} */ @Override public IgniteTxState txState() { return txState; } /** {@inheritDoc} */ @Override public void txState(IgniteTxState txState) { this.txState = txState; } /** {@inheritDoc} * @param ctx*/ @Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException { super.prepareMarshal(ctx); if (writes != null) marshalTx(writes, ctx); if (reads != null) marshalTx(reads, ctx); if (dhtVers != null && dhtVerKeys == null) { for (IgniteTxKey key : dhtVers.keySet()) { GridCacheContext cctx = ctx.cacheContext(key.cacheId()); key.prepareMarshal(cctx); } dhtVerKeys = dhtVers.keySet(); dhtVerVals = dhtVers.values(); } if (txNodesMsg == null) txNodesMsg = F.viewReadOnly(txNodes, COL_TO_MSG); } /** {@inheritDoc} */ @Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException { super.finishUnmarshal(ctx, ldr); if (writes != null) unmarshalTx(writes, false, ctx, ldr); if (reads != null) unmarshalTx(reads, false, ctx, ldr); if (dhtVerKeys != null && dhtVers == null) { assert dhtVerVals != null; assert dhtVerKeys.size() == dhtVerVals.size(); Iterator<IgniteTxKey> keyIt = dhtVerKeys.iterator(); Iterator<GridCacheVersion> verIt = dhtVerVals.iterator(); dhtVers = U.newHashMap(dhtVerKeys.size()); while (keyIt.hasNext()) { IgniteTxKey key = keyIt.next(); key.finishUnmarshal(ctx.cacheContext(key.cacheId()), ldr); dhtVers.put(key, verIt.next()); } } if (txNodesMsg != null) txNodes = F.viewReadOnly(txNodesMsg, MSG_TO_COL); } /** {@inheritDoc} */ @Override public boolean addDeploymentInfo() { return addDepInfo || forceAddDepInfo; } /** {@inheritDoc} */ @Override public IgniteLogger messageLogger(GridCacheSharedContext ctx) { return ctx.txPrepareMessageLogger(); } /** * Sets flag mask. * * @param flag Set or clear. * @param mask Mask. */ private void setFlag(boolean flag, int mask) { flags = flag ? (byte)(flags | mask) : (byte)(flags & ~mask); } /** * Reags flag mask. * * @param mask Mask to read. * @return Flag value. */ private boolean isFlag(int mask) { return (flags & mask) != 0; } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!super.writeTo(buf, writer)) return false; if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 8: if (!writer.writeByte("concurrency", concurrency != null ? (byte)concurrency.ordinal() : -1)) return false; writer.incrementState(); case 9: if (!writer.writeCollection("dhtVerKeys", dhtVerKeys, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 10: if (!writer.writeCollection("dhtVerVals", dhtVerVals, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 11: if (!writer.writeByte("flags", flags)) return false; writer.incrementState(); case 12: if (!writer.writeByte("isolation", isolation != null ? (byte)isolation.ordinal() : -1)) return false; writer.incrementState(); case 13: if (!writer.writeByte("plc", plc)) return false; writer.incrementState(); case 14: if (!writer.writeCollection("reads", reads, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 15: if (!writer.writeLong("threadId", threadId)) return false; writer.incrementState(); case 16: if (!writer.writeLong("timeout", timeout)) return false; writer.incrementState(); case 17: if (!writer.writeMap("txNodesMsg", txNodesMsg, MessageCollectionItemType.UUID, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 18: if (!writer.writeInt("txSize", txSize)) return false; writer.incrementState(); case 19: if (!writer.writeMessage("writeVer", writeVer)) return false; writer.incrementState(); case 20: if (!writer.writeCollection("writes", writes, MessageCollectionItemType.MSG)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; if (!super.readFrom(buf, reader)) return false; switch (reader.state()) { case 8: byte concurrencyOrd; concurrencyOrd = reader.readByte("concurrency"); if (!reader.isLastRead()) return false; concurrency = TransactionConcurrency.fromOrdinal(concurrencyOrd); reader.incrementState(); case 9: dhtVerKeys = reader.readCollection("dhtVerKeys", MessageCollectionItemType.MSG); if (!reader.isLastRead()) return false; reader.incrementState(); case 10: dhtVerVals = reader.readCollection("dhtVerVals", MessageCollectionItemType.MSG); if (!reader.isLastRead()) return false; reader.incrementState(); case 11: flags = reader.readByte("flags"); if (!reader.isLastRead()) return false; reader.incrementState(); case 12: byte isolationOrd; isolationOrd = reader.readByte("isolation"); if (!reader.isLastRead()) return false; isolation = TransactionIsolation.fromOrdinal(isolationOrd); reader.incrementState(); case 13: plc = reader.readByte("plc"); if (!reader.isLastRead()) return false; reader.incrementState(); case 14: reads = reader.readCollection("reads", MessageCollectionItemType.MSG); if (!reader.isLastRead()) return false; reader.incrementState(); case 15: threadId = reader.readLong("threadId"); if (!reader.isLastRead()) return false; reader.incrementState(); case 16: timeout = reader.readLong("timeout"); if (!reader.isLastRead()) return false; reader.incrementState(); case 17: txNodesMsg = reader.readMap("txNodesMsg", MessageCollectionItemType.UUID, MessageCollectionItemType.MSG, false); if (!reader.isLastRead()) return false; reader.incrementState(); case 18: txSize = reader.readInt("txSize"); if (!reader.isLastRead()) return false; reader.incrementState(); case 19: writeVer = reader.readMessage("writeVer"); if (!reader.isLastRead()) return false; reader.incrementState(); case 20: writes = reader.readCollection("writes", MessageCollectionItemType.MSG); if (!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(GridDistributedTxPrepareRequest.class); } /** {@inheritDoc} */ @Override public short directType() { return 25; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 21; } /** {@inheritDoc} */ @Override public String toString() { StringBuilder flags = new StringBuilder(); if (needReturnValue()) appendFlag(flags, "retVal"); if (isInvalidate()) appendFlag(flags, "invalidate"); if (onePhaseCommit()) appendFlag(flags, "onePhase"); if (last()) appendFlag(flags, "last"); if (system()) appendFlag(flags, "sys"); return GridToStringBuilder.toString(GridDistributedTxPrepareRequest.class, this, "flags", flags.toString(), "super", super.toString()); } }
package ohi.andre.consolelauncher.managers; import android.content.Context; import android.graphics.Color; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import ohi.andre.consolelauncher.R; import ohi.andre.consolelauncher.tuils.Tuils; import ohi.andre.consolelauncher.tuils.interfaces.Reloadable; public class AliasManager implements Reloadable { public static final String PATH = "alias.txt"; private Map<String, String> aliases; private String paramMarker, paramSeparator, aliasLabelFormat; private Context context; public AliasManager(Context c) { this.context = c; reload(); paramMarker = Pattern.quote(XMLPrefsManager.get(String.class, XMLPrefsManager.Behavior.alias_param_marker)); paramSeparator = Pattern.quote(XMLPrefsManager.get(String.class, XMLPrefsManager.Behavior.alias_param_separator)); aliasLabelFormat = XMLPrefsManager.get(String.class, XMLPrefsManager.Behavior.alias_content_format); } public String printAliases() { String output = Tuils.EMPTYSTRING; for (Map.Entry<String, String> entry : aliases.entrySet()) { output = output.concat(entry.getKey() + " --> " + entry.getValue() + Tuils.NEWLINE); } return output.trim(); } // [0] = aliasValue // [1] = aliasName // [2] = residualString public String[] getAlias(String alias, boolean supportSpaces) { if(supportSpaces) { // String[] split = alias.split(Tuils.SPACE); // String name = Tuils.EMPTYSTRING; // // for(int count = 0; count < split.length; count++) { // name += Tuils.SPACE + split[count]; // name = name.trim(); // // String a = aliases.get(name); // // if(a != null) { // String residual = Tuils.EMPTYSTRING; // for(int c = count + 1; c < split.length; c++) { // residual += split[c] + Tuils.SPACE; // } // // return new String[] {a, name, residual.trim()}; // } // } String args = Tuils.EMPTYSTRING; String aliasValue = null; while (true) { aliasValue = aliases.get(alias); if(aliasValue != null) break; else { int index = alias.lastIndexOf(Tuils.SPACE); if(index == -1) return new String[] {null, null, alias}; args = alias.substring(index + 1) + Tuils.SPACE + args; args = args.trim(); alias = alias.substring(0,index); } } return new String[] {aliasValue, alias, args}; } else { return new String[] {aliases.get(alias), alias, Tuils.EMPTYSTRING}; } } public String format(String aliasValue, String params) { params = params.trim(); if(params.length() == 0) return aliasValue; String[] split = params.split(paramSeparator); for(String s : split) { aliasValue = aliasValue.replaceFirst(paramMarker, s); } return aliasValue; } private final Pattern pn = Pattern.compile("%n", Pattern.CASE_INSENSITIVE | Pattern.LITERAL); private final Pattern pv = Pattern.compile("%v", Pattern.CASE_INSENSITIVE | Pattern.LITERAL); private final Pattern pa = Pattern.compile("%a", Pattern.CASE_INSENSITIVE | Pattern.LITERAL); public String formatLabel(String aliasName, String aliasValue) { String a = aliasLabelFormat; a = pn.matcher(a).replaceAll(Matcher.quoteReplacement(Tuils.NEWLINE)); a = pv.matcher(a).replaceAll(Matcher.quoteReplacement(aliasValue)); a = pa.matcher(a).replaceAll(Matcher.quoteReplacement(aliasName)); return a; } @Override public void reload() { if(aliases != null) aliases.clear(); else aliases = new HashMap(); File file = new File(Tuils.getFolder(), PATH); try { if(!file.exists()) file.createNewFile(); BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file))); String line; while((line = reader.readLine()) != null) { String[] splatted = line.split("="); if(splatted.length < 2) continue; String name, value = Tuils.EMPTYSTRING; name = splatted[0]; for(int c = 1; c < splatted.length; c++) { value += splatted[c]; if(c != splatted.length - 1) value += "="; } name = name.trim(); value = value.trim(); if(name.equalsIgnoreCase(value)) { Tuils.sendOutput(Color.RED, context, context.getString(R.string.output_notaddingalias1) + Tuils.SPACE + name + Tuils.SPACE + context.getString(R.string.output_notaddingalias2)); } else if(value.startsWith(name + Tuils.SPACE)) { Tuils.sendOutput(Color.RED, context, context.getString(R.string.output_notaddingalias1) + Tuils.SPACE + name + Tuils.SPACE + context.getString(R.string.output_notaddingalias3)); } else { aliases.put(name, value); } } } catch (Exception e) {} } public boolean add(String name, String value) { FileOutputStream fos; try { fos = new FileOutputStream(new File(Tuils.getFolder(), PATH), true); fos.write((Tuils.NEWLINE + name + "=" + value).getBytes()); fos.close(); aliases.put(name, value); return true; } catch (Exception e) { return false; } } public boolean remove(String name) { reload(); try { File inputFile = new File(Tuils.getFolder(), PATH); File tempFile = new File(Tuils.getFolder(), PATH + "2"); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); BufferedWriter writer = new BufferedWriter(new FileWriter(tempFile)); String prefix = name + "="; String line; while((line = reader.readLine()) != null) { if(line.startsWith(prefix)) continue; writer.write(line + Tuils.NEWLINE); } writer.close(); reader.close(); aliases.remove(name); return tempFile.renameTo(inputFile); } catch (Exception e) { return false; } } public List<String> getAliases() { if(aliases == null) return new ArrayList<>(0); return new ArrayList<>(aliases.keySet()); } }